Home
last modified time | relevance | path

Searched refs:nbio (Results 1 – 25 of 42) sorted by relevance

12

/linux-6.6.21/drivers/gpu/drm/amd/amdgpu/
Damdgpu_nbio.c30 if (!adev->nbio.ras) in amdgpu_nbio_ras_sw_init()
33 ras = adev->nbio.ras; in amdgpu_nbio_ras_sw_init()
43 adev->nbio.ras_if = &ras->ras_block.ras_comm; in amdgpu_nbio_ras_sw_init()
50 if (adev->nbio.funcs && adev->nbio.funcs->get_pcie_replay_count) in amdgpu_nbio_get_pcie_replay_count()
51 return adev->nbio.funcs->get_pcie_replay_count(adev); in amdgpu_nbio_get_pcie_replay_count()
59 if (adev->nbio.funcs->get_pcie_usage) in amdgpu_nbio_get_pcie_usage()
60 adev->nbio.funcs->get_pcie_usage(adev, count0, count1); in amdgpu_nbio_get_pcie_usage()
72 r = amdgpu_irq_get(adev, &adev->nbio.ras_controller_irq, 0); in amdgpu_nbio_ras_late_init()
75 r = amdgpu_irq_get(adev, &adev->nbio.ras_err_event_athub_irq, 0); in amdgpu_nbio_ras_late_init()
Dsoc21.c222 return adev->nbio.funcs->get_memsize(adev); in soc21_get_config_memsize()
351 u32 memsize = adev->nbio.funcs->get_memsize(adev);
436 (adev->nbio.funcs->program_aspm)) in soc21_program_aspm()
437 adev->nbio.funcs->program_aspm(adev); in soc21_program_aspm()
725 if (adev->nbio.ras && in soc21_common_late_init()
726 adev->nbio.ras_err_event_athub_irq.funcs) in soc21_common_late_init()
731 amdgpu_irq_get(adev, &adev->nbio.ras_err_event_athub_irq, 0); in soc21_common_late_init()
737 adev->nbio.funcs->enable_doorbell_selfring_aperture(adev, true); in soc21_common_late_init()
764 adev->nbio.funcs->init_registers(adev); in soc21_common_hw_init()
769 if (adev->nbio.funcs->remap_hdp_registers && !amdgpu_sriov_vf(adev)) in soc21_common_hw_init()
[all …]
Dsoc15.c320 return adev->nbio.funcs->get_memsize(adev); in soc15_get_config_memsize()
489 adev->nbio.funcs->enable_doorbell_interrupt(adev, false); in soc15_asic_baco_reset()
497 adev->nbio.funcs->enable_doorbell_interrupt(adev, true); in soc15_asic_baco_reset()
650 (adev->nbio.funcs->program_aspm)) in soc15_program_aspm()
651 adev->nbio.funcs->program_aspm(adev); in soc15_program_aspm()
1192 adev->nbio.funcs->enable_doorbell_selfring_aperture(adev, true); in soc15_common_late_init()
1228 adev->nbio.funcs->sdma_doorbell_range(adev, i, in soc15_sdma_doorbell_range_init()
1242 adev->nbio.funcs->init_registers(adev); in soc15_common_hw_init()
1247 if (adev->nbio.funcs->remap_hdp_registers && !amdgpu_sriov_vf(adev)) in soc15_common_hw_init()
1248 adev->nbio.funcs->remap_hdp_registers(adev); in soc15_common_hw_init()
[all …]
Dnv.c309 return adev->nbio.funcs->get_memsize(adev); in nv_get_config_memsize()
431 u32 memsize = adev->nbio.funcs->get_memsize(adev); in nv_asic_mode2_reset()
520 (adev->nbio.funcs->program_aspm)) in nv_program_aspm()
521 adev->nbio.funcs->program_aspm(adev); in nv_program_aspm()
613 (adev->nbio.funcs->enable_aspm) && in nv_update_umd_stable_pstate()
615 adev->nbio.funcs->enable_aspm(adev, !enter); in nv_update_umd_stable_pstate()
978 adev->nbio.funcs->enable_doorbell_selfring_aperture(adev, true); in nv_common_late_init()
1002 if (adev->nbio.funcs->apply_lc_spc_mode_wa) in nv_common_hw_init()
1003 adev->nbio.funcs->apply_lc_spc_mode_wa(adev); in nv_common_hw_init()
1005 if (adev->nbio.funcs->apply_l1_link_width_reconfig_wa) in nv_common_hw_init()
[all …]
Ddf_v3_6.c51 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_get_fica()
52 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_get_fica()
74 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_set_fica()
75 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_set_fica()
102 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_rreg()
103 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_rreg()
124 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_wreg()
125 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_wreg()
143 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_arm_with_status()
144 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_arm_with_status()
Dnbio_v7_9.c571 struct ras_manager *obj = amdgpu_ras_find_obj(adev, adev->nbio.ras_if); in nbio_v7_9_handle_ras_controller_intr_no_bifring()
602 get_ras_block_str(adev->nbio.ras_if)); in nbio_v7_9_handle_ras_controller_intr_no_bifring()
608 get_ras_block_str(adev->nbio.ras_if)); in nbio_v7_9_handle_ras_controller_intr_no_bifring()
694 adev->nbio.ras_controller_irq.funcs = in nbio_v7_9_init_ras_controller_interrupt()
696 adev->nbio.ras_controller_irq.num_types = 1; in nbio_v7_9_init_ras_controller_interrupt()
701 &adev->nbio.ras_controller_irq); in nbio_v7_9_init_ras_controller_interrupt()
712 adev->nbio.ras_err_event_athub_irq.funcs = in nbio_v7_9_init_ras_err_event_athub_interrupt()
714 adev->nbio.ras_err_event_athub_irq.num_types = 1; in nbio_v7_9_init_ras_err_event_athub_interrupt()
719 &adev->nbio.ras_err_event_athub_irq); in nbio_v7_9_init_ras_err_event_athub_interrupt()
Dnbio_v7_4.c367 struct ras_manager *obj = amdgpu_ras_find_obj(adev, adev->nbio.ras_if); in nbio_v7_4_handle_ras_controller_intr_no_bifring()
404 get_ras_block_str(adev->nbio.ras_if)); in nbio_v7_4_handle_ras_controller_intr_no_bifring()
410 get_ras_block_str(adev->nbio.ras_if)); in nbio_v7_4_handle_ras_controller_intr_no_bifring()
553 adev->nbio.ras_controller_irq.funcs = in nbio_v7_4_init_ras_controller_interrupt()
555 adev->nbio.ras_controller_irq.num_types = 1; in nbio_v7_4_init_ras_controller_interrupt()
560 &adev->nbio.ras_controller_irq); in nbio_v7_4_init_ras_controller_interrupt()
571 adev->nbio.ras_err_event_athub_irq.funcs = in nbio_v7_4_init_ras_err_event_athub_interrupt()
573 adev->nbio.ras_err_event_athub_irq.num_types = 1; in nbio_v7_4_init_ras_err_event_athub_interrupt()
578 &adev->nbio.ras_err_event_athub_irq); in nbio_v7_4_init_ras_err_event_athub_interrupt()
Damdgpu_discovery.c2410 adev->nbio.funcs = &nbio_v6_1_funcs; in amdgpu_discovery_set_ip_blocks()
2411 adev->nbio.hdp_flush_reg = &nbio_v6_1_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks()
2416 adev->nbio.funcs = &nbio_v7_0_funcs; in amdgpu_discovery_set_ip_blocks()
2417 adev->nbio.hdp_flush_reg = &nbio_v7_0_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks()
2422 adev->nbio.funcs = &nbio_v7_4_funcs; in amdgpu_discovery_set_ip_blocks()
2423 adev->nbio.hdp_flush_reg = &nbio_v7_4_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks()
2426 adev->nbio.funcs = &nbio_v7_9_funcs; in amdgpu_discovery_set_ip_blocks()
2427 adev->nbio.hdp_flush_reg = &nbio_v7_9_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks()
2434 adev->nbio.funcs = &nbio_v7_2_funcs; in amdgpu_discovery_set_ip_blocks()
2435 adev->nbio.hdp_flush_reg = &nbio_v7_2_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks()
[all …]
Damdgpu_device.c549 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_rreg()
550 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_rreg()
573 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_rreg_ext()
574 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_rreg_ext()
575 if (adev->nbio.funcs->get_pcie_index_hi_offset) in amdgpu_device_indirect_rreg_ext()
576 pcie_index_hi = adev->nbio.funcs->get_pcie_index_hi_offset(adev); in amdgpu_device_indirect_rreg_ext()
622 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_rreg64()
623 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_rreg64()
657 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_wreg()
658 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_wreg()
[all …]
Damdgpu_ras.c1653 if (adev->nbio.ras && in amdgpu_ras_interrupt_fatal_error_handler()
1654 adev->nbio.ras->handle_ras_controller_intr_no_bifring) in amdgpu_ras_interrupt_fatal_error_handler()
1655 adev->nbio.ras->handle_ras_controller_intr_no_bifring(adev); in amdgpu_ras_interrupt_fatal_error_handler()
1657 if (adev->nbio.ras && in amdgpu_ras_interrupt_fatal_error_handler()
1658 adev->nbio.ras->handle_ras_err_event_athub_intr_no_bifring) in amdgpu_ras_interrupt_fatal_error_handler()
1659 adev->nbio.ras->handle_ras_err_event_athub_intr_no_bifring(adev); in amdgpu_ras_interrupt_fatal_error_handler()
2645 adev->nbio.ras = &nbio_v7_4_ras; in amdgpu_ras_init()
2655 adev->nbio.ras = &nbio_v4_3_ras; in amdgpu_ras_init()
2659 adev->nbio.ras = &nbio_v7_9_ras; in amdgpu_ras_init()
2672 if (adev->nbio.ras && in amdgpu_ras_init()
[all …]
Damdgpu_bios.c502 if (adev->nbio.funcs && in amdgpu_soc15_read_bios_from_rom()
503 adev->nbio.funcs->get_rom_offset) { in amdgpu_soc15_read_bios_from_rom()
504 rom_offset = adev->nbio.funcs->get_rom_offset(adev); in amdgpu_soc15_read_bios_from_rom()
Dnbio_v4_3.c606 adev->nbio.ras_err_event_athub_irq.funcs = in nbio_v4_3_init_ras_err_event_athub_interrupt()
608 adev->nbio.ras_err_event_athub_irq.num_types = 1; in nbio_v4_3_init_ras_err_event_athub_interrupt()
614 &adev->nbio.ras_err_event_athub_irq); in nbio_v4_3_init_ras_err_event_athub_interrupt()
Dvega10_ih.c273 adev->nbio.funcs->ih_control(adev); in vega10_ih_irq_init()
293 adev->nbio.funcs->ih_doorbell_range(adev, adev->irq.ih.use_doorbell, in vega10_ih_irq_init()
Dih_v6_0.c306 adev->nbio.funcs->ih_control(adev); in ih_v6_0_irq_init()
327 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in ih_v6_0_irq_init()
Dih_v6_1.c306 adev->nbio.funcs->ih_control(adev); in ih_v6_1_irq_init()
327 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in ih_v6_1_irq_init()
Dvega20_ih.c292 adev->nbio.funcs->ih_control(adev); in vega20_ih_irq_init()
326 adev->nbio.funcs->ih_doorbell_range(adev, adev->irq.ih.use_doorbell, in vega20_ih_irq_init()
Daqua_vanjaram.c304 if (adev->nbio.funcs->get_compute_partition_mode) in aqua_vanjaram_query_partition_mode()
305 mode = adev->nbio.funcs->get_compute_partition_mode(adev); in aqua_vanjaram_query_partition_mode()
Dnavi10_ih.c329 adev->nbio.funcs->ih_control(adev); in navi10_ih_irq_init()
361 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in navi10_ih_irq_init()
Dsdma_v5_2.c293 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v5_2_ring_emit_hdp_flush()
300 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v5_2_ring_emit_hdp_flush()
301 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v5_2_ring_emit_hdp_flush()
566 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v5_2_gfx_resume()
Dsdma_v6_0.c311 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v6_0_ring_emit_hdp_flush()
318 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v6_0_ring_emit_hdp_flush()
319 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v6_0_ring_emit_hdp_flush()
546 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v6_0_gfx_resume()
Dsdma_v5_0.c485 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v5_0_ring_emit_hdp_flush()
495 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v5_0_ring_emit_hdp_flush()
496 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v5_0_ring_emit_hdp_flush()
769 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v5_0_gfx_resume()
Djpeg_v3_0.c151 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in jpeg_v3_0_hw_init()
Dgmc_v9_0.c1421 if (adev->nbio.funcs->get_memory_partition_mode) in gmc_v9_0_get_memory_partition()
1422 mode = adev->nbio.funcs->get_memory_partition_mode(adev, in gmc_v9_0_get_memory_partition()
1705 adev->nbio.funcs->get_memsize(adev) * 1024ULL * 1024ULL; in gmc_v9_0_mc_init()
Dsdma_v4_4_2.c366 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v4_4_2_ring_emit_hdp_flush()
371 adev->nbio.funcs->get_hdp_flush_done_offset(adev), in sdma_v4_4_2_ring_emit_hdp_flush()
372 adev->nbio.funcs->get_hdp_flush_req_offset(adev), in sdma_v4_4_2_ring_emit_hdp_flush()
/linux-6.6.21/drivers/block/xen-blkback/
Dblkback.c1299 int i, nbio = 0; in dispatch_rw_block_io() local
1437 biolist[nbio++] = bio; in dispatch_rw_block_io()
1452 biolist[nbio++] = bio; in dispatch_rw_block_io()
1457 atomic_set(&pending_req->pendcnt, nbio); in dispatch_rw_block_io()
1460 for (i = 0; i < nbio; i++) in dispatch_rw_block_io()

12