/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_nbio.c | 35 if (!adev->nbio.ras_if) { in amdgpu_nbio_ras_late_init() 36 adev->nbio.ras_if = kmalloc(sizeof(struct ras_common_if), GFP_KERNEL); in amdgpu_nbio_ras_late_init() 37 if (!adev->nbio.ras_if) in amdgpu_nbio_ras_late_init() 39 adev->nbio.ras_if->block = AMDGPU_RAS_BLOCK__PCIE_BIF; in amdgpu_nbio_ras_late_init() 40 adev->nbio.ras_if->type = AMDGPU_RAS_ERROR__MULTI_UNCORRECTABLE; in amdgpu_nbio_ras_late_init() 41 adev->nbio.ras_if->sub_block_index = 0; in amdgpu_nbio_ras_late_init() 43 ih_info.head = fs_info.head = *adev->nbio.ras_if; in amdgpu_nbio_ras_late_init() 44 r = amdgpu_ras_late_init(adev, adev->nbio.ras_if, in amdgpu_nbio_ras_late_init() 49 if (amdgpu_ras_is_supported(adev, adev->nbio.ras_if->block)) { in amdgpu_nbio_ras_late_init() 50 r = amdgpu_irq_get(adev, &adev->nbio.ras_controller_irq, 0); in amdgpu_nbio_ras_late_init() [all …]
|
D | soc15.c | 191 address = adev->nbio.funcs->get_pcie_index_offset(adev); in soc15_pcie_rreg() 192 data = adev->nbio.funcs->get_pcie_data_offset(adev); in soc15_pcie_rreg() 201 address = adev->nbio.funcs->get_pcie_index_offset(adev); in soc15_pcie_wreg() 202 data = adev->nbio.funcs->get_pcie_data_offset(adev); in soc15_pcie_wreg() 210 address = adev->nbio.funcs->get_pcie_index_offset(adev); in soc15_pcie_rreg64() 211 data = adev->nbio.funcs->get_pcie_data_offset(adev); in soc15_pcie_rreg64() 220 address = adev->nbio.funcs->get_pcie_index_offset(adev); in soc15_pcie_wreg64() 221 data = adev->nbio.funcs->get_pcie_data_offset(adev); in soc15_pcie_wreg64() 328 return adev->nbio.funcs->get_memsize(adev); in soc15_get_config_memsize() 532 adev->nbio.funcs->enable_doorbell_interrupt(adev, false); in soc15_asic_baco_reset() [all …]
|
D | nv.c | 237 address = adev->nbio.funcs->get_pcie_index_offset(adev); in nv_pcie_rreg() 238 data = adev->nbio.funcs->get_pcie_data_offset(adev); in nv_pcie_rreg() 247 address = adev->nbio.funcs->get_pcie_index_offset(adev); in nv_pcie_wreg() 248 data = adev->nbio.funcs->get_pcie_data_offset(adev); in nv_pcie_wreg() 256 address = adev->nbio.funcs->get_pcie_index_offset(adev); in nv_pcie_rreg64() 257 data = adev->nbio.funcs->get_pcie_data_offset(adev); in nv_pcie_rreg64() 266 address = adev->nbio.funcs->get_pcie_port_index_offset(adev); in nv_pcie_port_rreg() 267 data = adev->nbio.funcs->get_pcie_port_data_offset(adev); in nv_pcie_port_rreg() 281 address = adev->nbio.funcs->get_pcie_index_offset(adev); in nv_pcie_wreg64() 282 data = adev->nbio.funcs->get_pcie_data_offset(adev); in nv_pcie_wreg64() [all …]
|
D | df_v3_6.c | 51 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_get_fica() 52 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_get_fica() 74 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_set_fica() 75 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_set_fica() 102 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_rreg() 103 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_rreg() 124 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_wreg() 125 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_wreg() 143 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_arm_with_status() 144 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_arm_with_status()
|
D | nbio_v7_4.c | 353 struct ras_manager *obj = amdgpu_ras_find_obj(adev, adev->nbio.ras_if); in nbio_v7_4_handle_ras_controller_intr_no_bifring() 390 ras_block_str(adev->nbio.ras_if->block)); in nbio_v7_4_handle_ras_controller_intr_no_bifring() 396 ras_block_str(adev->nbio.ras_if->block)); in nbio_v7_4_handle_ras_controller_intr_no_bifring() 539 adev->nbio.ras_controller_irq.funcs = in nbio_v7_4_init_ras_controller_interrupt() 541 adev->nbio.ras_controller_irq.num_types = 1; in nbio_v7_4_init_ras_controller_interrupt() 546 &adev->nbio.ras_controller_irq); in nbio_v7_4_init_ras_controller_interrupt() 557 adev->nbio.ras_err_event_athub_irq.funcs = in nbio_v7_4_init_ras_err_event_athub_interrupt() 559 adev->nbio.ras_err_event_athub_irq.num_types = 1; in nbio_v7_4_init_ras_err_event_athub_interrupt() 564 &adev->nbio.ras_err_event_athub_irq); in nbio_v7_4_init_ras_err_event_athub_interrupt()
|
D | amdgpu_irq.c | 202 if (adev->nbio.ras_funcs && in amdgpu_irq_handler() 203 adev->nbio.ras_funcs->handle_ras_controller_intr_no_bifring) in amdgpu_irq_handler() 204 adev->nbio.ras_funcs->handle_ras_controller_intr_no_bifring(adev); in amdgpu_irq_handler() 206 if (adev->nbio.ras_funcs && in amdgpu_irq_handler() 207 adev->nbio.ras_funcs->handle_ras_err_event_athub_intr_no_bifring) in amdgpu_irq_handler() 208 adev->nbio.ras_funcs->handle_ras_err_event_athub_intr_no_bifring(adev); in amdgpu_irq_handler()
|
D | amdgpu_ras.c | 861 if (adev->nbio.ras_funcs && in amdgpu_ras_query_error_status() 862 adev->nbio.ras_funcs->query_ras_error_count) in amdgpu_ras_query_error_status() 863 adev->nbio.ras_funcs->query_ras_error_count(adev, &err_data); in amdgpu_ras_query_error_status() 2230 adev->nbio.ras_funcs = &nbio_v7_4_ras_funcs; in amdgpu_ras_init() 2237 if (adev->nbio.ras_funcs && in amdgpu_ras_init() 2238 adev->nbio.ras_funcs->init_ras_controller_interrupt) { in amdgpu_ras_init() 2239 r = adev->nbio.ras_funcs->init_ras_controller_interrupt(adev); in amdgpu_ras_init() 2244 if (adev->nbio.ras_funcs && in amdgpu_ras_init() 2245 adev->nbio.ras_funcs->init_ras_err_event_athub_interrupt) { in amdgpu_ras_init() 2246 r = adev->nbio.ras_funcs->init_ras_err_event_athub_interrupt(adev); in amdgpu_ras_init()
|
D | vega10_ih.c | 273 adev->nbio.funcs->ih_control(adev); in vega10_ih_irq_init() 293 adev->nbio.funcs->ih_doorbell_range(adev, adev->irq.ih.use_doorbell, in vega10_ih_irq_init()
|
D | navi10_ih.c | 331 adev->nbio.funcs->ih_control(adev); in navi10_ih_irq_init() 366 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in navi10_ih_irq_init()
|
D | vega20_ih.c | 309 adev->nbio.funcs->ih_control(adev); in vega20_ih_irq_init() 344 adev->nbio.funcs->ih_doorbell_range(adev, adev->irq.ih.use_doorbell, in vega20_ih_irq_init()
|
D | sdma_v5_2.c | 405 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v5_2_ring_emit_hdp_flush() 412 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v5_2_ring_emit_hdp_flush() 413 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v5_2_ring_emit_hdp_flush() 680 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v5_2_gfx_resume()
|
D | sdma_v5_0.c | 517 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v5_0_ring_emit_hdp_flush() 527 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v5_0_ring_emit_hdp_flush() 528 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v5_0_ring_emit_hdp_flush() 805 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v5_0_gfx_resume()
|
D | jpeg_v3_0.c | 143 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in jpeg_v3_0_hw_init()
|
D | amdgpu_device.c | 4621 u32 memsize = adev->nbio.funcs->get_memsize(adev); in amdgpu_device_mode1_reset() 5511 adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_enter() 5512 adev->nbio.funcs->enable_doorbell_interrupt(adev, false); in amdgpu_device_baco_enter() 5531 adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_exit() 5532 adev->nbio.funcs->enable_doorbell_interrupt(adev, true); in amdgpu_device_baco_exit() 5535 adev->nbio.funcs->clear_doorbell_interrupt) in amdgpu_device_baco_exit() 5536 adev->nbio.funcs->clear_doorbell_interrupt(adev); in amdgpu_device_baco_exit()
|
D | jpeg_v2_5.c | 167 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in jpeg_v2_5_hw_init()
|
D | jpeg_v2_0.c | 154 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in jpeg_v2_0_hw_init()
|
D | sdma_v4_0.c | 929 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v4_0_ring_emit_hdp_flush() 934 adev->nbio.funcs->get_hdp_flush_done_offset(adev), in sdma_v4_0_ring_emit_hdp_flush() 935 adev->nbio.funcs->get_hdp_flush_req_offset(adev), in sdma_v4_0_ring_emit_hdp_flush()
|
D | gmc_v10_0.c | 780 adev->nbio.funcs->get_memsize(adev) * 1024ULL * 1024ULL; in gmc_v10_0_mc_init()
|
D | gmc_v9_0.c | 1366 adev->nbio.funcs->get_memsize(adev) * 1024ULL * 1024ULL; in gmc_v9_0_mc_init()
|
D | amdgpu.h | 956 struct amdgpu_nbio nbio; member
|
D | vcn_v2_0.c | 229 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in vcn_v2_0_hw_init()
|
D | vcn_v2_5.c | 293 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in vcn_v2_5_hw_init()
|
D | gfx_v9_0.c | 5340 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in gfx_v9_0_ring_emit_hdp_flush() 5360 adev->nbio.funcs->get_hdp_flush_req_offset(adev), in gfx_v9_0_ring_emit_hdp_flush() 5361 adev->nbio.funcs->get_hdp_flush_done_offset(adev), in gfx_v9_0_ring_emit_hdp_flush()
|
D | vcn_v3_0.c | 358 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in vcn_v3_0_hw_init()
|
/drivers/block/xen-blkback/ |
D | blkback.c | 1194 int i, nbio = 0; in dispatch_rw_block_io() local 1333 biolist[nbio++] = bio; in dispatch_rw_block_io() 1352 biolist[nbio++] = bio; in dispatch_rw_block_io() 1359 atomic_set(&pending_req->pendcnt, nbio); in dispatch_rw_block_io() 1362 for (i = 0; i < nbio; i++) in dispatch_rw_block_io() 1386 for (i = 0; i < nbio; i++) in dispatch_rw_block_io()
|