Home
last modified time | relevance | path

Searched refs:nbio (Results 1 – 25 of 26) sorted by relevance

12

/drivers/gpu/drm/amd/amdgpu/
Damdgpu_nbio.c35 if (!adev->nbio.ras_if) { in amdgpu_nbio_ras_late_init()
36 adev->nbio.ras_if = kmalloc(sizeof(struct ras_common_if), GFP_KERNEL); in amdgpu_nbio_ras_late_init()
37 if (!adev->nbio.ras_if) in amdgpu_nbio_ras_late_init()
39 adev->nbio.ras_if->block = AMDGPU_RAS_BLOCK__PCIE_BIF; in amdgpu_nbio_ras_late_init()
40 adev->nbio.ras_if->type = AMDGPU_RAS_ERROR__MULTI_UNCORRECTABLE; in amdgpu_nbio_ras_late_init()
41 adev->nbio.ras_if->sub_block_index = 0; in amdgpu_nbio_ras_late_init()
42 strcpy(adev->nbio.ras_if->name, "pcie_bif"); in amdgpu_nbio_ras_late_init()
44 ih_info.head = fs_info.head = *adev->nbio.ras_if; in amdgpu_nbio_ras_late_init()
45 r = amdgpu_ras_late_init(adev, adev->nbio.ras_if, in amdgpu_nbio_ras_late_init()
50 if (amdgpu_ras_is_supported(adev, adev->nbio.ras_if->block)) { in amdgpu_nbio_ras_late_init()
[all …]
Dsoc15.c105 address = adev->nbio.funcs->get_pcie_index_offset(adev); in soc15_pcie_rreg()
106 data = adev->nbio.funcs->get_pcie_data_offset(adev); in soc15_pcie_rreg()
115 address = adev->nbio.funcs->get_pcie_index_offset(adev); in soc15_pcie_wreg()
116 data = adev->nbio.funcs->get_pcie_data_offset(adev); in soc15_pcie_wreg()
124 address = adev->nbio.funcs->get_pcie_index_offset(adev); in soc15_pcie_rreg64()
125 data = adev->nbio.funcs->get_pcie_data_offset(adev); in soc15_pcie_rreg64()
134 address = adev->nbio.funcs->get_pcie_index_offset(adev); in soc15_pcie_wreg64()
135 data = adev->nbio.funcs->get_pcie_data_offset(adev); in soc15_pcie_wreg64()
242 return adev->nbio.funcs->get_memsize(adev); in soc15_get_config_memsize()
465 u32 memsize = adev->nbio.funcs->get_memsize(adev); in soc15_asic_mode1_reset()
[all …]
Dnv.c73 address = adev->nbio.funcs->get_pcie_index_offset(adev); in nv_pcie_rreg()
74 data = adev->nbio.funcs->get_pcie_data_offset(adev); in nv_pcie_rreg()
83 address = adev->nbio.funcs->get_pcie_index_offset(adev); in nv_pcie_wreg()
84 data = adev->nbio.funcs->get_pcie_data_offset(adev); in nv_pcie_wreg()
92 address = adev->nbio.funcs->get_pcie_index_offset(adev); in nv_pcie_rreg64()
93 data = adev->nbio.funcs->get_pcie_data_offset(adev); in nv_pcie_rreg64()
102 address = adev->nbio.funcs->get_pcie_index_offset(adev); in nv_pcie_wreg64()
103 data = adev->nbio.funcs->get_pcie_data_offset(adev); in nv_pcie_wreg64()
138 return adev->nbio.funcs->get_memsize(adev); in nv_get_config_memsize()
295 u32 memsize = adev->nbio.funcs->get_memsize(adev); in nv_asic_mode1_reset()
[all …]
Dnbio_v7_4.c303 struct ras_manager *obj = amdgpu_ras_find_obj(adev, adev->nbio.ras_if); in nbio_v7_4_handle_ras_controller_intr_no_bifring()
333 adev->nbio.ras_if->name); in nbio_v7_4_handle_ras_controller_intr_no_bifring()
339 adev->nbio.ras_if->name); in nbio_v7_4_handle_ras_controller_intr_no_bifring()
457 adev->nbio.ras_controller_irq.funcs = in nbio_v7_4_init_ras_controller_interrupt()
459 adev->nbio.ras_controller_irq.num_types = 1; in nbio_v7_4_init_ras_controller_interrupt()
464 &adev->nbio.ras_controller_irq); in nbio_v7_4_init_ras_controller_interrupt()
475 adev->nbio.ras_err_event_athub_irq.funcs = in nbio_v7_4_init_ras_err_event_athub_interrupt()
477 adev->nbio.ras_err_event_athub_irq.num_types = 1; in nbio_v7_4_init_ras_err_event_athub_interrupt()
482 &adev->nbio.ras_err_event_athub_irq); in nbio_v7_4_init_ras_err_event_athub_interrupt()
Ddf_v3_6.c105 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_get_fica()
106 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_get_fica()
128 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_set_fica()
129 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_set_fica()
156 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_rreg()
157 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_rreg()
178 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_wreg()
179 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_wreg()
197 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_arm_with_status()
198 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_arm_with_status()
Damdgpu_irq.c167 if (adev->nbio.funcs && in amdgpu_irq_handler()
168 adev->nbio.funcs->handle_ras_controller_intr_no_bifring) in amdgpu_irq_handler()
169 adev->nbio.funcs->handle_ras_controller_intr_no_bifring(adev); in amdgpu_irq_handler()
171 if (adev->nbio.funcs && in amdgpu_irq_handler()
172 adev->nbio.funcs->handle_ras_err_event_athub_intr_no_bifring) in amdgpu_irq_handler()
173 adev->nbio.funcs->handle_ras_err_event_athub_intr_no_bifring(adev); in amdgpu_irq_handler()
Damdgpu_ras.c794 if (adev->nbio.funcs->query_ras_error_count) in amdgpu_ras_error_query()
795 adev->nbio.funcs->query_ras_error_count(adev, &err_data); in amdgpu_ras_error_query()
2064 if (adev->nbio.funcs->init_ras_controller_interrupt) { in amdgpu_ras_init()
2065 r = adev->nbio.funcs->init_ras_controller_interrupt(adev); in amdgpu_ras_init()
2070 if (adev->nbio.funcs->init_ras_err_event_athub_interrupt) { in amdgpu_ras_init()
2071 r = adev->nbio.funcs->init_ras_err_event_athub_interrupt(adev); in amdgpu_ras_init()
Dgmc_v10_0.c283 adev->nbio.funcs->hdp_flush(adev, NULL); in gmc_v10_0_flush_gpu_tlb()
726 adev->nbio.funcs->get_memsize(adev) * 1024ULL * 1024ULL; in gmc_v10_0_mc_init()
967 adev->nbio.funcs->hdp_flush(adev, NULL); in gmc_v10_0_gart_enable()
Dsdma_v5_2.c391 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v5_2_ring_emit_hdp_flush()
398 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v5_2_ring_emit_hdp_flush()
399 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v5_2_ring_emit_hdp_flush()
664 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v5_2_gfx_resume()
Dsdma_v5_0.c454 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v5_0_ring_emit_hdp_flush()
464 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v5_0_ring_emit_hdp_flush()
465 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v5_0_ring_emit_hdp_flush()
734 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v5_0_gfx_resume()
Dnavi10_ih.c291 adev->nbio.funcs->ih_control(adev); in navi10_ih_irq_init()
345 adev->nbio.funcs->ih_doorbell_range(adev, ih->use_doorbell, in navi10_ih_irq_init()
Djpeg_v3_0.c140 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in jpeg_v3_0_hw_init()
Djpeg_v2_5.c167 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in jpeg_v2_5_hw_init()
Dgmc_v9_0.c1269 adev->nbio.funcs->get_memsize(adev) * 1024ULL * 1024ULL; in gmc_v9_0_mc_init()
1641 adev->nbio.funcs->hdp_flush(adev, NULL); in gmc_v9_0_hw_init()
Djpeg_v2_0.c154 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in jpeg_v2_0_hw_init()
Dvega10_ih.c229 adev->nbio.funcs->ih_control(adev); in vega10_ih_irq_init()
Dpsp_v11_0.c705 adev->nbio.funcs->hdp_flush(adev, NULL); in psp_v11_0_memory_training()
Dsdma_v4_0.c901 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v4_0_ring_emit_hdp_flush()
906 adev->nbio.funcs->get_hdp_flush_done_offset(adev), in sdma_v4_0_ring_emit_hdp_flush()
907 adev->nbio.funcs->get_hdp_flush_req_offset(adev), in sdma_v4_0_ring_emit_hdp_flush()
Damdgpu_device.c4875 if (ras && ras->supported && adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_enter()
4876 adev->nbio.funcs->enable_doorbell_interrupt(adev, false); in amdgpu_device_baco_enter()
4894 if (ras && ras->supported && adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_exit()
4895 adev->nbio.funcs->enable_doorbell_interrupt(adev, true); in amdgpu_device_baco_exit()
Damdgpu.h881 struct amdgpu_nbio nbio; member
Dgfx_v10_0.c5550 adev->nbio.funcs->hdp_flush(adev, NULL); in gfx_v10_0_cp_gfx_load_pfp_microcode()
5628 adev->nbio.funcs->hdp_flush(adev, NULL); in gfx_v10_0_cp_gfx_load_ce_microcode()
5705 adev->nbio.funcs->hdp_flush(adev, NULL); in gfx_v10_0_cp_gfx_load_me_microcode()
6066 adev->nbio.funcs->hdp_flush(adev, NULL); in gfx_v10_0_cp_compute_load_microcode()
7764 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in gfx_v10_0_ring_emit_hdp_flush()
7784 adev->nbio.funcs->get_hdp_flush_req_offset(adev), in gfx_v10_0_ring_emit_hdp_flush()
7785 adev->nbio.funcs->get_hdp_flush_done_offset(adev), in gfx_v10_0_ring_emit_hdp_flush()
Dvcn_v2_5.c293 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in vcn_v2_5_hw_init()
Dvcn_v2_0.c229 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in vcn_v2_0_hw_init()
Dvcn_v3_0.c314 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in vcn_v3_0_hw_init()
/drivers/block/xen-blkback/
Dblkback.c1194 int i, nbio = 0; in dispatch_rw_block_io() local
1333 biolist[nbio++] = bio; in dispatch_rw_block_io()
1352 biolist[nbio++] = bio; in dispatch_rw_block_io()
1359 atomic_set(&pending_req->pendcnt, nbio); in dispatch_rw_block_io()
1362 for (i = 0; i < nbio; i++) in dispatch_rw_block_io()
1386 for (i = 0; i < nbio; i++) in dispatch_rw_block_io()

12