Home
last modified time | relevance | path

Searched refs:gpu_addr (Results 1 – 25 of 128) sorted by relevance

123456

/drivers/gpu/drm/radeon/
Dr600_dma.c143 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume()
145 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume()
150 WREG32(DMA_RB_BASE, ring->gpu_addr >> 8); in r600_dma_resume()
236 u64 gpu_addr; in r600_dma_ring_test() local
243 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test()
254 radeon_ring_write(ring, lower_32_bits(gpu_addr)); in r600_dma_ring_test()
255 radeon_ring_write(ring, upper_32_bits(gpu_addr) & 0xff); in r600_dma_ring_test()
290 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit()
317 u64 addr = semaphore->gpu_addr; in r600_dma_semaphore_ring_emit()
343 u64 gpu_addr; in r600_dma_ib_test() local
[all …]
Dcik_sdma.c154 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute()
155 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute()
203 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit()
232 u64 addr = semaphore->gpu_addr; in cik_sdma_semaphore_ring_emit()
400 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()
402 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
407 WREG32(SDMA0_GFX_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cik_sdma_gfx_resume()
408 WREG32(SDMA0_GFX_RB_BASE_HI + reg_offset, ring->gpu_addr >> 40); in cik_sdma_gfx_resume()
651 u64 gpu_addr; in cik_sdma_ring_test() local
658 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test()
[all …]
Duvd_v4_2.c47 addr = (rdev->uvd.gpu_addr + 0x200) >> 3; in uvd_v4_2_resume()
49 addr = rdev->uvd.gpu_addr >> 3; in uvd_v4_2_resume()
67 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_resume()
71 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_resume()
Duvd_v2_2.c43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit()
77 uint64_t addr = semaphore->gpu_addr; in uvd_v2_2_semaphore_emit()
113 addr = rdev->uvd.gpu_addr >> 3; in uvd_v2_2_resume()
130 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v2_2_resume()
134 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v2_2_resume()
Duvd_v1_0.c85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit()
121 addr = (rdev->uvd.gpu_addr >> 3) + 16; in uvd_v1_0_resume()
138 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v1_0_resume()
142 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v1_0_resume()
364 WREG32(UVD_LMI_EXT40_ADDR, upper_32_bits(ring->gpu_addr) | in uvd_v1_0_start()
374 WREG32(UVD_RBC_RB_BASE, ring->gpu_addr); in uvd_v1_0_start()
487 radeon_ring_write(ring, ib->gpu_addr); in uvd_v1_0_ib_execute()
Dradeon_semaphore.c51 (*semaphore)->gpu_addr = radeon_sa_bo_gpu_addr((*semaphore)->sa_bo); in radeon_semaphore_create()
69 ring->last_semaphore_signal_addr = semaphore->gpu_addr; in radeon_semaphore_emit_signal()
86 ring->last_semaphore_wait_addr = semaphore->gpu_addr; in radeon_semaphore_emit_wait()
Dvce_v1_0.c218 uint64_t addr = rdev->vce.gpu_addr; in vce_v1_0_resume()
300 WREG32(VCE_RB_BASE_LO, ring->gpu_addr); in vce_v1_0_start()
301 WREG32(VCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
307 WREG32(VCE_RB_BASE_LO2, ring->gpu_addr); in vce_v1_0_start()
308 WREG32(VCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
Dradeon_trace.h177 __field(uint64_t, gpu_addr)
183 __entry->gpu_addr = sem->gpu_addr;
187 __entry->waiters, __entry->gpu_addr)
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_ih.c69 ih->gpu_addr = dma_addr; in amdgpu_ih_ring_init()
89 &ih->ring_obj, &ih->gpu_addr, in amdgpu_ih_ring_init()
97 ih->wptr_addr = adev->wb.gpu_addr + wptr_offs * 4; in amdgpu_ih_ring_init()
99 ih->rptr_addr = adev->wb.gpu_addr + rptr_offs * 4; in amdgpu_ih_ring_init()
128 (void *)ih->ring, ih->gpu_addr); in amdgpu_ih_ring_fini()
131 amdgpu_bo_free_kernel(&ih->ring_obj, &ih->gpu_addr, in amdgpu_ih_ring_fini()
133 amdgpu_device_wb_free(adev, (ih->wptr_addr - ih->gpu_addr) / 4); in amdgpu_ih_ring_fini()
134 amdgpu_device_wb_free(adev, (ih->rptr_addr - ih->gpu_addr) / 4); in amdgpu_ih_ring_fini()
Dvcn_v2_0.c346 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_mc_resume()
348 upper_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_mc_resume()
358 lower_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v2_0_mc_resume()
360 upper_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v2_0_mc_resume()
366 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_0_mc_resume()
368 upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_0_mc_resume()
412 lower_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
415 upper_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
433 lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
436 upper_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
[all …]
Dvce_v4_0.c157 uint64_t addr = table->gpu_addr; in vce_v4_0_mmsch_start()
235 lower_32_bits(ring->gpu_addr)); in vce_v4_0_sriov_start()
237 upper_32_bits(ring->gpu_addr)); in vce_v4_0_sriov_start()
263 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start()
266 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
273 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start()
276 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
279 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start()
282 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
345 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO), ring->gpu_addr); in vce_v4_0_start()
[all …]
Dvcn_v1_0.c316 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v1_0_mc_resume_spg_mode()
318 upper_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v1_0_mc_resume_spg_mode()
328 lower_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v1_0_mc_resume_spg_mode()
330 upper_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v1_0_mc_resume_spg_mode()
336 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v1_0_mc_resume_spg_mode()
338 upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v1_0_mc_resume_spg_mode()
386 lower_32_bits(adev->vcn.inst->gpu_addr), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode()
388 upper_32_bits(adev->vcn.inst->gpu_addr), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode()
398 lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode()
400 upper_32_bits(adev->vcn.inst->gpu_addr + offset), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode()
[all …]
Dsi_dma.c75 amdgpu_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in si_dma_ring_emit_ib()
76 amdgpu_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in si_dma_ring_emit_ib()
157 rptr_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in si_dma_start()
164 WREG32(DMA_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in si_dma_start()
210 u64 gpu_addr; in si_dma_ring_test_ring() local
216 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ring()
225 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in si_dma_ring_test_ring()
226 amdgpu_ring_write(ring, upper_32_bits(gpu_addr) & 0xff); in si_dma_ring_test_ring()
261 u64 gpu_addr; in si_dma_ring_test_ib() local
268 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ib()
[all …]
Dvcn_v2_5.c412 lower_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_mc_resume()
414 upper_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_mc_resume()
423 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_mc_resume()
425 upper_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_mc_resume()
431 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_5_mc_resume()
433 upper_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_5_mc_resume()
476 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
479 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
497 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
500 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
[all …]
Dsdma_v2_4.c265 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v2_4_ring_emit_ib()
266 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v2_4_ring_emit_ib()
458 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v2_4_gfx_resume()
460 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v2_4_gfx_resume()
464 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in sdma_v2_4_gfx_resume()
465 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in sdma_v2_4_gfx_resume()
556 u64 gpu_addr; in sdma_v2_4_ring_test_ring() local
562 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v2_4_ring_test_ring()
572 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v2_4_ring_test_ring()
573 amdgpu_ring_write(ring, upper_32_bits(gpu_addr)); in sdma_v2_4_ring_test_ring()
[all …]
Dvcn_v3_0.c477 lower_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v3_0_mc_resume()
479 upper_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v3_0_mc_resume()
488 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v3_0_mc_resume()
490 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v3_0_mc_resume()
496 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v3_0_mc_resume()
498 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v3_0_mc_resume()
540 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
543 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
561 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
564 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
[all …]
Dcik_sdma.c236 amdgpu_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_emit_ib()
237 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xffffffff); in cik_sdma_ring_emit_ib()
480 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()
482 ((adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
486 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in cik_sdma_gfx_resume()
487 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in cik_sdma_gfx_resume()
622 u64 gpu_addr; in cik_sdma_ring_test_ring() local
628 gpu_addr = adev->wb.gpu_addr + (index * 4); in cik_sdma_ring_test_ring()
637 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in cik_sdma_ring_test_ring()
638 amdgpu_ring_write(ring, upper_32_bits(gpu_addr)); in cik_sdma_ring_test_ring()
[all …]
Dvce_v3_0.c283 WREG32(mmVCE_RB_BASE_LO, ring->gpu_addr); in vce_v3_0_start()
284 WREG32(mmVCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start()
290 WREG32(mmVCE_RB_BASE_LO2, ring->gpu_addr); in vce_v3_0_start()
291 WREG32(mmVCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start()
297 WREG32(mmVCE_RB_BASE_LO3, ring->gpu_addr); in vce_v3_0_start()
298 WREG32(mmVCE_RB_BASE_HI3, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start()
567 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR0, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
568 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR1, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
569 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR2, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
571 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
[all …]
Duvd_v7_0.c698 lower_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_mc_resume()
700 upper_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_mc_resume()
709 lower_32_bits(adev->uvd.inst[i].gpu_addr + offset)); in uvd_v7_0_mc_resume()
711 upper_32_bits(adev->uvd.inst[i].gpu_addr + offset)); in uvd_v7_0_mc_resume()
716 lower_32_bits(adev->uvd.inst[i].gpu_addr + offset + AMDGPU_UVD_HEAP_SIZE)); in uvd_v7_0_mc_resume()
718 upper_32_bits(adev->uvd.inst[i].gpu_addr + offset + AMDGPU_UVD_HEAP_SIZE)); in uvd_v7_0_mc_resume()
738 uint64_t addr = table->gpu_addr; in uvd_v7_0_mmsch_start()
840 lower_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_sriov_start()
842 upper_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_sriov_start()
852 lower_32_bits(adev->uvd.inst[i].gpu_addr + offset)); in uvd_v7_0_sriov_start()
[all …]
Dsdma_v3_0.c439 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib()
440 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v3_0_ring_emit_ib()
697 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume()
699 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume()
703 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in sdma_v3_0_gfx_resume()
704 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in sdma_v3_0_gfx_resume()
718 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v3_0_gfx_resume()
828 u64 gpu_addr; in sdma_v3_0_ring_test_ring() local
834 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring()
844 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v3_0_ring_test_ring()
[all …]
Damdgpu_fence.c176 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit()
232 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit_polling()
431 ring->fence_drv.gpu_addr = adev->wb.gpu_addr + (ring->fence_offs * 4); in amdgpu_fence_driver_start_ring()
436 ring->fence_drv.gpu_addr = adev->uvd.inst[ring->me].gpu_addr + index; in amdgpu_fence_driver_start_ring()
445 ring->name, ring->fence_drv.gpu_addr); in amdgpu_fence_driver_start_ring()
475 ring->fence_drv.gpu_addr = 0; in amdgpu_fence_driver_init_ring()
Djpeg_v1_0.c62 val = lower_32_bits(ring->gpu_addr); in jpeg_v1_0_decode_ring_set_patch_ring()
68 val = upper_32_bits(ring->gpu_addr); in jpeg_v1_0_decode_ring_set_patch_ring()
311 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in jpeg_v1_0_decode_ring_emit_ib()
315 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in jpeg_v1_0_decode_ring_emit_ib()
323 amdgpu_ring_write(ring, lower_32_bits(ring->gpu_addr)); in jpeg_v1_0_decode_ring_emit_ib()
327 amdgpu_ring_write(ring, upper_32_bits(ring->gpu_addr)); in jpeg_v1_0_decode_ring_emit_ib()
530 WREG32_SOC15(JPEG, 0, mmUVD_LMI_JRBC_RB_64BIT_BAR_LOW, lower_32_bits(ring->gpu_addr)); in jpeg_v1_0_start()
531 WREG32_SOC15(JPEG, 0, mmUVD_LMI_JRBC_RB_64BIT_BAR_HIGH, upper_32_bits(ring->gpu_addr)); in jpeg_v1_0_start()
Dsdma_v5_2.c360 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v5_2_ring_emit_ib()
361 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v5_2_ring_emit_ib()
633 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v5_2_gfx_resume()
648 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v5_2_gfx_resume()
650 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v5_2_gfx_resume()
654 WREG32_SOC15_IP(GC, sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_BASE), ring->gpu_addr >> 8); in sdma_v5_2_gfx_resume()
655 …WREG32_SOC15_IP(GC, sdma_v5_2_get_reg_offset(adev, i, mmSDMA0_GFX_RB_BASE_HI), ring->gpu_addr >> 4… in sdma_v5_2_gfx_resume()
910 u64 gpu_addr; in sdma_v5_2_ring_test_ring() local
918 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v5_2_ring_test_ring()
931 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v5_2_ring_test_ring()
[all …]
Duvd_v6_0.c616 lower_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v6_0_mc_resume()
618 upper_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v6_0_mc_resume()
854 WREG32(mmUVD_RBC_RB_RPTR_ADDR, (upper_32_bits(ring->gpu_addr) >> 2)); in uvd_v6_0_start()
858 lower_32_bits(ring->gpu_addr)); in uvd_v6_0_start()
860 upper_32_bits(ring->gpu_addr)); in uvd_v6_0_start()
874 WREG32(mmUVD_RB_BASE_LO, ring->gpu_addr); in uvd_v6_0_start()
875 WREG32(mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in uvd_v6_0_start()
881 WREG32(mmUVD_RB_BASE_LO2, ring->gpu_addr); in uvd_v6_0_start()
882 WREG32(mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in uvd_v6_0_start()
1038 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in uvd_v6_0_ring_emit_ib()
[all …]
/drivers/gpu/drm/amd/amdkfd/
Dkfd_mqd_manager.c57 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr; in allocate_hiq_mqd()
82 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr + offset; in allocate_sdma_mqd()

123456