/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_uvd.c | 126 INIT_DELAYED_WORK(&adev->uvd.idle_work, amdgpu_uvd_idle_work_handler); in amdgpu_uvd_sw_init() 174 r = request_firmware(&adev->uvd.fw, fw_name, adev->dev); in amdgpu_uvd_sw_init() 181 r = amdgpu_ucode_validate(adev->uvd.fw); in amdgpu_uvd_sw_init() 185 release_firmware(adev->uvd.fw); in amdgpu_uvd_sw_init() 186 adev->uvd.fw = NULL; in amdgpu_uvd_sw_init() 191 adev->uvd.max_handles = AMDGPU_DEFAULT_UVD_HANDLES; in amdgpu_uvd_sw_init() 193 hdr = (const struct common_firmware_header *)adev->uvd.fw->data; in amdgpu_uvd_sw_init() 208 adev->uvd.max_handles = AMDGPU_MAX_UVD_HANDLES; in amdgpu_uvd_sw_init() 210 adev->uvd.fw_version = ((version_major << 24) | (version_minor << 16) | in amdgpu_uvd_sw_init() 215 (adev->uvd.fw_version < FW_1_66_16)) in amdgpu_uvd_sw_init() [all …]
|
D | uvd_v7_0.c | 75 if (ring == &adev->uvd.ring_enc[0]) in uvd_v7_0_enc_ring_get_rptr() 109 if (ring == &adev->uvd.ring_enc[0]) in uvd_v7_0_enc_ring_get_wptr() 147 if (ring == &adev->uvd.ring_enc[0]) in uvd_v7_0_enc_ring_set_wptr() 375 adev->uvd.num_enc_rings = 1; in uvd_v7_0_early_init() 377 adev->uvd.num_enc_rings = 2; in uvd_v7_0_early_init() 393 r = amdgpu_irq_add_id(adev, AMDGPU_IH_CLIENTID_UVD, 124, &adev->uvd.irq); in uvd_v7_0_sw_init() 398 for (i = 0; i < adev->uvd.num_enc_rings; ++i) { in uvd_v7_0_sw_init() 399 r = amdgpu_irq_add_id(adev, AMDGPU_IH_CLIENTID_UVD, i + 119, &adev->uvd.irq); in uvd_v7_0_sw_init() 410 hdr = (const struct common_firmware_header *)adev->uvd.fw->data; in uvd_v7_0_sw_init() 412 adev->firmware.ucode[AMDGPU_UCODE_ID_UVD].fw = adev->uvd.fw; in uvd_v7_0_sw_init() [all …]
|
D | uvd_v6_0.c | 113 r = amdgpu_irq_add_id(adev, AMDGPU_IH_CLIENTID_LEGACY, 124, &adev->uvd.irq); in uvd_v6_0_sw_init() 125 ring = &adev->uvd.ring; in uvd_v6_0_sw_init() 127 r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.irq, 0); in uvd_v6_0_sw_init() 154 struct amdgpu_ring *ring = &adev->uvd.ring; in uvd_v6_0_hw_init() 213 struct amdgpu_ring *ring = &adev->uvd.ring; in uvd_v6_0_hw_fini() 261 lower_32_bits(adev->uvd.gpu_addr)); in uvd_v6_0_mc_resume() 263 upper_32_bits(adev->uvd.gpu_addr)); in uvd_v6_0_mc_resume() 266 size = AMDGPU_GPU_PAGE_ALIGN(adev->uvd.fw->size + 4); in uvd_v6_0_mc_resume() 277 (AMDGPU_UVD_SESSION_SIZE * adev->uvd.max_handles); in uvd_v6_0_mc_resume() 285 WREG32(mmUVD_GP_SCRATCH4, adev->uvd.max_handles); in uvd_v6_0_mc_resume() [all …]
|
D | uvd_v4_2.c | 110 r = amdgpu_irq_add_id(adev, AMDGPU_IH_CLIENTID_LEGACY, 124, &adev->uvd.irq); in uvd_v4_2_sw_init() 122 ring = &adev->uvd.ring; in uvd_v4_2_sw_init() 124 r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.irq, 0); in uvd_v4_2_sw_init() 153 struct amdgpu_ring *ring = &adev->uvd.ring; in uvd_v4_2_hw_init() 211 struct amdgpu_ring *ring = &adev->uvd.ring; in uvd_v4_2_hw_fini() 254 struct amdgpu_ring *ring = &adev->uvd.ring; in uvd_v4_2_start() 565 addr = (adev->uvd.gpu_addr + AMDGPU_UVD_FIRMWARE_OFFSET) >> 3; in uvd_v4_2_mc_resume() 566 size = AMDGPU_GPU_PAGE_ALIGN(adev->uvd.fw->size + 4) >> 3; in uvd_v4_2_mc_resume() 577 (AMDGPU_UVD_SESSION_SIZE * adev->uvd.max_handles)) >> 3; in uvd_v4_2_mc_resume() 582 addr = (adev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_mc_resume() [all …]
|
D | uvd_v5_0.c | 106 r = amdgpu_irq_add_id(adev, AMDGPU_IH_CLIENTID_LEGACY, 124, &adev->uvd.irq); in uvd_v5_0_sw_init() 118 ring = &adev->uvd.ring; in uvd_v5_0_sw_init() 120 r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.irq, 0); in uvd_v5_0_sw_init() 147 struct amdgpu_ring *ring = &adev->uvd.ring; in uvd_v5_0_hw_init() 207 struct amdgpu_ring *ring = &adev->uvd.ring; in uvd_v5_0_hw_fini() 256 lower_32_bits(adev->uvd.gpu_addr)); in uvd_v5_0_mc_resume() 258 upper_32_bits(adev->uvd.gpu_addr)); in uvd_v5_0_mc_resume() 261 size = AMDGPU_GPU_PAGE_ALIGN(adev->uvd.fw->size + 4); in uvd_v5_0_mc_resume() 272 (AMDGPU_UVD_SESSION_SIZE * adev->uvd.max_handles); in uvd_v5_0_mc_resume() 290 struct amdgpu_ring *ring = &adev->uvd.ring; in uvd_v5_0_start() [all …]
|
D | amdgpu_queue_mgr.c | 80 *out_ring = &adev->uvd.ring; in amdgpu_identity_map() 86 *out_ring = &adev->uvd.ring_enc[ring]; in amdgpu_identity_map() 249 ip_num_rings = adev->uvd.num_enc_rings; in amdgpu_queue_mgr_map()
|
D | amdgpu_fence.c | 337 if (ring != &adev->uvd.ring) { in amdgpu_fence_driver_start_ring() 342 index = ALIGN(adev->uvd.fw->size, 8); in amdgpu_fence_driver_start_ring() 343 ring->fence_drv.cpu_addr = adev->uvd.cpu_addr + index; in amdgpu_fence_driver_start_ring() 344 ring->fence_drv.gpu_addr = adev->uvd.gpu_addr + index; in amdgpu_fence_driver_start_ring()
|
D | amdgpu_kms.c | 162 fw_info->ver = adev->uvd.fw_version; in amdgpu_firmware_info() 306 ring_mask = adev->uvd.ring.ready ? 1 : 0; in amdgpu_info_ioctl() 319 for (i = 0; i < adev->uvd.num_enc_rings; i++) in amdgpu_info_ioctl() 320 ring_mask |= ((adev->uvd.ring_enc[i].ready ? 1 : 0) << i); in amdgpu_info_ioctl() 648 handle.uvd_max_handles = adev->uvd.max_handles; in amdgpu_info_ioctl()
|
D | amdgpu.h | 1540 struct amdgpu_uvd uvd; member
|
/drivers/gpu/drm/radeon/ |
D | radeon_uvd.c | 72 INIT_DELAYED_WORK(&rdev->uvd.idle_work, radeon_uvd_idle_work_handler); in radeon_uvd_init() 137 rdev->uvd.fw_header_present = false; in radeon_uvd_init() 138 rdev->uvd.max_handles = RADEON_DEFAULT_UVD_HANDLES; in radeon_uvd_init() 153 rdev->uvd.fw_header_present = true; in radeon_uvd_init() 166 rdev->uvd.max_handles = RADEON_MAX_UVD_HANDLES; in radeon_uvd_init() 186 RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles; in radeon_uvd_init() 189 NULL, &rdev->uvd.vcpu_bo); in radeon_uvd_init() 195 r = radeon_bo_reserve(rdev->uvd.vcpu_bo, false); in radeon_uvd_init() 197 radeon_bo_unref(&rdev->uvd.vcpu_bo); in radeon_uvd_init() 202 r = radeon_bo_pin(rdev->uvd.vcpu_bo, RADEON_GEM_DOMAIN_VRAM, in radeon_uvd_init() [all …]
|
D | uvd_v4_2.c | 46 if (rdev->uvd.fw_header_present) in uvd_v4_2_resume() 47 addr = (rdev->uvd.gpu_addr + 0x200) >> 3; in uvd_v4_2_resume() 49 addr = rdev->uvd.gpu_addr >> 3; in uvd_v4_2_resume() 62 (RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles)) >> 3; in uvd_v4_2_resume() 67 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_resume() 71 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_resume() 74 if (rdev->uvd.fw_header_present) in uvd_v4_2_resume() 75 WREG32(UVD_GP_SCRATCH4, rdev->uvd.max_handles); in uvd_v4_2_resume()
|
D | uvd_v2_2.c | 113 addr = rdev->uvd.gpu_addr >> 3; in uvd_v2_2_resume() 125 (RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles)) >> 3; in uvd_v2_2_resume() 130 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v2_2_resume() 134 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v2_2_resume()
|
D | uvd_v1_0.c | 121 addr = (rdev->uvd.gpu_addr >> 3) + 16; in uvd_v1_0_resume() 133 (RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles)) >> 3; in uvd_v1_0_resume() 138 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v1_0_resume() 142 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v1_0_resume() 145 WREG32(UVD_FW_START, *((uint32_t*)rdev->uvd.cpu_addr)); in uvd_v1_0_resume()
|
D | radeon_drv.c | 291 MODULE_PARM_DESC(uvd, "uvd enable/disable uvd support (1 = enable, 0 = disable)"); 292 module_param_named(uvd, radeon_uvd, int, 0444);
|
D | radeon_fence.c | 844 rdev->fence_drv[ring].cpu_addr = rdev->uvd.cpu_addr + index; in radeon_fence_driver_start_ring() 845 rdev->fence_drv[ring].gpu_addr = rdev->uvd.gpu_addr + index; in radeon_fence_driver_start_ring()
|
D | radeon.h | 2387 struct radeon_uvd uvd; member
|
/drivers/gpu/drm/amd/powerplay/inc/ |
D | pp_asicblocks.h | 44 bool uvd : 1; member
|
/drivers/gpu/drm/amd/powerplay/hwmgr/ |
D | rv_hwmgr.h | 112 uint32_t uvd : 1; member
|
D | cz_hwmgr.h | 139 uint32_t uvd : 1; member
|