/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_fence.c | 100 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_write() 116 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_read() 160 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit() 166 &ring->fence_drv.lock, in amdgpu_fence_emit() 176 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit() 179 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_emit() 225 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit_polling() 227 seq - ring->fence_drv.num_fences_mask, in amdgpu_fence_emit_polling() 232 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit_polling() 249 mod_timer(&ring->fence_drv.fallback_timer, in amdgpu_fence_schedule_fallback() [all …]
|
D | mes_v10_1.c | 104 r = amdgpu_fence_wait_polling(ring, ring->fence_drv.sync_seq, in mes_v10_1_submit_pkt_and_poll_completion() 161 mes->ring.fence_drv.gpu_addr; in mes_v10_1_add_hw_queue() 163 ++mes->ring.fence_drv.sync_seq; in mes_v10_1_add_hw_queue() 184 mes->ring.fence_drv.gpu_addr; in mes_v10_1_remove_hw_queue() 186 ++mes->ring.fence_drv.sync_seq; in mes_v10_1_remove_hw_queue() 215 mes->ring.fence_drv.gpu_addr; in mes_v10_1_query_sched_status() 217 ++mes->ring.fence_drv.sync_seq; in mes_v10_1_query_sched_status() 258 mes->ring.fence_drv.gpu_addr; in mes_v10_1_set_hw_resources() 260 ++mes->ring.fence_drv.sync_seq; in mes_v10_1_set_hw_resources()
|
D | amdgpu_debugfs.c | 1260 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_ib_preempt_fences_swap() 1263 last_seq = atomic_read(&ring->fence_drv.last_seq); in amdgpu_ib_preempt_fences_swap() 1264 sync_seq = ring->fence_drv.sync_seq; in amdgpu_ib_preempt_fences_swap() 1321 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_ib_preempt_mark_partial_job() 1374 length = ring->fence_drv.num_fences_mask + 1; in amdgpu_debugfs_ib_preempt() 1398 if (atomic_read(&ring->fence_drv.last_seq) != in amdgpu_debugfs_ib_preempt() 1399 ring->fence_drv.sync_seq) { in amdgpu_debugfs_ib_preempt()
|
D | amdgpu_job.c | 60 job->base.sched->name, atomic_read(&ring->fence_drv.last_seq), in amdgpu_job_timedout() 61 ring->fence_drv.sync_seq); in amdgpu_job_timedout()
|
D | uvd_v6_0.c | 1097 uint32_t seq = ring->fence_drv.sync_seq; in uvd_v6_0_ring_emit_pipeline_sync() 1098 uint64_t addr = ring->fence_drv.gpu_addr; in uvd_v6_0_ring_emit_pipeline_sync() 1126 uint32_t seq = ring->fence_drv.sync_seq; in uvd_v6_0_enc_ring_emit_pipeline_sync() 1127 uint64_t addr = ring->fence_drv.gpu_addr; in uvd_v6_0_enc_ring_emit_pipeline_sync()
|
D | si_dma.c | 426 uint32_t seq = ring->fence_drv.sync_seq; in si_dma_ring_emit_pipeline_sync() 427 uint64_t addr = ring->fence_drv.gpu_addr; in si_dma_ring_emit_pipeline_sync()
|
D | vce_v3_0.c | 888 uint32_t seq = ring->fence_drv.sync_seq; in vce_v3_0_emit_pipeline_sync() 889 uint64_t addr = ring->fence_drv.gpu_addr; in vce_v3_0_emit_pipeline_sync()
|
D | amdgpu_ring.h | 210 struct amdgpu_fence_driver fence_drv; member
|
D | sdma_v2_4.c | 778 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v2_4_ring_emit_pipeline_sync() 779 uint64_t addr = ring->fence_drv.gpu_addr; in sdma_v2_4_ring_emit_pipeline_sync()
|
D | cik_sdma.c | 839 uint32_t seq = ring->fence_drv.sync_seq; in cik_sdma_ring_emit_pipeline_sync() 840 uint64_t addr = ring->fence_drv.gpu_addr; in cik_sdma_ring_emit_pipeline_sync()
|
D | sdma_v3_0.c | 1049 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v3_0_ring_emit_pipeline_sync() 1050 uint64_t addr = ring->fence_drv.gpu_addr; in sdma_v3_0_ring_emit_pipeline_sync()
|
D | sdma_v5_2.c | 1150 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v5_2_ring_emit_pipeline_sync() 1151 uint64_t addr = ring->fence_drv.gpu_addr; in sdma_v5_2_ring_emit_pipeline_sync()
|
D | sdma_v5_0.c | 1228 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v5_0_ring_emit_pipeline_sync() 1229 uint64_t addr = ring->fence_drv.gpu_addr; in sdma_v5_0_ring_emit_pipeline_sync()
|
/drivers/gpu/drm/radeon/ |
D | radeon_fence.c | 69 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write() 90 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read() 120 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check() 146 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit() 177 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled() 231 last_seq = atomic64_read(&rdev->fence_drv[ring].last_seq); in radeon_fence_activity() 233 last_emitted = rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_activity() 258 } while (atomic64_xchg(&rdev->fence_drv[ring].last_seq, seq) > seq); in radeon_fence_activity() 276 struct radeon_fence_driver *fence_drv; in radeon_fence_check_lockup() local 280 fence_drv = container_of(work, struct radeon_fence_driver, in radeon_fence_check_lockup() [all …]
|
D | uvd_v2_2.c | 43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit()
|
D | evergreen_dma.c | 44 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit()
|
D | uvd_v1_0.c | 85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit()
|
D | r600_dma.c | 290 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit()
|
D | radeon_vce.c | 738 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in radeon_vce_fence_emit()
|
D | cik_sdma.c | 203 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit()
|
/drivers/gpu/drm/virtio/ |
D | virtgpu_fence.c | 76 struct virtio_gpu_fence_driver *drv = &vgdev->fence_drv; in virtio_gpu_fence_alloc() 98 struct virtio_gpu_fence_driver *drv = &vgdev->fence_drv; in virtio_gpu_fence_emit() 116 struct virtio_gpu_fence_driver *drv = &vgdev->fence_drv; in virtio_gpu_fence_event_process() 121 atomic64_set(&vgdev->fence_drv.last_fence_id, fence_id); in virtio_gpu_fence_event_process()
|
D | virtgpu_debugfs.c | 72 (u64)atomic64_read(&vgdev->fence_drv.last_fence_id), in virtio_gpu_debugfs_irq_info() 73 vgdev->fence_drv.current_fence_id); in virtio_gpu_debugfs_irq_info()
|
D | virtgpu_kms.c | 131 vgdev->fence_drv.context = dma_fence_context_alloc(1); in virtio_gpu_init() 132 spin_lock_init(&vgdev->fence_drv.lock); in virtio_gpu_init() 133 INIT_LIST_HEAD(&vgdev->fence_drv.fences); in virtio_gpu_init()
|
D | virtgpu_drv.h | 226 struct virtio_gpu_fence_driver fence_drv; member
|
D | virtgpu_ioctl.c | 115 if (!dma_fence_match_context(in_fence, vgdev->fence_drv.context)) in virtio_gpu_execbuffer_ioctl()
|