Lines Matching refs:ring
68 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring) in radeon_fence_write() argument
70 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write()
89 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) in radeon_fence_read() argument
91 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read()
114 static void radeon_fence_schedule_check(struct radeon_device *rdev, int ring) in radeon_fence_schedule_check() argument
121 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check()
137 int ring) in radeon_fence_emit() argument
147 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit()
148 (*fence)->ring = ring; in radeon_fence_emit()
152 rdev->fence_context + ring, in radeon_fence_emit()
154 radeon_fence_ring_emit(rdev, ring, *fence); in radeon_fence_emit()
155 trace_radeon_fence_emit(rdev->ddev, ring, (*fence)->seq); in radeon_fence_emit()
156 radeon_fence_schedule_check(rdev, ring); in radeon_fence_emit()
178 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled()
187 radeon_irq_kms_sw_irq_put(fence->rdev, fence->ring); in radeon_fence_check_signaled()
205 static bool radeon_fence_activity(struct radeon_device *rdev, int ring) in radeon_fence_activity() argument
232 last_seq = atomic64_read(&rdev->fence_drv[ring].last_seq); in radeon_fence_activity()
234 last_emitted = rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_activity()
235 seq = radeon_fence_read(rdev, ring); in radeon_fence_activity()
259 } while (atomic64_xchg(&rdev->fence_drv[ring].last_seq, seq) > seq); in radeon_fence_activity()
262 radeon_fence_schedule_check(rdev, ring); in radeon_fence_activity()
279 int ring; in radeon_fence_check_lockup() local
284 ring = fence_drv - &rdev->fence_drv[0]; in radeon_fence_check_lockup()
288 radeon_fence_schedule_check(rdev, ring); in radeon_fence_check_lockup()
301 if (radeon_fence_activity(rdev, ring)) in radeon_fence_check_lockup()
304 else if (radeon_ring_is_lockup(rdev, ring, &rdev->ring[ring])) { in radeon_fence_check_lockup()
310 fence_drv->sync_seq[ring], ring); in radeon_fence_check_lockup()
328 void radeon_fence_process(struct radeon_device *rdev, int ring) in radeon_fence_process() argument
330 if (radeon_fence_activity(rdev, ring)) in radeon_fence_process()
349 u64 seq, unsigned ring) in radeon_fence_seq_signaled() argument
351 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { in radeon_fence_seq_signaled()
355 radeon_fence_process(rdev, ring); in radeon_fence_seq_signaled()
356 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { in radeon_fence_seq_signaled()
366 unsigned ring = fence->ring; in radeon_fence_is_signaled() local
369 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { in radeon_fence_is_signaled()
374 radeon_fence_process(rdev, ring); in radeon_fence_is_signaled()
377 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { in radeon_fence_is_signaled()
397 if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) in radeon_fence_enable_signaling()
401 radeon_irq_kms_sw_irq_get(rdev, fence->ring); in radeon_fence_enable_signaling()
403 if (radeon_fence_activity(rdev, fence->ring)) in radeon_fence_enable_signaling()
407 if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) { in radeon_fence_enable_signaling()
408 radeon_irq_kms_sw_irq_put(rdev, fence->ring); in radeon_fence_enable_signaling()
416 if (radeon_irq_kms_sw_irq_get_delayed(rdev, fence->ring)) in radeon_fence_enable_signaling()
417 rdev->fence_drv[fence->ring].delayed_irq = true; in radeon_fence_enable_signaling()
418 radeon_fence_schedule_check(rdev, fence->ring); in radeon_fence_enable_signaling()
427 DMA_FENCE_TRACE(&fence->base, "armed on ring %i!\n", fence->ring); in radeon_fence_enable_signaling()
444 if (radeon_fence_seq_signaled(fence->rdev, fence->seq, fence->ring)) { in radeon_fence_signaled()
565 seq[fence->ring] = fence->seq; in radeon_fence_wait_timeout()
651 int radeon_fence_wait_next(struct radeon_device *rdev, int ring) in radeon_fence_wait_next() argument
656 seq[ring] = atomic64_read(&rdev->fence_drv[ring].last_seq) + 1ULL; in radeon_fence_wait_next()
657 if (seq[ring] >= rdev->fence_drv[ring].sync_seq[ring]) { in radeon_fence_wait_next()
678 int radeon_fence_wait_empty(struct radeon_device *rdev, int ring) in radeon_fence_wait_empty() argument
683 seq[ring] = rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_wait_empty()
684 if (!seq[ring]) in radeon_fence_wait_empty()
693 ring, r); in radeon_fence_wait_empty()
739 unsigned radeon_fence_count_emitted(struct radeon_device *rdev, int ring) in radeon_fence_count_emitted() argument
746 radeon_fence_process(rdev, ring); in radeon_fence_count_emitted()
747 emitted = rdev->fence_drv[ring].sync_seq[ring] in radeon_fence_count_emitted()
748 - atomic64_read(&rdev->fence_drv[ring].last_seq); in radeon_fence_count_emitted()
775 if (fence->ring == dst_ring) { in radeon_fence_need_sync()
781 if (fence->seq <= fdrv->sync_seq[fence->ring]) { in radeon_fence_need_sync()
806 if (fence->ring == dst_ring) { in radeon_fence_note_sync()
811 src = &fence->rdev->fence_drv[fence->ring]; in radeon_fence_note_sync()
833 int radeon_fence_driver_start_ring(struct radeon_device *rdev, int ring) in radeon_fence_driver_start_ring() argument
838 radeon_scratch_free(rdev, rdev->fence_drv[ring].scratch_reg); in radeon_fence_driver_start_ring()
839 if (rdev->wb.use_event || !radeon_ring_supports_scratch_reg(rdev, &rdev->ring[ring])) { in radeon_fence_driver_start_ring()
840 rdev->fence_drv[ring].scratch_reg = 0; in radeon_fence_driver_start_ring()
841 if (ring != R600_RING_TYPE_UVD_INDEX) { in radeon_fence_driver_start_ring()
842 index = R600_WB_EVENT_OFFSET + ring * 4; in radeon_fence_driver_start_ring()
843 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring()
844 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring()
850 rdev->fence_drv[ring].cpu_addr = rdev->uvd.cpu_addr + index; in radeon_fence_driver_start_ring()
851 rdev->fence_drv[ring].gpu_addr = rdev->uvd.gpu_addr + index; in radeon_fence_driver_start_ring()
855 r = radeon_scratch_get(rdev, &rdev->fence_drv[ring].scratch_reg); in radeon_fence_driver_start_ring()
861 rdev->fence_drv[ring].scratch_reg - in radeon_fence_driver_start_ring()
863 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring()
864 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring()
866 radeon_fence_write(rdev, atomic64_read(&rdev->fence_drv[ring].last_seq), ring); in radeon_fence_driver_start_ring()
867 rdev->fence_drv[ring].initialized = true; in radeon_fence_driver_start_ring()
869 ring, rdev->fence_drv[ring].gpu_addr); in radeon_fence_driver_start_ring()
883 static void radeon_fence_driver_init_ring(struct radeon_device *rdev, int ring) in radeon_fence_driver_init_ring() argument
887 rdev->fence_drv[ring].scratch_reg = -1; in radeon_fence_driver_init_ring()
888 rdev->fence_drv[ring].cpu_addr = NULL; in radeon_fence_driver_init_ring()
889 rdev->fence_drv[ring].gpu_addr = 0; in radeon_fence_driver_init_ring()
891 rdev->fence_drv[ring].sync_seq[i] = 0; in radeon_fence_driver_init_ring()
892 atomic64_set(&rdev->fence_drv[ring].last_seq, 0); in radeon_fence_driver_init_ring()
893 rdev->fence_drv[ring].initialized = false; in radeon_fence_driver_init_ring()
894 INIT_DELAYED_WORK(&rdev->fence_drv[ring].lockup_work, in radeon_fence_driver_init_ring()
896 rdev->fence_drv[ring].rdev = rdev; in radeon_fence_driver_init_ring()
913 int ring; in radeon_fence_driver_init() local
916 for (ring = 0; ring < RADEON_NUM_RINGS; ring++) { in radeon_fence_driver_init()
917 radeon_fence_driver_init_ring(rdev, ring); in radeon_fence_driver_init()
935 int ring, r; in radeon_fence_driver_fini() local
938 for (ring = 0; ring < RADEON_NUM_RINGS; ring++) { in radeon_fence_driver_fini()
939 if (!rdev->fence_drv[ring].initialized) in radeon_fence_driver_fini()
941 r = radeon_fence_wait_empty(rdev, ring); in radeon_fence_driver_fini()
944 radeon_fence_driver_force_completion(rdev, ring); in radeon_fence_driver_fini()
946 cancel_delayed_work_sync(&rdev->fence_drv[ring].lockup_work); in radeon_fence_driver_fini()
948 radeon_scratch_free(rdev, rdev->fence_drv[ring].scratch_reg); in radeon_fence_driver_fini()
949 rdev->fence_drv[ring].initialized = false; in radeon_fence_driver_fini()
963 void radeon_fence_driver_force_completion(struct radeon_device *rdev, int ring) in radeon_fence_driver_force_completion() argument
965 if (rdev->fence_drv[ring].initialized) { in radeon_fence_driver_force_completion()
966 radeon_fence_write(rdev, rdev->fence_drv[ring].sync_seq[ring], ring); in radeon_fence_driver_force_completion()
967 cancel_delayed_work_sync(&rdev->fence_drv[ring].lockup_work); in radeon_fence_driver_force_completion()
1047 switch (fence->ring) { in radeon_fence_get_timeline_name()