Lines Matching full:fence
50 * for GPU/CPU synchronization. When the fence is written,
51 * it is expected that all buffers associated with that fence
59 * radeon_fence_write - write a fence value
63 * @ring: ring index the fence is associated with
65 * Writes a fence value to memory or a scratch register (all asics).
80 * radeon_fence_read - read a fence value
83 * @ring: ring index the fence is associated with
85 * Reads a fence value from memory or a scratch register (all asics).
86 * Returns the value of the fence read from memory or register.
125 * radeon_fence_emit - emit a fence on the requested ring
128 * @fence: radeon fence object
129 * @ring: ring index the fence is associated with
131 * Emits a fence command on the requested ring (all asics).
135 struct radeon_fence **fence, in radeon_fence_emit() argument
141 *fence = kmalloc(sizeof(struct radeon_fence), GFP_KERNEL); in radeon_fence_emit()
142 if ((*fence) == NULL) { in radeon_fence_emit()
145 (*fence)->rdev = rdev; in radeon_fence_emit()
146 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit()
147 (*fence)->ring = ring; in radeon_fence_emit()
148 (*fence)->is_vm_update = false; in radeon_fence_emit()
149 dma_fence_init(&(*fence)->base, &radeon_fence_ops, in radeon_fence_emit()
153 radeon_fence_ring_emit(rdev, ring, *fence); in radeon_fence_emit()
154 trace_radeon_fence_emit(rdev->ddev, ring, (*fence)->seq); in radeon_fence_emit()
163 * for the fence locking itself, so unlocked variants are used for
168 struct radeon_fence *fence; in radeon_fence_check_signaled() local
171 fence = container_of(wait, struct radeon_fence, fence_wake); in radeon_fence_check_signaled()
177 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled()
178 if (seq >= fence->seq) { in radeon_fence_check_signaled()
179 dma_fence_signal_locked(&fence->base); in radeon_fence_check_signaled()
180 radeon_irq_kms_sw_irq_put(fence->rdev, fence->ring); in radeon_fence_check_signaled()
181 __remove_wait_queue(&fence->rdev->fence_queue, &fence->fence_wake); in radeon_fence_check_signaled()
182 dma_fence_put(&fence->base); in radeon_fence_check_signaled()
188 * radeon_fence_activity - check for fence activity
191 * @ring: ring index the fence is associated with
193 * Checks the current fence value and calculates the last
194 * signalled fence value. Returns true if activity occured
210 * continuously new fence signaled ie radeon_fence_read needs in radeon_fence_activity()
238 * checking if a fence is signaled as it means that the in radeon_fence_activity()
245 * fact that we might have set an older fence in radeon_fence_activity()
264 * Checks for fence activity and if there is none probe
299 dev_warn(rdev->dev, "GPU lockup (current fence id " in radeon_fence_check_lockup()
300 "0x%016llx last fence id 0x%016llx on ring %d)\n", in radeon_fence_check_lockup()
312 * radeon_fence_process - process a fence
315 * @ring: ring index the fence is associated with
317 * Checks the current fence value and wakes the fence queue
327 * radeon_fence_seq_signaled - check if a fence sequence number has signaled
331 * @ring: ring index the fence is associated with
333 * Check if the last signaled fence sequnce number is >= the requested
335 * Returns true if the fence has signaled (current fence value
336 * is >= requested value) or false if it has not (current fence
356 struct radeon_fence *fence = to_radeon_fence(f); in radeon_fence_is_signaled() local
357 struct radeon_device *rdev = fence->rdev; in radeon_fence_is_signaled()
358 unsigned ring = fence->ring; in radeon_fence_is_signaled()
359 u64 seq = fence->seq; in radeon_fence_is_signaled()
377 * radeon_fence_enable_signaling - enable signalling on fence
378 * @f: fence
381 * to fence_queue that checks if this fence is signaled, and if so it
382 * signals the fence and removes itself.
386 struct radeon_fence *fence = to_radeon_fence(f); in radeon_fence_enable_signaling() local
387 struct radeon_device *rdev = fence->rdev; in radeon_fence_enable_signaling()
389 if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) in radeon_fence_enable_signaling()
393 radeon_irq_kms_sw_irq_get(rdev, fence->ring); in radeon_fence_enable_signaling()
395 if (radeon_fence_activity(rdev, fence->ring)) in radeon_fence_enable_signaling()
398 /* did fence get signaled after we enabled the sw irq? */ in radeon_fence_enable_signaling()
399 if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) { in radeon_fence_enable_signaling()
400 radeon_irq_kms_sw_irq_put(rdev, fence->ring); in radeon_fence_enable_signaling()
408 if (radeon_irq_kms_sw_irq_get_delayed(rdev, fence->ring)) in radeon_fence_enable_signaling()
409 rdev->fence_drv[fence->ring].delayed_irq = true; in radeon_fence_enable_signaling()
410 radeon_fence_schedule_check(rdev, fence->ring); in radeon_fence_enable_signaling()
413 fence->fence_wake.flags = 0; in radeon_fence_enable_signaling()
414 fence->fence_wake.private = NULL; in radeon_fence_enable_signaling()
415 fence->fence_wake.func = radeon_fence_check_signaled; in radeon_fence_enable_signaling()
416 __add_wait_queue(&rdev->fence_queue, &fence->fence_wake); in radeon_fence_enable_signaling()
422 * radeon_fence_signaled - check if a fence has signaled
424 * @fence: radeon fence object
426 * Check if the requested fence has signaled (all asics).
427 * Returns true if the fence has signaled or false if it has not.
429 bool radeon_fence_signaled(struct radeon_fence *fence) in radeon_fence_signaled() argument
431 if (!fence) in radeon_fence_signaled()
434 if (radeon_fence_seq_signaled(fence->rdev, fence->seq, fence->ring)) { in radeon_fence_signaled()
435 dma_fence_signal(&fence->base); in radeon_fence_signaled()
447 * Check if the last signaled fence sequnce number is >= the requested
524 * radeon_fence_wait_timeout - wait for a fence to signal with timeout
526 * @fence: radeon fence object
529 * Wait for the requested fence to signal (all asics).
531 * (false) sleep when waiting for the fence.
536 long radeon_fence_wait_timeout(struct radeon_fence *fence, bool intr, long timeout) in radeon_fence_wait_timeout() argument
547 if (WARN_ON_ONCE(!to_radeon_fence(&fence->base))) in radeon_fence_wait_timeout()
548 return dma_fence_wait(&fence->base, intr); in radeon_fence_wait_timeout()
550 seq[fence->ring] = fence->seq; in radeon_fence_wait_timeout()
551 r = radeon_fence_wait_seq_timeout(fence->rdev, seq, intr, timeout); in radeon_fence_wait_timeout()
556 dma_fence_signal(&fence->base); in radeon_fence_wait_timeout()
561 * radeon_fence_wait - wait for a fence to signal
563 * @fence: radeon fence object
566 * Wait for the requested fence to signal (all asics).
568 * (false) sleep when waiting for the fence.
569 * Returns 0 if the fence has passed, error for all other cases.
571 int radeon_fence_wait(struct radeon_fence *fence, bool intr) in radeon_fence_wait() argument
573 long r = radeon_fence_wait_timeout(fence, intr, MAX_SCHEDULE_TIMEOUT); in radeon_fence_wait()
582 * radeon_fence_wait_any - wait for a fence to signal on any ring
585 * @fences: radeon fence object(s)
588 * Wait for any requested fence to signal (all asics). Fence
592 * Returns 0 if any fence has passed, error for all other cases.
625 * radeon_fence_wait_next - wait for the next fence to signal
628 * @ring: ring index the fence is associated with
630 * Wait for the next fence on the requested ring to signal (all asics).
631 * Returns 0 if the next fence has passed, error for all other cases.
642 already the last emited fence */ in radeon_fence_wait_next()
655 * @ring: ring index the fence is associated with
682 * radeon_fence_ref - take a ref on a fence
684 * @fence: radeon fence object
686 * Take a reference on a fence (all asics).
687 * Returns the fence.
689 struct radeon_fence *radeon_fence_ref(struct radeon_fence *fence) in radeon_fence_ref() argument
691 dma_fence_get(&fence->base); in radeon_fence_ref()
692 return fence; in radeon_fence_ref()
696 * radeon_fence_unref - remove a ref on a fence
698 * @fence: radeon fence object
700 * Remove a reference on a fence (all asics).
702 void radeon_fence_unref(struct radeon_fence **fence) in radeon_fence_unref() argument
704 struct radeon_fence *tmp = *fence; in radeon_fence_unref()
706 *fence = NULL; in radeon_fence_unref()
716 * @ring: ring index the fence is associated with
727 * but it's ok to report slightly wrong fence count here. in radeon_fence_count_emitted()
742 * @fence: radeon fence object
745 * Check if the fence needs to be synced against another ring
750 bool radeon_fence_need_sync(struct radeon_fence *fence, int dst_ring) in radeon_fence_need_sync() argument
754 if (!fence) { in radeon_fence_need_sync()
758 if (fence->ring == dst_ring) { in radeon_fence_need_sync()
763 fdrv = &fence->rdev->fence_drv[dst_ring]; in radeon_fence_need_sync()
764 if (fence->seq <= fdrv->sync_seq[fence->ring]) { in radeon_fence_need_sync()
774 * @fence: radeon fence object
777 * Note the sequence number at which point the fence will
780 void radeon_fence_note_sync(struct radeon_fence *fence, int dst_ring) in radeon_fence_note_sync() argument
785 if (!fence) { in radeon_fence_note_sync()
789 if (fence->ring == dst_ring) { in radeon_fence_note_sync()
794 src = &fence->rdev->fence_drv[fence->ring]; in radeon_fence_note_sync()
795 dst = &fence->rdev->fence_drv[dst_ring]; in radeon_fence_note_sync()
805 * radeon_fence_driver_start_ring - make the fence driver
809 * @ring: ring index to start the fence driver on
811 * Make the fence driver ready for processing (all asics).
813 * start the fence driver on the rings it has.
831 /* put fence directly behind firmware */ in radeon_fence_driver_start_ring()
840 dev_err(rdev->dev, "fence failed to get scratch register\n"); in radeon_fence_driver_start_ring()
851 dev_info(rdev->dev, "fence driver on ring %d use gpu addr 0x%016llx\n", in radeon_fence_driver_start_ring()
857 * radeon_fence_driver_init_ring - init the fence driver
861 * @ring: ring index to start the fence driver on
863 * Init the fence driver for the requested ring (all asics).
883 * radeon_fence_driver_init - init the fence driver
888 * Init the fence driver for all possible rings (all asics).
890 * start the fence driver on the rings it has using
906 * radeon_fence_driver_fini - tear down the fence driver
911 * Tear down the fence driver for all possible rings (all asics).
935 * radeon_fence_driver_force_completion - force all fence waiter to complete
940 * In case of GPU reset failure make sure no process keep waiting on fence
953 * Fence debugfs
968 seq_printf(m, "Last signaled fence 0x%016llx\n", in radeon_debugfs_fence_info_show()
985 * Manually trigger a gpu reset at the next fence wait.
1018 static const char *radeon_fence_get_driver_name(struct dma_fence *fence) in radeon_fence_get_driver_name() argument
1025 struct radeon_fence *fence = to_radeon_fence(f); in radeon_fence_get_timeline_name() local
1026 switch (fence->ring) { in radeon_fence_get_timeline_name()
1039 static inline bool radeon_test_signaled(struct radeon_fence *fence) in radeon_test_signaled() argument
1041 return test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->base.flags); in radeon_test_signaled()
1050 radeon_fence_wait_cb(struct dma_fence *fence, struct dma_fence_cb *cb) in radeon_fence_wait_cb() argument
1061 struct radeon_fence *fence = to_radeon_fence(f); in radeon_fence_default_wait() local
1062 struct radeon_device *rdev = fence->rdev; in radeon_fence_default_wait()
1080 if (radeon_test_signaled(fence)) in radeon_fence_default_wait()