• Home
  • Raw
  • Download

Lines Matching refs:it

134 	struct active_node *it, *n;  in __active_retire()  local
176 rbtree_postorder_for_each_entry_safe(it, n, &root, node) { in __active_retire()
177 GEM_BUG_ON(i915_active_fence_isset(&it->base)); in __active_retire()
178 kmem_cache_free(global.slab_cache, it); in __active_retire()
240 struct active_node *it; in __active_lookup() local
251 it = READ_ONCE(ref->cache); in __active_lookup()
252 if (it) { in __active_lookup()
253 u64 cached = READ_ONCE(it->timeline); in __active_lookup()
257 return it; in __active_lookup()
270 if (!cached && !cmpxchg(&it->timeline, 0, idx)) in __active_lookup()
271 return it; in __active_lookup()
275 BUILD_BUG_ON(offsetof(typeof(*it), node)); in __active_lookup()
280 it = fetch_node(ref->tree.rb_node); in __active_lookup()
281 while (it) { in __active_lookup()
282 if (it->timeline < idx) { in __active_lookup()
283 it = fetch_node(it->node.rb_right); in __active_lookup()
284 } else if (it->timeline > idx) { in __active_lookup()
285 it = fetch_node(it->node.rb_left); in __active_lookup()
287 WRITE_ONCE(ref->cache, it); in __active_lookup()
293 return it; in __active_lookup()
493 struct active_node *it; in __active_fence() local
495 it = __active_lookup(ref, idx); in __active_fence()
496 if (unlikely(!it)) { /* Contention with parallel tree builders! */ in __active_fence()
498 it = __active_lookup(ref, idx); in __active_fence()
501 GEM_BUG_ON(!it); /* slot must be preallocated */ in __active_fence()
503 return &it->base; in __active_fence()
600 static int flush_barrier(struct active_node *it) in flush_barrier() argument
604 if (likely(!is_barrier(&it->base))) in flush_barrier()
607 engine = __barrier_to_engine(it); in flush_barrier()
609 if (!is_barrier(&it->base)) in flush_barrier()
617 struct active_node *it, *n; in flush_lazy_signals() local
621 rbtree_postorder_for_each_entry_safe(it, n, &ref->tree, node) { in flush_lazy_signals()
622 err = flush_barrier(it); /* unconnected idle barrier? */ in flush_lazy_signals()
626 enable_signaling(&it->base); in flush_lazy_signals()
740 struct active_node *it, *n; in await_active() local
742 rbtree_postorder_for_each_entry_safe(it, n, &ref->tree, node) { in await_active()
743 err = __await_active(&it->base, fn, arg); in await_active()
993 struct active_node *it; in i915_active_acquire_barrier() local
997 it = rb_entry(parent, struct active_node, node); in i915_active_acquire_barrier()
998 if (it->timeline < node->timeline) in i915_active_acquire_barrier()