• Home
  • Raw
  • Download

Lines Matching refs:eb

301 static int eb_parse(struct i915_execbuffer *eb);
302 static struct i915_request *eb_pin_engine(struct i915_execbuffer *eb,
304 static void eb_unpin_engine(struct i915_execbuffer *eb);
306 static inline bool eb_use_cmdparser(const struct i915_execbuffer *eb) in eb_use_cmdparser() argument
308 return intel_engine_requires_cmd_parser(eb->engine) || in eb_use_cmdparser()
309 (intel_engine_using_cmd_parser(eb->engine) && in eb_use_cmdparser()
310 eb->args->batch_len); in eb_use_cmdparser()
313 static int eb_create(struct i915_execbuffer *eb) in eb_create() argument
315 if (!(eb->args->flags & I915_EXEC_HANDLE_LUT)) { in eb_create()
316 unsigned int size = 1 + ilog2(eb->buffer_count); in eb_create()
342 eb->buckets = kzalloc(sizeof(struct hlist_head) << size, in eb_create()
344 if (eb->buckets) in eb_create()
351 eb->lut_size = size; in eb_create()
353 eb->lut_size = -eb->buffer_count; in eb_create()
416 eb_pin_vma(struct i915_execbuffer *eb, in eb_pin_vma() argument
434 err = i915_vma_pin_ww(vma, &eb->ww, 0, 0, pin_flags); in eb_pin_vma()
443 err = i915_vma_pin_ww(vma, &eb->ww, in eb_pin_vma()
484 eb_validate_vma(struct i915_execbuffer *eb, in eb_validate_vma() argument
492 GRAPHICS_VER(eb->i915) >= 12 && !IS_TIGERLAKE(eb->i915)) in eb_validate_vma()
495 if (unlikely(entry->flags & eb->invalid_flags)) in eb_validate_vma()
524 if (!eb->reloc_cache.has_fence) { in eb_validate_vma()
528 eb->reloc_cache.needs_unfenced) && in eb_validate_vma()
537 eb_add_vma(struct i915_execbuffer *eb, in eb_add_vma() argument
541 struct drm_i915_gem_exec_object2 *entry = &eb->exec[i]; in eb_add_vma()
542 struct eb_vma *ev = &eb->vma[i]; in eb_add_vma()
548 if (eb->lut_size > 0) { in eb_add_vma()
551 &eb->buckets[hash_32(entry->handle, in eb_add_vma()
552 eb->lut_size)]); in eb_add_vma()
556 list_add_tail(&ev->reloc_link, &eb->relocs); in eb_add_vma()
571 if (eb->reloc_cache.has_fence) in eb_add_vma()
574 eb->batch = ev; in eb_add_vma()
595 static int eb_reserve_vma(struct i915_execbuffer *eb, in eb_reserve_vma() argument
610 err = i915_vma_pin_ww(vma, &eb->ww, in eb_reserve_vma()
618 eb->args->flags |= __EXEC_HAS_RELOC; in eb_reserve_vma()
638 static int eb_reserve(struct i915_execbuffer *eb) in eb_reserve() argument
640 const unsigned int count = eb->buffer_count; in eb_reserve()
662 list_for_each_entry(ev, &eb->unbound, bind_link) { in eb_reserve()
663 err = eb_reserve_vma(eb, ev, pin_flags); in eb_reserve()
671 INIT_LIST_HEAD(&eb->unbound); in eb_reserve()
676 ev = &eb->vma[i]; in eb_reserve()
686 list_add(&ev->bind_link, &eb->unbound); in eb_reserve()
689 list_add_tail(&ev->bind_link, &eb->unbound); in eb_reserve()
696 list_splice_tail(&last, &eb->unbound); in eb_reserve()
704 mutex_lock(&eb->context->vm->mutex); in eb_reserve()
705 err = i915_gem_evict_vm(eb->context->vm); in eb_reserve()
706 mutex_unlock(&eb->context->vm->mutex); in eb_reserve()
719 static unsigned int eb_batch_index(const struct i915_execbuffer *eb) in eb_batch_index() argument
721 if (eb->args->flags & I915_EXEC_BATCH_FIRST) in eb_batch_index()
724 return eb->buffer_count - 1; in eb_batch_index()
727 static int eb_select_context(struct i915_execbuffer *eb) in eb_select_context() argument
731 ctx = i915_gem_context_lookup(eb->file->driver_priv, eb->args->rsvd1); in eb_select_context()
735 eb->gem_context = ctx; in eb_select_context()
737 eb->invalid_flags |= EXEC_OBJECT_NEEDS_GTT; in eb_select_context()
742 static int __eb_add_lut(struct i915_execbuffer *eb, in __eb_add_lut() argument
745 struct i915_gem_context *ctx = eb->gem_context; in __eb_add_lut()
774 if (idr_find(&eb->file->object_idr, handle) == obj) { in __eb_add_lut()
796 static struct i915_vma *eb_lookup_vma(struct i915_execbuffer *eb, u32 handle) in eb_lookup_vma() argument
798 struct i915_address_space *vm = eb->context->vm; in eb_lookup_vma()
806 vma = radix_tree_lookup(&eb->gem_context->handles_vma, handle); in eb_lookup_vma()
813 obj = i915_gem_object_lookup(eb->file, handle); in eb_lookup_vma()
823 err = __eb_add_lut(eb, handle, vma); in eb_lookup_vma()
833 static int eb_lookup_vmas(struct i915_execbuffer *eb) in eb_lookup_vmas() argument
835 struct drm_i915_private *i915 = eb->i915; in eb_lookup_vmas()
836 unsigned int batch = eb_batch_index(eb); in eb_lookup_vmas()
840 INIT_LIST_HEAD(&eb->relocs); in eb_lookup_vmas()
842 for (i = 0; i < eb->buffer_count; i++) { in eb_lookup_vmas()
845 vma = eb_lookup_vma(eb, eb->exec[i].handle); in eb_lookup_vmas()
851 err = eb_validate_vma(eb, &eb->exec[i], vma); in eb_lookup_vmas()
857 eb_add_vma(eb, i, batch, vma); in eb_lookup_vmas()
862 if (i + 1 < eb->buffer_count) { in eb_lookup_vmas()
869 eb->vma[i + 1].vma = NULL; in eb_lookup_vmas()
875 eb->vma[i].flags |= __EXEC_OBJECT_USERPTR_INIT; in eb_lookup_vmas()
876 eb->args->flags |= __EXEC_USERPTR_USED; in eb_lookup_vmas()
880 if (unlikely(eb->batch->flags & EXEC_OBJECT_WRITE)) { in eb_lookup_vmas()
887 eb->batch_start_offset, eb->batch_len, in eb_lookup_vmas()
888 eb->batch->vma->size)) { in eb_lookup_vmas()
893 if (eb->batch_len == 0) in eb_lookup_vmas()
894 eb->batch_len = eb->batch->vma->size - eb->batch_start_offset; in eb_lookup_vmas()
895 if (unlikely(eb->batch_len == 0)) { /* impossible! */ in eb_lookup_vmas()
903 eb->vma[i].vma = NULL; in eb_lookup_vmas()
907 static int eb_lock_vmas(struct i915_execbuffer *eb) in eb_lock_vmas() argument
912 for (i = 0; i < eb->buffer_count; i++) { in eb_lock_vmas()
913 struct eb_vma *ev = &eb->vma[i]; in eb_lock_vmas()
916 err = i915_gem_object_lock(vma->obj, &eb->ww); in eb_lock_vmas()
924 static int eb_validate_vmas(struct i915_execbuffer *eb) in eb_validate_vmas() argument
929 INIT_LIST_HEAD(&eb->unbound); in eb_validate_vmas()
931 err = eb_lock_vmas(eb); in eb_validate_vmas()
935 for (i = 0; i < eb->buffer_count; i++) { in eb_validate_vmas()
936 struct drm_i915_gem_exec_object2 *entry = &eb->exec[i]; in eb_validate_vmas()
937 struct eb_vma *ev = &eb->vma[i]; in eb_validate_vmas()
940 err = eb_pin_vma(eb, entry, ev); in eb_validate_vmas()
947 eb->args->flags |= __EXEC_HAS_RELOC; in eb_validate_vmas()
952 list_add_tail(&ev->bind_link, &eb->unbound); in eb_validate_vmas()
967 eb_vma_misplaced(&eb->exec[i], vma, ev->flags)); in eb_validate_vmas()
970 if (!list_empty(&eb->unbound)) in eb_validate_vmas()
971 return eb_reserve(eb); in eb_validate_vmas()
977 eb_get_vma(const struct i915_execbuffer *eb, unsigned long handle) in eb_get_vma() argument
979 if (eb->lut_size < 0) { in eb_get_vma()
980 if (handle >= -eb->lut_size) in eb_get_vma()
982 return &eb->vma[handle]; in eb_get_vma()
987 head = &eb->buckets[hash_32(handle, eb->lut_size)]; in eb_get_vma()
996 static void eb_release_vmas(struct i915_execbuffer *eb, bool final) in eb_release_vmas() argument
998 const unsigned int count = eb->buffer_count; in eb_release_vmas()
1002 struct eb_vma *ev = &eb->vma[i]; in eb_release_vmas()
1014 eb_unpin_engine(eb); in eb_release_vmas()
1017 static void eb_destroy(const struct i915_execbuffer *eb) in eb_destroy() argument
1019 if (eb->lut_size > 0) in eb_destroy()
1020 kfree(eb->buckets); in eb_destroy()
1104 static void reloc_cache_reset(struct reloc_cache *cache, struct i915_execbuffer *eb) in reloc_cache_reset() argument
1180 struct i915_execbuffer *eb, in reloc_iomap() argument
1183 struct reloc_cache *cache = &eb->reloc_cache; in reloc_iomap()
1205 vma = i915_gem_object_ggtt_pin_ww(obj, &eb->ww, NULL, 0, 0, in reloc_iomap()
1247 struct i915_execbuffer *eb, in reloc_vaddr() argument
1250 struct reloc_cache *cache = &eb->reloc_cache; in reloc_vaddr()
1258 vaddr = reloc_iomap(obj, eb, page); in reloc_vaddr()
1292 struct i915_execbuffer *eb, in relocate_entry() argument
1297 bool wide = eb->reloc_cache.use_64bit_reloc; in relocate_entry()
1301 vaddr = reloc_vaddr(vma->obj, eb, in relocate_entry()
1309 eb->reloc_cache.vaddr); in relocate_entry()
1322 eb_relocate_entry(struct i915_execbuffer *eb, in eb_relocate_entry() argument
1326 struct drm_i915_private *i915 = eb->i915; in eb_relocate_entry()
1331 target = eb_get_vma(eb, reloc->target_handle); in eb_relocate_entry()
1368 GRAPHICS_VER(eb->i915) == 6 && in eb_relocate_entry()
1372 reloc_cache_unmap(&eb->reloc_cache); in eb_relocate_entry()
1378 reloc_cache_remap(&eb->reloc_cache, ev->vma->obj); in eb_relocate_entry()
1394 ev->vma->size - (eb->reloc_cache.use_64bit_reloc ? 8 : 4))) { in eb_relocate_entry()
1421 return relocate_entry(ev->vma, reloc, eb, target->vma); in eb_relocate_entry()
1424 static int eb_relocate_vma(struct i915_execbuffer *eb, struct eb_vma *ev) in eb_relocate_vma() argument
1468 u64 offset = eb_relocate_entry(eb, ev, r); in eb_relocate_vma()
1504 reloc_cache_reset(&eb->reloc_cache, eb); in eb_relocate_vma()
1509 eb_relocate_vma_slow(struct i915_execbuffer *eb, struct eb_vma *ev) in eb_relocate_vma_slow() argument
1518 u64 offset = eb_relocate_entry(eb, ev, &relocs[i]); in eb_relocate_vma_slow()
1527 reloc_cache_reset(&eb->reloc_cache, eb); in eb_relocate_vma_slow()
1558 static int eb_copy_relocations(const struct i915_execbuffer *eb) in eb_copy_relocations() argument
1561 const unsigned int count = eb->buffer_count; in eb_copy_relocations()
1566 const unsigned int nreloc = eb->exec[i].relocation_count; in eb_copy_relocations()
1574 err = check_relocations(&eb->exec[i]); in eb_copy_relocations()
1578 urelocs = u64_to_user_ptr(eb->exec[i].relocs_ptr); in eb_copy_relocations()
1620 eb->exec[i].relocs_ptr = (uintptr_t)relocs; in eb_copy_relocations()
1632 relocs = u64_to_ptr(typeof(*relocs), eb->exec[i].relocs_ptr); in eb_copy_relocations()
1633 if (eb->exec[i].relocation_count) in eb_copy_relocations()
1639 static int eb_prefault_relocations(const struct i915_execbuffer *eb) in eb_prefault_relocations() argument
1641 const unsigned int count = eb->buffer_count; in eb_prefault_relocations()
1647 err = check_relocations(&eb->exec[i]); in eb_prefault_relocations()
1655 static int eb_reinit_userptr(struct i915_execbuffer *eb) in eb_reinit_userptr() argument
1657 const unsigned int count = eb->buffer_count; in eb_reinit_userptr()
1661 if (likely(!(eb->args->flags & __EXEC_USERPTR_USED))) in eb_reinit_userptr()
1665 struct eb_vma *ev = &eb->vma[i]; in eb_reinit_userptr()
1680 static noinline int eb_relocate_parse_slow(struct i915_execbuffer *eb, in eb_relocate_parse_slow() argument
1694 eb_release_vmas(eb, false); in eb_relocate_parse_slow()
1695 i915_gem_ww_ctx_fini(&eb->ww); in eb_relocate_parse_slow()
1726 err = eb_prefault_relocations(eb); in eb_relocate_parse_slow()
1728 err = eb_copy_relocations(eb); in eb_relocate_parse_slow()
1736 err = eb_reinit_userptr(eb); in eb_relocate_parse_slow()
1739 i915_gem_ww_ctx_init(&eb->ww, true); in eb_relocate_parse_slow()
1745 rq = eb_pin_engine(eb, false); in eb_relocate_parse_slow()
1755 err = eb_validate_vmas(eb); in eb_relocate_parse_slow()
1759 GEM_BUG_ON(!eb->batch); in eb_relocate_parse_slow()
1761 list_for_each_entry(ev, &eb->relocs, reloc_link) { in eb_relocate_parse_slow()
1763 err = eb_relocate_vma(eb, ev); in eb_relocate_parse_slow()
1767 err = eb_relocate_vma_slow(eb, ev); in eb_relocate_parse_slow()
1783 err = eb_parse(eb); in eb_relocate_parse_slow()
1796 eb_release_vmas(eb, false); in eb_relocate_parse_slow()
1797 err = i915_gem_ww_ctx_backoff(&eb->ww); in eb_relocate_parse_slow()
1807 const unsigned int count = eb->buffer_count; in eb_relocate_parse_slow()
1812 &eb->exec[i]; in eb_relocate_parse_slow()
1829 static int eb_relocate_parse(struct i915_execbuffer *eb) in eb_relocate_parse() argument
1836 rq = eb_pin_engine(eb, throttle); in eb_relocate_parse()
1847 bool nonblock = eb->file->filp->f_flags & O_NONBLOCK; in eb_relocate_parse()
1866 err = eb_validate_vmas(eb); in eb_relocate_parse()
1873 if (eb->args->flags & __EXEC_HAS_RELOC) { in eb_relocate_parse()
1876 list_for_each_entry(ev, &eb->relocs, reloc_link) { in eb_relocate_parse()
1877 err = eb_relocate_vma(eb, ev); in eb_relocate_parse()
1889 err = eb_parse(eb); in eb_relocate_parse()
1893 eb_release_vmas(eb, false); in eb_relocate_parse()
1894 err = i915_gem_ww_ctx_backoff(&eb->ww); in eb_relocate_parse()
1902 err = eb_relocate_parse_slow(eb, rq); in eb_relocate_parse()
1911 eb->args->flags &= ~__EXEC_HAS_RELOC; in eb_relocate_parse()
1916 static int eb_move_to_gpu(struct i915_execbuffer *eb) in eb_move_to_gpu() argument
1918 const unsigned int count = eb->buffer_count; in eb_move_to_gpu()
1923 struct eb_vma *ev = &eb->vma[i]; in eb_move_to_gpu()
1935 capture->next = eb->request->capture_list; in eb_move_to_gpu()
1937 eb->request->capture_list = capture; in eb_move_to_gpu()
1960 (eb->request, obj, flags & EXEC_OBJECT_WRITE); in eb_move_to_gpu()
1964 err = i915_vma_move_to_active(vma, eb->request, in eb_move_to_gpu()
1969 if (!err && (eb->args->flags & __EXEC_USERPTR_USED)) { in eb_move_to_gpu()
1970 read_lock(&eb->i915->mm.notifier_lock); in eb_move_to_gpu()
1977 struct eb_vma *ev = &eb->vma[i]; in eb_move_to_gpu()
1988 read_unlock(&eb->i915->mm.notifier_lock); in eb_move_to_gpu()
1996 intel_gt_chipset_flush(eb->engine->gt); in eb_move_to_gpu()
2000 i915_request_set_error_once(eb->request, err); in eb_move_to_gpu()
2055 shadow_batch_pin(struct i915_execbuffer *eb, in shadow_batch_pin() argument
2067 err = i915_vma_pin_ww(vma, &eb->ww, 0, 0, flags); in shadow_batch_pin()
2074 static struct i915_vma *eb_dispatch_secure(struct i915_execbuffer *eb, struct i915_vma *vma) in eb_dispatch_secure() argument
2080 if (eb->batch_flags & I915_DISPATCH_SECURE) in eb_dispatch_secure()
2081 return i915_gem_object_ggtt_pin_ww(vma->obj, &eb->ww, NULL, 0, 0, 0); in eb_dispatch_secure()
2086 static int eb_parse(struct i915_execbuffer *eb) in eb_parse() argument
2088 struct drm_i915_private *i915 = eb->i915; in eb_parse()
2089 struct intel_gt_buffer_pool_node *pool = eb->batch_pool; in eb_parse()
2094 if (!eb_use_cmdparser(eb)) { in eb_parse()
2095 batch = eb_dispatch_secure(eb, eb->batch->vma); in eb_parse()
2102 len = eb->batch_len; in eb_parse()
2103 if (!CMDPARSER_USES_GGTT(eb->i915)) { in eb_parse()
2108 if (!eb->context->vm->has_read_only) { in eb_parse()
2116 if (unlikely(len < eb->batch_len)) /* last paranoid check of overflow */ in eb_parse()
2120 pool = intel_gt_get_buffer_pool(eb->engine->gt, len, in eb_parse()
2124 eb->batch_pool = pool; in eb_parse()
2127 err = i915_gem_object_lock(pool->obj, &eb->ww); in eb_parse()
2131 shadow = shadow_batch_pin(eb, pool->obj, eb->context->vm, PIN_USER); in eb_parse()
2141 if (CMDPARSER_USES_GGTT(eb->i915)) { in eb_parse()
2144 shadow = shadow_batch_pin(eb, pool->obj, in eb_parse()
2145 &eb->engine->gt->ggtt->vm, in eb_parse()
2154 eb->batch_flags |= I915_DISPATCH_SECURE; in eb_parse()
2157 batch = eb_dispatch_secure(eb, shadow); in eb_parse()
2167 err = intel_engine_cmd_parser(eb->engine, in eb_parse()
2168 eb->batch->vma, in eb_parse()
2169 eb->batch_start_offset, in eb_parse()
2170 eb->batch_len, in eb_parse()
2175 eb->batch = &eb->vma[eb->buffer_count++]; in eb_parse()
2176 eb->batch->vma = i915_vma_get(shadow); in eb_parse()
2177 eb->batch->flags = __EXEC_OBJECT_HAS_PIN; in eb_parse()
2179 eb->trampoline = trampoline; in eb_parse()
2180 eb->batch_start_offset = 0; in eb_parse()
2184 eb->batch = &eb->vma[eb->buffer_count++]; in eb_parse()
2185 eb->batch->flags = __EXEC_OBJECT_HAS_PIN; in eb_parse()
2186 eb->batch->vma = i915_vma_get(batch); in eb_parse()
2202 static int eb_submit(struct i915_execbuffer *eb, struct i915_vma *batch) in eb_submit() argument
2206 if (intel_context_nopreempt(eb->context)) in eb_submit()
2207 __set_bit(I915_FENCE_FLAG_NOPREEMPT, &eb->request->fence.flags); in eb_submit()
2209 err = eb_move_to_gpu(eb); in eb_submit()
2213 if (eb->args->flags & I915_EXEC_GEN7_SOL_RESET) { in eb_submit()
2214 err = i915_reset_gen7_sol_offsets(eb->request); in eb_submit()
2225 if (eb->engine->emit_init_breadcrumb) { in eb_submit()
2226 err = eb->engine->emit_init_breadcrumb(eb->request); in eb_submit()
2231 err = eb->engine->emit_bb_start(eb->request, in eb_submit()
2233 eb->batch_start_offset, in eb_submit()
2234 eb->batch_len, in eb_submit()
2235 eb->batch_flags); in eb_submit()
2239 if (eb->trampoline) { in eb_submit()
2240 GEM_BUG_ON(eb->batch_start_offset); in eb_submit()
2241 err = eb->engine->emit_bb_start(eb->request, in eb_submit()
2242 eb->trampoline->node.start + in eb_submit()
2243 eb->batch_len, in eb_submit()
2283 static struct i915_request *eb_throttle(struct i915_execbuffer *eb, struct intel_context *ce) in eb_throttle() argument
2317 static struct i915_request *eb_pin_engine(struct i915_execbuffer *eb, bool throttle) in eb_pin_engine() argument
2319 struct intel_context *ce = eb->context; in eb_pin_engine()
2324 GEM_BUG_ON(eb->args->flags & __EXEC_ENGINE_PINNED); in eb_pin_engine()
2334 err = intel_context_pin_ww(ce, &eb->ww); in eb_pin_engine()
2354 rq = eb_throttle(eb, ce); in eb_pin_engine()
2357 eb->args->flags |= __EXEC_ENGINE_PINNED; in eb_pin_engine()
2361 static void eb_unpin_engine(struct i915_execbuffer *eb) in eb_unpin_engine() argument
2363 struct intel_context *ce = eb->context; in eb_unpin_engine()
2366 if (!(eb->args->flags & __EXEC_ENGINE_PINNED)) in eb_unpin_engine()
2369 eb->args->flags &= ~__EXEC_ENGINE_PINNED; in eb_unpin_engine()
2379 eb_select_legacy_ring(struct i915_execbuffer *eb) in eb_select_legacy_ring() argument
2381 struct drm_i915_private *i915 = eb->i915; in eb_select_legacy_ring()
2382 struct drm_i915_gem_execbuffer2 *args = eb->args; in eb_select_legacy_ring()
2397 bsd_idx = gen8_dispatch_bsd_engine(i915, eb->file); in eb_select_legacy_ring()
2422 eb_select_engine(struct i915_execbuffer *eb) in eb_select_engine() argument
2428 if (i915_gem_context_user_engines(eb->gem_context)) in eb_select_engine()
2429 idx = eb->args->flags & I915_EXEC_RING_MASK; in eb_select_engine()
2431 idx = eb_select_legacy_ring(eb); in eb_select_engine()
2433 ce = i915_gem_context_get_engine(eb->gem_context, idx); in eb_select_engine()
2453 eb->context = ce; in eb_select_engine()
2454 eb->engine = ce->engine; in eb_select_engine()
2470 eb_put_engine(struct i915_execbuffer *eb) in eb_put_engine() argument
2472 intel_gt_pm_put(eb->engine->gt); in eb_put_engine()
2473 intel_context_put(eb->context); in eb_put_engine()
2488 add_timeline_fence_array(struct i915_execbuffer *eb, in add_timeline_fence_array() argument
2505 SIZE_MAX / sizeof(*f)) - eb->num_fences) in add_timeline_fence_array()
2516 f = krealloc(eb->fences, in add_timeline_fence_array()
2517 (eb->num_fences + nfences) * sizeof(*f), in add_timeline_fence_array()
2522 eb->fences = f; in add_timeline_fence_array()
2523 f += eb->num_fences; in add_timeline_fence_array()
2545 syncobj = drm_syncobj_find(eb->file, user_fence.handle); in add_timeline_fence_array()
2610 eb->num_fences++; in add_timeline_fence_array()
2616 static int add_fence_array(struct i915_execbuffer *eb) in add_fence_array() argument
2618 struct drm_i915_gem_execbuffer2 *args = eb->args; in add_fence_array()
2633 SIZE_MAX / sizeof(*f) - eb->num_fences)) in add_fence_array()
2640 f = krealloc(eb->fences, in add_fence_array()
2641 (eb->num_fences + num_fences) * sizeof(*f), in add_fence_array()
2646 eb->fences = f; in add_fence_array()
2647 f += eb->num_fences; in add_fence_array()
2659 syncobj = drm_syncobj_find(eb->file, user_fence.handle); in add_fence_array()
2682 eb->num_fences++; in add_fence_array()
2695 await_fence_array(struct i915_execbuffer *eb) in await_fence_array() argument
2700 for (n = 0; n < eb->num_fences; n++) { in await_fence_array()
2704 syncobj = ptr_unpack_bits(eb->fences[n].syncobj, &flags, 2); in await_fence_array()
2706 if (!eb->fences[n].dma_fence) in await_fence_array()
2709 err = i915_request_await_dma_fence(eb->request, in await_fence_array()
2710 eb->fences[n].dma_fence); in await_fence_array()
2718 static void signal_fence_array(const struct i915_execbuffer *eb) in signal_fence_array() argument
2720 struct dma_fence * const fence = &eb->request->fence; in signal_fence_array()
2723 for (n = 0; n < eb->num_fences; n++) { in signal_fence_array()
2727 syncobj = ptr_unpack_bits(eb->fences[n].syncobj, &flags, 2); in signal_fence_array()
2731 if (eb->fences[n].chain_fence) { in signal_fence_array()
2733 eb->fences[n].chain_fence, in signal_fence_array()
2735 eb->fences[n].value); in signal_fence_array()
2740 eb->fences[n].chain_fence = NULL; in signal_fence_array()
2750 struct i915_execbuffer *eb = data; in parse_timeline_fences() local
2756 return add_timeline_fence_array(eb, &timeline_fences); in parse_timeline_fences()
2768 static int eb_request_add(struct i915_execbuffer *eb, int err) in eb_request_add() argument
2770 struct i915_request *rq = eb->request; in eb_request_add()
2783 if (likely(!intel_context_is_closed(eb->context))) { in eb_request_add()
2784 attr = eb->gem_context->sched; in eb_request_add()
2809 struct i915_execbuffer *eb) in parse_execbuf2_extensions() argument
2817 if (eb->args->flags & I915_EXEC_FENCE_ARRAY) in parse_execbuf2_extensions()
2826 eb); in parse_execbuf2_extensions()
2836 struct i915_execbuffer eb; in i915_gem_do_execbuffer() local
2847 eb.i915 = i915; in i915_gem_do_execbuffer()
2848 eb.file = file; in i915_gem_do_execbuffer()
2849 eb.args = args; in i915_gem_do_execbuffer()
2853 eb.exec = exec; in i915_gem_do_execbuffer()
2854 eb.vma = (struct eb_vma *)(exec + args->buffer_count + 1); in i915_gem_do_execbuffer()
2855 eb.vma[0].vma = NULL; in i915_gem_do_execbuffer()
2856 eb.batch_pool = NULL; in i915_gem_do_execbuffer()
2858 eb.invalid_flags = __EXEC_OBJECT_UNKNOWN_FLAGS; in i915_gem_do_execbuffer()
2859 reloc_cache_init(&eb.reloc_cache, eb.i915); in i915_gem_do_execbuffer()
2861 eb.buffer_count = args->buffer_count; in i915_gem_do_execbuffer()
2862 eb.batch_start_offset = args->batch_start_offset; in i915_gem_do_execbuffer()
2863 eb.batch_len = args->batch_len; in i915_gem_do_execbuffer()
2864 eb.trampoline = NULL; in i915_gem_do_execbuffer()
2866 eb.fences = NULL; in i915_gem_do_execbuffer()
2867 eb.num_fences = 0; in i915_gem_do_execbuffer()
2869 eb.batch_flags = 0; in i915_gem_do_execbuffer()
2881 eb.batch_flags |= I915_DISPATCH_SECURE; in i915_gem_do_execbuffer()
2884 eb.batch_flags |= I915_DISPATCH_PINNED; in i915_gem_do_execbuffer()
2886 err = parse_execbuf2_extensions(args, &eb); in i915_gem_do_execbuffer()
2890 err = add_fence_array(&eb); in i915_gem_do_execbuffer()
2915 err = eb_create(&eb); in i915_gem_do_execbuffer()
2919 GEM_BUG_ON(!eb.lut_size); in i915_gem_do_execbuffer()
2921 err = eb_select_context(&eb); in i915_gem_do_execbuffer()
2925 err = eb_select_engine(&eb); in i915_gem_do_execbuffer()
2929 err = eb_lookup_vmas(&eb); in i915_gem_do_execbuffer()
2931 eb_release_vmas(&eb, true); in i915_gem_do_execbuffer()
2935 i915_gem_ww_ctx_init(&eb.ww, true); in i915_gem_do_execbuffer()
2937 err = eb_relocate_parse(&eb); in i915_gem_do_execbuffer()
2950 ww_acquire_done(&eb.ww.ctx); in i915_gem_do_execbuffer()
2952 batch = eb.batch->vma; in i915_gem_do_execbuffer()
2955 eb.request = i915_request_create(eb.context); in i915_gem_do_execbuffer()
2956 if (IS_ERR(eb.request)) { in i915_gem_do_execbuffer()
2957 err = PTR_ERR(eb.request); in i915_gem_do_execbuffer()
2961 if (unlikely(eb.gem_context->syncobj)) { in i915_gem_do_execbuffer()
2964 fence = drm_syncobj_fence_get(eb.gem_context->syncobj); in i915_gem_do_execbuffer()
2965 err = i915_request_await_dma_fence(eb.request, fence); in i915_gem_do_execbuffer()
2973 err = i915_request_await_execution(eb.request, in i915_gem_do_execbuffer()
2976 err = i915_request_await_dma_fence(eb.request, in i915_gem_do_execbuffer()
2982 if (eb.fences) { in i915_gem_do_execbuffer()
2983 err = await_fence_array(&eb); in i915_gem_do_execbuffer()
2989 out_fence = sync_file_create(&eb.request->fence); in i915_gem_do_execbuffer()
3003 eb.request->batch = batch; in i915_gem_do_execbuffer()
3004 if (eb.batch_pool) in i915_gem_do_execbuffer()
3005 intel_gt_buffer_pool_mark_active(eb.batch_pool, eb.request); in i915_gem_do_execbuffer()
3007 trace_i915_request_queue(eb.request, eb.batch_flags); in i915_gem_do_execbuffer()
3008 err = eb_submit(&eb, batch); in i915_gem_do_execbuffer()
3011 i915_request_get(eb.request); in i915_gem_do_execbuffer()
3012 err = eb_request_add(&eb, err); in i915_gem_do_execbuffer()
3014 if (eb.fences) in i915_gem_do_execbuffer()
3015 signal_fence_array(&eb); in i915_gem_do_execbuffer()
3028 if (unlikely(eb.gem_context->syncobj)) { in i915_gem_do_execbuffer()
3029 drm_syncobj_replace_fence(eb.gem_context->syncobj, in i915_gem_do_execbuffer()
3030 &eb.request->fence); in i915_gem_do_execbuffer()
3033 i915_request_put(eb.request); in i915_gem_do_execbuffer()
3036 eb_release_vmas(&eb, true); in i915_gem_do_execbuffer()
3037 if (eb.trampoline) in i915_gem_do_execbuffer()
3038 i915_vma_unpin(eb.trampoline); in i915_gem_do_execbuffer()
3040 i915_gem_ww_ctx_fini(&eb.ww); in i915_gem_do_execbuffer()
3042 if (eb.batch_pool) in i915_gem_do_execbuffer()
3043 intel_gt_buffer_pool_put(eb.batch_pool); in i915_gem_do_execbuffer()
3045 eb_put_engine(&eb); in i915_gem_do_execbuffer()
3047 i915_gem_context_put(eb.gem_context); in i915_gem_do_execbuffer()
3049 eb_destroy(&eb); in i915_gem_do_execbuffer()
3056 put_fence_array(eb.fences, eb.num_fences); in i915_gem_do_execbuffer()