• Home
  • Raw
  • Download

Lines Matching refs:gpuvm

758 __restore_vm_bo_list(struct drm_gpuvm *gpuvm, spinlock_t *lock,  in __restore_vm_bo_list()  argument
800 __drm_gpuvm_bo_list_add(struct drm_gpuvm *gpuvm, spinlock_t *lock, in __drm_gpuvm_bo_list_add() argument
825 __drm_gpuvm_bo_list_del(struct drm_gpuvm *gpuvm, spinlock_t *lock, in __drm_gpuvm_bo_list_del() argument
880 static int __drm_gpuva_insert(struct drm_gpuvm *gpuvm,
893 drm_gpuvm_warn_check_overflow(struct drm_gpuvm *gpuvm, u64 addr, u64 range) in drm_gpuvm_warn_check_overflow() argument
895 return drm_WARN(gpuvm->drm, drm_gpuvm_check_overflow(addr, range), in drm_gpuvm_warn_check_overflow()
900 drm_gpuvm_in_mm_range(struct drm_gpuvm *gpuvm, u64 addr, u64 range) in drm_gpuvm_in_mm_range() argument
903 u64 mm_start = gpuvm->mm_start; in drm_gpuvm_in_mm_range()
904 u64 mm_end = mm_start + gpuvm->mm_range; in drm_gpuvm_in_mm_range()
910 drm_gpuvm_in_kernel_node(struct drm_gpuvm *gpuvm, u64 addr, u64 range) in drm_gpuvm_in_kernel_node() argument
913 u64 kstart = gpuvm->kernel_alloc_node.va.addr; in drm_gpuvm_in_kernel_node()
914 u64 krange = gpuvm->kernel_alloc_node.va.range; in drm_gpuvm_in_kernel_node()
932 drm_gpuvm_range_valid(struct drm_gpuvm *gpuvm, in drm_gpuvm_range_valid() argument
936 drm_gpuvm_in_mm_range(gpuvm, addr, range) && in drm_gpuvm_range_valid()
937 !drm_gpuvm_in_kernel_node(gpuvm, addr, range); in drm_gpuvm_range_valid()
997 drm_gpuvm_init(struct drm_gpuvm *gpuvm, const char *name, in drm_gpuvm_init() argument
1005 gpuvm->rb.tree = RB_ROOT_CACHED; in drm_gpuvm_init()
1006 INIT_LIST_HEAD(&gpuvm->rb.list); in drm_gpuvm_init()
1008 INIT_LIST_HEAD(&gpuvm->extobj.list); in drm_gpuvm_init()
1009 spin_lock_init(&gpuvm->extobj.lock); in drm_gpuvm_init()
1011 INIT_LIST_HEAD(&gpuvm->evict.list); in drm_gpuvm_init()
1012 spin_lock_init(&gpuvm->evict.lock); in drm_gpuvm_init()
1014 kref_init(&gpuvm->kref); in drm_gpuvm_init()
1016 gpuvm->name = name ? name : "unknown"; in drm_gpuvm_init()
1017 gpuvm->flags = flags; in drm_gpuvm_init()
1018 gpuvm->ops = ops; in drm_gpuvm_init()
1019 gpuvm->drm = drm; in drm_gpuvm_init()
1020 gpuvm->r_obj = r_obj; in drm_gpuvm_init()
1024 drm_gpuvm_warn_check_overflow(gpuvm, start_offset, range); in drm_gpuvm_init()
1025 gpuvm->mm_start = start_offset; in drm_gpuvm_init()
1026 gpuvm->mm_range = range; in drm_gpuvm_init()
1028 memset(&gpuvm->kernel_alloc_node, 0, sizeof(struct drm_gpuva)); in drm_gpuvm_init()
1030 gpuvm->kernel_alloc_node.va.addr = reserve_offset; in drm_gpuvm_init()
1031 gpuvm->kernel_alloc_node.va.range = reserve_range; in drm_gpuvm_init()
1033 if (likely(!drm_gpuvm_warn_check_overflow(gpuvm, reserve_offset, in drm_gpuvm_init()
1035 __drm_gpuva_insert(gpuvm, &gpuvm->kernel_alloc_node); in drm_gpuvm_init()
1041 drm_gpuvm_fini(struct drm_gpuvm *gpuvm) in drm_gpuvm_fini() argument
1043 gpuvm->name = NULL; in drm_gpuvm_fini()
1045 if (gpuvm->kernel_alloc_node.va.range) in drm_gpuvm_fini()
1046 __drm_gpuva_remove(&gpuvm->kernel_alloc_node); in drm_gpuvm_fini()
1048 drm_WARN(gpuvm->drm, !RB_EMPTY_ROOT(&gpuvm->rb.tree.rb_root), in drm_gpuvm_fini()
1051 drm_WARN(gpuvm->drm, !list_empty(&gpuvm->extobj.list), in drm_gpuvm_fini()
1053 drm_WARN(gpuvm->drm, !list_empty(&gpuvm->evict.list), in drm_gpuvm_fini()
1056 drm_gem_object_put(gpuvm->r_obj); in drm_gpuvm_fini()
1062 struct drm_gpuvm *gpuvm = container_of(kref, struct drm_gpuvm, kref); in drm_gpuvm_free() local
1064 drm_gpuvm_fini(gpuvm); in drm_gpuvm_free()
1066 if (drm_WARN_ON(gpuvm->drm, !gpuvm->ops->vm_free)) in drm_gpuvm_free()
1069 gpuvm->ops->vm_free(gpuvm); in drm_gpuvm_free()
1081 drm_gpuvm_put(struct drm_gpuvm *gpuvm) in drm_gpuvm_put() argument
1083 if (gpuvm) in drm_gpuvm_put()
1084 kref_put(&gpuvm->kref, drm_gpuvm_free); in drm_gpuvm_put()
1111 drm_gpuvm_prepare_vm(struct drm_gpuvm *gpuvm, in drm_gpuvm_prepare_vm() argument
1115 return exec_prepare_obj(exec, gpuvm->r_obj, num_fences); in drm_gpuvm_prepare_vm()
1120 __drm_gpuvm_prepare_objects(struct drm_gpuvm *gpuvm, in __drm_gpuvm_prepare_objects() argument
1128 for_each_vm_bo_in_list(gpuvm, extobj, &extobjs, vm_bo) { in __drm_gpuvm_prepare_objects()
1135 restore_vm_bo_list(gpuvm, extobj); in __drm_gpuvm_prepare_objects()
1141 drm_gpuvm_prepare_objects_locked(struct drm_gpuvm *gpuvm, in drm_gpuvm_prepare_objects_locked() argument
1148 drm_gpuvm_resv_assert_held(gpuvm); in drm_gpuvm_prepare_objects_locked()
1149 list_for_each_entry(vm_bo, &gpuvm->extobj.list, list.entry.extobj) { in drm_gpuvm_prepare_objects_locked()
1185 drm_gpuvm_prepare_objects(struct drm_gpuvm *gpuvm, in drm_gpuvm_prepare_objects() argument
1189 if (drm_gpuvm_resv_protected(gpuvm)) in drm_gpuvm_prepare_objects()
1190 return drm_gpuvm_prepare_objects_locked(gpuvm, exec, in drm_gpuvm_prepare_objects()
1193 return __drm_gpuvm_prepare_objects(gpuvm, exec, num_fences); in drm_gpuvm_prepare_objects()
1212 drm_gpuvm_prepare_range(struct drm_gpuvm *gpuvm, struct drm_exec *exec, in drm_gpuvm_prepare_range() argument
1219 drm_gpuvm_for_each_va_range(va, gpuvm, addr, end) { in drm_gpuvm_prepare_range()
1248 struct drm_gpuvm *gpuvm = vm_exec->vm; in drm_gpuvm_exec_lock() local
1256 ret = drm_gpuvm_prepare_vm(gpuvm, exec, num_fences); in drm_gpuvm_exec_lock()
1261 ret = drm_gpuvm_prepare_objects(gpuvm, exec, num_fences); in drm_gpuvm_exec_lock()
1340 struct drm_gpuvm *gpuvm = vm_exec->vm; in drm_gpuvm_exec_lock_range() local
1347 ret = drm_gpuvm_prepare_range(gpuvm, exec, addr, range, in drm_gpuvm_exec_lock_range()
1363 __drm_gpuvm_validate(struct drm_gpuvm *gpuvm, struct drm_exec *exec) in __drm_gpuvm_validate() argument
1365 const struct drm_gpuvm_ops *ops = gpuvm->ops; in __drm_gpuvm_validate()
1370 for_each_vm_bo_in_list(gpuvm, evict, &evict, vm_bo) { in __drm_gpuvm_validate()
1377 restore_vm_bo_list(gpuvm, evict); in __drm_gpuvm_validate()
1383 drm_gpuvm_validate_locked(struct drm_gpuvm *gpuvm, struct drm_exec *exec) in drm_gpuvm_validate_locked() argument
1385 const struct drm_gpuvm_ops *ops = gpuvm->ops; in drm_gpuvm_validate_locked()
1389 drm_gpuvm_resv_assert_held(gpuvm); in drm_gpuvm_validate_locked()
1391 list_for_each_entry_safe(vm_bo, next, &gpuvm->evict.list, in drm_gpuvm_validate_locked()
1416 drm_gpuvm_validate(struct drm_gpuvm *gpuvm, struct drm_exec *exec) in drm_gpuvm_validate() argument
1418 const struct drm_gpuvm_ops *ops = gpuvm->ops; in drm_gpuvm_validate()
1423 if (drm_gpuvm_resv_protected(gpuvm)) in drm_gpuvm_validate()
1424 return drm_gpuvm_validate_locked(gpuvm, exec); in drm_gpuvm_validate()
1426 return __drm_gpuvm_validate(gpuvm, exec); in drm_gpuvm_validate()
1440 drm_gpuvm_resv_add_fence(struct drm_gpuvm *gpuvm, in drm_gpuvm_resv_add_fence() argument
1452 drm_gpuvm_is_extobj(gpuvm, obj) ? in drm_gpuvm_resv_add_fence()
1469 drm_gpuvm_bo_create(struct drm_gpuvm *gpuvm, in drm_gpuvm_bo_create() argument
1472 const struct drm_gpuvm_ops *ops = gpuvm->ops; in drm_gpuvm_bo_create()
1483 vm_bo->vm = drm_gpuvm_get(gpuvm); in drm_gpuvm_bo_create()
1503 struct drm_gpuvm *gpuvm = vm_bo->vm; in drm_gpuvm_bo_destroy() local
1504 const struct drm_gpuvm_ops *ops = gpuvm->ops; in drm_gpuvm_bo_destroy()
1506 bool lock = !drm_gpuvm_resv_protected(gpuvm); in drm_gpuvm_bo_destroy()
1509 drm_gpuvm_resv_assert_held(gpuvm); in drm_gpuvm_bo_destroy()
1522 drm_gpuvm_put(gpuvm); in drm_gpuvm_bo_destroy()
1554 __drm_gpuvm_bo_find(struct drm_gpuvm *gpuvm, in __drm_gpuvm_bo_find() argument
1561 if (vm_bo->vm == gpuvm) in __drm_gpuvm_bo_find()
1580 drm_gpuvm_bo_find(struct drm_gpuvm *gpuvm, in drm_gpuvm_bo_find() argument
1583 struct drm_gpuvm_bo *vm_bo = __drm_gpuvm_bo_find(gpuvm, obj); in drm_gpuvm_bo_find()
1605 drm_gpuvm_bo_obtain(struct drm_gpuvm *gpuvm, in drm_gpuvm_bo_obtain() argument
1610 vm_bo = drm_gpuvm_bo_find(gpuvm, obj); in drm_gpuvm_bo_obtain()
1614 vm_bo = drm_gpuvm_bo_create(gpuvm, obj); in drm_gpuvm_bo_obtain()
1644 struct drm_gpuvm *gpuvm = __vm_bo->vm; in drm_gpuvm_bo_obtain_prealloc() local
1648 vm_bo = drm_gpuvm_bo_find(gpuvm, obj); in drm_gpuvm_bo_obtain_prealloc()
1673 struct drm_gpuvm *gpuvm = vm_bo->vm; in drm_gpuvm_bo_extobj_add() local
1674 bool lock = !drm_gpuvm_resv_protected(gpuvm); in drm_gpuvm_bo_extobj_add()
1677 drm_gpuvm_resv_assert_held(gpuvm); in drm_gpuvm_bo_extobj_add()
1679 if (drm_gpuvm_is_extobj(gpuvm, vm_bo->obj)) in drm_gpuvm_bo_extobj_add()
1695 struct drm_gpuvm *gpuvm = vm_bo->vm; in drm_gpuvm_bo_evict() local
1697 bool lock = !drm_gpuvm_resv_protected(gpuvm); in drm_gpuvm_bo_evict()
1706 if (drm_gpuvm_is_extobj(gpuvm, obj) && !lock) in drm_gpuvm_bo_evict()
1717 __drm_gpuva_insert(struct drm_gpuvm *gpuvm, in __drm_gpuva_insert() argument
1723 if (drm_gpuva_it_iter_first(&gpuvm->rb.tree, in __drm_gpuva_insert()
1728 va->vm = gpuvm; in __drm_gpuva_insert()
1730 drm_gpuva_it_insert(va, &gpuvm->rb.tree); in __drm_gpuva_insert()
1736 head = &gpuvm->rb.list; in __drm_gpuva_insert()
1758 drm_gpuva_insert(struct drm_gpuvm *gpuvm, in drm_gpuva_insert() argument
1765 if (unlikely(!drm_gpuvm_range_valid(gpuvm, addr, range))) in drm_gpuva_insert()
1768 ret = __drm_gpuva_insert(gpuvm, va); in drm_gpuva_insert()
1775 drm_gpuvm_get(gpuvm); in drm_gpuva_insert()
1801 struct drm_gpuvm *gpuvm = va->vm; in drm_gpuva_remove() local
1803 if (unlikely(va == &gpuvm->kernel_alloc_node)) { in drm_gpuva_remove()
1804 drm_WARN(gpuvm->drm, 1, in drm_gpuva_remove()
1833 struct drm_gpuvm *gpuvm = va->vm; in drm_gpuva_link() local
1838 drm_WARN_ON(gpuvm->drm, obj != vm_bo->obj); in drm_gpuva_link()
1891 drm_gpuva_find_first(struct drm_gpuvm *gpuvm, in drm_gpuva_find_first() argument
1896 return drm_gpuva_it_iter_first(&gpuvm->rb.tree, addr, last); in drm_gpuva_find_first()
1909 drm_gpuva_find(struct drm_gpuvm *gpuvm, in drm_gpuva_find() argument
1914 va = drm_gpuva_find_first(gpuvm, addr, range); in drm_gpuva_find()
1942 drm_gpuva_find_prev(struct drm_gpuvm *gpuvm, u64 start) in drm_gpuva_find_prev() argument
1944 if (!drm_gpuvm_range_valid(gpuvm, start - 1, 1)) in drm_gpuva_find_prev()
1947 return drm_gpuva_it_iter_first(&gpuvm->rb.tree, start - 1, start); in drm_gpuva_find_prev()
1964 drm_gpuva_find_next(struct drm_gpuvm *gpuvm, u64 end) in drm_gpuva_find_next() argument
1966 if (!drm_gpuvm_range_valid(gpuvm, end, 1)) in drm_gpuva_find_next()
1969 return drm_gpuva_it_iter_first(&gpuvm->rb.tree, end, end + 1); in drm_gpuva_find_next()
1983 drm_gpuvm_interval_empty(struct drm_gpuvm *gpuvm, u64 addr, u64 range) in drm_gpuvm_interval_empty() argument
1985 return !drm_gpuva_find_first(gpuvm, addr, range); in drm_gpuvm_interval_empty()
1999 drm_gpuva_map(struct drm_gpuvm *gpuvm, in drm_gpuva_map() argument
2004 drm_gpuva_insert(gpuvm, va); in drm_gpuva_map()
2024 struct drm_gpuvm *gpuvm = va->vm; in drm_gpuva_remap() local
2030 drm_gpuva_insert(gpuvm, prev); in drm_gpuva_remap()
2035 drm_gpuva_insert(gpuvm, next); in drm_gpuva_remap()
2102 __drm_gpuvm_sm_map(struct drm_gpuvm *gpuvm, in __drm_gpuvm_sm_map() argument
2111 if (unlikely(!drm_gpuvm_range_valid(gpuvm, req_addr, req_range))) in __drm_gpuvm_sm_map()
2114 drm_gpuvm_for_each_va_range_safe(va, next, gpuvm, req_addr, req_end) { in __drm_gpuvm_sm_map()
2244 __drm_gpuvm_sm_unmap(struct drm_gpuvm *gpuvm, in __drm_gpuvm_sm_unmap() argument
2252 if (unlikely(!drm_gpuvm_range_valid(gpuvm, req_addr, req_range))) in __drm_gpuvm_sm_unmap()
2255 drm_gpuvm_for_each_va_range_safe(va, next, gpuvm, req_addr, req_end) { in __drm_gpuvm_sm_unmap()
2334 drm_gpuvm_sm_map(struct drm_gpuvm *gpuvm, void *priv, in drm_gpuvm_sm_map() argument
2338 const struct drm_gpuvm_ops *ops = gpuvm->ops; in drm_gpuvm_sm_map()
2345 return __drm_gpuvm_sm_map(gpuvm, ops, priv, in drm_gpuvm_sm_map()
2379 drm_gpuvm_sm_unmap(struct drm_gpuvm *gpuvm, void *priv, in drm_gpuvm_sm_unmap() argument
2382 const struct drm_gpuvm_ops *ops = gpuvm->ops; in drm_gpuvm_sm_unmap()
2388 return __drm_gpuvm_sm_unmap(gpuvm, ops, priv, in drm_gpuvm_sm_unmap()
2394 gpuva_op_alloc(struct drm_gpuvm *gpuvm) in gpuva_op_alloc() argument
2396 const struct drm_gpuvm_ops *fn = gpuvm->ops; in gpuva_op_alloc()
2411 gpuva_op_free(struct drm_gpuvm *gpuvm, in gpuva_op_free() argument
2414 const struct drm_gpuvm_ops *fn = gpuvm->ops; in gpuva_op_free()
2430 struct drm_gpuvm *gpuvm = args->vm; in drm_gpuva_sm_step() local
2434 op = gpuva_op_alloc(gpuvm); in drm_gpuva_sm_step()
2473 gpuva_op_free(gpuvm, op); in drm_gpuva_sm_step()
2517 drm_gpuvm_sm_map_ops_create(struct drm_gpuvm *gpuvm, in drm_gpuvm_sm_map_ops_create() argument
2534 args.vm = gpuvm; in drm_gpuvm_sm_map_ops_create()
2537 ret = __drm_gpuvm_sm_map(gpuvm, &gpuvm_list_ops, &args, in drm_gpuvm_sm_map_ops_create()
2546 drm_gpuva_ops_free(gpuvm, ops); in drm_gpuvm_sm_map_ops_create()
2581 drm_gpuvm_sm_unmap_ops_create(struct drm_gpuvm *gpuvm, in drm_gpuvm_sm_unmap_ops_create() argument
2597 args.vm = gpuvm; in drm_gpuvm_sm_unmap_ops_create()
2600 ret = __drm_gpuvm_sm_unmap(gpuvm, &gpuvm_list_ops, &args, in drm_gpuvm_sm_unmap_ops_create()
2608 drm_gpuva_ops_free(gpuvm, ops); in drm_gpuvm_sm_unmap_ops_create()
2632 drm_gpuvm_prefetch_ops_create(struct drm_gpuvm *gpuvm, in drm_gpuvm_prefetch_ops_create() argument
2647 drm_gpuvm_for_each_va_range(va, gpuvm, addr, end) { in drm_gpuvm_prefetch_ops_create()
2648 op = gpuva_op_alloc(gpuvm); in drm_gpuvm_prefetch_ops_create()
2662 drm_gpuva_ops_free(gpuvm, ops); in drm_gpuvm_prefetch_ops_create()
2730 drm_gpuva_ops_free(struct drm_gpuvm *gpuvm, in drm_gpuva_ops_free() argument
2744 gpuva_op_free(gpuvm, op); in drm_gpuva_ops_free()