• Home
  • Raw
  • Download

Lines Matching refs:bo

143 nouveau_bo_del_ttm(struct ttm_buffer_object *bo)  in nouveau_bo_del_ttm()  argument
145 struct nouveau_drm *drm = nouveau_bdev(bo->bdev); in nouveau_bo_del_ttm()
147 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_del_ttm()
150 DRM_ERROR("bo %p still attached to GEM object\n", bo); in nouveau_bo_del_ttm()
159 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_fixup_align()
212 nvbo->bo.bdev = &drm->ttm.bdev; in nouveau_bo_new()
221 nvbo->bo.mem.num_pages = size >> PAGE_SHIFT; in nouveau_bo_new()
227 ret = ttm_bo_init(&drm->ttm.bdev, &nvbo->bo, size, in nouveau_bo_new()
256 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in set_placement_range()
262 nvbo->bo.mem.num_pages < vram_pages / 4) { in set_placement_range()
300 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_pin()
301 struct ttm_buffer_object *bo = &nvbo->bo; in nouveau_bo_pin() local
304 ret = ttm_bo_reserve(bo, false, false, false, 0); in nouveau_bo_pin()
308 if (nvbo->pin_refcnt && !(memtype & (1 << bo->mem.mem_type))) { in nouveau_bo_pin()
309 NV_ERROR(drm, "bo %p pinned elsewhere: 0x%08x vs 0x%08x\n", bo, in nouveau_bo_pin()
310 1 << bo->mem.mem_type, memtype); in nouveau_bo_pin()
322 switch (bo->mem.mem_type) { in nouveau_bo_pin()
324 drm->gem.vram_available -= bo->mem.size; in nouveau_bo_pin()
327 drm->gem.gart_available -= bo->mem.size; in nouveau_bo_pin()
334 ttm_bo_unreserve(bo); in nouveau_bo_pin()
341 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_unpin()
342 struct ttm_buffer_object *bo = &nvbo->bo; in nouveau_bo_unpin() local
345 ret = ttm_bo_reserve(bo, false, false, false, 0); in nouveau_bo_unpin()
352 nouveau_bo_placement_set(nvbo, bo->mem.placement, 0); in nouveau_bo_unpin()
356 switch (bo->mem.mem_type) { in nouveau_bo_unpin()
358 drm->gem.vram_available += bo->mem.size; in nouveau_bo_unpin()
361 drm->gem.gart_available += bo->mem.size; in nouveau_bo_unpin()
369 ttm_bo_unreserve(bo); in nouveau_bo_unpin()
378 ret = ttm_bo_reserve(&nvbo->bo, false, false, false, 0); in nouveau_bo_map()
382 ret = ttm_bo_kmap(&nvbo->bo, 0, nvbo->bo.mem.num_pages, &nvbo->kmap); in nouveau_bo_map()
383 ttm_bo_unreserve(&nvbo->bo); in nouveau_bo_map()
400 ret = ttm_bo_validate(&nvbo->bo, &nvbo->placement, in nouveau_bo_validate()
535 nouveau_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) in nouveau_bo_evict_flags() argument
537 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_evict_flags()
539 switch (bo->mem.mem_type) { in nouveau_bo_evict_flags()
569 ret = ttm_bo_move_accel_cleanup(&nvbo->bo, fence, evict, in nouveau_bo_move_accel_cleanup()
588 nve0_bo_move_copy(struct nouveau_channel *chan, struct ttm_buffer_object *bo, in nve0_bo_move_copy() argument
620 nvc0_bo_move_copy(struct nouveau_channel *chan, struct ttm_buffer_object *bo, in nvc0_bo_move_copy() argument
658 nvc0_bo_move_m2mf(struct nouveau_channel *chan, struct ttm_buffer_object *bo, in nvc0_bo_move_m2mf() argument
697 nva3_bo_move_copy(struct nouveau_channel *chan, struct ttm_buffer_object *bo, in nva3_bo_move_copy() argument
735 nv98_bo_move_exec(struct nouveau_channel *chan, struct ttm_buffer_object *bo, in nv98_bo_move_exec() argument
753 nv84_bo_move_exec(struct nouveau_channel *chan, struct ttm_buffer_object *bo, in nv84_bo_move_exec() argument
787 nv50_bo_move_m2mf(struct nouveau_channel *chan, struct ttm_buffer_object *bo, in nv50_bo_move_m2mf() argument
791 struct nouveau_bo *nvbo = nouveau_bo(bo); in nv50_bo_move_m2mf()
891 nouveau_bo_mem_ctxdma(struct ttm_buffer_object *bo, in nouveau_bo_mem_ctxdma() argument
900 nv04_bo_move_m2mf(struct nouveau_channel *chan, struct ttm_buffer_object *bo, in nv04_bo_move_m2mf() argument
913 OUT_RING (chan, nouveau_bo_mem_ctxdma(bo, chan, old_mem)); in nv04_bo_move_m2mf()
914 OUT_RING (chan, nouveau_bo_mem_ctxdma(bo, chan, new_mem)); in nv04_bo_move_m2mf()
967 nouveau_bo_move_m2mf(struct ttm_buffer_object *bo, int evict, bool intr, in nouveau_bo_move_m2mf() argument
970 struct nouveau_drm *drm = nouveau_bdev(bo->bdev); in nouveau_bo_move_m2mf()
972 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_move_m2mf()
973 struct ttm_mem_reg *old_mem = &bo->mem; in nouveau_bo_move_m2mf()
994 ret = drm->ttm.move(chan, bo, &bo->mem, new_mem); in nouveau_bo_move_m2mf()
1064 nouveau_bo_move_flipd(struct ttm_buffer_object *bo, bool evict, bool intr, in nouveau_bo_move_flipd() argument
1078 ret = ttm_bo_mem_space(bo, &placement, &tmp_mem, intr, no_wait_gpu); in nouveau_bo_move_flipd()
1082 ret = ttm_tt_bind(bo->ttm, &tmp_mem); in nouveau_bo_move_flipd()
1086 ret = nouveau_bo_move_m2mf(bo, true, intr, no_wait_gpu, &tmp_mem); in nouveau_bo_move_flipd()
1090 ret = ttm_bo_move_ttm(bo, true, no_wait_gpu, new_mem); in nouveau_bo_move_flipd()
1092 ttm_bo_mem_put(bo, &tmp_mem); in nouveau_bo_move_flipd()
1097 nouveau_bo_move_flips(struct ttm_buffer_object *bo, bool evict, bool intr, in nouveau_bo_move_flips() argument
1111 ret = ttm_bo_mem_space(bo, &placement, &tmp_mem, intr, no_wait_gpu); in nouveau_bo_move_flips()
1115 ret = ttm_bo_move_ttm(bo, true, no_wait_gpu, &tmp_mem); in nouveau_bo_move_flips()
1119 ret = nouveau_bo_move_m2mf(bo, true, intr, no_wait_gpu, new_mem); in nouveau_bo_move_flips()
1124 ttm_bo_mem_put(bo, &tmp_mem); in nouveau_bo_move_flips()
1129 nouveau_bo_move_ntfy(struct ttm_buffer_object *bo, struct ttm_mem_reg *new_mem) in nouveau_bo_move_ntfy() argument
1131 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_move_ntfy()
1135 if (bo->destroy != nouveau_bo_del_ttm) in nouveau_bo_move_ntfy()
1159 nouveau_bo_vm_bind(struct ttm_buffer_object *bo, struct ttm_mem_reg *new_mem, in nouveau_bo_vm_bind() argument
1162 struct nouveau_drm *drm = nouveau_bdev(bo->bdev); in nouveau_bo_vm_bind()
1164 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_vm_bind()
1181 nouveau_bo_vm_cleanup(struct ttm_buffer_object *bo, in nouveau_bo_vm_cleanup() argument
1185 struct nouveau_drm *drm = nouveau_bdev(bo->bdev); in nouveau_bo_vm_cleanup()
1188 nv10_bo_put_tile_region(dev, *old_tile, bo->sync_obj); in nouveau_bo_vm_cleanup()
1193 nouveau_bo_move(struct ttm_buffer_object *bo, bool evict, bool intr, in nouveau_bo_move() argument
1196 struct nouveau_drm *drm = nouveau_bdev(bo->bdev); in nouveau_bo_move()
1197 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_move()
1198 struct ttm_mem_reg *old_mem = &bo->mem; in nouveau_bo_move()
1203 ret = nouveau_bo_vm_bind(bo, new_mem, &new_tile); in nouveau_bo_move()
1209 if (old_mem->mem_type == TTM_PL_SYSTEM && !bo->ttm) { in nouveau_bo_move()
1210 BUG_ON(bo->mem.mm_node != NULL); in nouveau_bo_move()
1211 bo->mem = *new_mem; in nouveau_bo_move()
1218 ret = ttm_bo_move_memcpy(bo, evict, no_wait_gpu, new_mem); in nouveau_bo_move()
1224 ret = nouveau_bo_move_flipd(bo, evict, intr, in nouveau_bo_move()
1227 ret = nouveau_bo_move_flips(bo, evict, intr, in nouveau_bo_move()
1230 ret = nouveau_bo_move_m2mf(bo, evict, intr, in nouveau_bo_move()
1237 ret = ttm_bo_move_memcpy(bo, evict, no_wait_gpu, new_mem); in nouveau_bo_move()
1242 nouveau_bo_vm_cleanup(bo, NULL, &new_tile); in nouveau_bo_move()
1244 nouveau_bo_vm_cleanup(bo, new_tile, &nvbo->tile); in nouveau_bo_move()
1251 nouveau_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) in nouveau_bo_verify_access() argument
1320 nouveau_ttm_fault_reserve_notify(struct ttm_buffer_object *bo) in nouveau_ttm_fault_reserve_notify() argument
1322 struct nouveau_drm *drm = nouveau_bdev(bo->bdev); in nouveau_ttm_fault_reserve_notify()
1323 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_ttm_fault_reserve_notify()
1330 if (bo->mem.mem_type != TTM_PL_VRAM) { in nouveau_ttm_fault_reserve_notify()
1337 if (bo->mem.start + bo->mem.num_pages < mappable) in nouveau_ttm_fault_reserve_notify()
1452 spin_lock(&nvbo->bo.bdev->fence_lock); in nouveau_bo_fence()
1453 old_fence = nvbo->bo.sync_obj; in nouveau_bo_fence()
1454 nvbo->bo.sync_obj = fence; in nouveau_bo_fence()
1455 spin_unlock(&nvbo->bo.bdev->fence_lock); in nouveau_bo_fence()
1526 const u32 size = nvbo->bo.mem.num_pages << PAGE_SHIFT; in nouveau_bo_vma_add()
1527 struct nouveau_mem *node = nvbo->bo.mem.mm_node; in nouveau_bo_vma_add()
1535 if (nvbo->bo.mem.mem_type == TTM_PL_VRAM) in nouveau_bo_vma_add()
1536 nouveau_vm_map(vma, nvbo->bo.mem.mm_node); in nouveau_bo_vma_add()
1537 else if (nvbo->bo.mem.mem_type == TTM_PL_TT) { in nouveau_bo_vma_add()
1553 if (nvbo->bo.mem.mem_type != TTM_PL_SYSTEM) { in nouveau_bo_vma_del()
1554 spin_lock(&nvbo->bo.bdev->fence_lock); in nouveau_bo_vma_del()
1555 ttm_bo_wait(&nvbo->bo, false, false, false); in nouveau_bo_vma_del()
1556 spin_unlock(&nvbo->bo.bdev->fence_lock); in nouveau_bo_vma_del()