Lines Matching refs:bo
56 vmw_buffer_object(struct ttm_buffer_object *bo) in vmw_buffer_object() argument
58 return container_of(bo, struct vmw_buffer_object, base); in vmw_buffer_object()
71 vmw_user_buffer_object(struct ttm_buffer_object *bo) in vmw_user_buffer_object() argument
73 struct vmw_buffer_object *vmw_bo = vmw_buffer_object(bo); in vmw_user_buffer_object()
95 struct ttm_buffer_object *bo = &buf->base; in vmw_bo_pin_in_placement() local
105 ret = ttm_bo_reserve(bo, interruptible, false, NULL); in vmw_bo_pin_in_placement()
110 ret = ttm_bo_mem_compat(placement, &bo->mem, in vmw_bo_pin_in_placement()
113 ret = ttm_bo_validate(bo, placement, &ctx); in vmw_bo_pin_in_placement()
118 ttm_bo_unreserve(bo); in vmw_bo_pin_in_placement()
144 struct ttm_buffer_object *bo = &buf->base; in vmw_bo_pin_in_vram_or_gmr() local
154 ret = ttm_bo_reserve(bo, interruptible, false, NULL); in vmw_bo_pin_in_vram_or_gmr()
159 ret = ttm_bo_mem_compat(&vmw_vram_gmr_placement, &bo->mem, in vmw_bo_pin_in_vram_or_gmr()
164 ret = ttm_bo_validate(bo, &vmw_vram_gmr_placement, &ctx); in vmw_bo_pin_in_vram_or_gmr()
168 ret = ttm_bo_validate(bo, &vmw_vram_placement, &ctx); in vmw_bo_pin_in_vram_or_gmr()
174 ttm_bo_unreserve(bo); in vmw_bo_pin_in_vram_or_gmr()
219 struct ttm_buffer_object *bo = &buf->base; in vmw_bo_pin_in_start_of_vram() local
226 place.lpfn = bo->num_pages; in vmw_bo_pin_in_start_of_vram()
237 ret = ttm_bo_reserve(bo, interruptible, false, NULL); in vmw_bo_pin_in_start_of_vram()
246 if (bo->mem.mem_type == TTM_PL_VRAM && in vmw_bo_pin_in_start_of_vram()
247 bo->mem.start < bo->num_pages && in vmw_bo_pin_in_start_of_vram()
248 bo->mem.start > 0 && in vmw_bo_pin_in_start_of_vram()
251 (void) ttm_bo_validate(bo, &vmw_sys_placement, &ctx); in vmw_bo_pin_in_start_of_vram()
255 ret = ttm_bo_mem_compat(&placement, &bo->mem, in vmw_bo_pin_in_start_of_vram()
258 ret = ttm_bo_validate(bo, &placement, &ctx); in vmw_bo_pin_in_start_of_vram()
261 WARN_ON(ret == 0 && bo->mem.start != 0); in vmw_bo_pin_in_start_of_vram()
265 ttm_bo_unreserve(bo); in vmw_bo_pin_in_start_of_vram()
288 struct ttm_buffer_object *bo = &buf->base; in vmw_bo_unpin() local
295 ret = ttm_bo_reserve(bo, interruptible, false, NULL); in vmw_bo_unpin()
301 ttm_bo_unreserve(bo); in vmw_bo_unpin()
315 void vmw_bo_get_guest_ptr(const struct ttm_buffer_object *bo, in vmw_bo_get_guest_ptr() argument
318 if (bo->mem.mem_type == TTM_PL_VRAM) { in vmw_bo_get_guest_ptr()
320 ptr->offset = bo->mem.start << PAGE_SHIFT; in vmw_bo_get_guest_ptr()
322 ptr->gmrId = bo->mem.start; in vmw_bo_get_guest_ptr()
340 struct ttm_buffer_object *bo = &vbo->base; in vmw_bo_pin_reserved() local
341 uint32_t old_mem_type = bo->mem.mem_type; in vmw_bo_pin_reserved()
344 dma_resv_assert_held(bo->base.resv); in vmw_bo_pin_reserved()
357 pl.mem_type = bo->mem.mem_type; in vmw_bo_pin_reserved()
358 pl.flags = bo->mem.placement; in vmw_bo_pin_reserved()
368 ret = ttm_bo_validate(bo, &placement, &ctx); in vmw_bo_pin_reserved()
370 BUG_ON(ret != 0 || bo->mem.mem_type != old_mem_type); in vmw_bo_pin_reserved()
391 struct ttm_buffer_object *bo = &vbo->base; in vmw_bo_map_and_cache() local
400 ret = ttm_bo_kmap(bo, 0, bo->num_pages, &vbo->map); in vmw_bo_map_and_cache()
418 if (vbo->map.bo == NULL) in vmw_bo_unmap()
463 void vmw_bo_bo_free(struct ttm_buffer_object *bo) in vmw_bo_bo_free() argument
465 struct vmw_buffer_object *vmw_bo = vmw_buffer_object(bo); in vmw_bo_bo_free()
479 static void vmw_user_bo_destroy(struct ttm_buffer_object *bo) in vmw_user_bo_destroy() argument
481 struct vmw_user_buffer_object *vmw_user_bo = vmw_user_buffer_object(bo); in vmw_user_bo_destroy()
508 void (*bo_free)(struct ttm_buffer_object *bo)) in vmw_bo_init() argument
653 int vmw_user_bo_verify_access(struct ttm_buffer_object *bo, in vmw_user_bo_verify_access() argument
658 if (unlikely(bo->destroy != vmw_user_bo_destroy)) in vmw_user_bo_verify_access()
661 vmw_user_bo = vmw_user_buffer_object(bo); in vmw_user_bo_verify_access()
692 struct ttm_buffer_object *bo = &user_bo->vbo.base; in vmw_user_bo_synccpu_grab() local
700 (bo->base.resv, true, true, in vmw_user_bo_synccpu_grab()
709 ret = ttm_bo_reserve(bo, true, nonblock, NULL); in vmw_user_bo_synccpu_grab()
713 ret = ttm_bo_wait(bo, true, nonblock); in vmw_user_bo_synccpu_grab()
717 ttm_bo_unreserve(bo); in vmw_user_bo_synccpu_grab()
1014 void vmw_bo_fence_single(struct ttm_buffer_object *bo, in vmw_bo_fence_single() argument
1017 struct ttm_bo_device *bdev = bo->bdev; in vmw_bo_fence_single()
1024 dma_resv_add_excl_fence(bo->base.resv, &fence->base); in vmw_bo_fence_single()
1027 dma_resv_add_excl_fence(bo->base.resv, &fence->base); in vmw_bo_fence_single()
1124 void vmw_bo_swap_notify(struct ttm_buffer_object *bo) in vmw_bo_swap_notify() argument
1127 if (bo->destroy != vmw_bo_bo_free && in vmw_bo_swap_notify()
1128 bo->destroy != vmw_user_bo_destroy) in vmw_bo_swap_notify()
1132 vmw_bo_unmap(vmw_buffer_object(bo)); in vmw_bo_swap_notify()
1146 void vmw_bo_move_notify(struct ttm_buffer_object *bo, in vmw_bo_move_notify() argument
1155 if (bo->destroy != vmw_bo_bo_free && in vmw_bo_move_notify()
1156 bo->destroy != vmw_user_bo_destroy) in vmw_bo_move_notify()
1159 vbo = container_of(bo, struct vmw_buffer_object, base); in vmw_bo_move_notify()
1166 if (mem->mem_type == TTM_PL_VRAM || bo->mem.mem_type == TTM_PL_VRAM) in vmw_bo_move_notify()
1174 if (mem->mem_type != VMW_PL_MOB && bo->mem.mem_type == VMW_PL_MOB) in vmw_bo_move_notify()