• Home
  • Raw
  • Download

Lines Matching refs:bo

47 	struct ttm_buffer_object *bo;  member
89 void ttm_move_memcpy(struct ttm_buffer_object *bo, in ttm_move_memcpy() argument
96 struct ttm_tt *ttm = bo->ttm; in ttm_move_memcpy()
135 int ttm_bo_move_memcpy(struct ttm_buffer_object *bo, in ttm_bo_move_memcpy() argument
139 struct ttm_device *bdev = bo->bdev; in ttm_bo_move_memcpy()
141 ttm_manager_type(bo->bdev, dst_mem->mem_type); in ttm_bo_move_memcpy()
142 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy()
143 struct ttm_resource *src_mem = bo->resource; in ttm_bo_move_memcpy()
162 dst_iter = ttm_kmap_iter_tt_init(&_dst_iter.tt, bo->ttm); in ttm_bo_move_memcpy()
168 src_iter = ttm_kmap_iter_tt_init(&_src_iter.tt, bo->ttm); in ttm_bo_move_memcpy()
174 ttm_move_memcpy(bo, dst_mem->num_pages, dst_iter, src_iter); in ttm_bo_move_memcpy()
178 ttm_bo_move_sync_cleanup(bo, dst_mem); in ttm_bo_move_memcpy()
188 static void ttm_transfered_destroy(struct ttm_buffer_object *bo) in ttm_transfered_destroy() argument
192 fbo = container_of(bo, struct ttm_transfer_obj, base); in ttm_transfered_destroy()
194 ttm_bo_put(fbo->bo); in ttm_transfered_destroy()
213 static int ttm_buffer_object_transfer(struct ttm_buffer_object *bo, in ttm_buffer_object_transfer() argument
223 fbo->base = *bo; in ttm_buffer_object_transfer()
225 ttm_bo_get(bo); in ttm_buffer_object_transfer()
226 fbo->bo = bo; in ttm_buffer_object_transfer()
242 if (bo->type != ttm_bo_type_sg) in ttm_buffer_object_transfer()
256 pgprot_t ttm_io_prot(struct ttm_buffer_object *bo, struct ttm_resource *res, in ttm_io_prot() argument
262 man = ttm_manager_type(bo->bdev, res->mem_type); in ttm_io_prot()
263 caching = man->use_tt ? bo->ttm->caching : res->bus.caching; in ttm_io_prot()
269 static int ttm_bo_ioremap(struct ttm_buffer_object *bo, in ttm_bo_ioremap() argument
274 struct ttm_resource *mem = bo->resource; in ttm_bo_ioremap()
276 if (bo->resource->bus.addr) { in ttm_bo_ioremap()
278 map->virtual = ((u8 *)bo->resource->bus.addr) + offset; in ttm_bo_ioremap()
280 resource_size_t res = bo->resource->bus.offset + offset; in ttm_bo_ioremap()
295 static int ttm_bo_kmap_ttm(struct ttm_buffer_object *bo, in ttm_bo_kmap_ttm() argument
300 struct ttm_resource *mem = bo->resource; in ttm_bo_kmap_ttm()
305 struct ttm_tt *ttm = bo->ttm; in ttm_bo_kmap_ttm()
311 ret = ttm_tt_populate(bo->bdev, ttm, &ctx); in ttm_bo_kmap_ttm()
329 prot = ttm_io_prot(bo, mem, PAGE_KERNEL); in ttm_bo_kmap_ttm()
337 int ttm_bo_kmap(struct ttm_buffer_object *bo, in ttm_bo_kmap() argument
345 map->bo = bo; in ttm_bo_kmap()
346 if (num_pages > bo->resource->num_pages) in ttm_bo_kmap()
348 if ((start_page + num_pages) > bo->resource->num_pages) in ttm_bo_kmap()
351 ret = ttm_mem_io_reserve(bo->bdev, bo->resource); in ttm_bo_kmap()
354 if (!bo->resource->bus.is_iomem) { in ttm_bo_kmap()
355 return ttm_bo_kmap_ttm(bo, start_page, num_pages, map); in ttm_bo_kmap()
359 return ttm_bo_ioremap(bo, offset, size, map); in ttm_bo_kmap()
383 ttm_mem_io_free(map->bo->bdev, map->bo->resource); in ttm_bo_kunmap()
389 int ttm_bo_vmap(struct ttm_buffer_object *bo, struct dma_buf_map *map) in ttm_bo_vmap() argument
391 struct ttm_resource *mem = bo->resource; in ttm_bo_vmap()
394 ret = ttm_mem_io_reserve(bo->bdev, mem); in ttm_bo_vmap()
405 bo->base.size); in ttm_bo_vmap()
409 bo->base.size); in ttm_bo_vmap()
412 vaddr_iomem = ioremap(mem->bus.offset, bo->base.size); in ttm_bo_vmap()
424 struct ttm_tt *ttm = bo->ttm; in ttm_bo_vmap()
428 ret = ttm_tt_populate(bo->bdev, ttm, &ctx); in ttm_bo_vmap()
436 prot = ttm_io_prot(bo, mem, PAGE_KERNEL); in ttm_bo_vmap()
448 void ttm_bo_vunmap(struct ttm_buffer_object *bo, struct dma_buf_map *map) in ttm_bo_vunmap() argument
450 struct ttm_resource *mem = bo->resource; in ttm_bo_vunmap()
461 ttm_mem_io_free(bo->bdev, bo->resource); in ttm_bo_vunmap()
465 static int ttm_bo_wait_free_node(struct ttm_buffer_object *bo, in ttm_bo_wait_free_node() argument
469 ret = ttm_bo_wait(bo, false, false); in ttm_bo_wait_free_node()
474 ttm_bo_tt_destroy(bo); in ttm_bo_wait_free_node()
475 ttm_resource_free(bo, &bo->resource); in ttm_bo_wait_free_node()
479 static int ttm_bo_move_to_ghost(struct ttm_buffer_object *bo, in ttm_bo_move_to_ghost() argument
494 dma_fence_put(bo->moving); in ttm_bo_move_to_ghost()
495 bo->moving = dma_fence_get(fence); in ttm_bo_move_to_ghost()
497 ret = ttm_buffer_object_transfer(bo, &ghost_obj); in ttm_bo_move_to_ghost()
512 bo->ttm = NULL; in ttm_bo_move_to_ghost()
513 bo->resource = NULL; in ttm_bo_move_to_ghost()
520 static void ttm_bo_move_pipeline_evict(struct ttm_buffer_object *bo, in ttm_bo_move_pipeline_evict() argument
523 struct ttm_device *bdev = bo->bdev; in ttm_bo_move_pipeline_evict()
526 from = ttm_manager_type(bdev, bo->resource->mem_type); in ttm_bo_move_pipeline_evict()
539 ttm_resource_free(bo, &bo->resource); in ttm_bo_move_pipeline_evict()
541 dma_fence_put(bo->moving); in ttm_bo_move_pipeline_evict()
542 bo->moving = dma_fence_get(fence); in ttm_bo_move_pipeline_evict()
545 int ttm_bo_move_accel_cleanup(struct ttm_buffer_object *bo, in ttm_bo_move_accel_cleanup() argument
551 struct ttm_device *bdev = bo->bdev; in ttm_bo_move_accel_cleanup()
552 struct ttm_resource_manager *from = ttm_manager_type(bdev, bo->resource->mem_type); in ttm_bo_move_accel_cleanup()
556 dma_resv_add_excl_fence(bo->base.resv, fence); in ttm_bo_move_accel_cleanup()
558 ret = ttm_bo_move_to_ghost(bo, fence, man->use_tt); in ttm_bo_move_accel_cleanup()
560 ttm_bo_move_pipeline_evict(bo, fence); in ttm_bo_move_accel_cleanup()
562 ret = ttm_bo_wait_free_node(bo, man->use_tt); in ttm_bo_move_accel_cleanup()
567 ttm_bo_assign_mem(bo, new_mem); in ttm_bo_move_accel_cleanup()
584 int ttm_bo_pipeline_gutting(struct ttm_buffer_object *bo) in ttm_bo_pipeline_gutting() argument
592 ret = ttm_resource_alloc(bo, &sys_mem, &sys_res); in ttm_bo_pipeline_gutting()
597 ret = ttm_bo_wait(bo, false, true); in ttm_bo_pipeline_gutting()
599 if (!bo->ttm) { in ttm_bo_pipeline_gutting()
601 ret = ttm_tt_create(bo, true); in ttm_bo_pipeline_gutting()
605 ttm_tt_unpopulate(bo->bdev, bo->ttm); in ttm_bo_pipeline_gutting()
606 if (bo->type == ttm_bo_type_device) in ttm_bo_pipeline_gutting()
607 ttm_tt_mark_for_clear(bo->ttm); in ttm_bo_pipeline_gutting()
609 ttm_resource_free(bo, &bo->resource); in ttm_bo_pipeline_gutting()
610 ttm_bo_assign_mem(bo, sys_res); in ttm_bo_pipeline_gutting()
622 ttm = bo->ttm; in ttm_bo_pipeline_gutting()
623 bo->ttm = NULL; in ttm_bo_pipeline_gutting()
624 ret = ttm_tt_create(bo, true); in ttm_bo_pipeline_gutting()
625 swap(bo->ttm, ttm); in ttm_bo_pipeline_gutting()
629 ret = ttm_buffer_object_transfer(bo, &ghost); in ttm_bo_pipeline_gutting()
633 ret = dma_resv_copy_fences(&ghost->base._resv, bo->base.resv); in ttm_bo_pipeline_gutting()
636 ttm_bo_wait(bo, false, false); in ttm_bo_pipeline_gutting()
640 bo->ttm = ttm; in ttm_bo_pipeline_gutting()
641 bo->resource = NULL; in ttm_bo_pipeline_gutting()
642 ttm_bo_assign_mem(bo, sys_res); in ttm_bo_pipeline_gutting()
646 ttm_tt_destroy(bo->bdev, ttm); in ttm_bo_pipeline_gutting()
649 ttm_resource_free(bo, &sys_res); in ttm_bo_pipeline_gutting()