/drivers/gpu/drm/qxl/ |
D | qxl_object.c | 30 static void qxl_ttm_bo_destroy(struct ttm_buffer_object *tbo) in qxl_ttm_bo_destroy() argument 35 bo = container_of(tbo, struct qxl_bo, tbo); in qxl_ttm_bo_destroy() 112 r = ttm_bo_init(&qdev->mman.bdev, &bo->tbo, size, type, in qxl_bo_create() 136 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in qxl_bo_kmap() 148 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kmap_atomic_page() 153 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in qxl_bo_kmap_atomic_page() 155 else if (bo->tbo.mem.mem_type == TTM_PL_PRIV0) in qxl_bo_kmap_atomic_page() 161 ret = ttm_mem_io_reserve(bo->tbo.bdev, &bo->tbo.mem); in qxl_bo_kmap_atomic_page() 164 return io_mapping_map_atomic_wc(map, bo->tbo.mem.bus.offset + page_offset); in qxl_bo_kmap_atomic_page() 190 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kunmap_atomic_page() [all …]
|
D | qxl_object.h | 34 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, 0); in qxl_bo_reserve() 47 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve() 52 return bo->tbo.offset; in qxl_bo_gpu_offset() 57 return bo->tbo.num_pages << PAGE_SHIFT; in qxl_bo_size() 62 return !!atomic_read(&bo->tbo.reserved); in qxl_bo_is_reserved() 67 return bo->tbo.addr_space_offset; in qxl_bo_mmap_offset() 75 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, 0); in qxl_bo_wait() 84 spin_lock(&bo->tbo.bdev->fence_lock); in qxl_bo_wait() 86 *mem_type = bo->tbo.mem.mem_type; in qxl_bo_wait() 87 if (bo->tbo.sync_obj) in qxl_bo_wait() [all …]
|
D | qxl_fence.c | 58 spin_lock(&bo->tbo.bdev->fence_lock); in qxl_fence_add_release() 61 spin_unlock(&bo->tbo.bdev->fence_lock); in qxl_fence_add_release() 71 spin_lock(&bo->tbo.bdev->fence_lock); in qxl_fence_remove_release() 80 spin_unlock(&bo->tbo.bdev->fence_lock); in qxl_fence_remove_release()
|
D | qxl_release.c | 90 release->bos[i]->tbo.addr_space_offset in qxl_release_free() 255 if (!release->bos[i]->tbo.sync_obj) in qxl_fence_releaseable() 256 release->bos[i]->tbo.sync_obj = &release->bos[i]->fence; in qxl_fence_releaseable()
|
D | qxl_cmd.c | 514 …cmd->u.surface_create.data |= (new_mem->start << PAGE_SHIFT) + surf->tbo.bdev->man[new_mem->mem_ty… in qxl_hw_surface_alloc() 632 spin_lock(&surf->tbo.bdev->fence_lock); in qxl_reap_surf() 633 ret = ttm_bo_wait(&surf->tbo, true, true, !stall); in qxl_reap_surf() 634 spin_unlock(&surf->tbo.bdev->fence_lock); in qxl_reap_surf()
|
D | qxl_debugfs.c | 63 bo->tbo.sync_obj, bo->fence.num_active_releases); in qxl_debugfs_buffers_info()
|
D | qxl_drv.h | 109 struct ttm_buffer_object tbo; member 357 return slot->high_bits | (bo->tbo.offset + offset); in qxl_bo_physical_address()
|
D | qxl_ttm.c | 208 qbo = container_of(bo, struct qxl_bo, tbo); in qxl_evict_flags() 455 qbo = container_of(bo, struct qxl_bo, tbo); in qxl_bo_move_notify()
|
D | qxl_ioctl.c | 303 ret = ttm_bo_validate(&qobj->tbo, &qobj->placement, in qxl_update_area_ioctl()
|
/drivers/gpu/drm/radeon/ |
D | radeon_object.c | 59 static void radeon_ttm_bo_destroy(struct ttm_buffer_object *tbo) in radeon_ttm_bo_destroy() argument 63 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy() 152 r = ttm_bo_init(&rdev->mman.bdev, &bo->tbo, size, type, in radeon_bo_create() 177 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in radeon_bo_kmap() 200 struct ttm_buffer_object *tbo; in radeon_bo_unref() local 206 tbo = &((*bo)->tbo); in radeon_bo_unref() 208 ttm_bo_unref(&tbo); in radeon_bo_unref() 210 if (tbo == NULL) in radeon_bo_unref() 253 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in radeon_bo_pin_restricted() 282 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in radeon_bo_unpin() [all …]
|
D | radeon_object.h | 59 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 73 return bo->tbo.offset; in radeon_bo_gpu_offset() 78 return bo->tbo.num_pages << PAGE_SHIFT; in radeon_bo_size() 83 return ttm_bo_is_reserved(&bo->tbo); in radeon_bo_is_reserved() 88 return (bo->tbo.num_pages << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages() 93 return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_gpu_page_alignment() 107 return bo->tbo.addr_space_offset; in radeon_bo_mmap_offset()
|
D | radeon_prime.c | 34 int npages = bo->tbo.num_pages; in radeon_gem_prime_get_sg_table() 36 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table() 44 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in radeon_gem_prime_vmap()
|
D | radeon_trace.h | 25 __entry->pages = bo->tbo.num_pages;
|
D | radeon_cs.c | 103 p->relocs[i].lobj.tv.bo = &p->relocs[i].robj->tbo; in radeon_cs_parser_relocs() 159 radeon_ib_sync_to(&p->ib, p->relocs[i].robj->tbo.sync_obj); in radeon_cs_sync_rings() 400 r = radeon_vm_bo_update_pte(rdev, vm, rdev->ring_tmp_bo.bo, &rdev->ring_tmp_bo.bo->tbo.mem); in radeon_bo_vm_update_pte() 406 r = radeon_vm_bo_update_pte(parser->rdev, vm, bo, &bo->tbo.mem); in radeon_bo_vm_update_pte()
|
D | radeon_gem.c | 45 drm_prime_gem_destroy(&robj->gem_base, robj->tbo.sg); in radeon_gem_object_free() 594 domain = radeon_mem_type_to_domain(rbo->tbo.mem.mem_type); in radeon_debugfs_gem_info()
|
D | radeon_uvd.c | 551 tv.bo = &bo->tbo; in radeon_uvd_send_msg() 563 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in radeon_uvd_send_msg()
|
D | radeon_display.c | 375 spin_lock(&rbo->tbo.bdev->fence_lock); in radeon_crtc_page_flip() 376 if (rbo->tbo.sync_obj) in radeon_crtc_page_flip() 377 work->fence = radeon_fence_ref(rbo->tbo.sync_obj); in radeon_crtc_page_flip() 378 spin_unlock(&rbo->tbo.bdev->fence_lock); in radeon_crtc_page_flip()
|
D | radeon_pm.c | 140 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in radeon_unmap_vram_bos() 141 ttm_bo_unmap_virtual(&bo->tbo); in radeon_unmap_vram_bos()
|
D | radeon_ttm.c | 189 rbo = container_of(bo, struct radeon_bo, tbo); in radeon_evict_flags()
|
/drivers/gpu/drm/cirrus/ |
D | cirrus_main.c | 273 struct ttm_buffer_object *tbo; in cirrus_bo_unref() local 278 tbo = &((*bo)->bo); in cirrus_bo_unref() 279 ttm_bo_unref(&tbo); in cirrus_bo_unref() 280 if (tbo == NULL) in cirrus_bo_unref()
|
D | cirrus_ttm.c | 95 static void cirrus_bo_ttm_destroy(struct ttm_buffer_object *tbo) in cirrus_bo_ttm_destroy() argument 99 bo = container_of(tbo, struct cirrus_bo, bo); in cirrus_bo_ttm_destroy()
|
/drivers/gpu/drm/mgag200/ |
D | mgag200_main.c | 309 struct ttm_buffer_object *tbo; in mgag200_bo_unref() local 314 tbo = &((*bo)->bo); in mgag200_bo_unref() 315 ttm_bo_unref(&tbo); in mgag200_bo_unref() 316 if (tbo == NULL) in mgag200_bo_unref()
|
D | mgag200_ttm.c | 95 static void mgag200_bo_ttm_destroy(struct ttm_buffer_object *tbo) in mgag200_bo_ttm_destroy() argument 99 bo = container_of(tbo, struct mgag200_bo, bo); in mgag200_bo_ttm_destroy()
|
/drivers/gpu/drm/ast/ |
D | ast_main.c | 467 struct ttm_buffer_object *tbo; in ast_bo_unref() local 472 tbo = &((*bo)->bo); in ast_bo_unref() 473 ttm_bo_unref(&tbo); in ast_bo_unref() 474 if (tbo == NULL) in ast_bo_unref()
|
D | ast_ttm.c | 95 static void ast_bo_ttm_destroy(struct ttm_buffer_object *tbo) in ast_bo_ttm_destroy() argument 99 bo = container_of(tbo, struct ast_bo, bo); in ast_bo_ttm_destroy()
|