/drivers/gpu/drm/ttm/ |
D | ttm_tt.c | 51 static void ttm_tt_alloc_page_directory(struct ttm_tt *ttm) in ttm_tt_alloc_page_directory() argument 53 ttm->pages = drm_calloc_large(ttm->num_pages, sizeof(void*)); in ttm_tt_alloc_page_directory() 56 static void ttm_dma_tt_alloc_page_directory(struct ttm_dma_tt *ttm) in ttm_dma_tt_alloc_page_directory() argument 58 ttm->ttm.pages = drm_calloc_large(ttm->ttm.num_pages, in ttm_dma_tt_alloc_page_directory() 59 sizeof(*ttm->ttm.pages) + in ttm_dma_tt_alloc_page_directory() 60 sizeof(*ttm->dma_address) + in ttm_dma_tt_alloc_page_directory() 61 sizeof(*ttm->cpu_address)); in ttm_dma_tt_alloc_page_directory() 62 ttm->cpu_address = (void *) (ttm->ttm.pages + ttm->ttm.num_pages); in ttm_dma_tt_alloc_page_directory() 63 ttm->dma_address = (void *) (ttm->cpu_address + ttm->ttm.num_pages); in ttm_dma_tt_alloc_page_directory() 106 static int ttm_tt_set_caching(struct ttm_tt *ttm, in ttm_tt_set_caching() argument [all …]
|
D | ttm_agp_backend.c | 46 struct ttm_tt ttm; member 51 static int ttm_agp_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem) in ttm_agp_bind() argument 53 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_bind() 59 mem = agp_allocate_memory(agp_be->bridge, ttm->num_pages, AGP_USER_MEMORY); in ttm_agp_bind() 64 for (i = 0; i < ttm->num_pages; i++) { in ttm_agp_bind() 65 struct page *page = ttm->pages[i]; in ttm_agp_bind() 68 page = ttm->dummy_read_page; in ttm_agp_bind() 84 static int ttm_agp_unbind(struct ttm_tt *ttm) in ttm_agp_unbind() argument 86 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_unbind() 97 static void ttm_agp_destroy(struct ttm_tt *ttm) in ttm_agp_destroy() argument [all …]
|
D | ttm_bo_util.c | 51 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_ttm() local 56 ttm_tt_unbind(ttm); in ttm_bo_move_ttm() 63 ret = ttm_tt_set_placement_caching(ttm, new_mem->placement); in ttm_bo_move_ttm() 68 ret = ttm_tt_bind(ttm, new_mem); in ttm_bo_move_ttm() 250 static int ttm_copy_io_ttm_page(struct ttm_tt *ttm, void *src, in ttm_copy_io_ttm_page() argument 254 struct page *d = ttm->pages[page]; in ttm_copy_io_ttm_page() 287 static int ttm_copy_ttm_io_page(struct ttm_tt *ttm, void *dst, in ttm_copy_ttm_io_page() argument 291 struct page *s = ttm->pages[page]; in ttm_copy_ttm_io_page() 329 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy() local 357 (ttm == NULL || (ttm->state == tt_unpopulated && in ttm_bo_move_memcpy() [all …]
|
D | ttm_page_alloc.c | 864 int ttm_pool_populate(struct ttm_tt *ttm) in ttm_pool_populate() argument 866 struct ttm_mem_global *mem_glob = ttm->glob->mem_glob; in ttm_pool_populate() 870 if (ttm->state != tt_unpopulated) in ttm_pool_populate() 873 for (i = 0; i < ttm->num_pages; ++i) { in ttm_pool_populate() 874 ret = ttm_get_pages(&ttm->pages[i], 1, in ttm_pool_populate() 875 ttm->page_flags, in ttm_pool_populate() 876 ttm->caching_state); in ttm_pool_populate() 878 ttm_pool_unpopulate(ttm); in ttm_pool_populate() 882 ret = ttm_mem_global_alloc_page(mem_glob, ttm->pages[i], in ttm_pool_populate() 885 ttm_pool_unpopulate(ttm); in ttm_pool_populate() [all …]
|
D | ttm_page_alloc_dma.c | 849 struct ttm_tt *ttm = &ttm_dma->ttm; in ttm_dma_pool_get_pages() local 857 ttm->pages[index] = d_page->p; in ttm_dma_pool_get_pages() 875 struct ttm_tt *ttm = &ttm_dma->ttm; in ttm_dma_populate() local 876 struct ttm_mem_global *mem_glob = ttm->glob->mem_glob; in ttm_dma_populate() 883 if (ttm->state != tt_unpopulated) in ttm_dma_populate() 886 type = ttm_to_type(ttm->page_flags, ttm->caching_state); in ttm_dma_populate() 887 if (ttm->page_flags & TTM_PAGE_FLAG_DMA32) in ttm_dma_populate() 891 if (ttm->page_flags & TTM_PAGE_FLAG_ZERO_ALLOC) in ttm_dma_populate() 903 for (i = 0; i < ttm->num_pages; ++i) { in ttm_dma_populate() 910 ret = ttm_mem_global_alloc_page(mem_glob, ttm->pages[i], in ttm_dma_populate() [all …]
|
D | ttm_bo_vm.c | 97 struct ttm_tt *ttm = NULL; in ttm_bo_vm_fault() local 141 if (bo->ttm && (bo->ttm->page_flags & TTM_PAGE_FLAG_SG)) { in ttm_bo_vm_fault() 211 ttm = bo->ttm; in ttm_bo_vm_fault() 216 if (ttm->bdev->driver->ttm_tt_populate(ttm)) { in ttm_bo_vm_fault() 230 page = ttm->pages[page_offset]; in ttm_bo_vm_fault()
|
D | ttm_bo.c | 150 if (bo->ttm) in ttm_bo_release_list() 151 ttm_tt_destroy(bo->ttm); in ttm_bo_release_list() 179 if (bo->ttm != NULL) { in ttm_bo_add_to_lru() 242 bo->ttm = NULL; in ttm_bo_add_ttm() 252 bo->ttm = bdev->driver->ttm_tt_create(bdev, bo->num_pages << PAGE_SHIFT, in ttm_bo_add_ttm() 254 if (unlikely(bo->ttm == NULL)) in ttm_bo_add_ttm() 258 bo->ttm = bdev->driver->ttm_tt_create(bdev, bo->num_pages << PAGE_SHIFT, in ttm_bo_add_ttm() 261 if (unlikely(bo->ttm == NULL)) { in ttm_bo_add_ttm() 265 bo->ttm->sg = bo->sg; in ttm_bo_add_ttm() 302 if (bo->ttm == NULL) { in ttm_bo_handle_move_mem() [all …]
|
D | Makefile | 5 ttm-y := ttm_agp_backend.o ttm_memory.o ttm_tt.o ttm_bo.o \ 10 obj-$(CONFIG_DRM_TTM) += ttm.o
|
/drivers/gpu/drm/nouveau/ |
D | nouveau_sgdma.c | 11 struct ttm_dma_tt ttm; member 17 nouveau_sgdma_destroy(struct ttm_tt *ttm) in nouveau_sgdma_destroy() argument 19 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nouveau_sgdma_destroy() 21 if (ttm) { in nouveau_sgdma_destroy() 22 ttm_dma_tt_fini(&nvbe->ttm); in nouveau_sgdma_destroy() 28 nv04_sgdma_bind(struct ttm_tt *ttm, struct ttm_mem_reg *mem) in nv04_sgdma_bind() argument 30 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nv04_sgdma_bind() 33 if (ttm->sg) { in nv04_sgdma_bind() 34 node->sg = ttm->sg; in nv04_sgdma_bind() 38 node->pages = nvbe->ttm.dma_address; in nv04_sgdma_bind() [all …]
|
D | nouveau_ttm.c | 287 return ttm_bo_mmap(filp, vma, &drm->ttm.bdev); in nouveau_ttm_mmap() 308 global_ref = &drm->ttm.mem_global_ref; in nouveau_ttm_global_init() 317 drm->ttm.mem_global_ref.release = NULL; in nouveau_ttm_global_init() 321 drm->ttm.bo_global_ref.mem_glob = global_ref->object; in nouveau_ttm_global_init() 322 global_ref = &drm->ttm.bo_global_ref.ref; in nouveau_ttm_global_init() 331 drm_global_item_unref(&drm->ttm.mem_global_ref); in nouveau_ttm_global_init() 332 drm->ttm.mem_global_ref.release = NULL; in nouveau_ttm_global_init() 342 if (drm->ttm.mem_global_ref.release == NULL) in nouveau_ttm_global_release() 345 drm_global_item_unref(&drm->ttm.bo_global_ref.ref); in nouveau_ttm_global_release() 346 drm_global_item_unref(&drm->ttm.mem_global_ref); in nouveau_ttm_global_release() [all …]
|
D | nouveau_bo.c | 215 nvbo->bo.bdev = &drm->ttm.bdev; in nouveau_bo_new() 227 acc_size = ttm_bo_dma_acc_size(&drm->ttm.bdev, size, in nouveau_bo_new() 230 ret = ttm_bo_init(&drm->ttm.bdev, &nvbo->bo, size, in nouveau_bo_new() 957 struct nouveau_channel *chan = drm->ttm.chan; in nouveau_bo_move_m2mf() 975 ret = drm->ttm.move(chan, bo, &bo->mem, new_mem); in nouveau_bo_move_m2mf() 1032 &drm->ttm.copy); in nouveau_bo_move_init() 1034 ret = mthd->init(chan, drm->ttm.copy.handle); in nouveau_bo_move_init() 1036 nvif_object_fini(&drm->ttm.copy); in nouveau_bo_move_init() 1040 drm->ttm.move = mthd->exec; in nouveau_bo_move_init() 1041 drm->ttm.chan = chan; in nouveau_bo_move_init() [all …]
|
D | nouveau_ttm.h | 7 return container_of(bd, struct nouveau_drm, ttm.bdev); in nouveau_bdev()
|
D | nouveau_prime.c | 36 return drm_prime_pages_to_sg(nvbo->bo.ttm->pages, npages); in nouveau_gem_prime_get_sg_table()
|
D | nouveau_drm.h | 131 } ttm; member
|
/drivers/gpu/drm/radeon/ |
D | radeon_ttm.c | 215 if (radeon_ttm_tt_has_userptr(bo->ttm)) in radeon_verify_access() 314 r = ttm_tt_set_placement_caching(bo->ttm, tmp_mem.placement); in radeon_move_vram_ram() 319 r = ttm_tt_bind(bo->ttm, &tmp_mem); in radeon_move_vram_ram() 383 if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) { in radeon_bo_move() 497 struct ttm_dma_tt ttm; member 507 static int radeon_ttm_tt_pin_userptr(struct ttm_tt *ttm) in radeon_ttm_tt_pin_userptr() argument 509 struct radeon_device *rdev = radeon_get_rdev(ttm->bdev); in radeon_ttm_tt_pin_userptr() 510 struct radeon_ttm_tt *gtt = (void *)ttm; in radeon_ttm_tt_pin_userptr() 524 unsigned long end = gtt->userptr + ttm->num_pages * PAGE_SIZE; in radeon_ttm_tt_pin_userptr() 532 unsigned num_pages = ttm->num_pages - pinned; in radeon_ttm_tt_pin_userptr() [all …]
|
D | radeon_prime.c | 37 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table() 124 if (radeon_ttm_tt_has_userptr(bo->tbo.ttm)) in radeon_gem_prime_export()
|
D | radeon_gem.c | 322 r = radeon_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in radeon_gem_userptr_ioctl() 411 if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) { in radeon_mode_dumb_mmap() 641 if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) in radeon_gem_op_ioctl()
|
/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_buffer.c | 394 vsgt->pages = vmw_tt->dma_ttm.ttm.pages; in vmw_ttm_map_dma() 395 vsgt->num_pages = vmw_tt->dma_ttm.ttm.num_pages; in vmw_ttm_map_dma() 504 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_bo_map_dma() 521 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_bo_unmap_dma() 541 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_bo_sg_table() 547 static int vmw_ttm_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem) in vmw_ttm_bind() argument 550 container_of(ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_ttm_bind() 563 ttm->num_pages, vmw_be->gmr_id); in vmw_ttm_bind() 567 vmw_mob_create(ttm->num_pages); in vmw_ttm_bind() 573 &vmw_be->vsgt, ttm->num_pages, in vmw_ttm_bind() [all …]
|
/drivers/gpu/drm/ast/ |
D | ast_ttm.c | 35 return container_of(bd, struct ast_private, ttm.bdev); in ast_bdev() 55 global_ref = &ast->ttm.mem_global_ref; in ast_ttm_global_init() 67 ast->ttm.bo_global_ref.mem_glob = in ast_ttm_global_init() 68 ast->ttm.mem_global_ref.object; in ast_ttm_global_init() 69 global_ref = &ast->ttm.bo_global_ref.ref; in ast_ttm_global_init() 77 drm_global_item_unref(&ast->ttm.mem_global_ref); in ast_ttm_global_init() 86 if (ast->ttm.mem_global_ref.release == NULL) in ast_ttm_global_release() 89 drm_global_item_unref(&ast->ttm.bo_global_ref.ref); in ast_ttm_global_release() 90 drm_global_item_unref(&ast->ttm.mem_global_ref); in ast_ttm_global_release() 91 ast->ttm.mem_global_ref.release = NULL; in ast_ttm_global_release() [all …]
|
/drivers/gpu/drm/cirrus/ |
D | cirrus_ttm.c | 35 return container_of(bd, struct cirrus_device, ttm.bdev); in cirrus_bdev() 55 global_ref = &cirrus->ttm.mem_global_ref; in cirrus_ttm_global_init() 67 cirrus->ttm.bo_global_ref.mem_glob = in cirrus_ttm_global_init() 68 cirrus->ttm.mem_global_ref.object; in cirrus_ttm_global_init() 69 global_ref = &cirrus->ttm.bo_global_ref.ref; in cirrus_ttm_global_init() 77 drm_global_item_unref(&cirrus->ttm.mem_global_ref); in cirrus_ttm_global_init() 86 if (cirrus->ttm.mem_global_ref.release == NULL) in cirrus_ttm_global_release() 89 drm_global_item_unref(&cirrus->ttm.bo_global_ref.ref); in cirrus_ttm_global_release() 90 drm_global_item_unref(&cirrus->ttm.mem_global_ref); in cirrus_ttm_global_release() 91 cirrus->ttm.mem_global_ref.release = NULL; in cirrus_ttm_global_release() [all …]
|
/drivers/gpu/drm/mgag200/ |
D | mgag200_ttm.c | 35 return container_of(bd, struct mga_device, ttm.bdev); in mgag200_bdev() 55 global_ref = &ast->ttm.mem_global_ref; in mgag200_ttm_global_init() 67 ast->ttm.bo_global_ref.mem_glob = in mgag200_ttm_global_init() 68 ast->ttm.mem_global_ref.object; in mgag200_ttm_global_init() 69 global_ref = &ast->ttm.bo_global_ref.ref; in mgag200_ttm_global_init() 77 drm_global_item_unref(&ast->ttm.mem_global_ref); in mgag200_ttm_global_init() 86 if (ast->ttm.mem_global_ref.release == NULL) in mgag200_ttm_global_release() 89 drm_global_item_unref(&ast->ttm.bo_global_ref.ref); in mgag200_ttm_global_release() 90 drm_global_item_unref(&ast->ttm.mem_global_ref); in mgag200_ttm_global_release() 91 ast->ttm.mem_global_ref.release = NULL; in mgag200_ttm_global_release() [all …]
|
/drivers/gpu/drm/qxl/ |
D | qxl_ttm.c | 258 struct ttm_dma_tt ttm; member 263 static int qxl_ttm_backend_bind(struct ttm_tt *ttm, in qxl_ttm_backend_bind() argument 266 struct qxl_ttm_tt *gtt = (void *)ttm; in qxl_ttm_backend_bind() 269 if (!ttm->num_pages) { in qxl_ttm_backend_bind() 271 ttm->num_pages, bo_mem, ttm); in qxl_ttm_backend_bind() 277 static int qxl_ttm_backend_unbind(struct ttm_tt *ttm) in qxl_ttm_backend_unbind() argument 283 static void qxl_ttm_backend_destroy(struct ttm_tt *ttm) in qxl_ttm_backend_destroy() argument 285 struct qxl_ttm_tt *gtt = (void *)ttm; in qxl_ttm_backend_destroy() 287 ttm_dma_tt_fini(>t->ttm); in qxl_ttm_backend_destroy() 297 static int qxl_ttm_tt_populate(struct ttm_tt *ttm) in qxl_ttm_tt_populate() argument [all …]
|
/drivers/gpu/drm/bochs/ |
D | bochs_mm.c | 16 return container_of(bd, struct bochs_device, ttm.bdev); in bochs_bdev() 34 global_ref = &bochs->ttm.mem_global_ref; in bochs_ttm_global_init() 46 bochs->ttm.bo_global_ref.mem_glob = in bochs_ttm_global_init() 47 bochs->ttm.mem_global_ref.object; in bochs_ttm_global_init() 48 global_ref = &bochs->ttm.bo_global_ref.ref; in bochs_ttm_global_init() 56 drm_global_item_unref(&bochs->ttm.mem_global_ref); in bochs_ttm_global_init() 65 if (bochs->ttm.mem_global_ref.release == NULL) in bochs_ttm_global_release() 68 drm_global_item_unref(&bochs->ttm.bo_global_ref.ref); in bochs_ttm_global_release() 69 drm_global_item_unref(&bochs->ttm.mem_global_ref); in bochs_ttm_global_release() 70 bochs->ttm.mem_global_ref.release = NULL; in bochs_ttm_global_release() [all …]
|
D | bochs.h | 86 } ttm; member
|
/drivers/gpu/drm/ |
D | Makefile | 37 obj-$(CONFIG_DRM_TTM) += ttm/
|