Searched refs:user_pages (Results 1 – 7 of 7) sorted by relevance
71 acd->user_pages = kzalloc(sizeof(struct page *) * acd->page_count, GFP_KERNEL); in kpc_dma_transfer()72 if (!acd->user_pages) { in kpc_dma_transfer()80 …rv = get_user_pages(iov_base, acd->page_count, FOLL_TOUCH | FOLL_WRITE | FOLL_GET, acd->user_pages… in kpc_dma_transfer()88 …rv = sg_alloc_table_from_pages(&acd->sgt, acd->user_pages, acd->page_count, iov_base & (PAGE_SIZE-… in kpc_dma_transfer()194 put_page(acd->user_pages[i]); in kpc_dma_transfer()197 kfree(acd->user_pages); in kpc_dma_transfer()209 BUG_ON(acd->user_pages == NULL); in transfer_complete_cb()215 if (!PageReserved(acd->user_pages[i])) { in transfer_complete_cb()216 set_page_dirty(acd->user_pages[i]); in transfer_complete_cb()223 put_page(acd->user_pages[i]); in transfer_complete_cb()[all …]
92 struct page **user_pages; member
546 lobj->user_invalidated && lobj->user_pages) { in amdgpu_cs_list_validate()554 lobj->user_pages); in amdgpu_cs_list_validate()564 kvfree(lobj->user_pages); in amdgpu_cs_list_validate()565 lobj->user_pages = NULL; in amdgpu_cs_list_validate()624 e->user_pages = kvmalloc_array(bo->tbo.ttm->num_pages, in amdgpu_cs_parser_bos()627 if (!e->user_pages) { in amdgpu_cs_parser_bos()632 r = amdgpu_ttm_tt_get_user_pages(bo, e->user_pages); in amdgpu_cs_parser_bos()634 kvfree(e->user_pages); in amdgpu_cs_parser_bos()635 e->user_pages = NULL; in amdgpu_cs_parser_bos()640 if (bo->tbo.ttm->pages[i] != e->user_pages[i]) { in amdgpu_cs_parser_bos()
38 struct page **user_pages; member
210 e->user_pages = NULL; in amdgpu_bo_list_get_list()
565 entry->user_pages = NULL; in amdgpu_vm_get_pd_bo()
44 user_pages.o \