Home
last modified time | relevance | path

Searched refs:npage (Results 1 – 9 of 9) sorted by relevance

/drivers/infiniband/hw/hns/
Dhns_roce_mr.c637 int npage; in mtr_map_region() local
648 npage = 0; in mtr_map_region()
657 addr = to_hr_hw_page_addr(pages[npage]); in mtr_map_region()
659 addr = pages[npage]; in mtr_map_region()
662 npage++; in mtr_map_region()
820 int npage; in mtr_get_pages() local
824 npage = hns_roce_get_umem_bufs(hr_dev, pages, count, 0, in mtr_get_pages()
827 npage = hns_roce_get_kmem_bufs(hr_dev, pages, count, 0, in mtr_get_pages()
830 if (mtr->hem_cfg.is_direct && npage > 1) { in mtr_get_pages()
831 err = mtr_check_direct_pages(pages, npage, page_shift); in mtr_get_pages()
[all …]
/drivers/gpu/drm/gma500/
Dgtt.c95 set_pages_array_wc(pages, r->npage); in psb_gtt_insert()
99 for (i = r->roll; i < r->npage; i++) { in psb_gtt_insert()
137 for (i = 0; i < r->npage; i++) in psb_gtt_remove()
140 set_pages_array_wb(r->pages, r->npage); in psb_gtt_remove()
159 if (roll >= r->npage) { in psb_gtt_roll()
173 for (i = r->roll; i < r->npage; i++) { in psb_gtt_roll()
204 gt->npage = gt->gem.size / PAGE_SIZE; in psb_gtt_attach_pages()
255 gt->npage, 0, 0, PSB_MMU_CACHED_MEMORY); in psb_gtt_pin()
296 (gpu_base + gt->offset), gt->npage, 0, 0); in psb_gtt_unpin()
Dgtt.h39 int npage; /* Number of backing pages */ member
Dgma_display.c395 if (gt->npage > 4) in gma_crtc_cursor_set()
398 cursor_pages = gt->npage; in gma_crtc_cursor_set()
Dframebuffer.c90 if (gtt->npage) { in psbfb_pan()
/drivers/vfio/
Dvfio_iommu_type1.c387 static int vfio_lock_acct(struct vfio_dma *dma, long npage, bool async) in vfio_lock_acct() argument
392 if (!npage) in vfio_lock_acct()
401 ret = __account_locked_vm(mm, abs(npage), npage > 0, dma->task, in vfio_lock_acct()
562 long npage, unsigned long *pfn_base, in vfio_pin_pages_remote() argument
600 for (vaddr += PAGE_SIZE, iova += PAGE_SIZE; pinned < npage; in vfio_pin_pages_remote()
643 unsigned long pfn, long npage, in vfio_unpin_pages_remote() argument
649 for (i = 0; i < npage; i++, iova += PAGE_SIZE) { in vfio_unpin_pages_remote()
717 int npage, int prot, in vfio_iommu_type1_pin_pages() argument
749 for (i = 0; i < npage; i++) { in vfio_iommu_type1_pin_pages()
822 int npage) in vfio_iommu_type1_unpin_pages() argument
[all …]
Dvfio.c1895 int vfio_pin_pages(struct device *dev, unsigned long *user_pfn, int npage, in vfio_pin_pages() argument
1903 if (!dev || !user_pfn || !phys_pfn || !npage) in vfio_pin_pages()
1906 if (npage > VFIO_PIN_PAGES_MAX_ENTRIES) in vfio_pin_pages()
1927 npage, prot, phys_pfn); in vfio_pin_pages()
1948 int vfio_unpin_pages(struct device *dev, unsigned long *user_pfn, int npage) in vfio_unpin_pages() argument
1955 if (!dev || !user_pfn || !npage) in vfio_unpin_pages()
1958 if (npage > VFIO_PIN_PAGES_MAX_ENTRIES) in vfio_unpin_pages()
1973 npage); in vfio_unpin_pages()
2007 unsigned long *user_iova_pfn, int npage, in vfio_group_pin_pages() argument
2014 if (!group || !user_iova_pfn || !phys_pfn || !npage) in vfio_group_pin_pages()
[all …]
/drivers/gpu/drm/i915/gvt/
Dkvmgt.c157 int npage; in gvt_unpin_guest_page() local
162 for (npage = 0; npage < total_pages; npage++) { in gvt_unpin_guest_page()
163 unsigned long cur_gfn = gfn + npage; in gvt_unpin_guest_page()
177 int npage; in gvt_pin_guest_page() local
185 for (npage = 0; npage < total_pages; npage++) { in gvt_pin_guest_page()
186 unsigned long cur_gfn = gfn + npage; in gvt_pin_guest_page()
199 npage++; in gvt_pin_guest_page()
204 if (npage == 0) in gvt_pin_guest_page()
206 else if (base_pfn + npage != pfn) { in gvt_pin_guest_page()
209 npage++; in gvt_pin_guest_page()
[all …]
/drivers/infiniband/hw/mthca/
Dmthca_allocator.c162 int npage = (nent * sizeof (void *) + PAGE_SIZE - 1) / PAGE_SIZE; in mthca_array_init() local
165 array->page_list = kmalloc_array(npage, sizeof(*array->page_list), in mthca_array_init()
170 for (i = 0; i < npage; ++i) { in mthca_array_init()