Home
last modified time | relevance | path

Searched refs:nr_pages (Results 1 – 25 of 69) sorted by relevance

123

/drivers/xen/
Dballoon.c313 static enum bp_state increase_reservation(unsigned long nr_pages) in increase_reservation() argument
326 nr_pages = min(nr_pages, balloon_stats.balloon_hotplug); in increase_reservation()
327 balloon_stats.hotplug_pages += nr_pages; in increase_reservation()
328 balloon_stats.balloon_hotplug -= nr_pages; in increase_reservation()
333 if (nr_pages > ARRAY_SIZE(frame_list)) in increase_reservation()
334 nr_pages = ARRAY_SIZE(frame_list); in increase_reservation()
337 for (i = 0; i < nr_pages; i++) { in increase_reservation()
339 nr_pages = i; in increase_reservation()
347 reservation.nr_extents = nr_pages; in increase_reservation()
385 static enum bp_state decrease_reservation(unsigned long nr_pages, gfp_t gfp) in decrease_reservation() argument
[all …]
Dprivcmd.c373 unsigned long nr_pages; in privcmd_ioctl_mmap_batch() local
397 nr_pages = m.num; in privcmd_ioctl_mmap_batch()
398 if ((m.num <= 0) || (nr_pages > (LONG_MAX >> PAGE_SHIFT))) in privcmd_ioctl_mmap_batch()
424 ((m.addr + (nr_pages << PAGE_SHIFT)) != vma->vm_end) || in privcmd_ioctl_mmap_batch()
/drivers/media/v4l2-core/
Dvideobuf-dma-sg.c66 int nr_pages) in videobuf_vmalloc_to_sg() argument
72 sglist = vzalloc(nr_pages * sizeof(*sglist)); in videobuf_vmalloc_to_sg()
75 sg_init_table(sglist, nr_pages); in videobuf_vmalloc_to_sg()
76 for (i = 0; i < nr_pages; i++, virt += PAGE_SIZE) { in videobuf_vmalloc_to_sg()
96 int nr_pages, int offset, size_t size) in videobuf_pages_to_sg() argument
103 sglist = vmalloc(nr_pages * sizeof(*sglist)); in videobuf_pages_to_sg()
106 sg_init_table(sglist, nr_pages); in videobuf_pages_to_sg()
114 for (i = 1; i < nr_pages; i++) { in videobuf_pages_to_sg()
177 dma->nr_pages = last-first+1; in videobuf_dma_init_user_locked()
178 dma->pages = kmalloc(dma->nr_pages * sizeof(struct page *), GFP_KERNEL); in videobuf_dma_init_user_locked()
[all …]
/drivers/gpu/drm/
Ddrm_buffer.c46 int nr_pages = size / PAGE_SIZE + 1; in drm_buffer_alloc() local
51 *buf = kzalloc(sizeof(struct drm_buffer) + nr_pages*sizeof(char *), in drm_buffer_alloc()
57 size, nr_pages); in drm_buffer_alloc()
63 for (idx = 0; idx < nr_pages; ++idx) { in drm_buffer_alloc()
73 idx + 1, size, nr_pages); in drm_buffer_alloc()
105 int nr_pages = size / PAGE_SIZE + 1; in drm_buffer_copy_from_user() local
115 for (idx = 0; idx < nr_pages; ++idx) { in drm_buffer_copy_from_user()
140 int nr_pages = buf->size / PAGE_SIZE + 1; in drm_buffer_free() local
142 for (idx = 0; idx < nr_pages; ++idx) in drm_buffer_free()
/drivers/net/ethernet/ibm/ehea/
Dehea_qmr.c107 int i, nr_pages; in hw_queue_dtor() local
112 nr_pages = queue->queue_length / queue->pagesize; in hw_queue_dtor()
114 for (i = 0; i < nr_pages; i += pages_per_kpage) in hw_queue_dtor()
151 ret = hw_queue_ctor(&cq->hw_queue, cq->attr.nr_pages, in ehea_create_cq()
156 for (counter = 0; counter < cq->attr.nr_pages; counter++) { in ehea_create_cq()
169 cq, hret, counter, cq->attr.nr_pages); in ehea_create_cq()
173 if (counter == (cq->attr.nr_pages - 1)) { in ehea_create_cq()
273 ret = hw_queue_ctor(&eq->hw_queue, eq->attr.nr_pages, in ehea_create_eq()
280 for (i = 0; i < eq->attr.nr_pages; i++) { in ehea_create_eq()
294 if (i == (eq->attr.nr_pages - 1)) { in ehea_create_eq()
[all …]
/drivers/edac/
Dpasemi_edac.c157 dimm->nr_pages = 128 << (20 - PAGE_SHIFT); in pasemi_edac_init_csrows()
160 dimm->nr_pages = 256 << (20 - PAGE_SHIFT); in pasemi_edac_init_csrows()
164 dimm->nr_pages = 512 << (20 - PAGE_SHIFT); in pasemi_edac_init_csrows()
167 dimm->nr_pages = 1024 << (20 - PAGE_SHIFT); in pasemi_edac_init_csrows()
170 dimm->nr_pages = 2048 << (20 - PAGE_SHIFT); in pasemi_edac_init_csrows()
180 csrow->last_page = csrow->first_page + dimm->nr_pages - 1; in pasemi_edac_init_csrows()
181 last_page_in_mmc += dimm->nr_pages; in pasemi_edac_init_csrows()
Dcell_edac.c134 u32 nr_pages; in cell_edac_init_csrows() local
149 nr_pages = resource_size(&r) >> PAGE_SHIFT; in cell_edac_init_csrows()
150 csrow->last_page = csrow->first_page + nr_pages - 1; in cell_edac_init_csrows()
156 dimm->nr_pages = nr_pages / csrow->nr_channels; in cell_edac_init_csrows()
162 csrow->first_page, nr_pages); in cell_edac_init_csrows()
Di3200_edac.c396 unsigned long nr_pages; in i3200_probe1() local
402 nr_pages = drb_to_nr_pages(drbs, stacked, j, i); in i3200_probe1()
403 if (nr_pages == 0) in i3200_probe1()
407 stacked ? " (stacked)" : "", PAGES_TO_MiB(nr_pages)); in i3200_probe1()
409 dimm->nr_pages = nr_pages; in i3200_probe1()
410 dimm->grain = nr_pages << PAGE_SHIFT; in i3200_probe1()
Dedac_mc_sysfs.c181 u32 nr_pages = 0; in csrow_size_show() local
184 nr_pages += csrow->channels[i]->dimm->nr_pages; in csrow_size_show()
185 return sprintf(data, "%u\n", PAGES_TO_MiB(nr_pages)); in csrow_size_show()
355 int chan, nr_pages = 0; in nr_pages_per_csrow() local
358 nr_pages += csrow->channels[chan]->dimm->nr_pages; in nr_pages_per_csrow()
360 return nr_pages; in nr_pages_per_csrow()
389 if (!csrow->channels[chan]->dimm->nr_pages) in edac_create_csrow_object()
444 if (!csrow->channels[chan]->dimm->nr_pages) in edac_create_csrow_objects()
467 if (!csrow->channels[chan]->dimm->nr_pages) in edac_delete_csrow_objects()
529 return sprintf(data, "%u\n", PAGES_TO_MiB(dimm->nr_pages)); in dimmdev_size_show()
[all …]
Dghes_edac.c111 dimm->nr_pages = MiB_TO_PAGES(32);/* Unknown */ in ghes_edac_dmidecode()
113 dimm->nr_pages = MiB_TO_PAGES(entry->extended_size); in ghes_edac_dmidecode()
116 dimm->nr_pages = MiB_TO_PAGES((entry->size & in ghes_edac_dmidecode()
119 dimm->nr_pages = MiB_TO_PAGES(entry->size); in ghes_edac_dmidecode()
174 if (dimm->nr_pages) { in ghes_edac_dmidecode()
177 PAGES_TO_MiB(dimm->nr_pages), in ghes_edac_dmidecode()
502 dimm->nr_pages = 1; in ghes_edac_register()
Dx38_edac.c380 unsigned long nr_pages; in x38_probe1() local
383 nr_pages = drb_to_nr_pages(drbs, stacked, in x38_probe1()
387 if (nr_pages == 0) in x38_probe1()
393 dimm->nr_pages = nr_pages / x38_channel_num; in x38_probe1()
394 dimm->grain = nr_pages << PAGE_SHIFT; in x38_probe1()
Dedac_mc.c93 edac_dbg(4, " dimm->nr_pages = 0x%x\n", dimm->nr_pages); in edac_mc_dump_dimm()
95 edac_dbg(4, " dimm->nr_pages = 0x%x\n", dimm->nr_pages); in edac_mc_dump_dimm()
735 u32 nr_pages = 0; in edac_mc_add_mc() local
739 nr_pages += csrow->channels[j]->dimm->nr_pages; in edac_mc_add_mc()
740 if (!nr_pages) in edac_mc_add_mc()
744 if (csrow->channels[j]->dimm->nr_pages) in edac_mc_add_mc()
748 if (mci->dimms[i]->nr_pages) in edac_mc_add_mc()
887 n += dimm->nr_pages; in edac_mc_find_csrow_by_page()
1219 if (e->enable_per_layer_report && dimm->nr_pages) { in edac_mc_handle_error()
Di82975x_edac.c376 u32 cumul_size, nr_pages; in i82975x_init_csrows() local
407 nr_pages = cumul_size - last_cumul_size; in i82975x_init_csrows()
408 if (!nr_pages) in i82975x_init_csrows()
421 dimm->nr_pages = nr_pages / csrow->nr_channels; in i82975x_init_csrows()
Di82875p_edac.c357 u32 cumul_size, nr_pages; in i82875p_init_csrows() local
380 nr_pages = cumul_size - last_cumul_size; in i82875p_init_csrows()
386 dimm->nr_pages = nr_pages / nr_chans; in i82875p_init_csrows()
Di3000_edac.c317 unsigned long last_cumul_size, nr_pages; in i3000_probe1() local
408 nr_pages = cumul_size - last_cumul_size; in i3000_probe1()
414 dimm->nr_pages = nr_pages / nr_channels; in i3000_probe1()
Damd76x_edac.c211 dimm->nr_pages = (mba_mask + 1) >> PAGE_SHIFT; in amd76x_init_csrows()
212 csrow->last_page = csrow->first_page + dimm->nr_pages - 1; in amd76x_init_csrows()
214 dimm->grain = dimm->nr_pages << PAGE_SHIFT; in amd76x_init_csrows()
/drivers/infiniband/hw/ehca/
Dehca_eq.c59 u32 nr_pages; in ehca_create_eq() local
83 &nr_pages, &eq->ist); in ehca_create_eq()
90 ret = ipz_queue_ctor(NULL, &eq->ipz_queue, nr_pages, in ehca_create_eq()
97 for (i = 0; i < nr_pages; i++) { in ehca_create_eq()
110 if (i == (nr_pages - 1)) { in ehca_create_eq()
Dipz_pt_fn.c257 int i, nr_pages; in ipz_queue_dtor() local
267 nr_pages = queue->queue_length / queue->pagesize; in ipz_queue_dtor()
268 for (i = 0; i < nr_pages; i += PAGES_PER_KPAGE) in ipz_queue_dtor()
/drivers/gpu/drm/exynos/
Dexynos_drm_buf.c25 unsigned int nr_pages; in lowlevel_buffer_allocate() local
56 nr_pages = buf->size >> PAGE_SHIFT; in lowlevel_buffer_allocate()
62 buf->pages = kzalloc(sizeof(struct page) * nr_pages, in lowlevel_buffer_allocate()
79 while (i < nr_pages) { in lowlevel_buffer_allocate()
95 buf->sgt = drm_prime_pages_to_sg(buf->pages, nr_pages); in lowlevel_buffer_allocate()
/drivers/dma/
Diovlock.c91 page_list->nr_pages = num_pages_spanned(&iov[i]); in dma_pin_iovec_pages()
95 pages += page_list->nr_pages; in dma_pin_iovec_pages()
103 page_list->nr_pages, in dma_pin_iovec_pages()
110 if (ret != page_list->nr_pages) in dma_pin_iovec_pages()
133 for (j = 0; j < page_list->nr_pages; j++) { in dma_unpin_iovec_pages()
/drivers/staging/crystalhd/
Dcrystalhd_misc.c684 int i = 0, rw = 0, res = 0, nr_pages = 0, skip_fb_sg = 0; in crystalhd_map_dio() local
695 nr_pages = end - start; in crystalhd_map_dio()
716 if (nr_pages > dio->max_pages) { in crystalhd_map_dio()
718 dio->max_pages, nr_pages); in crystalhd_map_dio()
744 res = get_user_pages(current, current->mm, uaddr, nr_pages, rw == READ, in crystalhd_map_dio()
750 if (res < nr_pages) { in crystalhd_map_dio()
751 BCMLOG_ERR("get pages failed: %d-%d\n", nr_pages, res); in crystalhd_map_dio()
757 dio->page_cnt = nr_pages; in crystalhd_map_dio()
761 if (nr_pages > 1) { in crystalhd_map_dio()
768 for (i = 1; i < nr_pages; i++) { in crystalhd_map_dio()
/drivers/media/common/saa7146/
Dsaa7146_core.c149 static struct scatterlist* vmalloc_to_sg(unsigned char *virt, int nr_pages) in vmalloc_to_sg() argument
155 sglist = kcalloc(nr_pages, sizeof(struct scatterlist), GFP_KERNEL); in vmalloc_to_sg()
158 sg_init_table(sglist, nr_pages); in vmalloc_to_sg()
159 for (i = 0; i < nr_pages; i++, virt += PAGE_SIZE) { in vmalloc_to_sg()
251 int nr_pages = 0; in saa7146_pgtable_build_single() local
270 nr_pages++; in saa7146_pgtable_build_single()
277 for(i=nr_pages;i<1024;i++) { in saa7146_pgtable_build_single()
/drivers/video/
Dpvr2fb.c679 unsigned int nr_pages; in pvr2fb_write() local
683 nr_pages = (count + PAGE_SIZE - 1) >> PAGE_SHIFT; in pvr2fb_write()
685 pages = kmalloc(nr_pages * sizeof(struct page *), GFP_KERNEL); in pvr2fb_write()
691 nr_pages, WRITE, 0, pages, NULL); in pvr2fb_write()
694 if (ret < nr_pages) { in pvr2fb_write()
695 nr_pages = ret; in pvr2fb_write()
704 end = (unsigned long)page_address(pages[nr_pages]); in pvr2fb_write()
705 len = nr_pages << PAGE_SHIFT; in pvr2fb_write()
723 for (i = 0; i < nr_pages; i++, dst += PAGE_SIZE) { in pvr2fb_write()
739 for (i = 0; i < nr_pages; i++) in pvr2fb_write()
/drivers/mtd/devices/
Dmtd_dataflash.c619 static int add_dataflash_otp(struct spi_device *spi, char *name, int nr_pages, in add_dataflash_otp() argument
645 device->size = nr_pages * pagesize; in add_dataflash_otp()
680 int nr_pages, int pagesize, int pageoffset) in add_dataflash() argument
682 return add_dataflash_otp(spi, name, nr_pages, pagesize, in add_dataflash()
695 unsigned nr_pages; member
838 return add_dataflash_otp(spi, info->name, info->nr_pages, in dataflash_probe()
/drivers/base/
Dfirmware_class.c132 int nr_pages; member
248 for (i = 0; i < buf->nr_pages; i++) in __fw_free_buf()
594 buf->data = vmap(buf->pages, buf->nr_pages, 0, PAGE_KERNEL_RO); in fw_map_pages_buf()
631 for (i = 0; i < fw_buf->nr_pages; i++) in firmware_loading_store()
636 fw_buf->nr_pages = 0; in firmware_loading_store()
741 while (buf->nr_pages < pages_needed) { in fw_realloc_buffer()
742 buf->pages[buf->nr_pages] = in fw_realloc_buffer()
745 if (!buf->pages[buf->nr_pages]) { in fw_realloc_buffer()
749 buf->nr_pages++; in fw_realloc_buffer()

123