Home
last modified time | relevance | path

Searched refs:page_count (Results 1 – 25 of 68) sorted by relevance

123

/drivers/media/pci/ivtv/
Divtv-udma.c33 dma_page->page_count = dma_page->last - dma_page->first + 1; in ivtv_udma_get_page_info()
34 if (dma_page->page_count == 1) dma_page->tail -= dma_page->offset; in ivtv_udma_get_page_info()
48 for (i = 0; i < dma_page->page_count; i++) { in ivtv_udma_fill_sg_list()
49 unsigned int len = (i == dma_page->page_count - 1) ? in ivtv_udma_fill_sg_list()
112 if (dma->SG_length || dma->page_count) { in ivtv_udma_setup()
114 dma->SG_length, dma->page_count); in ivtv_udma_setup()
120 if (user_dma.page_count <= 0) { in ivtv_udma_setup()
122 user_dma.page_count, size_in_bytes, user_dma.offset); in ivtv_udma_setup()
129 user_dma.uaddr, user_dma.page_count, 0, 1, dma->map, NULL); in ivtv_udma_setup()
132 if (user_dma.page_count != err) { in ivtv_udma_setup()
[all …]
Divtv-yuv.c67 if (dma->SG_length || dma->page_count) { in ivtv_yuv_prep_user_dma()
70 dma->SG_length, dma->page_count); in ivtv_yuv_prep_user_dma()
79 …y_pages = get_user_pages(current, current->mm, y_dma.uaddr, y_dma.page_count, 0, 1, &dma->map[0], … in ivtv_yuv_prep_user_dma()
81 if (y_pages == y_dma.page_count) { in ivtv_yuv_prep_user_dma()
83 uv_dma.uaddr, uv_dma.page_count, 0, 1, in ivtv_yuv_prep_user_dma()
88 if (y_pages != y_dma.page_count || uv_pages != uv_dma.page_count) { in ivtv_yuv_prep_user_dma()
91 if (y_pages == y_dma.page_count) { in ivtv_yuv_prep_user_dma()
94 "expecting %d\n", uv_pages, uv_dma.page_count); in ivtv_yuv_prep_user_dma()
106 "expecting %d\n", y_pages, y_dma.page_count); in ivtv_yuv_prep_user_dma()
123 dma->page_count = y_pages + uv_pages; in ivtv_yuv_prep_user_dma()
[all …]
/drivers/firewire/
Dcore-iso.c42 int fw_iso_buffer_alloc(struct fw_iso_buffer *buffer, int page_count) in fw_iso_buffer_alloc() argument
46 buffer->page_count = 0; in fw_iso_buffer_alloc()
48 buffer->pages = kmalloc(page_count * sizeof(buffer->pages[0]), in fw_iso_buffer_alloc()
53 for (i = 0; i < page_count; i++) { in fw_iso_buffer_alloc()
58 buffer->page_count = i; in fw_iso_buffer_alloc()
59 if (i < page_count) { in fw_iso_buffer_alloc()
75 for (i = 0; i < buffer->page_count; i++) { in fw_iso_buffer_map_dma()
84 if (i < buffer->page_count) in fw_iso_buffer_map_dma()
91 int page_count, enum dma_data_direction direction) in fw_iso_buffer_init() argument
95 ret = fw_iso_buffer_alloc(buffer, page_count); in fw_iso_buffer_init()
[all …]
/drivers/char/agp/
Dgeneric.c198 if (curr->page_count != 0) { in agp_free_memory()
203 for (i = 0; i < curr->page_count; i++) { in agp_free_memory()
208 for (i = 0; i < curr->page_count; i++) { in agp_free_memory()
235 size_t page_count, u32 type) in agp_allocate_memory() argument
246 if ((cur_memory + page_count > bridge->max_memory_agp) || in agp_allocate_memory()
247 (cur_memory + page_count < page_count)) in agp_allocate_memory()
251 new = agp_generic_alloc_user(page_count, type); in agp_allocate_memory()
258 new = bridge->driver->alloc_by_type(page_count, type); in agp_allocate_memory()
264 scratch_pages = (page_count + ENTRIES_PER_PAGE - 1) / ENTRIES_PER_PAGE; in agp_allocate_memory()
272 if (bridge->driver->agp_alloc_pages(bridge, new, page_count)) { in agp_allocate_memory()
[all …]
Di460-agp.c311 if ((io_pg_start + I460_IOPAGES_PER_KPAGE * mem->page_count) > num_entries) { in i460_insert_memory_small_io_page()
317 while (j < (io_pg_start + I460_IOPAGES_PER_KPAGE * mem->page_count)) { in i460_insert_memory_small_io_page()
327 for (i = 0, j = io_pg_start; i < mem->page_count; i++) { in i460_insert_memory_small_io_page()
346 for (i = pg_start; i < (pg_start + I460_IOPAGES_PER_KPAGE * mem->page_count); i++) in i460_remove_memory_small_io_page()
415 end = &i460.lp_desc[(pg_start + mem->page_count - 1) / I460_KPAGES_PER_IOPAGE]; in i460_insert_memory_large_io_page()
417 end_offset = (pg_start + mem->page_count - 1) % I460_KPAGES_PER_IOPAGE; in i460_insert_memory_large_io_page()
473 end = &i460.lp_desc[(pg_start + mem->page_count - 1) / I460_KPAGES_PER_IOPAGE]; in i460_remove_memory_large_io_page()
475 end_offset = (pg_start + mem->page_count - 1) % I460_KPAGES_PER_IOPAGE; in i460_remove_memory_large_io_page()
Dsgi-agp.c174 if ((pg_start + mem->page_count) > num_entries) in sgi_tioca_insert_memory()
179 while (j < (pg_start + mem->page_count)) { in sgi_tioca_insert_memory()
190 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) { in sgi_tioca_insert_memory()
218 for (i = pg_start; i < (mem->page_count + pg_start); i++) { in sgi_tioca_remove_memory()
Dnvidia-agp.c209 if (mem->page_count == 0) in nvidia_insert_memory()
212 if ((pg_start + mem->page_count) > in nvidia_insert_memory()
216 for (j = pg_start; j < (pg_start + mem->page_count); j++) { in nvidia_insert_memory()
225 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) { in nvidia_insert_memory()
249 if (mem->page_count == 0) in nvidia_remove_memory()
252 for (i = pg_start; i < (mem->page_count + pg_start); i++) in nvidia_remove_memory()
Dintel-gtt.c126 DBG("try unmapping %lu pages\n", (unsigned long)mem->page_count); in intel_gtt_unmap_memory()
217 if ((pg_start + mem->page_count) in i810_insert_dcache_entries()
224 for (i = pg_start; i < (pg_start + mem->page_count); i++) { in i810_insert_dcache_entries()
269 new->page_count = pg_count; in alloc_agpphysmem_i8xx()
280 if (curr->page_count == 4) in intel_i810_free_by_type()
896 if (mem->page_count == 0) in intel_fake_agp_insert_entries()
899 if (pg_start + mem->page_count > intel_private.gtt_total_entries) in intel_fake_agp_insert_entries()
914 ret = intel_gtt_map_memory(mem->pages, mem->page_count, &st); in intel_fake_agp_insert_entries()
922 intel_gtt_insert_pages(pg_start, mem->page_count, mem->pages, in intel_fake_agp_insert_entries()
947 if (mem->page_count == 0) in intel_fake_agp_remove_entries()
[all …]
Dati-agp.c280 if (mem->page_count == 0) in ati_insert_memory()
283 if ((pg_start + mem->page_count) > num_entries) in ati_insert_memory()
287 while (j < (pg_start + mem->page_count)) { in ati_insert_memory()
301 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) { in ati_insert_memory()
326 if (mem->page_count == 0) in ati_remove_memory()
329 for (i = pg_start; i < (mem->page_count + pg_start); i++) { in ati_remove_memory()
Duninorth-agp.c165 if (mem->page_count == 0) in uninorth_insert_memory()
171 if ((pg_start + mem->page_count) > num_entries) in uninorth_insert_memory()
175 for (i = 0; i < mem->page_count; ++i) { in uninorth_insert_memory()
184 for (i = 0; i < mem->page_count; i++) { in uninorth_insert_memory()
214 if (mem->page_count == 0) in uninorth_remove_memory()
218 for (i = 0; i < mem->page_count; ++i) { in uninorth_remove_memory()
Dagp.h199 struct agp_memory *agp_generic_alloc_by_type(size_t page_count, int type);
203 struct agp_memory *memory, size_t page_count);
220 struct agp_memory *agp_generic_alloc_user(size_t page_count, int type);
Defficeon-agp.c241 int i, count = mem->page_count, num_entries; in efficeon_insert_memory()
249 if ((pg_start + mem->page_count) > num_entries) in efficeon_insert_memory()
290 int i, count = mem->page_count, num_entries; in efficeon_remove_memory()
296 if ((pg_start + mem->page_count) > num_entries) in efficeon_remove_memory()
Dparisc-agp.c137 io_pg_count = info->io_pages_per_kpage * mem->page_count; in parisc_agp_insert_memory()
154 for (i = 0, j = io_pg_start; i < mem->page_count; i++) { in parisc_agp_insert_memory()
183 io_pg_count = info->io_pages_per_kpage * mem->page_count; in parisc_agp_remove_memory()
Dali-agp.c128 int i, page_count; in m1541_cache_flush() local
133 page_count = 1 << A_SIZE_32(agp_bridge->current_size)->page_order; in m1541_cache_flush()
134 for (i = 0; i < PAGE_SIZE * page_count; i += PAGE_SIZE) { in m1541_cache_flush()
Damd-k7-agp.c297 if ((pg_start + mem->page_count) > num_entries) in amd_insert_memory()
301 while (j < (pg_start + mem->page_count)) { in amd_insert_memory()
314 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) { in amd_insert_memory()
337 for (i = pg_start; i < (mem->page_count + pg_start); i++) { in amd_remove_memory()
Dsworks-agp.c331 if ((pg_start + mem->page_count) > num_entries) { in serverworks_insert_memory()
336 while (j < (pg_start + mem->page_count)) { in serverworks_insert_memory()
349 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) { in serverworks_insert_memory()
374 for (i = pg_start; i < (mem->page_count + pg_start); i++) { in serverworks_remove_memory()
/drivers/gpu/drm/udl/
Dudl_gem.c126 int page_count, i; in udl_gem_get_pages() local
134 page_count = obj->base.size / PAGE_SIZE; in udl_gem_get_pages()
136 obj->pages = drm_malloc_ab(page_count, sizeof(struct page *)); in udl_gem_get_pages()
144 for (i = 0; i < page_count; i++) { in udl_gem_get_pages()
162 int page_count = obj->base.size / PAGE_SIZE; in udl_gem_put_pages() local
171 for (i = 0; i < page_count; i++) in udl_gem_put_pages()
180 int page_count = obj->base.size / PAGE_SIZE; in udl_gem_vmap() local
194 obj->vmapping = vmap(obj->pages, page_count, 0, PAGE_KERNEL); in udl_gem_vmap()
/drivers/gpu/drm/nouveau/
Dnouveau_bo.c626 u32 page_count = new_mem->num_pages; in nvc0_bo_move_copy() local
629 page_count = new_mem->num_pages; in nvc0_bo_move_copy()
630 while (page_count) { in nvc0_bo_move_copy()
631 int line_count = (page_count > 8191) ? 8191 : page_count; in nvc0_bo_move_copy()
649 page_count -= line_count; in nvc0_bo_move_copy()
664 u32 page_count = new_mem->num_pages; in nvc0_bo_move_m2mf() local
667 page_count = new_mem->num_pages; in nvc0_bo_move_m2mf()
668 while (page_count) { in nvc0_bo_move_m2mf()
669 int line_count = (page_count > 2047) ? 2047 : page_count; in nvc0_bo_move_m2mf()
688 page_count -= line_count; in nvc0_bo_move_m2mf()
[all …]
/drivers/iommu/
Dtegra-gart.c58 u32 page_count; /* total remappable size */ member
80 iova < gart->iovmm_base + GART_PAGE_SIZE * gart->page_count; \
147 gart_end = gart_start + gart->page_count * GART_PAGE_SIZE - 1; in gart_iova_range_valid()
170 gart->page_count * GART_PAGE_SIZE - 1; in gart_iommu_attach_dev()
389 gart->page_count = (resource_size(res_remap) >> GART_PAGE_SHIFT); in tegra_gart_probe()
391 gart->savedata = vmalloc(sizeof(u32) * gart->page_count); in tegra_gart_probe()
/drivers/block/
Drbd.c256 u32 page_count; member
1886 obj_request->page_count); in rbd_obj_request_destroy()
2105 obj_request->page_count = 0; in rbd_img_obj_end_request()
2228 unsigned int page_count; in rbd_img_request_fill() local
2231 page_count = (u32)calc_pages_for(offset, length); in rbd_img_request_fill()
2232 obj_request->page_count = page_count; in rbd_img_request_fill()
2234 page_count--; /* more on last page */ in rbd_img_request_fill()
2235 pages += page_count; in rbd_img_request_fill()
2288 u32 page_count; in rbd_img_obj_copyup_callback() local
2301 page_count = obj_request->copyup_page_count; in rbd_img_obj_copyup_callback()
[all …]
Dps3vram.c61 unsigned int page_count; member
363 for (i = 0; i < cache->page_count; i++) { in ps3vram_cache_flush()
383 for (i = 0; i < cache->page_count; i++) { in ps3vram_cache_match()
394 i = (jiffies + (counter++)) % cache->page_count; in ps3vram_cache_match()
408 priv->cache.page_count = CACHE_PAGE_COUNT; in ps3vram_cache_init()
/drivers/gpu/drm/
Ddrm_bufs.c745 dma->page_count += byte_count >> PAGE_SHIFT; in drm_addbufs_agp()
779 int page_count; in drm_addbufs_pci() local
847 temp_pagelist = kmalloc((dma->page_count + (count << page_order)) * in drm_addbufs_pci()
857 dma->pagelist, dma->page_count * sizeof(*dma->pagelist)); in drm_addbufs_pci()
859 dma->page_count + (count << page_order)); in drm_addbufs_pci()
864 page_count = 0; in drm_addbufs_pci()
883 dma->page_count + page_count, in drm_addbufs_pci()
885 temp_pagelist[dma->page_count + page_count++] in drm_addbufs_pci()
944 if (dma->page_count) { in drm_addbufs_pci()
951 dma->page_count += entry->seg_count << page_order; in drm_addbufs_pci()
[all …]
/drivers/gpu/drm/ttm/
Dttm_agp_backend.c63 mem->page_count = 0; in ttm_agp_bind()
70 mem->pages[mem->page_count++] = page; in ttm_agp_bind()
/drivers/target/
Dtarget_core_rd.c88 u32 i, j, page_count = 0, sg_per_table; in rd_release_device_space() local
106 page_count++; in rd_release_device_space()
115 rd_dev->rd_host->rd_host_id, rd_dev->rd_dev_id, page_count, in rd_release_device_space()
116 rd_dev->sg_table_count, (unsigned long)page_count * PAGE_SIZE); in rd_release_device_space()
/drivers/net/ethernet/brocade/bna/
Dbna_tx_rx.c1346 (bfi_q)->pages = htons((u16)(bna_qpt)->page_count); \
1930 u32 page_count, in bna_rxq_qpt_setup() argument
1944 rxq->qpt.page_count = page_count; in bna_rxq_qpt_setup()
1953 for (i = 0; i < rxq->qpt.page_count; i++) { in bna_rxq_qpt_setup()
1968 u32 page_count, in bna_rxp_cqpt_setup() argument
1982 rxp->cq.qpt.page_count = page_count; in bna_rxp_cqpt_setup()
1991 for (i = 0; i < rxp->cq.qpt.page_count; i++) { in bna_rxp_cqpt_setup()
2368 u32 page_count; in bna_rx_create() local
2401 page_count = res_info[BNA_RX_RES_MEM_T_CQPT_PAGE].res_u.mem_info.len / in bna_rx_create()
2552 bna_rxp_cqpt_setup(rxp, page_count, PAGE_SIZE, in bna_rx_create()
[all …]

123