Home
last modified time | relevance | path

Searched refs:order (Results 1 – 25 of 246) sorted by relevance

12345678910

/drivers/media/platform/soc_camera/
Dsoc_mediabus.c26 .order = SOC_MBUS_ORDER_LE,
36 .order = SOC_MBUS_ORDER_LE,
46 .order = SOC_MBUS_ORDER_LE,
56 .order = SOC_MBUS_ORDER_LE,
66 .order = SOC_MBUS_ORDER_LE,
76 .order = SOC_MBUS_ORDER_BE,
86 .order = SOC_MBUS_ORDER_LE,
96 .order = SOC_MBUS_ORDER_BE,
106 .order = SOC_MBUS_ORDER_LE,
115 .order = SOC_MBUS_ORDER_LE,
[all …]
/drivers/media/pci/cx18/
Dcx18-mailbox.c239 static void epu_dma_done(struct cx18 *cx, struct cx18_in_work_order *order) in epu_dma_done() argument
248 mb = &order->mb; in epu_dma_done()
255 (order->flags & CX18_F_EWO_MB_STALE_UPON_RECEIPT) ? in epu_dma_done()
261 mdl_ack = order->mdl_ack; in epu_dma_done()
285 if ((order->flags & CX18_F_EWO_MB_STALE_UPON_RECEIPT) && in epu_dma_done()
332 static void epu_debug(struct cx18 *cx, struct cx18_in_work_order *order) in epu_debug() argument
335 char *str = order->str; in epu_debug()
337 CX18_DEBUG_INFO("%x %s\n", order->mb.args[0], str); in epu_debug()
343 static void epu_cmd(struct cx18 *cx, struct cx18_in_work_order *order) in epu_cmd() argument
345 switch (order->rpu) { in epu_cmd()
[all …]
/drivers/gpu/drm/lib/
Ddrm_random.c15 void drm_random_reorder(unsigned int *order, unsigned int count, in drm_random_reorder() argument
23 swap(order[i], order[j]); in drm_random_reorder()
30 unsigned int *order, i; in drm_random_order() local
32 order = kmalloc_array(count, sizeof(*order), GFP_KERNEL); in drm_random_order()
33 if (!order) in drm_random_order()
34 return order; in drm_random_order()
37 order[i] = i; in drm_random_order()
39 drm_random_reorder(order, count, state); in drm_random_order()
40 return order; in drm_random_order()
/drivers/gpu/drm/i915/selftests/
Di915_random.c49 void i915_random_reorder(unsigned int *order, unsigned int count, in i915_random_reorder() argument
57 swap(order[i], order[j]); in i915_random_reorder()
63 unsigned int *order, i; in i915_random_order() local
65 order = kmalloc_array(count, sizeof(*order), GFP_KERNEL); in i915_random_order()
66 if (!order) in i915_random_order()
67 return order; in i915_random_order()
70 order[i] = i; in i915_random_order()
72 i915_random_reorder(order, count, state); in i915_random_order()
73 return order; in i915_random_order()
Di915_syncmap.c274 unsigned int pass, order; in igt_syncmap_join_above() local
296 for (order = 0; order < 64; order += SHIFT) { in igt_syncmap_join_above()
297 u64 context = BIT_ULL(order); in igt_syncmap_join_above()
335 unsigned int step, order, idx; in igt_syncmap_join_below() local
345 for (order = 64 - SHIFT; order > 0; order -= SHIFT) { in igt_syncmap_join_below()
346 u64 context = step * BIT_ULL(order); in igt_syncmap_join_below()
354 context, order, step, sync->height, sync->prefix); in igt_syncmap_join_below()
362 for (order = SHIFT; order < 64; order += SHIFT) { in igt_syncmap_join_below()
363 u64 context = step * BIT_ULL(order); in igt_syncmap_join_below()
367 context, order, step); in igt_syncmap_join_below()
[all …]
Di915_gem_timeline.c81 int order, offset; in igt_sync() local
89 for (order = 1; order < 64; order++) { in igt_sync()
90 for (offset = -1; offset <= (order > 1); offset++) { in igt_sync()
91 u64 ctx = BIT_ULL(order) + offset; in igt_sync()
105 for (order = 1; order < 64; order++) { in igt_sync()
106 for (offset = -1; offset <= (order > 1); offset++) { in igt_sync()
107 u64 ctx = BIT_ULL(order) + offset; in igt_sync()
134 int order, last_order; in bench_sync() local
257 for (last_order = 1, order = 1; order < 32; in bench_sync()
258 ({ int tmp = last_order; last_order = order; order += tmp; })) { in bench_sync()
[all …]
/drivers/staging/android/ion/
Dion_system_heap.c35 static int order_to_index(unsigned int order) in order_to_index() argument
40 if (order == orders[i]) in order_to_index()
46 static inline unsigned int order_to_size(int order) in order_to_size() argument
48 return PAGE_SIZE << order; in order_to_size()
64 unsigned long order) in alloc_buffer_page() argument
71 pool = heap->uncached_pools[order_to_index(order)]; in alloc_buffer_page()
73 pool = heap->cached_pools[order_to_index(order)]; in alloc_buffer_page()
84 unsigned int order = compound_order(page); in free_buffer_page() local
89 __free_pages(page, order); in free_buffer_page()
94 pool = heap->uncached_pools[order_to_index(order)]; in free_buffer_page()
[all …]
Dion_page_pool.c30 struct page *page = alloc_pages(pool->gfp_mask, pool->order); in ion_page_pool_alloc_pages()
40 __free_pages(page, pool->order); in ion_page_pool_free_pages()
98 BUG_ON(pool->order != compound_order(page)); in ion_page_pool_free()
112 return count << pool->order; in ion_page_pool_total()
143 freed += (1 << pool->order); in ion_page_pool_shrink()
149 struct ion_page_pool *ion_page_pool_create(gfp_t gfp_mask, unsigned int order, in ion_page_pool_create() argument
161 pool->order = order; in ion_page_pool_create()
163 plist_node_init(&pool->list, order); in ion_page_pool_create()
/drivers/pci/endpoint/
Dpci-epc-mem.c36 int order; in pci_epc_mem_get_order() local
42 order = fls(size); in pci_epc_mem_get_order()
44 order = fls64(size); in pci_epc_mem_get_order()
46 return order; in pci_epc_mem_get_order()
139 int order; in pci_epc_mem_alloc_addr() local
142 order = pci_epc_mem_get_order(mem, size); in pci_epc_mem_alloc_addr()
144 pageno = bitmap_find_free_region(mem->bitmap, mem->pages, order); in pci_epc_mem_alloc_addr()
151 bitmap_release_region(mem->bitmap, pageno, order); in pci_epc_mem_alloc_addr()
172 int order; in pci_epc_mem_free_addr() local
177 order = pci_epc_mem_get_order(mem, size); in pci_epc_mem_free_addr()
[all …]
/drivers/gpu/drm/selftests/
Dtest-drm_mm.c375 unsigned int *order, n, m, o = 0; in __igt_reserve() local
388 order = drm_random_order(count, &prng); in __igt_reserve()
389 if (!order) in __igt_reserve()
403 nodes[n].start = order[n] * size; in __igt_reserve()
429 drm_random_reorder(order, count, &prng); in __igt_reserve()
432 set_node(&tmp, order[n] * size, 1))) in __igt_reserve()
436 drm_mm_remove_node(&nodes[order[n]]); in __igt_reserve()
437 err = drm_mm_reserve_node(&mm, &nodes[order[n]]); in __igt_reserve()
465 node = &nodes[order[(o + m) % count]]; in __igt_reserve()
470 node = &nodes[order[(o + m) % count]]; in __igt_reserve()
[all …]
/drivers/infiniband/hw/hns/
Dhns_roce_mr.c69 static int hns_roce_buddy_alloc(struct hns_roce_buddy *buddy, int order, in hns_roce_buddy_alloc() argument
77 for (o = order; o <= buddy->max_order; ++o) { in hns_roce_buddy_alloc()
92 while (o > order) { in hns_roce_buddy_alloc()
101 *seg <<= order; in hns_roce_buddy_alloc()
106 int order) in hns_roce_buddy_free() argument
108 seg >>= order; in hns_roce_buddy_free()
112 while (test_bit(seg ^ 1, buddy->bits[order])) { in hns_roce_buddy_free()
113 clear_bit(seg ^ 1, buddy->bits[order]); in hns_roce_buddy_free()
114 --buddy->num_free[order]; in hns_roce_buddy_free()
116 ++order; in hns_roce_buddy_free()
[all …]
/drivers/staging/media/atomisp/pci/atomisp2/hmm/
Dhmm_reserved_pool.c123 unsigned int order = 0; in hmm_reserved_pool_init() local
141 order = MAX_ORDER; in hmm_reserved_pool_init()
144 blk_pgnr = 1U << order; in hmm_reserved_pool_init()
146 order--; in hmm_reserved_pool_init()
149 BUG_ON(order > MAX_ORDER); in hmm_reserved_pool_init()
151 pages = alloc_pages(GFP_KERNEL | __GFP_NOWARN, order); in hmm_reserved_pool_init()
153 if (order == 0) { in hmm_reserved_pool_init()
163 order--; in hmm_reserved_pool_init()
166 blk_pgnr = 1U << order; in hmm_reserved_pool_init()
172 __free_pages(pages, order); in hmm_reserved_pool_init()
/drivers/net/ethernet/mellanox/mlx4/
Dmr.c46 static u32 mlx4_buddy_alloc(struct mlx4_buddy *buddy, int order) in mlx4_buddy_alloc() argument
54 for (o = order; o <= buddy->max_order; ++o) in mlx4_buddy_alloc()
69 while (o > order) { in mlx4_buddy_alloc()
78 seg <<= order; in mlx4_buddy_alloc()
83 static void mlx4_buddy_free(struct mlx4_buddy *buddy, u32 seg, int order) in mlx4_buddy_free() argument
85 seg >>= order; in mlx4_buddy_free()
89 while (test_bit(seg ^ 1, buddy->bits[order])) { in mlx4_buddy_free()
90 clear_bit(seg ^ 1, buddy->bits[order]); in mlx4_buddy_free()
91 --buddy->num_free[order]; in mlx4_buddy_free()
93 ++order; in mlx4_buddy_free()
[all …]
/drivers/gpu/drm/
Ddrm_hashtab.c41 int drm_ht_create(struct drm_open_hash *ht, unsigned int order) in drm_ht_create() argument
43 unsigned int size = 1 << order; in drm_ht_create()
45 ht->order = order; in drm_ht_create()
66 hashed_key = hash_long(key, ht->order); in drm_ht_verbose_list()
80 hashed_key = hash_long(key, ht->order); in drm_ht_find_key()
98 hashed_key = hash_long(key, ht->order); in drm_ht_find_key_rcu()
117 hashed_key = hash_long(key, ht->order); in drm_ht_insert_item()
Ddrm_bufs.c690 int order; in drm_legacy_addbufs_agp() local
703 order = order_base_2(request->size); in drm_legacy_addbufs_agp()
704 size = 1 << order; in drm_legacy_addbufs_agp()
708 page_order = order - PAGE_SHIFT > 0 ? order - PAGE_SHIFT : 0; in drm_legacy_addbufs_agp()
715 DRM_DEBUG("order: %d\n", order); in drm_legacy_addbufs_agp()
722 if (order < DRM_MIN_ORDER || order > DRM_MAX_ORDER) in drm_legacy_addbufs_agp()
747 entry = &dma->bufs[order]; in drm_legacy_addbufs_agp()
776 buf->order = order; in drm_legacy_addbufs_agp()
849 int order; in drm_legacy_addbufs_pci() local
874 order = order_base_2(request->size); in drm_legacy_addbufs_pci()
[all …]
/drivers/gpu/drm/i915/
Di915_gem_internal.c93 int order = min(fls(npages) - 1, max_order); in i915_gem_object_get_pages_internal() local
97 page = alloc_pages(gfp | (order ? QUIET : 0), order); in i915_gem_object_get_pages_internal()
100 if (!order--) in i915_gem_object_get_pages_internal()
104 max_order = order; in i915_gem_object_get_pages_internal()
107 sg_set_page(sg, page, PAGE_SIZE << order, 0); in i915_gem_object_get_pages_internal()
110 npages -= 1 << order; in i915_gem_object_get_pages_internal()
/drivers/xen/
Dswiotlb-xen.c221 unsigned long bytes, order; in xen_swiotlb_init() local
229 order = get_order(xen_io_tlb_nslabs << IO_TLB_SHIFT); in xen_swiotlb_init()
238 while ((SLABS_PER_PAGE << order) > IO_TLB_MIN_SLABS) { in xen_swiotlb_init()
239 xen_io_tlb_start = (void *)xen_get_swiotlb_free_pages(order); in xen_swiotlb_init()
242 order--; in xen_swiotlb_init()
244 if (order != get_order(bytes)) { in xen_swiotlb_init()
246 (PAGE_SIZE << order) >> 20); in xen_swiotlb_init()
247 xen_io_tlb_nslabs = SLABS_PER_PAGE << order; in xen_swiotlb_init()
266 free_pages((unsigned long)xen_io_tlb_start, order); in xen_swiotlb_init()
297 free_pages((unsigned long)xen_io_tlb_start, order); in xen_swiotlb_init()
[all …]
/drivers/media/platform/vimc/
Dvimc-debayer.c47 enum vimc_deb_rgb_colors order[2][2]; member
76 .order = { { VIMC_DEB_BLUE, VIMC_DEB_GREEN },
81 .order = { { VIMC_DEB_GREEN, VIMC_DEB_BLUE },
86 .order = { { VIMC_DEB_GREEN, VIMC_DEB_RED },
91 .order = { { VIMC_DEB_RED, VIMC_DEB_GREEN },
96 .order = { { VIMC_DEB_BLUE, VIMC_DEB_GREEN },
101 .order = { { VIMC_DEB_GREEN, VIMC_DEB_BLUE },
106 .order = { { VIMC_DEB_GREEN, VIMC_DEB_RED },
111 .order = { { VIMC_DEB_RED, VIMC_DEB_GREEN },
116 .order = { { VIMC_DEB_BLUE, VIMC_DEB_GREEN },
[all …]
/drivers/infiniband/hw/mlx5/
Dmem.c49 int *ncont, int *order) in mlx5_ib_cont_pages() argument
64 if (order) in mlx5_ib_cont_pages()
65 *order = ilog2(roundup_pow_of_two(*ncont)); in mlx5_ib_cont_pages()
98 if (order) in mlx5_ib_cont_pages()
99 *order = ilog2(roundup_pow_of_two(i) >> m); in mlx5_ib_cont_pages()
105 if (order) in mlx5_ib_cont_pages()
106 *order = 0; in mlx5_ib_cont_pages()
/drivers/atm/
Deni.c173 1 << eni_dev->free_list[i].order); in dump_mem()
205 int len,order; in eni_put_free() local
217 for (order = 0; !(((unsigned long)start | size) & (1 << order)); order++); in eni_put_free()
218 if (MID_MIN_BUF_SIZE > (1 << order)) { in eni_put_free()
220 order); in eni_put_free()
224 list[len].order = order; in eni_put_free()
226 start += 1 << order; in eni_put_free()
227 size -= 1 << order; in eni_put_free()
238 int len,i,order,best_order,index; in eni_alloc_mem() local
244 for (order = 0; (1 << order) < *size; order++); in eni_alloc_mem()
[all …]
/drivers/s390/char/
Dvmcp.c62 int nr_pages, order; in vmcp_response_alloc() local
64 order = get_order(session->bufsize); in vmcp_response_alloc()
71 if (order > 2) in vmcp_response_alloc()
78 session->response = (char *)__get_free_pages(GFP_KERNEL | __GFP_RETRY_MAYFAIL, order); in vmcp_response_alloc()
83 int nr_pages, order; in vmcp_response_free() local
88 order = get_order(session->bufsize); in vmcp_response_free()
95 free_pages((unsigned long)session->response, order); in vmcp_response_free()
/drivers/infiniband/hw/mthca/
Dmthca_mr.c43 int order; member
84 static u32 mthca_buddy_alloc(struct mthca_buddy *buddy, int order) in mthca_buddy_alloc() argument
92 for (o = order; o <= buddy->max_order; ++o) in mthca_buddy_alloc()
107 while (o > order) { in mthca_buddy_alloc()
116 seg <<= order; in mthca_buddy_alloc()
121 static void mthca_buddy_free(struct mthca_buddy *buddy, u32 seg, int order) in mthca_buddy_free() argument
123 seg >>= order; in mthca_buddy_free()
127 while (test_bit(seg ^ 1, buddy->bits[order])) { in mthca_buddy_free()
128 clear_bit(seg ^ 1, buddy->bits[order]); in mthca_buddy_free()
129 --buddy->num_free[order]; in mthca_buddy_free()
[all …]
/drivers/base/
Ddma-coherent.c166 int order = get_order(size); in __dma_alloc_from_coherent() local
176 pageno = bitmap_find_free_region(mem->bitmap, mem->size, order); in __dma_alloc_from_coherent()
238 int order, void *vaddr) in __dma_release_from_coherent() argument
246 bitmap_release_region(mem->bitmap, page, order); in __dma_release_from_coherent()
265 int dma_release_from_dev_coherent(struct device *dev, int order, void *vaddr) in dma_release_from_dev_coherent() argument
269 return __dma_release_from_coherent(mem, order, vaddr); in dma_release_from_dev_coherent()
273 int dma_release_from_global_coherent(int order, void *vaddr) in dma_release_from_global_coherent() argument
278 return __dma_release_from_coherent(dma_coherent_default_memory, order, in dma_release_from_global_coherent()
/drivers/gpu/drm/etnaviv/
Detnaviv_cmdbuf.c95 int granule_offs, order, ret; in etnaviv_cmdbuf_new() local
104 order = order_base_2(ALIGN(size, SUBALLOC_GRANULE) / SUBALLOC_GRANULE); in etnaviv_cmdbuf_new()
108 SUBALLOC_GRANULES, order); in etnaviv_cmdbuf_new()
132 int order = order_base_2(ALIGN(cmdbuf->size, SUBALLOC_GRANULE) / in etnaviv_cmdbuf_free() local
138 order); in etnaviv_cmdbuf_free()
/drivers/iommu/
Ddma-iommu.c275 unsigned long order, base_pfn, end_pfn; in iommu_dma_init_domain() local
281 order = __ffs(domain->pgsize_bitmap); in iommu_dma_init_domain()
282 base_pfn = max_t(unsigned long, 1, base >> order); in iommu_dma_init_domain()
283 end_pfn = (base + size - 1) >> order; in iommu_dma_init_domain()
294 domain->geometry.aperture_start >> order); in iommu_dma_init_domain()
296 domain->geometry.aperture_end >> order); in iommu_dma_init_domain()
306 end_pfn &= DMA_BIT_MASK(32) >> order; in iommu_dma_init_domain()
310 if (1UL << order != iovad->granule || in iommu_dma_init_domain()
324 init_iova_domain(iovad, 1UL << order, base_pfn, end_pfn); in iommu_dma_init_domain()
462 unsigned int order = __fls(order_mask); in __iommu_dma_alloc_pages() local
[all …]

12345678910