/drivers/staging/media/soc_camera/ |
D | soc_mediabus.c | 22 .order = SOC_MBUS_ORDER_LE, 32 .order = SOC_MBUS_ORDER_LE, 42 .order = SOC_MBUS_ORDER_LE, 52 .order = SOC_MBUS_ORDER_LE, 62 .order = SOC_MBUS_ORDER_LE, 72 .order = SOC_MBUS_ORDER_BE, 82 .order = SOC_MBUS_ORDER_LE, 92 .order = SOC_MBUS_ORDER_BE, 102 .order = SOC_MBUS_ORDER_LE, 111 .order = SOC_MBUS_ORDER_LE, [all …]
|
/drivers/media/pci/cx18/ |
D | cx18-mailbox.c | 230 static void epu_dma_done(struct cx18 *cx, struct cx18_in_work_order *order) in epu_dma_done() argument 239 mb = &order->mb; in epu_dma_done() 246 (order->flags & CX18_F_EWO_MB_STALE_UPON_RECEIPT) ? in epu_dma_done() 252 mdl_ack = order->mdl_ack; in epu_dma_done() 276 if ((order->flags & CX18_F_EWO_MB_STALE_UPON_RECEIPT) && in epu_dma_done() 323 static void epu_debug(struct cx18 *cx, struct cx18_in_work_order *order) in epu_debug() argument 326 char *str = order->str; in epu_debug() 328 CX18_DEBUG_INFO("%x %s\n", order->mb.args[0], str); in epu_debug() 334 static void epu_cmd(struct cx18 *cx, struct cx18_in_work_order *order) in epu_cmd() argument 336 switch (order->rpu) { in epu_cmd() [all …]
|
/drivers/gpu/drm/i915/selftests/ |
D | i915_buddy.c | 204 unsigned int order; in igt_check_mm() local 220 order = i915_buddy_block_order(root); in igt_check_mm() 223 if (order != mm->max_order) { in igt_check_mm() 244 block = list_first_entry_or_null(&mm->free_list[order], in igt_check_mm() 248 pr_err("root mismatch at order=%u\n", order); in igt_check_mm() 318 int order; in igt_buddy_alloc_smoke() local 330 order = max_order; in igt_buddy_alloc_smoke() 335 block = i915_buddy_alloc(&mm, order); in igt_buddy_alloc_smoke() 340 order); in igt_buddy_alloc_smoke() 342 if (order--) { in igt_buddy_alloc_smoke() [all …]
|
D | i915_random.c | 69 void i915_random_reorder(unsigned int *order, unsigned int count, in i915_random_reorder() argument 72 i915_prandom_shuffle(order, sizeof(*order), count, state); in i915_random_reorder() 77 unsigned int *order, i; in i915_random_order() local 79 order = kmalloc_array(count, sizeof(*order), in i915_random_order() 81 if (!order) in i915_random_order() 82 return order; in i915_random_order() 85 order[i] = i; in i915_random_order() 87 i915_random_reorder(order, count, state); in i915_random_order() 88 return order; in i915_random_order()
|
D | i915_syncmap.c | 274 unsigned int pass, order; in igt_syncmap_join_above() local 296 for (order = 0; order < 64; order += SHIFT) { in igt_syncmap_join_above() 297 u64 context = BIT_ULL(order); in igt_syncmap_join_above() 335 unsigned int step, order, idx; in igt_syncmap_join_below() local 345 for (order = 64 - SHIFT; order > 0; order -= SHIFT) { in igt_syncmap_join_below() 346 u64 context = step * BIT_ULL(order); in igt_syncmap_join_below() 354 context, order, step, sync->height, sync->prefix); in igt_syncmap_join_below() 362 for (order = SHIFT; order < 64; order += SHIFT) { in igt_syncmap_join_below() 363 u64 context = step * BIT_ULL(order); in igt_syncmap_join_below() 367 context, order, step); in igt_syncmap_join_below() [all …]
|
/drivers/gpu/drm/lib/ |
D | drm_random.c | 15 void drm_random_reorder(unsigned int *order, unsigned int count, in drm_random_reorder() argument 23 swap(order[i], order[j]); in drm_random_reorder() 30 unsigned int *order, i; in drm_random_order() local 32 order = kmalloc_array(count, sizeof(*order), GFP_KERNEL); in drm_random_order() 33 if (!order) in drm_random_order() 34 return order; in drm_random_order() 37 order[i] = i; in drm_random_order() 39 drm_random_reorder(order, count, state); in drm_random_order() 40 return order; in drm_random_order()
|
/drivers/gpu/drm/nouveau/nvkm/subdev/therm/ |
D | gk104.c | 34 const struct gk104_clkgate_engine_info *order = therm->clkgate_order; in gk104_clkgate_enable() local 38 for (i = 0; order[i].engine != NVKM_SUBDEV_NR; i++) { in gk104_clkgate_enable() 39 if (!nvkm_device_subdev(dev, order[i].engine)) in gk104_clkgate_enable() 42 nvkm_mask(dev, 0x20200 + order[i].offset, 0xff00, 0x4500); in gk104_clkgate_enable() 50 for (i = 0; order[i].engine != NVKM_SUBDEV_NR; i++) { in gk104_clkgate_enable() 51 if (!nvkm_device_subdev(dev, order[i].engine)) in gk104_clkgate_enable() 54 nvkm_mask(dev, 0x20200 + order[i].offset, 0x00ff, 0x0045); in gk104_clkgate_enable() 63 const struct gk104_clkgate_engine_info *order = therm->clkgate_order; in gk104_clkgate_fini() local 67 for (i = 0; order[i].engine != NVKM_SUBDEV_NR; i++) { in gk104_clkgate_fini() 68 if (!nvkm_device_subdev(dev, order[i].engine)) in gk104_clkgate_fini() [all …]
|
/drivers/pci/endpoint/ |
D | pci-epc-mem.c | 25 int order; in pci_epc_mem_get_order() local 31 order = fls(size); in pci_epc_mem_get_order() 33 order = fls64(size); in pci_epc_mem_get_order() 35 return order; in pci_epc_mem_get_order() 128 int order; in pci_epc_mem_alloc_addr() local 131 order = pci_epc_mem_get_order(mem, size); in pci_epc_mem_alloc_addr() 133 pageno = bitmap_find_free_region(mem->bitmap, mem->pages, order); in pci_epc_mem_alloc_addr() 140 bitmap_release_region(mem->bitmap, pageno, order); in pci_epc_mem_alloc_addr() 161 int order; in pci_epc_mem_free_addr() local 166 order = pci_epc_mem_get_order(mem, size); in pci_epc_mem_free_addr() [all …]
|
/drivers/staging/android/ion/heaps/ |
D | ion_page_pool.c | 19 return alloc_pages(pool->gfp_mask, pool->order); in ion_page_pool_alloc_pages() 25 __free_pages(page, pool->order); in ion_page_pool_free_pages() 40 1 << pool->order); in ion_page_pool_add() 60 -(1 << pool->order)); in ion_page_pool_remove() 85 BUG_ON(pool->order != compound_order(page)); in ion_page_pool_free() 97 return count << pool->order; in ion_page_pool_total() 128 freed += (1 << pool->order); in ion_page_pool_shrink() 134 struct ion_page_pool *ion_page_pool_create(gfp_t gfp_mask, unsigned int order) in ion_page_pool_create() argument 145 pool->order = order; in ion_page_pool_create() 147 plist_node_init(&pool->list, order); in ion_page_pool_create()
|
D | ion_system_heap.c | 28 static int order_to_index(unsigned int order) in order_to_index() argument 33 if (order == orders[i]) in order_to_index() 39 static inline unsigned int order_to_size(int order) in order_to_size() argument 41 return PAGE_SIZE << order; in order_to_size() 51 unsigned long order) in alloc_buffer_page() argument 53 struct ion_page_pool *pool = heap->pools[order_to_index(order)]; in alloc_buffer_page() 62 unsigned int order = compound_order(page); in free_buffer_page() local 66 __free_pages(page, order); in free_buffer_page() 70 pool = heap->pools[order_to_index(order)]; in free_buffer_page()
|
/drivers/gpu/drm/selftests/ |
D | test-drm_mm.c | 376 unsigned int *order, n, m, o = 0; in __igt_reserve() local 389 order = drm_random_order(count, &prng); in __igt_reserve() 390 if (!order) in __igt_reserve() 404 nodes[n].start = order[n] * size; in __igt_reserve() 430 drm_random_reorder(order, count, &prng); in __igt_reserve() 433 set_node(&tmp, order[n] * size, 1))) in __igt_reserve() 437 drm_mm_remove_node(&nodes[order[n]]); in __igt_reserve() 438 err = drm_mm_reserve_node(&mm, &nodes[order[n]]); in __igt_reserve() 466 node = &nodes[order[(o + m) % count]]; in __igt_reserve() 471 node = &nodes[order[(o + m) % count]]; in __igt_reserve() [all …]
|
/drivers/net/ethernet/mellanox/mlx4/ |
D | mr.c | 46 static u32 mlx4_buddy_alloc(struct mlx4_buddy *buddy, int order) in mlx4_buddy_alloc() argument 54 for (o = order; o <= buddy->max_order; ++o) in mlx4_buddy_alloc() 69 while (o > order) { in mlx4_buddy_alloc() 78 seg <<= order; in mlx4_buddy_alloc() 83 static void mlx4_buddy_free(struct mlx4_buddy *buddy, u32 seg, int order) in mlx4_buddy_free() argument 85 seg >>= order; in mlx4_buddy_free() 89 while (test_bit(seg ^ 1, buddy->bits[order])) { in mlx4_buddy_free() 90 clear_bit(seg ^ 1, buddy->bits[order]); in mlx4_buddy_free() 91 --buddy->num_free[order]; in mlx4_buddy_free() 93 ++order; in mlx4_buddy_free() [all …]
|
/drivers/infiniband/hw/hns/ |
D | hns_roce_db.c | 95 struct hns_roce_db *db, int order) in hns_roce_alloc_db_from_pgdir() argument 100 for (o = order; o <= 1; ++o) { in hns_roce_alloc_db_from_pgdir() 113 if (o > order) in hns_roce_alloc_db_from_pgdir() 114 set_bit(i ^ 1, pgdir->bits[order]); in hns_roce_alloc_db_from_pgdir() 120 db->order = order; in hns_roce_alloc_db_from_pgdir() 126 int order) in hns_roce_alloc_db() argument 134 if (!hns_roce_alloc_db_from_pgdir(pgdir, db, order)) in hns_roce_alloc_db() 146 WARN_ON(hns_roce_alloc_db_from_pgdir(pgdir, db, order)); in hns_roce_alloc_db() 161 o = db->order; in hns_roce_free_db() 164 if (db->order == 0 && test_bit(i ^ 1, db->u.pgdir->order0)) { in hns_roce_free_db()
|
D | hns_roce_mr.c | 69 static int hns_roce_buddy_alloc(struct hns_roce_buddy *buddy, int order, in hns_roce_buddy_alloc() argument 77 for (o = order; o <= buddy->max_order; ++o) { in hns_roce_buddy_alloc() 92 while (o > order) { in hns_roce_buddy_alloc() 101 *seg <<= order; in hns_roce_buddy_alloc() 106 int order) in hns_roce_buddy_free() argument 108 seg >>= order; in hns_roce_buddy_free() 112 while (test_bit(seg ^ 1, buddy->bits[order])) { in hns_roce_buddy_free() 113 clear_bit(seg ^ 1, buddy->bits[order]); in hns_roce_buddy_free() 114 --buddy->num_free[order]; in hns_roce_buddy_free() 116 ++order; in hns_roce_buddy_free() [all …]
|
/drivers/gpu/drm/ttm/ |
D | ttm_page_alloc.c | 78 unsigned int order; member 248 unsigned int order) in ttm_pages_put() argument 250 unsigned int i, pages_nr = (1 << order); in ttm_pages_put() 252 if (order == 0) { in ttm_pages_put() 258 if (order > 0) { in ttm_pages_put() 262 __free_pages(pages[i], order); in ttm_pages_put() 327 ttm_pages_put(pages_to_free, freed_pages, pool->order); in ttm_page_pool_free() 362 ttm_pages_put(pages_to_free, freed_pages, pool->order); in ttm_page_pool_free() 400 page_nr = (1 << pool->order); in ttm_pool_shrink_scan() 402 nr_free_pool = roundup(nr_free, page_nr) >> pool->order; in ttm_pool_shrink_scan() [all …]
|
/drivers/xen/ |
D | swiotlb-xen.c | 180 unsigned long bytes, order; in xen_swiotlb_init() local 188 order = get_order(xen_io_tlb_nslabs << IO_TLB_SHIFT); in xen_swiotlb_init() 210 while ((SLABS_PER_PAGE << order) > IO_TLB_MIN_SLABS) { in xen_swiotlb_init() 211 xen_io_tlb_start = (void *)xen_get_swiotlb_free_pages(order); in xen_swiotlb_init() 214 order--; in xen_swiotlb_init() 216 if (order != get_order(bytes)) { in xen_swiotlb_init() 218 (PAGE_SIZE << order) >> 20); in xen_swiotlb_init() 219 xen_io_tlb_nslabs = SLABS_PER_PAGE << order; in xen_swiotlb_init() 238 free_pages((unsigned long)xen_io_tlb_start, order); in xen_swiotlb_init() 271 free_pages((unsigned long)xen_io_tlb_start, order); in xen_swiotlb_init() [all …]
|
/drivers/gpu/drm/i915/gem/ |
D | i915_gem_internal.c | 82 int order = min(fls(npages) - 1, max_order); in i915_gem_object_get_pages_internal() local 86 page = alloc_pages(gfp | (order ? QUIET : MAYFAIL), in i915_gem_object_get_pages_internal() 87 order); in i915_gem_object_get_pages_internal() 90 if (!order--) in i915_gem_object_get_pages_internal() 94 max_order = order; in i915_gem_object_get_pages_internal() 97 sg_set_page(sg, page, PAGE_SIZE << order, 0); in i915_gem_object_get_pages_internal() 98 sg_page_sizes |= PAGE_SIZE << order; in i915_gem_object_get_pages_internal() 101 npages -= 1 << order; in i915_gem_object_get_pages_internal()
|
/drivers/staging/media/ipu3/ |
D | ipu3-dmamap.c | 55 unsigned int order = __fls(order_mask); in imgu_dmamap_alloc_buffer() local 57 order_size = 1U << order; in imgu_dmamap_alloc_buffer() 59 gfp | high_order_gfp : gfp, order); in imgu_dmamap_alloc_buffer() 62 if (!order) in imgu_dmamap_alloc_buffer() 65 split_page(page, order); in imgu_dmamap_alloc_buffer() 69 __free_pages(page, order); in imgu_dmamap_alloc_buffer() 247 unsigned long order, base_pfn; in imgu_dmamap_init() local 253 order = __ffs(IPU3_PAGE_SIZE); in imgu_dmamap_init() 254 base_pfn = max_t(unsigned long, 1, imgu->mmu->aperture_start >> order); in imgu_dmamap_init() 255 init_iova_domain(&imgu->iova_domain, 1UL << order, base_pfn); in imgu_dmamap_init()
|
/drivers/gpu/drm/ |
D | drm_hashtab.c | 45 int drm_ht_create(struct drm_open_hash *ht, unsigned int order) in drm_ht_create() argument 47 unsigned int size = 1 << order; in drm_ht_create() 49 ht->order = order; in drm_ht_create() 70 hashed_key = hash_long(key, ht->order); in drm_ht_verbose_list() 84 hashed_key = hash_long(key, ht->order); in drm_ht_find_key() 102 hashed_key = hash_long(key, ht->order); in drm_ht_find_key_rcu() 121 hashed_key = hash_long(key, ht->order); in drm_ht_insert_item()
|
D | drm_bufs.c | 720 int order; in drm_legacy_addbufs_agp() local 733 order = order_base_2(request->size); in drm_legacy_addbufs_agp() 734 size = 1 << order; in drm_legacy_addbufs_agp() 738 page_order = order - PAGE_SHIFT > 0 ? order - PAGE_SHIFT : 0; in drm_legacy_addbufs_agp() 745 DRM_DEBUG("order: %d\n", order); in drm_legacy_addbufs_agp() 752 if (order < DRM_MIN_ORDER || order > DRM_MAX_ORDER) in drm_legacy_addbufs_agp() 777 entry = &dma->bufs[order]; in drm_legacy_addbufs_agp() 806 buf->order = order; in drm_legacy_addbufs_agp() 879 int order; in drm_legacy_addbufs_pci() local 904 order = order_base_2(request->size); in drm_legacy_addbufs_pci() [all …]
|
/drivers/media/platform/vimc/ |
D | vimc-debayer.c | 38 enum vimc_deb_rgb_colors order[2][2]; member 67 .order = { { VIMC_DEB_BLUE, VIMC_DEB_GREEN }, 72 .order = { { VIMC_DEB_GREEN, VIMC_DEB_BLUE }, 77 .order = { { VIMC_DEB_GREEN, VIMC_DEB_RED }, 82 .order = { { VIMC_DEB_RED, VIMC_DEB_GREEN }, 87 .order = { { VIMC_DEB_BLUE, VIMC_DEB_GREEN }, 92 .order = { { VIMC_DEB_GREEN, VIMC_DEB_BLUE }, 97 .order = { { VIMC_DEB_GREEN, VIMC_DEB_RED }, 102 .order = { { VIMC_DEB_RED, VIMC_DEB_GREEN }, 107 .order = { { VIMC_DEB_BLUE, VIMC_DEB_GREEN }, [all …]
|
/drivers/gpu/drm/i915/gt/ |
D | selftest_timeline.c | 209 int order, offset; in igt_sync() local 214 for (order = 1; order < 64; order++) { in igt_sync() 215 for (offset = -1; offset <= (order > 1); offset++) { in igt_sync() 216 u64 ctx = BIT_ULL(order) + offset; in igt_sync() 227 for (order = 1; order < 64; order++) { in igt_sync() 228 for (offset = -1; offset <= (order > 1); offset++) { in igt_sync() 229 u64 ctx = BIT_ULL(order) + offset; in igt_sync() 256 int order, last_order; in bench_sync() local 373 for (last_order = 1, order = 1; order < 32; in bench_sync() 374 ({ int tmp = last_order; last_order = order; order += tmp; })) { in bench_sync() [all …]
|
/drivers/atm/ |
D | eni.c | 174 1 << eni_dev->free_list[i].order); in dump_mem() 206 int len,order; in eni_put_free() local 218 for (order = 0; !(((unsigned long)start | size) & (1 << order)); order++); in eni_put_free() 219 if (MID_MIN_BUF_SIZE > (1 << order)) { in eni_put_free() 221 order); in eni_put_free() 225 list[len].order = order; in eni_put_free() 227 start += 1 << order; in eni_put_free() 228 size -= 1 << order; in eni_put_free() 239 int len,i,order,best_order,index; in eni_alloc_mem() local 245 for (order = 0; (1 << order) < *size; order++) in eni_alloc_mem() [all …]
|
/drivers/s390/char/ |
D | vmcp.c | 63 int nr_pages, order; in vmcp_response_alloc() local 65 order = get_order(session->bufsize); in vmcp_response_alloc() 72 if (order > 2) in vmcp_response_alloc() 79 session->response = (char *)__get_free_pages(GFP_KERNEL | __GFP_RETRY_MAYFAIL, order); in vmcp_response_alloc() 84 int nr_pages, order; in vmcp_response_free() local 89 order = get_order(session->bufsize); in vmcp_response_free() 96 free_pages((unsigned long)session->response, order); in vmcp_response_free()
|
/drivers/infiniband/hw/mthca/ |
D | mthca_mr.c | 43 int order; member 84 static u32 mthca_buddy_alloc(struct mthca_buddy *buddy, int order) in mthca_buddy_alloc() argument 92 for (o = order; o <= buddy->max_order; ++o) in mthca_buddy_alloc() 107 while (o > order) { in mthca_buddy_alloc() 116 seg <<= order; in mthca_buddy_alloc() 121 static void mthca_buddy_free(struct mthca_buddy *buddy, u32 seg, int order) in mthca_buddy_free() argument 123 seg >>= order; in mthca_buddy_free() 127 while (test_bit(seg ^ 1, buddy->bits[order])) { in mthca_buddy_free() 128 clear_bit(seg ^ 1, buddy->bits[order]); in mthca_buddy_free() 129 --buddy->num_free[order]; in mthca_buddy_free() [all …]
|