| /lib/ |
| D | kasprintf.c | 15 char *kvasprintf(gfp_t gfp, const char *fmt, va_list ap) in kvasprintf() argument 25 p = kmalloc_track_caller(first+1, gfp); in kvasprintf() 43 const char *kvasprintf_const(gfp_t gfp, const char *fmt, va_list ap) in kvasprintf_const() argument 46 return kstrdup_const(fmt, gfp); in kvasprintf_const() 48 return kstrdup_const(va_arg(ap, const char*), gfp); in kvasprintf_const() 49 return kvasprintf(gfp, fmt, ap); in kvasprintf_const() 53 char *kasprintf(gfp_t gfp, const char *fmt, ...) in kasprintf() argument 59 p = kvasprintf(gfp, fmt, ap); in kasprintf()
|
| D | fortify_kunit.c | 203 gfp_t gfp = GFP_KERNEL | __GFP_NOWARN; \ 207 checker(expected_size, kmalloc(alloc_size, gfp), \ 210 kmalloc_node(alloc_size, gfp, NUMA_NO_NODE), \ 212 checker(expected_size, kzalloc(alloc_size, gfp), \ 215 kzalloc_node(alloc_size, gfp, NUMA_NO_NODE), \ 217 checker(expected_size, kcalloc(1, alloc_size, gfp), \ 219 checker(expected_size, kcalloc(alloc_size, 1, gfp), \ 222 kcalloc_node(1, alloc_size, gfp, NUMA_NO_NODE), \ 225 kcalloc_node(alloc_size, 1, gfp, NUMA_NO_NODE), \ 227 checker(expected_size, kmalloc_array(1, alloc_size, gfp), \ [all …]
|
| D | objpool.c | 83 if ((pool->gfp & (GFP_ATOMIC | GFP_KERNEL)) != GFP_ATOMIC) in objpool_init_percpu_slots() 84 slot = __vmalloc_node(size, sizeof(void *), pool->gfp, in objpool_init_percpu_slots() 88 slot = kmalloc_node(size, pool->gfp, cpu_to_node(i)); in objpool_init_percpu_slots() 119 gfp_t gfp, void *context, objpool_init_obj_cb objinit, in objpool_init() argument 142 pool->gfp = gfp & ~__GFP_ZERO; in objpool_init() 146 pool->cpu_slots = kzalloc(slot_size, pool->gfp); in objpool_init()
|
| D | string_helpers.c | 648 char *kstrdup_quotable(const char *src, gfp_t gfp) in kstrdup_quotable() argument 660 dst = kmalloc(dlen + 1, gfp); in kstrdup_quotable() 676 char *kstrdup_quotable_cmdline(struct task_struct *task, gfp_t gfp) in kstrdup_quotable_cmdline() argument 698 quoted = kstrdup_quotable(buffer, gfp); in kstrdup_quotable_cmdline() 709 char *kstrdup_quotable_file(struct file *file, gfp_t gfp) in kstrdup_quotable_file() argument 714 return kstrdup("<unknown>", gfp); in kstrdup_quotable_file() 719 return kstrdup("<no_memory>", gfp); in kstrdup_quotable_file() 723 pathname = kstrdup("<too_long>", gfp); in kstrdup_quotable_file() 725 pathname = kstrdup_quotable(pathname, gfp); in kstrdup_quotable_file() 735 char *kstrdup_and_replace(const char *src, char old, char new, gfp_t gfp) in kstrdup_and_replace() argument [all …]
|
| D | idr.c | 34 unsigned long max, gfp_t gfp) in idr_alloc_u32() argument 46 slot = idr_get_free(&idr->idr_rt, &iter, gfp, max - base); in idr_alloc_u32() 79 int idr_alloc(struct idr *idr, void *ptr, int start, int end, gfp_t gfp) in idr_alloc() argument 87 ret = idr_alloc_u32(idr, ptr, &id, end > 0 ? end - 1 : INT_MAX, gfp); in idr_alloc() 117 int idr_alloc_cyclic(struct idr *idr, void *ptr, int start, int end, gfp_t gfp) in idr_alloc_cyclic() argument 125 err = idr_alloc_u32(idr, ptr, &id, max, gfp); in idr_alloc_cyclic() 128 err = idr_alloc_u32(idr, ptr, &id, max, gfp); in idr_alloc_cyclic() 381 gfp_t gfp) in ida_alloc_range() argument 454 if (xas_nomem(&xas, gfp)) { in ida_alloc_range() 466 alloc = kzalloc(sizeof(*bitmap), gfp); in ida_alloc_range()
|
| D | test_printf.c | 649 gfp_t gfp; in flags() local 670 gfp = GFP_TRANSHUGE; in flags() 671 test("GFP_TRANSHUGE", "%pGg", &gfp); in flags() 673 gfp = GFP_ATOMIC|__GFP_DMA; in flags() 674 test("GFP_ATOMIC|GFP_DMA", "%pGg", &gfp); in flags() 676 gfp = __GFP_HIGH; in flags() 677 test("__GFP_HIGH", "%pGg", &gfp); in flags() 680 gfp = ~__GFP_BITS_MASK; in flags() 681 snprintf(cmp_buffer, BUF_SIZE, "%#lx", (unsigned long) gfp); in flags() 682 test(cmp_buffer, "%pGg", &gfp); in flags() [all …]
|
| D | argv_split.c | 60 char **argv_split(gfp_t gfp, const char *str, int *argcp) in argv_split() argument 67 argv_str = kstrndup(str, KMALLOC_MAX_SIZE - 1, gfp); in argv_split() 72 argv = kmalloc_array(argc + 2, sizeof(*argv), gfp); in argv_split()
|
| D | memregion.c | 9 int memregion_alloc(gfp_t gfp) in memregion_alloc() argument 11 return ida_alloc(&memregion_ids, gfp); in memregion_alloc()
|
| D | flex_proportions.c | 38 int fprop_global_init(struct fprop_global *p, gfp_t gfp) in fprop_global_init() argument 44 err = percpu_counter_init(&p->events, 1, gfp); in fprop_global_init() 91 int fprop_local_init_percpu(struct fprop_local_percpu *pl, gfp_t gfp) in fprop_local_init_percpu() argument 95 err = percpu_counter_init(&pl->events, 0, gfp); in fprop_local_init_percpu()
|
| D | test_free_pages.c | 14 static void test_free_pages(gfp_t gfp) in test_free_pages() argument 19 unsigned long addr = __get_free_pages(gfp, 3); in test_free_pages()
|
| D | xarray.c | 299 bool xas_nomem(struct xa_state *xas, gfp_t gfp) in xas_nomem() argument 306 gfp |= __GFP_ACCOUNT; in xas_nomem() 307 xas->xa_alloc = kmem_cache_alloc_lru(radix_tree_node_cachep, xas->xa_lru, gfp); in xas_nomem() 326 static bool __xas_nomem(struct xa_state *xas, gfp_t gfp) in __xas_nomem() argument 336 gfp |= __GFP_ACCOUNT; in __xas_nomem() 337 if (gfpflags_allow_blocking(gfp)) { in __xas_nomem() 339 xas->xa_alloc = kmem_cache_alloc_lru(radix_tree_node_cachep, xas->xa_lru, gfp); in __xas_nomem() 342 xas->xa_alloc = kmem_cache_alloc_lru(radix_tree_node_cachep, xas->xa_lru, gfp); in __xas_nomem() 371 gfp_t gfp = GFP_NOWAIT | __GFP_NOWARN; in xas_alloc() local 374 gfp |= __GFP_ACCOUNT; in xas_alloc() [all …]
|
| D | bucket_locks.c | 15 size_t max_size, unsigned int cpu_mult, gfp_t gfp, in __alloc_bucket_spinlocks() argument 34 tlocks = kvmalloc_array(size, sizeof(spinlock_t), gfp); in __alloc_bucket_spinlocks()
|
| D | btree.c | 92 static unsigned long *btree_node_alloc(struct btree_head *head, gfp_t gfp) in btree_node_alloc() argument 96 node = mempool_alloc(head->mempool, gfp); in btree_node_alloc() 409 gfp_t gfp) in btree_grow() argument 414 node = btree_node_alloc(head, gfp); in btree_grow() 445 gfp_t gfp) in btree_insert_level() argument 452 err = btree_grow(head, geo, gfp); in btree_insert_level() 468 new = btree_node_alloc(head, gfp); in btree_insert_level() 473 new, level + 1, gfp); in btree_insert_level() 506 unsigned long *key, void *val, gfp_t gfp) in btree_insert() argument 509 return btree_insert_level(head, geo, key, val, 1, gfp); in btree_insert() [all …]
|
| D | ref_tracker.c | 187 gfp_t gfp) in ref_tracker_alloc() argument 192 gfp_t gfp_mask = gfp | __GFP_NOWARN; in ref_tracker_alloc() 201 if (gfp & __GFP_DIRECT_RECLAIM) in ref_tracker_alloc() 210 tracker->alloc_stack_handle = stack_depot_save(entries, nr_entries, gfp); in ref_tracker_alloc()
|
| D | test_objpool.c | 289 gfp_t gfp = GFP_KERNEL; in ot_init_sync_m0() local 296 gfp = GFP_ATOMIC; in ot_init_sync_m0() 299 gfp, sop, ot_init_node, NULL)) { in ot_init_sync_m0() 459 gfp_t gfp = GFP_KERNEL; in ot_init_async_m0() local 466 gfp = GFP_ATOMIC; in ot_init_async_m0() 468 if (objpool_init(&sop->pool, max, test->objsz, gfp, sop, in ot_init_async_m0()
|
| D | percpu-refcount.c | 64 unsigned int flags, gfp_t gfp) in percpu_ref_init() argument 72 __alloc_percpu_gfp(sizeof(unsigned long), align, gfp); in percpu_ref_init() 76 data = kzalloc(sizeof(*ref->data), gfp); in percpu_ref_init()
|
| D | maple_tree.c | 160 static inline struct maple_node *mt_alloc_one(gfp_t gfp) in mt_alloc_one() argument 162 return kmem_cache_alloc(maple_node_cache, gfp); in mt_alloc_one() 165 static inline int mt_alloc_bulk(gfp_t gfp, size_t size, void **nodes) in mt_alloc_bulk() argument 167 return kmem_cache_alloc_bulk(maple_node_cache, gfp, size, nodes); in mt_alloc_bulk() 1220 static inline void mas_alloc_nodes(struct ma_state *mas, gfp_t gfp) in mas_alloc_nodes() argument 1241 node = (struct maple_alloc *)mt_alloc_one(gfp); in mas_alloc_nodes() 1263 count = mt_alloc_bulk(gfp, max_req, slots); in mas_alloc_nodes() 1315 static void mas_node_count_gfp(struct ma_state *mas, int count, gfp_t gfp) in mas_node_count_gfp() argument 1321 mas_alloc_nodes(mas, gfp); in mas_node_count_gfp() 4347 unsigned long *next, gfp_t gfp) in mas_alloc_cyclic() argument [all …]
|
| D | radix-tree.c | 408 static int radix_tree_extend(struct radix_tree_root *root, gfp_t gfp, in radix_tree_extend() argument 425 struct radix_tree_node *node = radix_tree_node_alloc(gfp, NULL, in radix_tree_extend() 607 gfp_t gfp = root_gfp_mask(root); in __radix_tree_create() local 613 int error = radix_tree_extend(root, gfp, max, shift); in __radix_tree_create() 624 child = radix_tree_node_alloc(gfp, node, root, shift, in __radix_tree_create() 1477 struct radix_tree_iter *iter, gfp_t gfp, in idr_get_free() argument 1493 int error = radix_tree_extend(root, gfp, start, shift); in idr_get_free() 1506 child = radix_tree_node_alloc(gfp, node, root, shift, in idr_get_free()
|
| D | rhashtable.c | 150 gfp_t gfp) in nested_bucket_table_alloc() argument 162 kmalloc_noprof(size, gfp|__GFP_ZERO)); in nested_bucket_table_alloc() 179 gfp_t gfp) in bucket_table_alloc() argument 188 gfp|__GFP_ZERO, NUMA_NO_NODE)); in bucket_table_alloc() 192 if (tbl == NULL && !gfpflags_allow_blocking(gfp)) { in bucket_table_alloc() 193 tbl = nested_bucket_table_alloc(ht, nbuckets, gfp); in bucket_table_alloc()
|
| D | percpu_counter.c | 189 gfp_t gfp, u32 nr_counters, in __percpu_counter_init_many() argument 199 __alignof__(*counters), gfp); in __percpu_counter_init_many()
|
| D | scatterlist.c | 614 gfp_t gfp, unsigned int *nent_p) in sgl_alloc_order() argument 633 gfp & ~GFP_DMA); in sgl_alloc_order() 641 page = alloc_pages(gfp, order); in sgl_alloc_order() 666 struct scatterlist *sgl_alloc(unsigned long long length, gfp_t gfp, in sgl_alloc() argument 669 return sgl_alloc_order(length, 0, false, gfp, nent_p); in sgl_alloc()
|
| /lib/kunit/ |
| D | string-stream.c | 17 static struct string_stream_fragment *alloc_string_stream_fragment(int len, gfp_t gfp) in alloc_string_stream_fragment() argument 21 frag = kzalloc(sizeof(*frag), gfp); in alloc_string_stream_fragment() 25 frag->fragment = kmalloc(len, gfp); in alloc_string_stream_fragment() 67 frag_container = alloc_string_stream_fragment(buf_len, stream->gfp); in string_stream_vadd() 123 buf = kzalloc(buf_len, stream->gfp); in string_stream_get_string() 157 struct string_stream *alloc_string_stream(gfp_t gfp) in alloc_string_stream() argument 161 stream = kzalloc(sizeof(*stream), gfp); in alloc_string_stream() 165 stream->gfp = gfp; in alloc_string_stream() 190 struct string_stream *kunit_alloc_string_stream(struct kunit *test, gfp_t gfp) in kunit_alloc_string_stream() argument 194 stream = alloc_string_stream(gfp); in kunit_alloc_string_stream()
|
| D | string-stream.h | 26 gfp_t gfp; member 32 struct string_stream *kunit_alloc_string_stream(struct kunit *test, gfp_t gfp); 35 struct string_stream *alloc_string_stream(gfp_t gfp);
|
| D | test.c | 852 void *kunit_kmalloc_array(struct kunit *test, size_t n, size_t size, gfp_t gfp) in KUNIT_DEFINE_ACTION_WRAPPER() 856 data = kmalloc_array(n, size, gfp); in KUNIT_DEFINE_ACTION_WRAPPER() 886 const char *kunit_kstrdup_const(struct kunit *test, const char *str, gfp_t gfp) in kunit_kstrdup_const() argument 892 return kunit_kstrdup(test, str, gfp); in kunit_kstrdup_const()
|
| /lib/reed_solomon/ |
| D | reed_solomon.c | 71 int fcr, int prim, int nroots, gfp_t gfp) in codec_init() argument 76 rs = kzalloc(sizeof(*rs), gfp); in codec_init() 91 rs->alpha_to = kmalloc_array(rs->nn + 1, sizeof(uint16_t), gfp); in codec_init() 95 rs->index_of = kmalloc_array(rs->nn + 1, sizeof(uint16_t), gfp); in codec_init() 99 rs->genpoly = kmalloc_array(rs->nroots + 1, sizeof(uint16_t), gfp); in codec_init() 215 int prim, int nroots, gfp_t gfp) in init_rs_internal() argument 237 rs = kzalloc(sizeof(*rs) + bsize, gfp); in init_rs_internal() 266 rs->codec = codec_init(symsize, gfpoly, gffunc, fcr, prim, nroots, gfp); in init_rs_internal() 289 int nroots, gfp_t gfp) in init_rs_gfp() argument 291 return init_rs_internal(symsize, gfpoly, NULL, fcr, prim, nroots, gfp); in init_rs_gfp()
|