/lib/ |
D | generic-radix-tree.c | 79 static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask) in genradix_alloc_node() argument 83 node = (struct genradix_node *)__get_free_page(gfp_mask|__GFP_ZERO); in genradix_alloc_node() 90 kmemleak_alloc(node, PAGE_SIZE, 1, gfp_mask); in genradix_alloc_node() 105 gfp_t gfp_mask) in __genradix_ptr_alloc() argument 122 new_node = genradix_alloc_node(gfp_mask); in __genradix_ptr_alloc() 145 new_node = genradix_alloc_node(gfp_mask); in __genradix_ptr_alloc() 218 gfp_t gfp_mask) in __genradix_prealloc() argument 223 if (!__genradix_ptr_alloc(radix, offset, gfp_mask)) in __genradix_prealloc()
|
D | scatterlist.c | 149 static struct scatterlist *sg_kmalloc(unsigned int nents, gfp_t gfp_mask) in sg_kmalloc() argument 161 void *ptr = (void *) __get_free_page(gfp_mask); in sg_kmalloc() 162 kmemleak_alloc(ptr, PAGE_SIZE, 1, gfp_mask); in sg_kmalloc() 166 gfp_mask); in sg_kmalloc() 268 unsigned int nents_first_chunk, gfp_t gfp_mask, in __sg_alloc_table() argument 302 sg = alloc_fn(alloc_size, gfp_mask); in __sg_alloc_table() 355 int sg_alloc_table(struct sg_table *table, unsigned int nents, gfp_t gfp_mask) in sg_alloc_table() argument 360 NULL, 0, gfp_mask, sg_kmalloc); in sg_alloc_table() 392 gfp_t gfp_mask) in __sg_alloc_table_from_pages() argument 413 ret = sg_alloc_table(sgt, chunks, gfp_mask); in __sg_alloc_table_from_pages() [all …]
|
D | radix-tree.c | 242 radix_tree_node_alloc(gfp_t gfp_mask, struct radix_tree_node *parent, in radix_tree_node_alloc() argument 254 if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) { in radix_tree_node_alloc() 263 gfp_mask | __GFP_NOWARN); in radix_tree_node_alloc() 285 ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); in radix_tree_node_alloc() 331 static __must_check int __radix_tree_preload(gfp_t gfp_mask, unsigned nr) in __radix_tree_preload() argument 341 gfp_mask &= ~__GFP_ACCOUNT; in __radix_tree_preload() 347 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); in __radix_tree_preload() 374 int radix_tree_preload(gfp_t gfp_mask) in radix_tree_preload() argument 377 WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask)); in radix_tree_preload() 378 return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE); in radix_tree_preload() [all …]
|
D | sg_split.c | 152 gfp_t gfp_mask) in sg_split() argument 157 splitters = kcalloc(nb_splits, sizeof(*splitters), gfp_mask); in sg_split() 170 gfp_mask); in sg_split()
|
D | sg_pool.c | 62 static struct scatterlist *sg_pool_alloc(unsigned int nents, gfp_t gfp_mask) in sg_pool_alloc() argument 67 return mempool_alloc(sgp->pool, gfp_mask); in sg_pool_alloc()
|
D | textsearch.c | 264 unsigned int len, gfp_t gfp_mask, int flags) in textsearch_prepare() argument 289 conf = ops->init(pattern, len, gfp_mask, flags); in textsearch_prepare()
|
D | ts_kmp.c | 92 gfp_t gfp_mask, int flags) in kmp_init() argument 100 conf = alloc_ts_config(priv_size, gfp_mask); in kmp_init()
|
D | ts_bm.c | 142 gfp_t gfp_mask, int flags) in bm_init() argument 150 conf = alloc_ts_config(priv_size, gfp_mask); in bm_init()
|
D | ts_fsm.c | 256 gfp_t gfp_mask, int flags) in fsm_init() argument 282 conf = alloc_ts_config(priv_size, gfp_mask); in fsm_init()
|
D | kfifo.c | 25 size_t esize, gfp_t gfp_mask) in __kfifo_alloc() argument 43 fifo->data = kmalloc_array(esize, size, gfp_mask); in __kfifo_alloc()
|
D | kobject.c | 171 char *kobject_get_path(struct kobject *kobj, gfp_t gfp_mask) in kobject_get_path() argument 179 path = kzalloc(len, gfp_mask); in kobject_get_path()
|
D | btree.c | 81 void *btree_alloc(gfp_t gfp_mask, void *pool_data) in btree_alloc() argument 83 return kmem_cache_alloc(btree_cachep, gfp_mask); in btree_alloc()
|