Home
last modified time | relevance | path

Searched refs:gfp_mask (Results 1 – 11 of 11) sorted by relevance

/lib/
Didr.c94 static struct idr_layer *idr_layer_alloc(gfp_t gfp_mask, struct idr *layer_idr) in idr_layer_alloc() argument
109 new = kmem_cache_zalloc(idr_layer_cache, gfp_mask | __GFP_NOWARN); in idr_layer_alloc()
134 return kmem_cache_zalloc(idr_layer_cache, gfp_mask); in idr_layer_alloc()
192 static int __idr_pre_get(struct idr *idp, gfp_t gfp_mask) in __idr_pre_get() argument
196 new = kmem_cache_zalloc(idr_layer_cache, gfp_mask); in __idr_pre_get()
221 gfp_t gfp_mask, struct idr *layer_idr) in sub_alloc() argument
273 new = idr_layer_alloc(gfp_mask, layer_idr); in sub_alloc()
290 struct idr_layer **pa, gfp_t gfp_mask, in idr_get_empty_slot() argument
302 if (!(p = idr_layer_alloc(gfp_mask, layer_idr))) in idr_get_empty_slot()
322 if (!(new = idr_layer_alloc(gfp_mask, layer_idr))) { in idr_get_empty_slot()
[all …]
Dscatterlist.c164 static struct scatterlist *sg_kmalloc(unsigned int nents, gfp_t gfp_mask) in sg_kmalloc() argument
176 void *ptr = (void *) __get_free_page(gfp_mask); in sg_kmalloc()
177 kmemleak_alloc(ptr, PAGE_SIZE, 1, gfp_mask); in sg_kmalloc()
180 return kmalloc(nents * sizeof(struct scatterlist), gfp_mask); in sg_kmalloc()
277 gfp_t gfp_mask, sg_alloc_fn *alloc_fn) in __sg_alloc_table() argument
308 sg = alloc_fn(alloc_size, gfp_mask); in __sg_alloc_table()
359 int sg_alloc_table(struct sg_table *table, unsigned int nents, gfp_t gfp_mask) in sg_alloc_table() argument
364 NULL, gfp_mask, sg_kmalloc); in sg_alloc_table()
395 gfp_t gfp_mask) in sg_alloc_table_from_pages() argument
409 ret = sg_alloc_table(sgt, chunks, gfp_mask); in sg_alloc_table_from_pages()
Dradix-tree.c85 return root->gfp_mask & __GFP_BITS_MASK; in root_gfp_mask()
108 root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT)); in root_tag_set()
113 root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT)); in root_tag_clear()
118 root->gfp_mask &= __GFP_BITS_MASK; in root_tag_clear_all()
123 return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT)); in root_tag_get()
184 gfp_t gfp_mask = root_gfp_mask(root); in radix_tree_node_alloc() local
191 if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) { in radix_tree_node_alloc()
213 ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); in radix_tree_node_alloc()
254 static int __radix_tree_preload(gfp_t gfp_mask) in __radix_tree_preload() argument
264 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); in __radix_tree_preload()
[all …]
Dsg_split.c154 gfp_t gfp_mask) in sg_split() argument
159 splitters = kcalloc(nb_splits, sizeof(*splitters), gfp_mask); in sg_split()
172 gfp_mask); in sg_split()
Dtextsearch.c262 unsigned int len, gfp_t gfp_mask, int flags) in textsearch_prepare() argument
287 conf = ops->init(pattern, len, gfp_mask, flags); in textsearch_prepare()
Dts_kmp.c96 gfp_t gfp_mask, int flags) in kmp_init() argument
104 conf = alloc_ts_config(priv_size, gfp_mask); in kmp_init()
Dts_bm.c146 gfp_t gfp_mask, int flags) in bm_init() argument
154 conf = alloc_ts_config(priv_size, gfp_mask); in bm_init()
Dts_fsm.c260 gfp_t gfp_mask, int flags) in fsm_init() argument
286 conf = alloc_ts_config(priv_size, gfp_mask); in fsm_init()
Dkfifo.c39 size_t esize, gfp_t gfp_mask) in __kfifo_alloc() argument
57 fifo->data = kmalloc(size * esize, gfp_mask); in __kfifo_alloc()
Dkobject.c146 char *kobject_get_path(struct kobject *kobj, gfp_t gfp_mask) in kobject_get_path() argument
154 path = kzalloc(len, gfp_mask); in kobject_get_path()
Dbtree.c81 void *btree_alloc(gfp_t gfp_mask, void *pool_data) in btree_alloc() argument
83 return kmem_cache_alloc(btree_cachep, gfp_mask); in btree_alloc()