Home
last modified time | relevance | path

Searched refs:gfp_mask (Results 1 – 11 of 11) sorted by relevance

/lib/
Didr.c101 static struct idr_layer *idr_layer_alloc(gfp_t gfp_mask, struct idr *layer_idr) in idr_layer_alloc() argument
116 new = kmem_cache_zalloc(idr_layer_cache, gfp_mask | __GFP_NOWARN); in idr_layer_alloc()
141 return kmem_cache_zalloc(idr_layer_cache, gfp_mask); in idr_layer_alloc()
199 int __idr_pre_get(struct idr *idp, gfp_t gfp_mask) in __idr_pre_get() argument
203 new = kmem_cache_zalloc(idr_layer_cache, gfp_mask); in __idr_pre_get()
229 gfp_t gfp_mask, struct idr *layer_idr) in sub_alloc() argument
281 new = idr_layer_alloc(gfp_mask, layer_idr); in sub_alloc()
298 struct idr_layer **pa, gfp_t gfp_mask, in idr_get_empty_slot() argument
310 if (!(p = idr_layer_alloc(gfp_mask, layer_idr))) in idr_get_empty_slot()
330 if (!(new = idr_layer_alloc(gfp_mask, layer_idr))) { in idr_get_empty_slot()
[all …]
Dscatterlist.c136 static struct scatterlist *sg_kmalloc(unsigned int nents, gfp_t gfp_mask) in sg_kmalloc() argument
148 void *ptr = (void *) __get_free_page(gfp_mask); in sg_kmalloc()
149 kmemleak_alloc(ptr, PAGE_SIZE, 1, gfp_mask); in sg_kmalloc()
152 return kmalloc(nents * sizeof(struct scatterlist), gfp_mask); in sg_kmalloc()
244 unsigned int max_ents, gfp_t gfp_mask, in __sg_alloc_table() argument
270 sg = alloc_fn(alloc_size, gfp_mask); in __sg_alloc_table()
320 int sg_alloc_table(struct sg_table *table, unsigned int nents, gfp_t gfp_mask) in sg_alloc_table() argument
325 gfp_mask, sg_kmalloc); in sg_alloc_table()
356 gfp_t gfp_mask) in sg_alloc_table_from_pages() argument
370 ret = sg_alloc_table(sgt, chunks, gfp_mask); in sg_alloc_table_from_pages()
Dprio_heap.c9 int heap_init(struct ptr_heap *heap, size_t size, gfp_t gfp_mask, in heap_init() argument
12 heap->ptrs = kmalloc(size, gfp_mask); in heap_init()
Dradix-tree.c109 return root->gfp_mask & __GFP_BITS_MASK; in root_gfp_mask()
132 root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT)); in root_tag_set()
137 root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT)); in root_tag_clear()
142 root->gfp_mask &= __GFP_BITS_MASK; in root_tag_clear_all()
147 return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT)); in root_tag_get()
208 gfp_t gfp_mask = root_gfp_mask(root); in radix_tree_node_alloc() local
210 if (!(gfp_mask & __GFP_WAIT)) { in radix_tree_node_alloc()
226 ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); in radix_tree_node_alloc()
267 int radix_tree_preload(gfp_t gfp_mask) in radix_tree_preload() argument
277 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); in radix_tree_preload()
Dtextsearch.c261 unsigned int len, gfp_t gfp_mask, int flags) in textsearch_prepare() argument
286 conf = ops->init(pattern, len, gfp_mask, flags); in textsearch_prepare()
Dts_kmp.c96 gfp_t gfp_mask, int flags) in kmp_init() argument
104 conf = alloc_ts_config(priv_size, gfp_mask); in kmp_init()
Dts_bm.c146 gfp_t gfp_mask, int flags) in bm_init() argument
154 conf = alloc_ts_config(priv_size, gfp_mask); in bm_init()
Dts_fsm.c260 gfp_t gfp_mask, int flags) in fsm_init() argument
286 conf = alloc_ts_config(priv_size, gfp_mask); in fsm_init()
Dkfifo.c39 size_t esize, gfp_t gfp_mask) in __kfifo_alloc() argument
57 fifo->data = kmalloc(size * esize, gfp_mask); in __kfifo_alloc()
Dkobject.c102 char *kobject_get_path(struct kobject *kobj, gfp_t gfp_mask) in kobject_get_path() argument
110 path = kzalloc(len, gfp_mask); in kobject_get_path()
Dbtree.c81 void *btree_alloc(gfp_t gfp_mask, void *pool_data) in btree_alloc() argument
83 return kmem_cache_alloc(btree_cachep, gfp_mask); in btree_alloc()