Lines Matching +full:1 +full:ma
48 1, /* 72 */
49 1, /* 80 */
50 1, /* 88 */
51 1, /* 96 */
69 return -1; in bpf_mem_cache_idx()
72 return size_index[(size - 1) / 8] - 1; in bpf_mem_cache_idx()
74 return fls(size - 1) - 2; in bpf_mem_cache_idx()
147 obj[1] = pptr; in __alloc()
183 WARN_ON_ONCE(local_inc_return(&c->active) != 1); in inc_active()
257 free_percpu(((void **)obj)[1]); in free_one()
310 if (atomic_xchg(&c->call_rcu_ttrace_in_progress, 1)) { in do_call_rcu_ttrace()
397 if (atomic_xchg(&c->call_rcu_in_progress, 1)) { in check_free_by_rcu()
472 * 8k allocs and above low == 1, high == 3, batch == 1. in init_refill_work()
474 c->low_watermark = max(32 * 256 / c->unit_size, 1); in init_refill_work()
477 c->batch = max((c->high_watermark - c->low_watermark) / 4 * 3, 1); in init_refill_work()
482 /* To avoid consuming memory assume that 1st run of bpf in prefill_mem_cache()
486 alloc_bulk(c, c->unit_size <= 256 ? 4 : 1, cpu_to_node(cpu), false); in prefill_mem_cache()
496 int bpf_mem_alloc_init(struct bpf_mem_alloc *ma, int size, bool percpu) in bpf_mem_alloc_init() argument
504 ma->percpu = percpu; in bpf_mem_alloc_init()
531 ma->cache = pc; in bpf_mem_alloc_init()
558 ma->caches = pcc; in bpf_mem_alloc_init()
593 static void check_leaked_objs(struct bpf_mem_alloc *ma) in check_leaked_objs() argument
599 if (ma->cache) { in check_leaked_objs()
601 c = per_cpu_ptr(ma->cache, cpu); in check_leaked_objs()
605 if (ma->caches) { in check_leaked_objs()
607 cc = per_cpu_ptr(ma->caches, cpu); in check_leaked_objs()
616 static void free_mem_alloc_no_barrier(struct bpf_mem_alloc *ma) in free_mem_alloc_no_barrier() argument
618 check_leaked_objs(ma); in free_mem_alloc_no_barrier()
619 free_percpu(ma->cache); in free_mem_alloc_no_barrier()
620 free_percpu(ma->caches); in free_mem_alloc_no_barrier()
621 ma->cache = NULL; in free_mem_alloc_no_barrier()
622 ma->caches = NULL; in free_mem_alloc_no_barrier()
625 static void free_mem_alloc(struct bpf_mem_alloc *ma) in free_mem_alloc() argument
641 free_mem_alloc_no_barrier(ma); in free_mem_alloc()
646 struct bpf_mem_alloc *ma = container_of(work, struct bpf_mem_alloc, work); in free_mem_alloc_deferred() local
648 free_mem_alloc(ma); in free_mem_alloc_deferred()
649 kfree(ma); in free_mem_alloc_deferred()
652 static void destroy_mem_alloc(struct bpf_mem_alloc *ma, int rcu_in_progress) in destroy_mem_alloc() argument
660 free_mem_alloc_no_barrier(ma); in destroy_mem_alloc()
664 copy = kmemdup(ma, sizeof(*ma), GFP_KERNEL); in destroy_mem_alloc()
667 free_mem_alloc(ma); in destroy_mem_alloc()
672 memset(ma, 0, sizeof(*ma)); in destroy_mem_alloc()
677 void bpf_mem_alloc_destroy(struct bpf_mem_alloc *ma) in bpf_mem_alloc_destroy() argument
683 if (ma->cache) { in bpf_mem_alloc_destroy()
686 c = per_cpu_ptr(ma->cache, cpu); in bpf_mem_alloc_destroy()
696 destroy_mem_alloc(ma, rcu_in_progress); in bpf_mem_alloc_destroy()
698 if (ma->caches) { in bpf_mem_alloc_destroy()
701 cc = per_cpu_ptr(ma->caches, cpu); in bpf_mem_alloc_destroy()
713 destroy_mem_alloc(ma, rcu_in_progress); in bpf_mem_alloc_destroy()
737 if (local_inc_return(&c->active) == 1) { in unit_alloc()
773 if (local_inc_return(&c->active) == 1) { in unit_free()
801 if (local_inc_return(&c->active) == 1) { in unit_free_rcu()
817 void notrace *bpf_mem_alloc(struct bpf_mem_alloc *ma, size_t size) in bpf_mem_alloc() argument
829 ret = unit_alloc(this_cpu_ptr(ma->caches)->cache + idx); in bpf_mem_alloc()
833 void notrace bpf_mem_free(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_free() argument
846 unit_free(this_cpu_ptr(ma->caches)->cache + idx, ptr); in bpf_mem_free()
849 void notrace bpf_mem_free_rcu(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_free_rcu() argument
862 unit_free_rcu(this_cpu_ptr(ma->caches)->cache + idx, ptr); in bpf_mem_free_rcu()
865 void notrace *bpf_mem_cache_alloc(struct bpf_mem_alloc *ma) in bpf_mem_cache_alloc() argument
869 ret = unit_alloc(this_cpu_ptr(ma->cache)); in bpf_mem_cache_alloc()
873 void notrace bpf_mem_cache_free(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_cache_free() argument
878 unit_free(this_cpu_ptr(ma->cache), ptr); in bpf_mem_cache_free()
881 void notrace bpf_mem_cache_free_rcu(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_cache_free_rcu() argument
886 unit_free_rcu(this_cpu_ptr(ma->cache), ptr); in bpf_mem_cache_free_rcu()
911 void notrace *bpf_mem_cache_alloc_flags(struct bpf_mem_alloc *ma, gfp_t flags) in bpf_mem_cache_alloc_flags() argument
916 c = this_cpu_ptr(ma->cache); in bpf_mem_cache_alloc_flags()