Home
last modified time | relevance | path

Searched refs:arena_ind (Results 1 – 25 of 33) sorted by relevance

12

/external/jemalloc_new/test/unit/
Darena_reset.c89 unsigned arena_ind; in do_arena_create() local
91 assert_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz, in do_arena_create()
94 return arena_ind; in do_arena_create()
98 do_arena_reset_pre(unsigned arena_ind, void ***ptrs, unsigned *nptrs) { in do_arena_reset_pre() argument
105 flags = MALLOCX_ARENA(arena_ind) | MALLOCX_TCACHE_NONE; in do_arena_reset_pre()
137 do_arena_reset_post(void **ptrs, unsigned nptrs, unsigned arena_ind) { in do_arena_reset_post() argument
145 &background_thread_info[arena_ind % ncpus].mtx); in do_arena_reset_post()
154 &background_thread_info[arena_ind % ncpus].mtx); in do_arena_reset_post()
161 do_arena_reset_destroy(const char *name, unsigned arena_ind) { in do_arena_reset_destroy() argument
168 mib[1] = (size_t)arena_ind; in do_arena_reset_destroy()
[all …]
Ddecay.c40 unsigned arena_ind; in do_arena_create() local
42 assert_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz, NULL, 0), in do_arena_create()
49 mib[1] = (size_t)arena_ind; in do_arena_create()
56 mib[1] = (size_t)arena_ind; in do_arena_create()
61 return arena_ind; in do_arena_create()
65 do_arena_destroy(unsigned arena_ind) { in do_arena_destroy() argument
70 mib[1] = (size_t)arena_ind; in do_arena_destroy()
83 do_purge(unsigned arena_ind) { in do_purge() argument
88 mib[1] = (size_t)arena_ind; in do_purge()
94 do_decay(unsigned arena_ind) { in do_decay() argument
[all …]
Dretained.c5 static unsigned arena_ind; variable
15 unsigned arena_ind; in do_arena_create() local
17 assert_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz, in do_arena_create()
20 return arena_ind; in do_arena_create()
24 do_arena_destroy(unsigned arena_ind) { in do_arena_destroy() argument
31 mib[1] = (size_t)arena_ind; in do_arena_destroy()
44 do_get_size_impl(const char *cmd, unsigned arena_ind) { in do_get_size_impl() argument
51 mib[2] = arena_ind; in do_get_size_impl()
60 do_get_active(unsigned arena_ind) { in do_get_active() argument
61 return do_get_size_impl("stats.arenas.0.pactive", arena_ind) * PAGE; in do_get_active()
[all …]
Dpack.c68 unsigned arena_ind; in arenas_create_mallctl() local
71 sz = sizeof(arena_ind); in arenas_create_mallctl()
72 assert_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz, NULL, 0), in arenas_create_mallctl()
75 return arena_ind; in arenas_create_mallctl()
79 arena_reset_mallctl(unsigned arena_ind) { in arena_reset_mallctl() argument
85 mib[1] = (size_t)arena_ind; in arena_reset_mallctl()
97 unsigned arena_ind = arenas_create_mallctl(); in TEST_BEGIN() local
106 void *p = mallocx(SZ, MALLOCX_ARENA(arena_ind) | in TEST_BEGIN()
111 SZ, arena_ind, i, j); in TEST_BEGIN()
130 dallocx(p, MALLOCX_ARENA(arena_ind) | in TEST_BEGIN()
[all …]
Dstats.c224 gen_mallctl_str(char *cmd, char *name, unsigned arena_ind) { in gen_mallctl_str() argument
225 sprintf(cmd, "stats.arenas.%u.bins.0.%s", arena_ind, name); in gen_mallctl_str()
239 unsigned arena_ind, old_arena_ind; in TEST_BEGIN() local
241 assert_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz, NULL, 0), in TEST_BEGIN()
243 sz = sizeof(arena_ind); in TEST_BEGIN()
245 (void *)&arena_ind, sizeof(arena_ind)), 0, in TEST_BEGIN()
259 gen_mallctl_str(cmd, "nmalloc", arena_ind); in TEST_BEGIN()
262 gen_mallctl_str(cmd, "ndalloc", arena_ind); in TEST_BEGIN()
265 gen_mallctl_str(cmd, "nrequests", arena_ind); in TEST_BEGIN()
269 gen_mallctl_str(cmd, "curregs", arena_ind); in TEST_BEGIN()
[all …]
Dfork.c37 unsigned arena_ind; in TEST_BEGIN() local
39 assert_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz, NULL, 0), in TEST_BEGIN()
46 (void *)&arena_ind, sizeof(arena_ind)), 0, in TEST_BEGIN()
/external/jemalloc/test/integration/
Dchunk.c29 bool *commit, unsigned arena_ind) in chunk_alloc() argument
34 *zero ? "true" : "false", *commit ? "true" : "false", arena_ind); in chunk_alloc()
37 arena_ind)); in chunk_alloc()
41 chunk_dalloc(void *chunk, size_t size, bool committed, unsigned arena_ind) in chunk_dalloc() argument
45 __func__, chunk, size, committed ? "true" : "false", arena_ind); in chunk_dalloc()
49 return (old_hooks.dalloc(chunk, size, committed, arena_ind)); in chunk_dalloc()
54 unsigned arena_ind) in chunk_commit() argument
60 arena_ind); in chunk_commit()
61 err = old_hooks.commit(chunk, size, offset, length, arena_ind); in chunk_commit()
68 unsigned arena_ind) in chunk_decommit() argument
[all …]
Dthread_arena.c10 unsigned arena_ind; in thd_start() local
18 size = sizeof(arena_ind); in thd_start()
19 if ((err = mallctl("thread.arena", (void *)&arena_ind, &size, in thd_start()
27 size = sizeof(arena_ind); in thd_start()
28 if ((err = mallctl("thread.arena", (void *)&arena_ind, &size, NULL, in thd_start()
35 assert_u_eq(arena_ind, main_arena_ind, in thd_start()
44 unsigned arena_ind; in TEST_BEGIN() local
53 size = sizeof(arena_ind); in TEST_BEGIN()
54 if ((err = mallctl("thread.arena", (void *)&arena_ind, &size, NULL, in TEST_BEGIN()
64 (void *)&arena_ind); in TEST_BEGIN()
DMALLOCX_ARENA.c17 unsigned arena_ind; in thd_start() local
21 sz = sizeof(arena_ind); in thd_start()
22 assert_d_eq(mallctl("arenas.extend", (void *)&arena_ind, &sz, NULL, 0), in thd_start()
35 mib[1] = arena_ind; in thd_start()
41 p = mallocx(1, MALLOCX_ARENA(arena_ind)); in thd_start()
Dxallocx.c13 arena_ind(void) in arena_ind() function
244 int flags = MALLOCX_ARENA(arena_ind()); in TEST_BEGIN()
311 int flags = MALLOCX_ARENA(arena_ind()); in TEST_BEGIN()
416 int flags = MALLOCX_ARENA(arena_ind()) | MALLOCX_ZERO; in test_zero()
/external/jemalloc_new/test/include/test/
Dextent_hooks.h8 unsigned arena_ind);
10 size_t size, bool committed, unsigned arena_ind);
12 size_t size, bool committed, unsigned arena_ind);
14 size_t size, size_t offset, size_t length, unsigned arena_ind);
16 size_t size, size_t offset, size_t length, unsigned arena_ind);
18 size_t size, size_t offset, size_t length, unsigned arena_ind);
20 void *addr, size_t size, size_t offset, size_t length, unsigned arena_ind);
23 unsigned arena_ind);
26 unsigned arena_ind);
82 size_t alignment, bool *zero, bool *commit, unsigned arena_ind) { in extent_alloc_hook() argument
[all …]
/external/jemalloc_new/test/integration/
Dthread_arena.c9 unsigned arena_ind; in thd_start() local
17 size = sizeof(arena_ind); in thd_start()
18 if ((err = mallctl("thread.arena", (void *)&arena_ind, &size, in thd_start()
26 size = sizeof(arena_ind); in thd_start()
27 if ((err = mallctl("thread.arena", (void *)&arena_ind, &size, NULL, in thd_start()
34 assert_u_eq(arena_ind, main_arena_ind, in thd_start()
57 unsigned arena_ind, old_arena_ind; in TEST_BEGIN() local
59 assert_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz, NULL, 0), in TEST_BEGIN()
62 size_t size = sizeof(arena_ind); in TEST_BEGIN()
64 (void *)&arena_ind, sizeof(arena_ind))) != 0) { in TEST_BEGIN()
[all …]
Dextent.c18 test_extent_body(unsigned arena_ind) { in test_extent_body() argument
26 flags = MALLOCX_ARENA(arena_ind) | MALLOCX_TCACHE_NONE; in test_extent_body()
41 purge_mib[1] = (size_t)arena_ind; in test_extent_body()
56 0, "Unexpected arena.%u.purge error", arena_ind); in test_extent_body()
79 0, "Unexpected arena.%u.purge error", arena_ind); in test_extent_body()
140 unsigned arena_ind; in test_manual_hook_body() local
149 assert_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz, NULL, 0), in test_manual_hook_body()
156 hooks_mib[1] = (size_t)arena_ind; in test_manual_hook_body()
181 test_extent_body(arena_ind); in test_manual_hook_body()
226 unsigned arena_ind; in TEST_BEGIN() local
[all …]
DMALLOCX_ARENA.c16 unsigned arena_ind; in thd_start() local
20 sz = sizeof(arena_ind); in thd_start()
21 assert_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz, NULL, 0), in thd_start()
34 mib[1] = arena_ind; in thd_start()
40 p = mallocx(1, MALLOCX_ARENA(arena_ind)); in thd_start()
/external/jemalloc/test/unit/
Dpack.c110 unsigned arena_ind; in arenas_extend_mallctl() local
113 sz = sizeof(arena_ind); in arenas_extend_mallctl()
114 assert_d_eq(mallctl("arenas.extend", (void *)&arena_ind, &sz, NULL, 0), in arenas_extend_mallctl()
117 return (arena_ind); in arenas_extend_mallctl()
121 arena_reset_mallctl(unsigned arena_ind) in arena_reset_mallctl() argument
128 mib[1] = (size_t)arena_ind; in arena_reset_mallctl()
135 unsigned arena_ind = arenas_extend_mallctl(); in TEST_BEGIN() local
146 void *p = mallocx(SZ, MALLOCX_ARENA(arena_ind) | in TEST_BEGIN()
151 SZ, arena_ind, i, j); in TEST_BEGIN()
169 dallocx(p, MALLOCX_ARENA(arena_ind) | in TEST_BEGIN()
[all …]
Darena_reset.c84 unsigned arena_ind, nsmall, nlarge, nhuge, nptrs, i; in TEST_BEGIN() local
95 assert_d_eq(mallctl("arenas.extend", (void *)&arena_ind, &sz, NULL, 0), in TEST_BEGIN()
98 flags = MALLOCX_ARENA(arena_ind) | MALLOCX_TCACHE_NONE; in TEST_BEGIN()
139 mib[1] = (size_t)arena_ind; in TEST_BEGIN()
/external/jemalloc_new/include/jemalloc/
Djemalloc_typedefs.h.in6 * size_t alignment, bool *zero, bool *commit, unsigned arena_ind);
14 * bool committed, unsigned arena_ind);
22 * bool committed, unsigned arena_ind);
30 * size_t offset, size_t length, unsigned arena_ind);
38 * size_t offset, size_t length, unsigned arena_ind);
46 * size_t offset, size_t length, unsigned arena_ind);
54 * size_t size_a, size_t size_b, bool committed, unsigned arena_ind);
62 * void *addr_b, size_t size_b, bool committed, unsigned arena_ind);
/external/jemalloc/include/jemalloc/
Djemalloc_typedefs.h.in4 * bool *commit, unsigned arena_ind);
10 * chunk_dalloc(void *chunk, size_t size, bool committed, unsigned arena_ind);
17 * unsigned arena_ind);
24 * unsigned arena_ind);
31 * unsigned arena_ind);
38 * bool committed, unsigned arena_ind);
45 * bool committed, unsigned arena_ind);
/external/jemalloc_new/src/
Dctl.c669 ret->arena_ind = (unsigned)i; in arenas_i_impl()
673 assert(ret == NULL || arenas_i2a(ret->arena_ind) == arenas_i2a(i)); in arenas_i_impl()
826 if (ctl_arena->arena_ind == 0) { in ctl_arena_stats_sdmerge()
886 unsigned arena_ind; in ctl_arena_init() local
892 arena_ind = ctl_arena->arena_ind; in ctl_arena_init()
894 arena_ind = ctl_arenas->narenas; in ctl_arena_init()
898 if (arenas_i_impl(tsd, arena_ind, false, true) == NULL) { in ctl_arena_init()
903 if (arena_init(tsd_tsdn(tsd), arena_ind, extent_hooks) == NULL) { in ctl_arena_init()
907 if (arena_ind == ctl_arenas->narenas) { in ctl_arena_init()
911 return arena_ind; in ctl_arena_init()
[all …]
Dextent.c27 unsigned arena_ind);
29 size_t size, bool committed, unsigned arena_ind);
31 size_t size, bool committed, unsigned arena_ind);
33 size_t size, size_t offset, size_t length, unsigned arena_ind);
38 void *addr, size_t size, size_t offset, size_t length, unsigned arena_ind);
41 size_t size, size_t offset, size_t length, unsigned arena_ind);
48 void *addr, size_t size, size_t offset, size_t length, unsigned arena_ind);
56 unsigned arena_ind);
65 unsigned arena_ind);
1197 size_t alignment, bool *zero, bool *commit, unsigned arena_ind) { in extent_alloc_default() argument
[all …]
/external/jemalloc/src/
Dchunk.c22 size_t alignment, bool *zero, bool *commit, unsigned arena_ind);
24 unsigned arena_ind);
26 size_t length, unsigned arena_ind);
28 size_t length, unsigned arena_ind);
30 size_t length, unsigned arena_ind);
32 size_t size_b, bool committed, unsigned arena_ind);
34 size_t size_b, bool committed, unsigned arena_ind);
411 chunk_arena_get(tsdn_t *tsdn, unsigned arena_ind) in chunk_arena_get() argument
415 arena = arena_get(tsdn, arena_ind, false); in chunk_arena_get()
442 bool *commit, unsigned arena_ind) in chunk_alloc_default() argument
[all …]
Dctl.c120 static void arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all);
1542 arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all) in arena_i_purge() argument
1549 if (arena_ind == narenas) { in arena_i_purge()
1569 assert(arena_ind < narenas); in arena_i_purge()
1571 tarena = arena_get(tsdn, arena_ind, false); in arena_i_purge()
1617 unsigned arena_ind; in arena_i_reset_ctl() local
1629 arena_ind = (unsigned)mib[1]; in arena_i_reset_ctl()
1632 assert(arena_ind < ctl_stats.narenas); in arena_i_reset_ctl()
1635 assert(arena_ind >= opt_narenas); in arena_i_reset_ctl()
1637 arena = arena_get(tsd_tsdn(tsd), arena_ind, false); in arena_i_reset_ctl()
[all …]
/external/jemalloc_new/include/jemalloc/internal/
Djemalloc_internal_inlines_a.h29 unsigned arena_ind; in percpu_arena_choose() local
32 arena_ind = cpuid; in percpu_arena_choose()
36 arena_ind = cpuid - ncpus / 2; in percpu_arena_choose()
39 return arena_ind; in percpu_arena_choose()
Dextent_inlines.h39 unsigned arena_ind = (unsigned)((extent->e_bits & in extent_arena_get() local
45 if (false && arena_ind >= MALLOCX_ARENA_LIMIT) { in extent_arena_get()
48 assert(arena_ind < MALLOCX_ARENA_LIMIT); in extent_arena_get()
49 return (arena_t *)atomic_load_p(&arenas[arena_ind], ATOMIC_ACQUIRE); in extent_arena_get()
181 unsigned arena_ind = (arena != NULL) ? arena_ind_get(arena) : ((1U << in extent_arena_set() local
184 ((uint64_t)arena_ind << EXTENT_BITS_ARENA_SHIFT); in extent_arena_set()
Dbackground_thread_inlines.h17 unsigned arena_ind = arena_ind_get(arena); in arena_background_thread_info_get() local
18 return &background_thread_info[arena_ind % ncpus]; in arena_background_thread_info_get()

12