Home
last modified time | relevance | path

Searched refs:binind (Results 1 – 17 of 17) sorted by relevance

/external/jemalloc_new/include/jemalloc/internal/
Dtcache_inlines.h43 UNUSED size_t size, szind_t binind, bool zero, bool slow_path) { in tcache_alloc_small() argument
49 assert(binind < NBINS); in tcache_alloc_small()
50 bin = tcache_small_bin_get(tcache, binind); in tcache_alloc_small()
61 bin, binind, &tcache_hard_success); in tcache_alloc_small()
73 usize = sz_index2size(binind); in tcache_alloc_small()
80 arena_alloc_junk_small(ret, &bin_infos[binind], in tcache_alloc_small()
88 arena_alloc_junk_small(ret, &bin_infos[binind], true); in tcache_alloc_small()
107 szind_t binind, bool zero, bool slow_path) { in tcache_alloc_large() argument
112 assert(binind >= NBINS &&binind < nhbins); in tcache_alloc_large()
113 bin = tcache_large_bin_get(tcache, binind); in tcache_alloc_large()
[all …]
Djemalloc_internal_inlines_a.h110 tcache_small_bin_get(tcache_t *tcache, szind_t binind) { in tcache_small_bin_get() argument
111 assert(binind < NBINS); in tcache_small_bin_get()
112 return &tcache->bins_small[binind]; in tcache_small_bin_get()
116 tcache_large_bin_get(tcache_t *tcache, szind_t binind) { in tcache_large_bin_get() argument
117 assert(binind >= NBINS &&binind < nhbins); in tcache_large_bin_get()
118 return &tcache->bins_large[binind - NBINS]; in tcache_large_bin_get()
Dtcache_externs.h33 cache_bin_t *tbin, szind_t binind, bool *tcache_success);
35 szind_t binind, unsigned rem);
36 void tcache_bin_flush_large(tsd_t *tsd, cache_bin_t *tbin, szind_t binind,
Darena_externs.h29 size_t arena_slab_regind(extent_t *slab, szind_t binind, const void *ptr);
48 cache_bin_t *tbin, szind_t binind, uint64_t prof_accumbytes);
/external/jemalloc/include/jemalloc/internal/
Dtcache.h148 tcache_bin_t *tbin, szind_t binind, bool *tcache_success);
150 szind_t binind, unsigned rem);
151 void tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
184 szind_t binind, bool slow_path);
296 szind_t binind, bool zero, bool slow_path) in tcache_alloc_small() argument
303 assert(binind < NBINS); in tcache_alloc_small()
304 tbin = &tcache->tbins[binind]; in tcache_alloc_small()
314 tbin, binind, &tcache_hard_success); in tcache_alloc_small()
325 usize = index2size(binind); in tcache_alloc_small()
333 &arena_bin_info[binind], false); in tcache_alloc_small()
[all …]
Darena.h72 szind_t binind; member
568 tcache_bin_t *tbin, szind_t binind, uint64_t prof_accumbytes);
686 szind_t binind);
688 size_t runind, szind_t binind, size_t flags);
878 szind_t binind; in arena_mapbits_binind_get() local
881 binind = (mapbits & CHUNK_MAP_BININD_MASK) >> CHUNK_MAP_BININD_SHIFT; in arena_mapbits_binind_get()
882 assert(binind < NBINS || binind == BININD_INVALID); in arena_mapbits_binind_get()
883 return (binind); in arena_mapbits_binind_get()
1014 szind_t binind) in arena_mapbits_large_binind_set() argument
1019 assert(binind <= BININD_INVALID); in arena_mapbits_large_binind_set()
[all …]
Djemalloc_internal.h1135 szind_t binind = size2index(usize); in u2rz() local
1136 ret = arena_bin_info[binind].redzone_size; in u2rz()
Djemalloc_internal.h.in1135 szind_t binind = size2index(usize); local
1136 ret = arena_bin_info[binind].redzone_size;
/external/jemalloc_new/test/unit/
Dslab.c4 szind_t binind; in TEST_BEGIN() local
6 for (binind = 0; binind < NBINS; binind++) { in TEST_BEGIN()
9 const bin_info_t *bin_info = &bin_infos[binind]; in TEST_BEGIN()
12 binind, 0, extent_state_active, false, true, true); in TEST_BEGIN()
18 assert_zu_eq(arena_slab_regind(&slab, binind, reg), in TEST_BEGIN()
Dpack.c53 unsigned binind = binind_compute(); in nregs_per_run_compute() local
59 mib[2] = (size_t)binind; in nregs_per_run_compute()
/external/jemalloc/src/
Dandroid_je_iterate.c108 szind_t binind; in je_iterate_chunk() local
112 binind = arena_mapbits_binind_get(chunk, pageind); in je_iterate_chunk()
114 assert(run->binind == binind); in je_iterate_chunk()
115 size = arena_bin_info[binind].run_size; in je_iterate_chunk()
130 szind_t binind; in je_iterate_small() local
136 binind = run->binind; in je_iterate_small()
137 bin_info = &arena_bin_info[binind]; in je_iterate_small()
Dtcache.c39 szind_t binind = tcache->next_gc_bin; in tcache_event_hard() local
40 tcache_bin_t *tbin = &tcache->tbins[binind]; in tcache_event_hard()
41 tcache_bin_info_t *tbin_info = &tcache_bin_info[binind]; in tcache_event_hard()
47 if (binind < NBINS) { in tcache_event_hard()
48 tcache_bin_flush_small(tsd, tcache, tbin, binind, in tcache_event_hard()
52 tcache_bin_flush_large(tsd, tbin, binind, tbin->ncached in tcache_event_hard()
78 tcache_bin_t *tbin, szind_t binind, bool *tcache_success) in tcache_alloc_small_hard() argument
82 arena_tcache_fill_small(tsdn, arena, tbin, binind, config_prof ? in tcache_alloc_small_hard()
93 szind_t binind, unsigned rem) in tcache_bin_flush_small() argument
100 assert(binind < NBINS); in tcache_bin_flush_small()
[all …]
Darena.c305 szind_t binind = arena_ptr_small_binind_get(ptr, mapbits); in arena_run_reg_dalloc() local
306 arena_bin_info_t *bin_info = &arena_bin_info[binind]; in arena_run_reg_dalloc()
512 szind_t binind) in arena_run_split_small() argument
518 assert(binind != BININD_INVALID); in arena_run_split_small()
538 arena_mapbits_small_set(chunk, run_ind+i, i, binind, in arena_run_split_small()
1166 arena_run_alloc_small_helper(arena_t *arena, size_t size, szind_t binind) in arena_run_alloc_small_helper() argument
1170 if (arena_run_split_small(arena, run, size, binind)) in arena_run_alloc_small_helper()
1177 arena_run_alloc_small(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t binind) in arena_run_alloc_small() argument
1184 assert(binind != BININD_INVALID); in arena_run_alloc_small()
1187 run = arena_run_alloc_small_helper(arena, size, binind); in arena_run_alloc_small()
[all …]
/external/jemalloc_new/src/
Dtcache.c45 szind_t binind = tcache->next_gc_bin; in tcache_event_hard() local
48 if (binind < NBINS) { in tcache_event_hard()
49 tbin = tcache_small_bin_get(tcache, binind); in tcache_event_hard()
51 tbin = tcache_large_bin_get(tcache, binind); in tcache_event_hard()
57 if (binind < NBINS) { in tcache_event_hard()
58 tcache_bin_flush_small(tsd, tcache, tbin, binind, in tcache_event_hard()
65 cache_bin_info_t *tbin_info = &tcache_bin_info[binind]; in tcache_event_hard()
67 (tcache->lg_fill_div[binind] + 1)) >= 1) { in tcache_event_hard()
68 tcache->lg_fill_div[binind]++; in tcache_event_hard()
71 tcache_bin_flush_large(tsd, tbin, binind, tbin->ncached in tcache_event_hard()
[all …]
Darena.c243 arena_slab_regind(extent_t *slab, szind_t binind, const void *ptr) { in arena_slab_regind() argument
251 (uintptr_t)bin_infos[binind].reg_size == 0); in arena_slab_regind()
256 regind = div_compute(&arena_binind_div_info[binind], diff); in arena_slab_regind()
258 assert(regind < bin_infos[binind].nregs); in arena_slab_regind()
265 szind_t binind = extent_szind_get(slab); in arena_slab_reg_dalloc() local
266 const bin_info_t *bin_info = &bin_infos[binind]; in arena_slab_reg_dalloc()
267 size_t regind = arena_slab_regind(slab, binind, ptr); in arena_slab_reg_dalloc()
1115 arena_slab_alloc(tsdn_t *tsdn, arena_t *arena, szind_t binind, in arena_slab_alloc() argument
1126 binind, &zero, &commit); in arena_slab_alloc()
1130 true, binind, &zero, &commit); in arena_slab_alloc()
[all …]
Dandroid_je_iterate.c48 szind_t binind = extent_szind_get(extent); in je_iterate() local
49 const bin_info_t* bin_info = &bin_infos[binind]; in je_iterate()
/external/jemalloc/test/unit/
Dpack.c62 unsigned binind = binind_compute(); in nregs_per_run_compute() local
68 mib[2] = (size_t)binind; in nregs_per_run_compute()
79 unsigned binind = binind_compute(); in npages_per_run_compute() local
86 mib[2] = (size_t)binind; in npages_per_run_compute()