Home
last modified time | relevance | path

Searched refs:tcache (Results 1 – 25 of 44) sorted by relevance

12

/external/jemalloc_new/src/
Dtcache.c44 tcache_event_hard(tsd_t *tsd, tcache_t *tcache) { in tcache_event_hard() argument
45 szind_t binind = tcache->next_gc_bin; in tcache_event_hard()
49 tbin = tcache_small_bin_get(tcache, binind); in tcache_event_hard()
51 tbin = tcache_large_bin_get(tcache, binind); in tcache_event_hard()
58 tcache_bin_flush_small(tsd, tcache, tbin, binind, in tcache_event_hard()
67 (tcache->lg_fill_div[binind] + 1)) >= 1) { in tcache_event_hard()
68 tcache->lg_fill_div[binind]++; in tcache_event_hard()
72 - tbin->low_water + (tbin->low_water >> 2), tcache); in tcache_event_hard()
79 if (binind < NBINS && tcache->lg_fill_div[binind] > 1) { in tcache_event_hard()
80 tcache->lg_fill_div[binind]--; in tcache_event_hard()
[all …]
Djemalloc.c650 tcache_t *tcache; in stats_print_atexit() local
653 ql_foreach(tcache, &arena->tcache_ql, link) { in stats_print_atexit()
654 tcache_stats_merge(tsdn, tcache, arena); in stats_print_atexit()
1690 tcache_t *tcache; local
1697 tcache = tsd_tcachep_get(tsd);
1698 assert(tcache == tcache_get(tsd));
1700 tcache = tcache_get(tsd);
1703 tcache = NULL;
1705 tcache = tcaches_get(tsd, dopts->tcache_ind);
1722 dopts->zero, tcache, arena);
[all …]
Darena.c1250 arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, in arena_tcache_fill_small() argument
1263 tcache->lg_fill_div[binind]); i < nfill; i++) { in arena_tcache_fill_small()
1394 bool zero, tcache_t *tcache) { in arena_palloc() argument
1401 zero, tcache, true); in arena_palloc()
1453 arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache, in arena_dalloc_promoted() argument
1461 tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr, in arena_dalloc_promoted()
1623 size_t alignment, bool zero, tcache_t *tcache) { in arena_ralloc_move_helper() argument
1626 zero, tcache, true); in arena_ralloc_move_helper()
1632 return ipalloct(tsdn, usize, alignment, zero, tcache, arena); in arena_ralloc_move_helper()
1637 size_t size, size_t alignment, bool zero, tcache_t *tcache) { in arena_ralloc() argument
[all …]
/external/jemalloc/src/
Dtcache.c37 tcache_event_hard(tsd_t *tsd, tcache_t *tcache) in tcache_event_hard() argument
39 szind_t binind = tcache->next_gc_bin; in tcache_event_hard()
40 tcache_bin_t *tbin = &tcache->tbins[binind]; in tcache_event_hard()
48 tcache_bin_flush_small(tsd, tcache, tbin, binind, in tcache_event_hard()
53 - tbin->low_water + (tbin->low_water >> 2), tcache); in tcache_event_hard()
71 tcache->next_gc_bin++; in tcache_event_hard()
72 if (tcache->next_gc_bin == nhbins) in tcache_event_hard()
73 tcache->next_gc_bin = 0; in tcache_event_hard()
77 tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, in tcache_alloc_small_hard() argument
83 tcache->prof_accumbytes : 0); in tcache_alloc_small_hard()
[all …]
Djemalloc.c766 tcache_t *tcache; in stats_print_atexit() local
775 ql_foreach(tcache, &arena->tcache_ql, link) { in stats_print_atexit()
776 tcache_stats_merge(tsdn, tcache, arena); in stats_print_atexit()
1883 ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) argument
1902 iqalloc(tsd, ptr, tcache, false);
1906 iqalloc(tsd, ptr, tcache, true);
1912 isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) argument
1927 isqalloc(tsd, ptr, usize, tcache, slow_path);
2123 size_t *alignment, bool *zero, tcache_t **tcache, arena_t **arena) argument
2138 *tcache = NULL;
[all …]
/external/jemalloc/include/jemalloc/internal/
Dtcache.h112 tcache_t *tcache; member
146 void tcache_event_hard(tsd_t *tsd, tcache_t *tcache);
147 void *tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
149 void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
152 unsigned rem, tcache_t *tcache);
153 void tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache,
159 void tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena);
173 void tcache_event(tsd_t *tsd, tcache_t *tcache);
179 void *tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
181 void *tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache,
[all …]
Djemalloc_internal.h994 tcache_t *tcache, bool is_metadata, arena_t *arena, bool slow_path);
998 tcache_t *tcache, bool is_metadata, arena_t *arena);
1000 tcache_t *tcache, arena_t *arena);
1005 void idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool is_metadata,
1008 void iqalloc(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path);
1009 void isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
1011 void isqalloc(tsd_t *tsd, void *ptr, size_t size, tcache_t *tcache,
1014 size_t extra, size_t alignment, bool zero, tcache_t *tcache,
1017 size_t alignment, bool zero, tcache_t *tcache, arena_t *arena);
1052 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache, in iallocztm() argument
[all …]
Djemalloc_internal.h.in173 * jemalloc can conceptually be broken into components (arena, tcache, etc.),
200 * t: tcache
392 #include "jemalloc/internal/tcache.h"
429 #include "jemalloc/internal/tcache.h"
527 #include "jemalloc/internal/tcache.h"
977 * Include portions of arena.h interleaved with tcache.h in order to resolve
983 #include "jemalloc/internal/tcache.h"
994 tcache_t *tcache, bool is_metadata, arena_t *arena, bool slow_path);
998 tcache_t *tcache, bool is_metadata, arena_t *arena);
1000 tcache_t *tcache, arena_t *arena);
[all …]
Darena.h586 size_t alignment, bool zero, tcache_t *tcache);
611 size_t size, size_t alignment, bool zero, tcache_t *tcache);
707 bool zero, tcache_t *tcache, bool slow_path);
710 void arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path);
711 void arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
1352 tcache_t *tcache, bool slow_path) in arena_malloc() argument
1355 assert(!tsdn_null(tsdn) || tcache == NULL); in arena_malloc()
1358 if (likely(tcache != NULL)) { in arena_malloc()
1361 tcache, size, ind, zero, slow_path)); in arena_malloc()
1365 tcache, size, ind, zero, slow_path)); in arena_malloc()
[all …]
/external/jemalloc_new/include/jemalloc/internal/
Dtcache_inlines.h31 tcache_event(tsd_t *tsd, tcache_t *tcache) { in tcache_event() argument
36 if (unlikely(ticker_tick(&tcache->gc_ticker))) { in tcache_event()
37 tcache_event_hard(tsd, tcache); in tcache_event()
42 tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, in tcache_alloc_small() argument
50 bin = tcache_small_bin_get(tcache, binind); in tcache_alloc_small()
60 ret = tcache_alloc_small_hard(tsd_tsdn(tsd), arena, tcache, in tcache_alloc_small()
99 tcache->prof_accumbytes += usize; in tcache_alloc_small()
101 tcache_event(tsd, tcache); in tcache_alloc_small()
106 tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size, in tcache_alloc_large() argument
113 bin = tcache_large_bin_get(tcache, binind); in tcache_alloc_large()
[all …]
Djemalloc_internal_inlines_c.h41 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache, in iallocztm() argument
46 assert(!is_internal || tcache == NULL); in iallocztm()
53 ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path); in iallocztm()
68 tcache_t *tcache, bool is_internal, arena_t *arena) { in ipallocztm() argument
73 assert(!is_internal || tcache == NULL); in ipallocztm()
78 ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache); in ipallocztm()
88 tcache_t *tcache, arena_t *arena) { in ipalloct() argument
89 return ipallocztm(tsdn, usize, alignment, zero, tcache, false, arena); in ipalloct()
104 idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, alloc_ctx_t *alloc_ctx, in idalloctm() argument
107 assert(!is_internal || tcache == NULL); in idalloctm()
[all …]
Darena_inlines_b.h88 tcache_t *tcache, bool slow_path) { in arena_malloc() argument
89 assert(!tsdn_null(tsdn) || tcache == NULL); in arena_malloc()
92 if (likely(tcache != NULL)) { in arena_malloc()
95 tcache, size, ind, zero, slow_path); in arena_malloc()
99 tcache, size, ind, zero, slow_path); in arena_malloc()
190 arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, in arena_dalloc() argument
192 assert(!tsdn_null(tsdn) || tcache == NULL); in arena_dalloc()
195 if (unlikely(tcache == NULL)) { in arena_dalloc()
224 tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind, in arena_dalloc()
229 arena_dalloc_promoted(tsdn, ptr, tcache, in arena_dalloc()
[all …]
Dtcache_externs.h31 void tcache_event_hard(tsd_t *tsd, tcache_t *tcache);
32 void *tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
34 void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin,
37 unsigned rem, tcache_t *tcache);
38 void tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache,
42 void tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena);
47 void tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena);
Djemalloc_internal_inlines_b.h25 tcache_t *tcache = tcache_get(tsd); in arena_choose_impl() local
26 if (tcache->arena != NULL) { in arena_choose_impl()
28 assert(tcache->arena == in arena_choose_impl()
30 if (tcache->arena != ret) { in arena_choose_impl()
32 tcache, ret); in arena_choose_impl()
35 tcache_arena_associate(tsd_tsdn(tsd), tcache, in arena_choose_impl()
Djemalloc_internal_inlines_a.h110 tcache_small_bin_get(tcache_t *tcache, szind_t binind) { in tcache_small_bin_get() argument
112 return &tcache->bins_small[binind]; in tcache_small_bin_get()
116 tcache_large_bin_get(tcache_t *tcache, szind_t binind) { in tcache_large_bin_get() argument
118 return &tcache->bins_large[binind - NBINS]; in tcache_large_bin_get()
Darena_inlines_a.h49 tcache_t *tcache = tcache_get(tsd); in percpu_arena_update() local
50 if (tcache != NULL) { in percpu_arena_update()
51 tcache_arena_reassociate(tsd_tsdn(tsd), tcache, in percpu_arena_update()
Darena_externs.h47 void arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
58 size_t alignment, bool zero, tcache_t *tcache);
60 void arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
68 size_t size, size_t alignment, bool zero, tcache_t *tcache);
Dtcache_structs.h56 tcache_t *tcache; member
/external/strace/tests-mx32/
Dgetcpu.c53 long *tcache = tail_alloc(128); in main() local
62 res = syscall(__NR_getcpu, cpu, node, tcache); in main()
66 printf("getcpu([%u], [%u], %p) = 0\n", *cpu, *node, tcache); in main()
/external/strace/tests-m32/
Dgetcpu.c53 long *tcache = tail_alloc(128); in main() local
62 res = syscall(__NR_getcpu, cpu, node, tcache); in main()
66 printf("getcpu([%u], [%u], %p) = 0\n", *cpu, *node, tcache); in main()
/external/strace/tests/
Dgetcpu.c53 long *tcache = tail_alloc(128); in main() local
62 res = syscall(__NR_getcpu, cpu, node, tcache); in main()
66 printf("getcpu([%u], [%u], %p) = 0\n", *cpu, *node, tcache); in main()
/external/jemalloc_new/
DAndroid.bp48 // The minimum number of small slots held in the tcache. This must be
51 // The number of small slots held in the tcache. The higher this number
55 // The number of large slots held in the tcache. The higher this number
59 // 1 << XX is the maximum sized allocation that will be in the tcache.
72 // Only enable the tcache on non-svelte configurations, to save PSS.
137 "src/tcache.c",
/external/jemalloc/
DAndroid.bp33 // The number of small slots held in the tcache. The higher this number
37 // The number of large slots held in the tcache. The higher this number
41 // 1 << XX is the maximum sized allocation that will be in the tcache.
61 // Only enable the tcache on non-svelte configurations, to save PSS.
140 "src/tcache.c",
/external/jemalloc/include/jemalloc/
Djemalloc_macros.h.in24 * Bias tcache index bits so that 0 encodes "automatic tcache management", and 1
/external/jemalloc_new/include/jemalloc/
Djemalloc_macros.h.in24 * Bias tcache index bits so that 0 encodes "automatic tcache management", and 1

12