/external/jemalloc/src/ |
D | tcache.c | 37 tcache_event_hard(tsd_t *tsd, tcache_t *tcache) in tcache_event_hard() argument 39 szind_t binind = tcache->next_gc_bin; in tcache_event_hard() 40 tcache_bin_t *tbin = &tcache->tbins[binind]; in tcache_event_hard() 48 tcache_bin_flush_small(tsd, tcache, tbin, binind, in tcache_event_hard() 53 - tbin->low_water + (tbin->low_water >> 2), tcache); in tcache_event_hard() 71 tcache->next_gc_bin++; in tcache_event_hard() 72 if (tcache->next_gc_bin == nhbins) in tcache_event_hard() 73 tcache->next_gc_bin = 0; in tcache_event_hard() 77 tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, in tcache_alloc_small_hard() argument 83 tcache->prof_accumbytes : 0); in tcache_alloc_small_hard() [all …]
|
D | jemalloc.c | 766 tcache_t *tcache; in stats_print_atexit() local 775 ql_foreach(tcache, &arena->tcache_ql, link) { in stats_print_atexit() 776 tcache_stats_merge(tsdn, tcache, arena); in stats_print_atexit() 1883 ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) argument 1902 iqalloc(tsd, ptr, tcache, false); 1906 iqalloc(tsd, ptr, tcache, true); 1912 isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) argument 1927 isqalloc(tsd, ptr, usize, tcache, slow_path); 2123 size_t *alignment, bool *zero, tcache_t **tcache, arena_t **arena) argument 2138 *tcache = NULL; [all …]
|
D | huge.c | 380 size_t usize, size_t alignment, bool zero, tcache_t *tcache) in huge_ralloc() argument 405 isqalloc(tsd, ptr, oldsize, tcache, true); in huge_ralloc()
|
D | arena.c | 2791 bool zero, tcache_t *tcache) in arena_palloc() argument 2799 tcache, true); in arena_palloc() 2808 tcache, true); in arena_palloc() 3300 size_t alignment, bool zero, tcache_t *tcache) in arena_ralloc_move_helper() argument 3305 zero, tcache, true)); in arena_ralloc_move_helper() 3309 return (ipalloct(tsdn, usize, alignment, zero, tcache, arena)); in arena_ralloc_move_helper() 3314 size_t alignment, bool zero, tcache_t *tcache) in arena_ralloc() argument 3337 alignment, zero, tcache); in arena_ralloc() 3349 isqalloc(tsd, ptr, oldsize, tcache, true); in arena_ralloc() 3352 zero, tcache); in arena_ralloc()
|
D | stats.c | 623 CONFIG_WRITE_BOOL_JSON(tcache, ",") in stats_general_print() 742 OPT_WRITE_BOOL(tcache, ",") in stats_general_print()
|
D | ctl.c | 500 {NAME("tcache"), CHILD(named, tcache)}, 1342 tcache_t *tcache = tsd_tcache_get(tsd); in CTL_RO_CONFIG_GEN() local 1343 if (tcache != NULL) { in CTL_RO_CONFIG_GEN() 1344 tcache_arena_reassociate(tsd_tsdn(tsd), tcache, in CTL_RO_CONFIG_GEN()
|
/external/jemalloc/include/jemalloc/internal/ |
D | tcache.h | 112 tcache_t *tcache; member 146 void tcache_event_hard(tsd_t *tsd, tcache_t *tcache); 147 void *tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, 149 void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin, 152 unsigned rem, tcache_t *tcache); 153 void tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache, 159 void tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena); 173 void tcache_event(tsd_t *tsd, tcache_t *tcache); 179 void *tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, 181 void *tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, [all …]
|
D | jemalloc_internal.h | 994 tcache_t *tcache, bool is_metadata, arena_t *arena, bool slow_path); 998 tcache_t *tcache, bool is_metadata, arena_t *arena); 1000 tcache_t *tcache, arena_t *arena); 1005 void idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool is_metadata, 1008 void iqalloc(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path); 1009 void isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache, 1011 void isqalloc(tsd_t *tsd, void *ptr, size_t size, tcache_t *tcache, 1014 size_t extra, size_t alignment, bool zero, tcache_t *tcache, 1017 size_t alignment, bool zero, tcache_t *tcache, arena_t *arena); 1052 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache, in iallocztm() argument [all …]
|
D | jemalloc_internal.h.in | 173 * jemalloc can conceptually be broken into components (arena, tcache, etc.), 200 * t: tcache 392 #include "jemalloc/internal/tcache.h" 429 #include "jemalloc/internal/tcache.h" 527 #include "jemalloc/internal/tcache.h" 977 * Include portions of arena.h interleaved with tcache.h in order to resolve 983 #include "jemalloc/internal/tcache.h" 994 tcache_t *tcache, bool is_metadata, arena_t *arena, bool slow_path); 998 tcache_t *tcache, bool is_metadata, arena_t *arena); 1000 tcache_t *tcache, arena_t *arena); [all …]
|
D | arena.h | 580 size_t alignment, bool zero, tcache_t *tcache); 605 size_t size, size_t alignment, bool zero, tcache_t *tcache); 701 bool zero, tcache_t *tcache, bool slow_path); 704 void arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path); 705 void arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache, 1346 tcache_t *tcache, bool slow_path) in arena_malloc() argument 1349 assert(!tsdn_null(tsdn) || tcache == NULL); in arena_malloc() 1352 if (likely(tcache != NULL)) { in arena_malloc() 1355 tcache, size, ind, zero, slow_path)); in arena_malloc() 1359 tcache, size, ind, zero, slow_path)); in arena_malloc() [all …]
|
D | huge.h | 18 size_t usize, size_t alignment, bool zero, tcache_t *tcache);
|
D | tsd.h | 593 O(tcache, tcache_t *) \
|
/external/strace/tests-m32/ |
D | getcpu.c | 53 long *tcache = tail_alloc(128); in main() local 62 res = syscall(__NR_getcpu, cpu, node, tcache); in main() 66 printf("getcpu([%u], [%u], %p) = 0\n", *cpu, *node, tcache); in main()
|
/external/strace/tests-mx32/ |
D | getcpu.c | 53 long *tcache = tail_alloc(128); in main() local 62 res = syscall(__NR_getcpu, cpu, node, tcache); in main() 66 printf("getcpu([%u], [%u], %p) = 0\n", *cpu, *node, tcache); in main()
|
/external/strace/tests/ |
D | getcpu.c | 53 long *tcache = tail_alloc(128); in main() local 62 res = syscall(__NR_getcpu, cpu, node, tcache); in main() 66 printf("getcpu([%u], [%u], %p) = 0\n", *cpu, *node, tcache); in main()
|
/external/jemalloc/ |
D | Android.bp | 33 // The number of small slots held in the tcache. The higher this number 37 // The number of large slots held in the tcache. The higher this number 41 // 1 << XX is the maximum sized allocation that will be in the tcache. 60 // Only enable the tcache on non-svelte configurations, to save PSS. 125 "src/tcache.c",
|
D | ChangeLog | 333 - Add support for explicit tcaches. The "tcache.create", "tcache.flush", and 334 "tcache.destroy" mallctls control tcache lifetime and flushing, and the 336 control which tcache is used for each operation. 431 MALLOCX_TCACHE(tc) and MALLOCX_TCACHE_NONE flags to control tcache usage. 572 + internal zero-initialized data structures (relevant to tcache and prof 666 - Disable tcache by default if running inside Valgrind, in order to avoid 676 - Fix error return value for "thread.tcache.enabled" mallctl. 704 - Add the "thread.tcache.enabled" mallctl. 716 - Rename the "tcache.flush" mallctl to "thread.tcache.flush". 764 - Fix build issues for --disable-tcache. [all …]
|
D | INSTALL | 155 --disable-tcache 158 the "opt.tcache" option for usage details.
|
D | Makefile.in | 108 $(srcroot)src/tcache.c \
|
D | configure.ac | 967 AC_ARG_ENABLE([tcache], optenable 968 [AS_HELP_STRING([--disable-tcache], [Disable per thread caches])], 2018 AC_MSG_RESULT([tcache : ${enable_tcache}])
|
/external/jemalloc/include/jemalloc/ |
D | jemalloc_macros.h.in | 24 * Bias tcache index bits so that 0 encodes "automatic tcache management", and 1
|
/external/jemalloc/test/unit/ |
D | mallctl.c | 144 TEST_MALLCTL_CONFIG(tcache, bool); in TEST_BEGIN() 183 TEST_MALLCTL_OPT(bool, tcache, tcache); in TEST_BEGIN() 184 TEST_MALLCTL_OPT(size_t, lg_tcache_max, tcache); in TEST_BEGIN()
|
/external/valgrind/coregrind/m_syswrap/ |
D | syswrap-linux.c | 3494 unsigned *, cpu, unsigned *, node, struct vki_getcpu_cache *, tcache); in PRE()
|