/external/jemalloc_new/src/ |
D | sz.c | 4 JEMALLOC_ALIGNED(CACHELINE) 18 JEMALLOC_ALIGNED(CACHELINE) 26 JEMALLOC_ALIGNED(CACHELINE)
|
D | ckh.c | 277 usize = sz_sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE); in ckh_grow() 282 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, in ckh_grow() 322 usize = sz_sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE); in ckh_shrink() 326 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, NULL, in ckh_shrink() 398 usize = sz_sa2u(sizeof(ckhc_t) << lg_mincells, CACHELINE); in ckh_new() 403 ckh->tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, in ckh_new()
|
D | large.c | 17 return large_palloc(tsdn, arena, usize, CACHELINE, zero); in large_malloc() 165 CACHELINE, false, NSIZES, &is_zeroed_trail, &commit)) != NULL in large_ralloc_no_move_expand() 168 CACHELINE, false, NSIZES, &is_zeroed_trail, &commit)) != NULL) { in large_ralloc_no_move_expand() 174 extent_past_get(extent), trailsize, 0, CACHELINE, false, in large_ralloc_no_move_expand() 274 if (alignment <= CACHELINE) { in large_ralloc_move_helper()
|
D | tcache.c | 409 size = sz_sa2u(size, CACHELINE); in tsd_tcache_data_init() 411 void *avail_array = ipallocztm(tsd_tsdn(tsd), size, CACHELINE, true, in tsd_tcache_data_init() 456 size = sz_sa2u(size, CACHELINE); in tcache_create_explicit() 458 tcache = ipallocztm(tsd_tsdn(tsd), size, CACHELINE, true, NULL, true, in tcache_create_explicit() 580 * (MALLOCX_TCACHE_MAX+1), CACHELINE); in tcaches_create_prep() 690 * sizeof(cache_bin_info_t), CACHELINE); in tcache_boot()
|
D | rtree.c | 33 sizeof(rtree_node_elm_t), CACHELINE); in rtree_node_alloc_impl() 48 sizeof(rtree_leaf_elm_t), CACHELINE); in rtree_leaf_alloc_impl()
|
D | base.c | 360 size_t base_alignment = CACHELINE; in base_new() 472 CACHELINE, &esn); in base_alloc_extent()
|
D | background_thread.c | 885 sizeof(background_thread_info_t), CACHELINE); in background_thread_boot1()
|
D | prof.c | 2360 CACHELINE); in prof_boot2() 2374 CACHELINE); in prof_boot2()
|
D | arena.c | 1403 if (likely(alignment <= CACHELINE)) { in arena_palloc() 1780 arena = (arena_t *)base_alloc(tsdn, base, sizeof(arena_t), CACHELINE); in arena_new()
|
D | jemalloc.c | 85 JEMALLOC_ALIGNED(CACHELINE)
|
/external/jemalloc/src/ |
D | ckh.c | 269 usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE); in ckh_grow() 274 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, in ckh_grow() 315 usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE); in ckh_shrink() 318 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, NULL, in ckh_shrink() 390 usize = sa2u(sizeof(ckhc_t) << lg_mincells, CACHELINE); in ckh_new() 395 ckh->tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, in ckh_new()
|
D | tcache.c | 333 size = sa2u(size, CACHELINE); in tcache_create() 335 tcache = ipallocztm(tsdn, size, CACHELINE, true, NULL, true, in tcache_create()
|
D | huge.c | 75 CACHELINE, false, NULL, true, iarena); in huge_palloc()
|
D | jemalloc.c | 92 JEMALLOC_ALIGNED(CACHELINE) 105 JEMALLOC_ALIGNED(CACHELINE) 113 JEMALLOC_ALIGNED(CACHELINE)
|
/external/jemalloc_new/include/jemalloc/internal/ |
D | jemalloc_internal_types.h | 148 #define CACHELINE 64 macro 149 #define CACHELINE_MASK (CACHELINE - 1)
|
/external/jemalloc_new/test/unit/ |
D | base.c | 135 CACHELINE, in TEST_BEGIN() 136 CACHELINE << 1, in TEST_BEGIN()
|
/external/jemalloc/include/jemalloc/internal/ |
D | jemalloc_internal.h | 316 #define CACHELINE 64 macro 317 #define CACHELINE_MASK (CACHELINE - 1)
|
D | jemalloc_internal.h.in | 312 * CACHELINE cannot be based on LG_CACHELINE because __declspec(align()) can 316 #define CACHELINE 64 macro 317 #define CACHELINE_MASK (CACHELINE - 1)
|
/external/lz4/programs/ |
D | lz4io.c | 82 #define CACHELINE 64 macro
|