Home
last modified time | relevance | path

Searched refs:LG_PAGE (Results 1 – 25 of 39) sorted by relevance

12

/external/jemalloc_new/include/jemalloc/internal/
Dsz.h57 pszind_t shift = (x < LG_SIZE_CLASS_GROUP + LG_PAGE) ? 0 : x - in sz_psz2ind()
58 (LG_SIZE_CLASS_GROUP + LG_PAGE); in sz_psz2ind()
61 pszind_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_PAGE + 1) ? in sz_psz2ind()
62 LG_PAGE : x - LG_SIZE_CLASS_GROUP - 1; in sz_psz2ind()
83 size_t grp_size = ((ZU(1) << (LG_PAGE + in sz_pind2sz_compute()
87 size_t lg_delta = shift + (LG_PAGE-1); in sz_pind2sz_compute()
115 size_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_PAGE + 1) ? in sz_psz2u()
116 LG_PAGE : x - LG_SIZE_CLASS_GROUP - 1; in sz_psz2u()
Darena_types.h5 #define LG_SLAB_MAXREGS (LG_PAGE - LG_TINY_MIN)
Djemalloc_internal_defs.h211 #define LG_PAGE 12 macro
Djemalloc_internal_defs_host.h202 #define LG_PAGE 12 macro
Dpages.h8 #define PAGE ((size_t)(1U << LG_PAGE))
Dextent_inlines.h197 unsigned lg_range = LG_PAGE - in extent_addr_randomize()
209 uintptr_t random_offset = ((uintptr_t)r) << (LG_PAGE - in extent_addr_randomize()
/external/jemalloc/src/
Darena.c192 LG_PAGE)); in arena_avail_insert()
193 assert((npages << LG_PAGE) < chunksize); in arena_avail_insert()
206 LG_PAGE)); in arena_avail_remove()
207 assert((npages << LG_PAGE) < chunksize); in arena_avail_remove()
221 LG_PAGE)); in arena_run_dirty_insert()
239 LG_PAGE)); in arena_run_dirty_remove()
253 return (extent_node_size_get(node) >> LG_PAGE); in arena_chunk_dirty_npages()
303 size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE; in arena_run_reg_dalloc()
330 (run_ind << LG_PAGE)), (npages << LG_PAGE)); in arena_run_zero()
331 memset((void *)((uintptr_t)chunk + (run_ind << LG_PAGE)), 0, in arena_run_zero()
[all …]
Dandroid_je_iterate.c121 pageind += size >> LG_PAGE; in je_iterate_chunk()
/external/jemalloc/test/unit/
Dpack.c14 #if LG_PAGE <= 14
15 #define SZ (ZU(1) << (LG_PAGE - 2))
90 return (run_size >> LG_PAGE); in npages_per_run_compute()
97 return ((chunksize >> LG_PAGE) - map_bias); in npages_per_chunk_compute()
Drun_quantize.c114 for (i = 1; i <= chunksize >> LG_PAGE; i++) { in TEST_BEGIN()
117 run_size = i << LG_PAGE; in TEST_BEGIN()
/external/jemalloc_new/src/
Dbin.c14 (pgs << LG_PAGE), (pgs << LG_PAGE) / ((1U<<lg_grp) + \
Darena.c95 extents_npages_get(&arena->extents_retained) << LG_PAGE); in arena_stats_merge()
123 extents_npages_get(&arena->extents_muzzy)) << LG_PAGE))); in arena_stats_merge()
373 arena_nactive_add(arena, size >> LG_PAGE); in arena_extent_alloc_large()
387 arena_nactive_sub(arena, extent_size_get(extent) >> LG_PAGE); in arena_extent_dalloc_large_prep()
401 arena_nactive_sub(arena, udiff >> LG_PAGE); in arena_extent_ralloc_large_shrink()
415 arena_nactive_add(arena, udiff >> LG_PAGE); in arena_extent_ralloc_large_expand()
737 nstashed += extent_size_get(extent) >> LG_PAGE; in arena_stash_decayed()
761 size_t npages = extent_size_get(extent) >> LG_PAGE; in arena_decay_stashed()
801 nunmapped << LG_PAGE); in arena_decay_stashed()
906 arena_nactive_sub(arena, extent_size_get(slab) >> LG_PAGE); in arena_slab_dalloc()
[all …]
Dextent.c322 size_t npages = size >> LG_PAGE; in extents_insert_locked()
348 size_t npages = size >> LG_PAGE; in extents_remove_locked()
357 cur_extents_npages - (size >> LG_PAGE), ATOMIC_RELAXED); in extents_remove_locked()
709 for (size_t i = 1; i < (extent_size_get(extent) >> LG_PAGE) - 1; i++) { in extent_interior_register()
712 LG_PAGE), extent, szind, true); in extent_interior_register()
724 size_t nadd = extent_size_get(extent) >> LG_PAGE; in extent_gdump_add()
747 size_t nsub = extent_size_get(extent) >> LG_PAGE; in extent_gdump_sub()
816 for (i = 1; i < (extent_size_get(extent) >> LG_PAGE) - 1; i++) { in extent_interior_deregister()
819 LG_PAGE)); in extent_interior_deregister()
/external/jemalloc/include/jemalloc/internal/
Darena.h7 #define LG_RUN_MAXREGS (LG_PAGE - LG_TINY_MIN)
157 #define CHUNK_MAP_SIZE_SHIFT (CHUNK_MAP_RUNIND_SHIFT - LG_PAGE)
771 return ((void *)((uintptr_t)chunk + (pageind << LG_PAGE))); in arena_miscelm_to_rpages()
1129 pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE; in arena_ptr_small_binind_get()
1246 size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE; in arena_prof_tctx_get()
1273 size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE; in arena_prof_tctx_set()
1314 LG_PAGE; in arena_prof_tctx_reset()
1399 pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE; in arena_salloc()
1415 assert(pageind + ((ret+large_pad)>>LG_PAGE) <= in arena_salloc()
1419 pageind+((ret+large_pad)>>LG_PAGE)-1)); in arena_salloc()
[all …]
Djemalloc_internal.h327 #define PAGE ((size_t)(1U << LG_PAGE))
594 pszind_t shift = (x < LG_SIZE_CLASS_GROUP + LG_PAGE) ? 0 : x - in psz2ind()
595 (LG_SIZE_CLASS_GROUP + LG_PAGE); in psz2ind()
598 pszind_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_PAGE + 1) ? in psz2ind()
599 LG_PAGE : x - LG_SIZE_CLASS_GROUP - 1; in psz2ind()
619 size_t grp_size = ((ZU(1) << (LG_PAGE + in pind2sz_compute()
623 size_t lg_delta = shift + (LG_PAGE-1); in pind2sz_compute()
655 size_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_PAGE + 1) ? in psz2u()
656 LG_PAGE : x - LG_SIZE_CLASS_GROUP - 1; in psz2u()
Djemalloc_internal_defs.h204 #define LG_PAGE 12 macro
Djemalloc_internal_defs_host.h205 #define LG_PAGE 12 macro
Dsize_classes.sh229 * This header requires LG_SIZEOF_PTR, LG_TINY_MIN, LG_QUANTUM, and LG_PAGE to
247 * NPSIZES: Number of size classes that are a multiple of (1U << LG_PAGE).
Djemalloc_internal.h.in323 /* Page size. LG_PAGE is determined by the configure script. */
327 #define PAGE ((size_t)(1U << LG_PAGE))
594 pszind_t shift = (x < LG_SIZE_CLASS_GROUP + LG_PAGE) ? 0 : x -
595 (LG_SIZE_CLASS_GROUP + LG_PAGE);
598 pszind_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_PAGE + 1) ?
599 LG_PAGE : x - LG_SIZE_CLASS_GROUP - 1;
619 size_t grp_size = ((ZU(1) << (LG_PAGE +
623 size_t lg_delta = shift + (LG_PAGE-1);
655 size_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_PAGE + 1) ?
656 LG_PAGE : x - LG_SIZE_CLASS_GROUP - 1;
/external/jemalloc_new/test/unit/
Dextent_quantize.c108 for (i = 1; i <= SZ_MAX >> LG_PAGE; i++) { in TEST_BEGIN()
111 extent_size = i << LG_PAGE; in TEST_BEGIN()
Dpack.c6 #if LG_PAGE <= 14
7 #define SZ (ZU(1) << (LG_PAGE - 2))
Dslab.c11 MALLOCX_LG_ALIGN(LG_PAGE)), bin_info->slab_size, true, in TEST_BEGIN()
Drtree.c125 PAGE + (((uintptr_t)1) << LG_PAGE) - 1}; in TEST_BEGIN()
149 (((uintptr_t)2) << LG_PAGE), false), in TEST_BEGIN()
/external/jemalloc_new/
Dconfigure.ac1347 [LG_PAGE="$with_lg_page"], [LG_PAGE="detect"])
1348 if test "x$LG_PAGE" = "xdetect"; then
1349 AC_CACHE_CHECK([LG_PAGE],
1391 LG_PAGE="${je_cv_lg_page}"
1393 if test "x${LG_PAGE}" != "xundefined" ; then
1394 AC_DEFINE_UNQUOTED([LG_PAGE], [$LG_PAGE]) definition
1396 AC_MSG_ERROR([cannot determine value for LG_PAGE])
1427 if test "x${LG_PAGE}" != "xundefined" -a \
1428 "${je_cv_lg_hugepage}" -lt "${LG_PAGE}" ; then
1429 AC_MSG_ERROR([Huge page size (2^${je_cv_lg_hugepage}) must be at least page size (2^${LG_PAGE})])
[all …]
/external/jemalloc/
Dconfigure.ac1203 [LG_PAGE="$with_lg_page"], [LG_PAGE="detect"])
1204 if test "x$LG_PAGE" = "xdetect"; then
1205 AC_CACHE_CHECK([LG_PAGE],
1247 LG_PAGE="${je_cv_lg_page}"
1249 if test "x${LG_PAGE}" != "xundefined" ; then
1250 AC_DEFINE_UNQUOTED([LG_PAGE], [$LG_PAGE]) definition
1252 AC_MSG_ERROR([cannot determine value for LG_PAGE])
1258 [LG_PAGE_SIZES="$with_lg_page_sizes"], [LG_PAGE_SIZES="$LG_PAGE"])

12