/external/jemalloc_new/include/jemalloc/internal/ |
D | sz.h | 64 size_t delta_inverse_mask = ZU(-1) << lg_delta; in sz_psz2ind() 66 ((ZU(1) << LG_SIZE_CLASS_GROUP) - 1); in sz_psz2ind() 80 size_t mod = pind & ((ZU(1) << LG_SIZE_CLASS_GROUP) - 1); in sz_pind2sz_compute() 83 size_t grp_size = ((ZU(1) << (LG_PAGE + in sz_pind2sz_compute() 117 size_t delta = ZU(1) << lg_delta; in sz_psz2u() 130 if (size <= (ZU(1) << LG_TINY_MAXCLASS)) { in sz_size2index_compute() 145 size_t delta_inverse_mask = ZU(-1) << lg_delta; in sz_size2index_compute() 147 ((ZU(1) << LG_SIZE_CLASS_GROUP) - 1); in sz_size2index_compute() 177 return (ZU(1) << (LG_TINY_MAXCLASS - NTBINS + 1 + index)); in sz_index2size_compute() 183 size_t mod = reduced_index & ((ZU(1) << LG_SIZE_CLASS_GROUP) - in sz_index2size_compute() [all …]
|
D | bitmap.h | 19 #define BITMAP_MAXBITS (ZU(1) << LG_BITMAP_MAXBITS) 202 return !(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))); in bitmap_get() 216 assert(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))); in bitmap_set() 217 g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK); in bitmap_set() 229 assert(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))); in bitmap_set() 230 g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK); in bitmap_set() 268 size_t sib_base = bit + (ZU(1) << lg_bits_per_group); in bitmap_ffu() 343 assert((g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))) == 0); in bitmap_unset() 344 g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK); in bitmap_unset() 357 assert((g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))) in bitmap_unset() [all …]
|
D | jemalloc_internal_macros.h | 15 #define ZU(z) ((size_t)z) macro 20 #define KZU(z) ZU(z##ULL)
|
D | prng.h | 108 assert(lg_range <= ZU(1) << (3 + LG_SIZEOF_PTR)); in prng_lg_range_zu() 121 ret = state1 >> ((ZU(1) << (3 + LG_SIZEOF_PTR)) - lg_range); in prng_lg_range_zu()
|
D | rtree.h | 133 unsigned ptrbits = ZU(1) << (LG_SIZEOF_PTR+3); in rtree_leafkey() 137 uintptr_t mask = ~((ZU(1) << maskbits) - 1); in rtree_leafkey() 143 unsigned ptrbits = ZU(1) << (LG_SIZEOF_PTR+3); in rtree_cache_direct_map() 152 unsigned ptrbits = ZU(1) << (LG_SIZEOF_PTR+3); in rtree_subkey() 156 uintptr_t mask = (ZU(1) << maskbits) - 1; in rtree_subkey()
|
D | jemalloc_internal_types.h | 39 (ZU(1) << (flags & MALLOCX_LG_ALIGN_MASK))
|
D | size_classes.h | 188 #define LARGE_MINCLASS (ZU(1) << LG_LARGE_MINCLASS) 337 #define LARGE_MINCLASS (ZU(1) << LG_LARGE_MINCLASS) 484 #define LARGE_MINCLASS (ZU(1) << LG_LARGE_MINCLASS) 796 #define LARGE_MINCLASS (ZU(1) << LG_LARGE_MINCLASS) 1105 #define LARGE_MINCLASS (ZU(1) << LG_LARGE_MINCLASS) 1412 #define LARGE_MINCLASS (ZU(1) << LG_LARGE_MINCLASS)
|
/external/jemalloc_new/test/integration/ |
D | rallocx.c | 50 #define MAXSZ ZU(12 * 1024 * 1024) in TEST_BEGIN() 153 #define MAX_ALIGN (ZU(1) << 25) in TEST_BEGIN() 155 align = ZU(1); in TEST_BEGIN() 179 #define MAX_VALIDATE (ZU(1) << 22) in TEST_BEGIN() 190 (void *)((uintptr_t)q & ((ZU(1) << lg_align)-1)), in TEST_BEGIN() 223 assert_ptr_null(rallocx(p, ZU(PTRDIFF_MAX)+1, 0), in TEST_BEGIN() 224 "Expected OOM for rallocx(p, size=%#zx, 0)", ZU(PTRDIFF_MAX)+1); in TEST_BEGIN() 229 assert_ptr_null(rallocx(p, 1, MALLOCX_ALIGN(ZU(PTRDIFF_MAX)+1)), in TEST_BEGIN() 231 ZU(PTRDIFF_MAX)+1); in TEST_BEGIN()
|
D | mallocx.c | 62 assert_ptr_null(mallocx(ZU(PTRDIFF_MAX)+1, 0), in TEST_BEGIN() 63 "Expected OOM for mallocx(size=%#zx, 0)", ZU(PTRDIFF_MAX)+1); in TEST_BEGIN() 68 assert_ptr_null(mallocx(1, MALLOCX_ALIGN(ZU(PTRDIFF_MAX)+1)), in TEST_BEGIN() 70 ZU(PTRDIFF_MAX)+1); in TEST_BEGIN()
|
/external/jemalloc_new/test/unit/ |
D | prng.c | 83 ra = prng_lg_range_zu(&sa, ZU(1) << (3 + LG_SIZEOF_PTR), atomic); in test_prng_lg_range_zu() 85 rb = prng_lg_range_zu(&sa, ZU(1) << (3 + LG_SIZEOF_PTR), atomic); in test_prng_lg_range_zu() 90 rb = prng_lg_range_zu(&sb, ZU(1) << (3 + LG_SIZEOF_PTR), atomic); in test_prng_lg_range_zu() 95 ra = prng_lg_range_zu(&sa, ZU(1) << (3 + LG_SIZEOF_PTR), atomic); in test_prng_lg_range_zu() 96 rb = prng_lg_range_zu(&sa, ZU(1) << (3 + LG_SIZEOF_PTR), atomic); in test_prng_lg_range_zu() 101 ra = prng_lg_range_zu(&sa, ZU(1) << (3 + LG_SIZEOF_PTR), atomic); in test_prng_lg_range_zu() 102 for (lg_range = (ZU(1) << (3 + LG_SIZEOF_PTR)) - 1; lg_range > 0; in test_prng_lg_range_zu() 108 assert_zu_eq(rb, (ra >> ((ZU(1) << (3 + LG_SIZEOF_PTR)) - in test_prng_lg_range_zu()
|
D | size_classes.c | 147 assert_u_eq(sz_size2index(ZU(PTRDIFF_MAX)+1), NSIZES, in TEST_BEGIN() 154 assert_zu_eq(sz_s2u(ZU(PTRDIFF_MAX)+1), 0, in TEST_BEGIN() 161 assert_u_eq(sz_psz2ind(ZU(PTRDIFF_MAX)+1), NPSIZES, in TEST_BEGIN() 169 assert_zu_eq(sz_psz2u(ZU(PTRDIFF_MAX)+1), max_psz, in TEST_BEGIN()
|
D | ckh.c | 36 "ckh_count() should return %zu, but it returned %zu", ZU(0), in TEST_BEGIN() 107 #define NITEMS ZU(1000) in TEST_BEGIN() 199 ZU(0), ckh_count(&ckh)); in TEST_BEGIN()
|
D | pack.c | 7 #define SZ (ZU(1) << (LG_PAGE - 2)) 9 #define SZ ZU(4096)
|
D | zero.c | 18 ZU(0), sz_prev); in test_zero()
|
D | junk.c | 78 ZU(0), sz_prev); in test_junk()
|
D | extent_quantize.c | 102 #define SZ_MAX ZU(4 * 1024 * 1024) in TEST_BEGIN()
|
/external/jemalloc_new/src/ |
D | ckh.c | 67 for (i = 0; i < (ZU(1) << LG_CKH_BUCKET_CELLS); i++) { in ckh_bucket_search() 89 bucket = hashes[0] & ((ZU(1) << ckh->lg_curbuckets) - 1); in ckh_isearch() 96 bucket = hashes[1] & ((ZU(1) << ckh->lg_curbuckets) - 1); in ckh_isearch() 113 for (i = 0; i < (ZU(1) << LG_CKH_BUCKET_CELLS); i++) { in ckh_try_bucket_insert() 115 ((i + offset) & ((ZU(1) << LG_CKH_BUCKET_CELLS) - 1))]; in ckh_try_bucket_insert() 169 tbucket = hashes[1] & ((ZU(1) << ckh->lg_curbuckets) - 1); in ckh_evict_reloc_insert() 171 tbucket = hashes[0] & ((ZU(1) << ckh->lg_curbuckets) in ckh_evict_reloc_insert() 213 bucket = hashes[0] & ((ZU(1) << ckh->lg_curbuckets) - 1); in ckh_try_insert() 219 bucket = hashes[1] & ((ZU(1) << ckh->lg_curbuckets) - 1); in ckh_try_insert() 389 (ZU(1) << lg_mincells) < mincells; in ckh_new() [all …]
|
D | sz.c | 7 (((ZU(1)<<lg_grp) + (ZU(ndelta)<<lg_delta))), 21 ((ZU(1)<<lg_grp) + (ZU(ndelta)<<lg_delta)),
|
D | bitmap.c | 17 assert(nbits <= (ZU(1) << LG_BITMAP_MAXBITS)); in bitmap_info_init() 88 assert(nbits <= (ZU(1) << LG_BITMAP_MAXBITS)); in bitmap_info_init()
|
D | rtree.c | 65 size_t nchildren = ZU(1) << rtree_levels[level].bits; in rtree_delete_subtree() 111 node = rtree_node_alloc(tsdn, rtree, ZU(1) << in rtree_node_init() 137 leaf = rtree_leaf_alloc(tsdn, rtree, ZU(1) << in rtree_leaf_init()
|
D | tcache.c | 674 if (opt_lg_tcache_max < 0 || (ZU(1) << opt_lg_tcache_max) < in tcache_boot() 678 tcache_maxclass = (ZU(1) << opt_lg_tcache_max); in tcache_boot()
|
/external/ImageMagick/PerlMagick/t/reference/filter/ |
D | Level.miff | 340 #FHg4A_5XS:XS:XS:ZU<ZU<\S:_PDbWAXZ2_nf���������������������������������������������������������… 347 &DX72F/WR9XS:ZU<ZU<ZU<\S:bPD_XAU_5Xqd����������������������������������������������������…
|
D | Minify.miff | 15 …VJ;TH:WL?hbp�����̜x��Ui�BG�@>�?:�>4�;,�7-�3.�1/�23�6/�60�9/�;2�>7�?3�</�?3�ZU�ls�fq�sk����ӽ�ط�ܾ���…
|
D | Scale.miff | 15 ….NC5q=.�@1�E>�??�@7�?,�>-�>+�=,�<-�>/�B5�MF`NC;5153./3..2/11/4..^?9�z�ZX�E@ZU][Z_S,.1-...,),***)*,…
|
/external/icu/icu4c/source/data/translit/ |
D | ja_Hrkt_ja_Latn_BGN.txt | 158 ズ → zu ; # KATAKANA LETTER ZU 301 ず → zu ; # HIRAGANA LETTER ZU
|