Home
last modified time | relevance | path

Searched refs:VG_MIN_MALLOC_SZB (Results 1 – 5 of 5) sorted by relevance

/external/valgrind/coregrind/m_replacemalloc/
Dreplacemalloc_core.c53 UInt VG_(clo_alignment) = VG_MIN_MALLOC_SZB;
59 if (VG_(clo_alignment) < VG_MIN_MALLOC_SZB || in VG_()
65 VG_MIN_MALLOC_SZB); in VG_()
91 if (VG_MIN_MALLOC_SZB == align) in VG_()
Dvg_replace_malloc.c820 if (alignment < VG_MIN_MALLOC_SZB) \
821 alignment = VG_MIN_MALLOC_SZB; \
846 if (alignment < VG_MIN_MALLOC_SZB) \
847 alignment = VG_MIN_MALLOC_SZB; \
/external/valgrind/coregrind/
Dpub_core_mallocfree.h70 # define VG_MIN_MALLOC_SZB 8 macro
86 # define VG_MIN_MALLOC_SZB 16 macro
Dm_mallocfree.c183 UByte padding[ VG_MIN_MALLOC_SZB -
185 VG_MIN_MALLOC_SZB) ];
585 while (0 != overhead_szB_lo(a) % VG_MIN_MALLOC_SZB) a->rz_szB++; in arena_init()
721 VG_(clo_profile_heap) ? VG_MIN_MALLOC_SZB : 0; in ensure_mm_init()
873 vg_assert(0 == (Addr)sb % VG_MIN_MALLOC_SZB); in newSuperblock()
1017 SizeT n = pszB / VG_MIN_MALLOC_SZB; in pszB_to_listNo()
1018 vg_assert(0 == pszB % VG_MIN_MALLOC_SZB); in pszB_to_listNo()
1090 pszB += VG_MIN_MALLOC_SZB; in listNo_to_pszB_min()
1648 SizeT n = VG_MIN_MALLOC_SZB-1; in align_req_pszB()
1837 vg_assert( (((Addr)v) & (VG_MIN_MALLOC_SZB-1)) == 0 ); in VG_()
[all …]
Dm_main.c309 VG_(sprintf)(default_alignment, "%d", VG_MIN_MALLOC_SZB); in usage_NORETURN()