Home
last modified time | relevance | path

Searched defs:usize (Results 1 – 25 of 43) sorted by relevance

12

/external/jemalloc/src/
Dvalgrind.c9 valgrind_make_mem_noaccess(void *ptr, size_t usize) in valgrind_make_mem_noaccess()
16 valgrind_make_mem_undefined(void *ptr, size_t usize) in valgrind_make_mem_undefined()
23 valgrind_make_mem_defined(void *ptr, size_t usize) in valgrind_make_mem_defined()
30 valgrind_freelike_block(void *ptr, size_t usize) in valgrind_freelike_block()
Dhuge.c43 huge_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero) in huge_malloc()
52 huge_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, in huge_palloc()
133 huge_dalloc_junk(void *ptr, size_t usize) in huge_dalloc_junk()
155 size_t usize, usize_next; in huge_ralloc_no_move_similar() local
216 size_t usize) in huge_ralloc_no_move_shrink()
270 size_t usize, bool zero) { in huge_ralloc_no_move_expand()
369 huge_ralloc_move_helper(tsdn_t *tsdn, arena_t *arena, size_t usize, in huge_ralloc_move_helper()
380 size_t usize, size_t alignment, bool zero, tcache_t *tcache) in huge_ralloc()
Djemalloc.c1537 ialloc_prof_sample(tsd_t *tsd, size_t usize, szind_t ind, bool zero,
1557 ialloc_prof(tsd_t *tsd, size_t usize, szind_t ind, bool zero, bool slow_path)
1583 ialloc_body(size_t size, bool zero, tsdn_t **tsdn, size_t *usize,
1615 ialloc_post_check(void *ret, tsdn_t *tsdn, size_t usize, const char *func,
1663 imemalign_prof_sample(tsd_t *tsd, size_t alignment, size_t usize,
1683 imemalign_prof(tsd_t *tsd, size_t alignment, size_t usize)
1708 size_t usize; local
1840 irealloc_prof_sample(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize,
1859 irealloc_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize)
1885 size_t usize; local
[all …]
Darena.c811 arena_huge_malloc_stats_update(arena_t *arena, size_t usize) in arena_huge_malloc_stats_update()
824 arena_huge_malloc_stats_update_undo(arena_t *arena, size_t usize) in arena_huge_malloc_stats_update_undo()
837 arena_huge_dalloc_stats_update(arena_t *arena, size_t usize) in arena_huge_dalloc_stats_update()
850 arena_huge_reset_stats_cancel(arena_t *arena, size_t usize) in arena_huge_reset_stats_cancel()
861 arena_huge_dalloc_stats_update_undo(arena_t *arena, size_t usize) in arena_huge_dalloc_stats_update_undo()
874 arena_huge_ralloc_stats_update(arena_t *arena, size_t oldsize, size_t usize) in arena_huge_ralloc_stats_update()
883 size_t usize) in arena_huge_ralloc_stats_update_undo()
918 chunk_hooks_t *chunk_hooks, size_t usize, size_t alignment, size_t *sn, in arena_chunk_alloc_huge_hard()
941 arena_chunk_alloc_huge(tsdn_t *tsdn, arena_t *arena, size_t usize, in arena_chunk_alloc_huge()
970 arena_chunk_dalloc_huge(tsdn_t *tsdn, arena_t *arena, void *chunk, size_t usize, in arena_chunk_dalloc_huge()
[all …]
Dckh.c266 size_t usize; in ckh_grow() local
306 size_t usize; in ckh_shrink() local
355 size_t mincells, usize; in ckh_new() local
/external/jemalloc/include/jemalloc/internal/
Dvalgrind.h16 #define JEMALLOC_VALGRIND_MAKE_MEM_NOACCESS(ptr, usize) do { \ argument
20 #define JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ptr, usize) do { \ argument
24 #define JEMALLOC_VALGRIND_MAKE_MEM_DEFINED(ptr, usize) do { \ argument
33 #define JEMALLOC_VALGRIND_MALLOC(cond, tsdn, ptr, usize, zero) do { \ argument
51 #define JEMALLOC_VALGRIND_REALLOC(moved, tsdn, ptr, usize, ptr_null, \ argument
97 #define JEMALLOC_VALGRIND_MAKE_MEM_NOACCESS(ptr, usize) do {} while (0) argument
98 #define JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ptr, usize) do {} while (0) argument
99 #define JEMALLOC_VALGRIND_MAKE_MEM_DEFINED(ptr, usize) do {} while (0) argument
100 #define JEMALLOC_VALGRIND_MALLOC(cond, tsdn, ptr, usize, zero) do {} while (0) argument
101 #define JEMALLOC_VALGRIND_REALLOC(maybe_moved, tsdn, ptr, usize, \ argument
Dprof.h411 prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx) in prof_tctx_set()
421 prof_tctx_reset(tsdn_t *tsdn, const void *ptr, size_t usize, const void *old_ptr, in prof_tctx_reset()
432 prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update, in prof_sample_accum_update()
462 prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update) in prof_alloc_prep()
483 prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx) in prof_malloc()
497 prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx, in prof_realloc()
534 prof_free(tsd_t *tsd, const void *ptr, size_t usize) in prof_free()
Djemalloc_internal.h.in659 size_t usize = (psz + delta_mask) & ~delta_mask; local
739 size_t usize = grp_size + mod_size; local
780 size_t usize = (size + delta_mask) & ~delta_mask; local
815 size_t usize; local
1078 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
1098 ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
1106 ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero)
1130 u2rz(size_t usize)
1146 size_t usize = isalloc(tsdn, ptr, false); local
1207 size_t usize, copysize; local
Djemalloc_internal.h659 size_t usize = (psz + delta_mask) & ~delta_mask; in psz2u() local
739 size_t usize = grp_size + mod_size; in index2size_compute() local
780 size_t usize = (size + delta_mask) & ~delta_mask; in s2u_compute() local
815 size_t usize; in sa2u() local
1078 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, in ipallocztm()
1098 ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, in ipalloct()
1106 ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero) in ipalloc()
1130 u2rz(size_t usize) in u2rz()
1146 size_t usize = isalloc(tsdn, ptr, false); in p2rz() local
1207 size_t usize, copysize; in iralloct_realign() local
Dquarantine.h16 size_t usize; member
/external/jemalloc_new/include/jemalloc/internal/
Dprof_inlines_b.h48 prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize, in prof_tctx_set()
65 prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update, in prof_sample_accum_update()
102 prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update) { in prof_alloc_prep()
122 prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize, alloc_ctx_t *alloc_ctx, in prof_malloc()
137 prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx, in prof_realloc()
195 prof_free(tsd_t *tsd, const void *ptr, size_t usize, alloc_ctx_t *alloc_ctx) { in prof_free()
Dsz.h119 size_t usize = (psz + delta_mask) & ~delta_mask; in sz_psz2u() local
194 size_t usize = grp_size + mod_size; in sz_index2size_compute() local
231 size_t usize = (size + delta_mask) & ~delta_mask; in sz_s2u_compute() local
263 size_t usize; in sz_sa2u() local
Djemalloc_internal_inlines_c.h67 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, in ipallocztm()
87 ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, in ipalloct()
93 ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero) { in ipalloc()
141 size_t usize, copysize; in iralloct_realign() local
Dprof_inlines_a.h45 prof_accum_cancel(tsdn_t *tsdn, prof_accum_t *prof_accum, size_t usize) { in prof_accum_cancel()
/external/flatbuffers/rust/flatbuffers/src/
Dbuilder.rs120 pub fn collapse(self) -> (Vec<u8>, usize) { in collapse() argument
164 pub fn num_written_vtables(&self) -> usize { in num_written_vtables()
204 pub fn start_vector<T: Push>(&mut self, num_items: usize) { in start_vector()
354 fn used_space(&self) -> usize { in used_space()
560 fn align(&mut self, len: usize, alignment: PushAlignment) { in align()
567 fn track_min_align(&mut self, alignment: usize) { in track_min_align()
580 fn make_space(&mut self, want: usize) -> usize { in make_space()
587 fn ensure_capacity(&mut self, want: usize) -> usize { in ensure_capacity()
600 fn unused_ready_space(&self) -> usize { in unused_ready_space()
628 fn get_vtable_byte_len(field_locs: &[FieldLoc]) -> usize { in get_vtable_byte_len()
[all …]
Dvtable.rs42 pub fn num_fields(&self) -> usize { in num_fields()
45 pub fn num_bytes(&self) -> usize { in num_bytes()
48 pub fn object_inline_num_bytes(&self) -> usize { in object_inline_num_bytes()
Dpush.rs47 pub fn value(&self) -> usize { in value()
/external/jemalloc/test/unit/
Djunk.c41 arena_dalloc_junk_large_intercept(void *ptr, size_t usize) in arena_dalloc_junk_large_intercept()
56 huge_dalloc_junk_intercept(void *ptr, size_t usize) in huge_dalloc_junk_intercept()
171 arena_ralloc_junk_large_intercept(void *ptr, size_t old_usize, size_t usize) in arena_ralloc_junk_large_intercept()
203 arena_redzone_corruption_replacement(void *ptr, size_t usize, bool after, in arena_redzone_corruption_replacement()
/external/jemalloc_new/src/
Dlarge.c14 large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero) { in large_malloc()
21 large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, in large_palloc()
104 large_ralloc_no_move_shrink(tsdn_t *tsdn, extent_t *extent, size_t usize) { in large_ralloc_no_move_shrink()
139 large_ralloc_no_move_expand(tsdn_t *tsdn, extent_t *extent, size_t usize, in large_ralloc_no_move_expand()
272 large_ralloc_move_helper(tsdn_t *tsdn, arena_t *arena, size_t usize, in large_ralloc_move_helper()
281 large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize, in large_ralloc()
Djemalloc.c1689 size_t size, size_t usize, szind_t ind) {
1731 size_t usize, szind_t ind) {
1818 size_t usize = 0; local
2162 irealloc_prof_sample(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize,
2183 irealloc_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize,
2226 size_t usize; local
2247 isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) {
2612 size_t usize, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena,
2636 size_t alignment, size_t *usize, bool zero, tcache_t *tcache,
2680 size_t usize; local
[all …]
Darena.c289 arena_large_malloc_stats_update(tsdn_t *tsdn, arena_t *arena, size_t usize) { in arena_large_malloc_stats_update()
305 arena_large_dalloc_stats_update(tsdn_t *tsdn, arena_t *arena, size_t usize) { in arena_large_dalloc_stats_update()
322 size_t usize) { in arena_large_ralloc_stats_update()
328 arena_extent_alloc_large(tsdn_t *tsdn, arena_t *arena, size_t usize, in arena_extent_alloc_large()
393 size_t usize = extent_usize_get(extent); in arena_extent_ralloc_large_shrink() local
407 size_t usize = extent_usize_get(extent); in arena_extent_ralloc_large_expand() local
979 size_t usize; in arena_reset() local
1324 size_t usize; in arena_malloc_small() local
1393 arena_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, in arena_palloc()
1413 arena_prof_promote(tsdn_t *tsdn, const void *ptr, size_t usize) { in arena_prof_promote()
[all …]
Dckh.c274 size_t usize; in ckh_grow() local
313 size_t usize; in ckh_shrink() local
362 size_t mincells, usize; in ckh_new() local
/external/jemalloc_new/test/unit/
Djunk.c33 large_dalloc_junk_intercept(void *ptr, size_t usize) { in large_dalloc_junk_intercept()
48 large_dalloc_maybe_junk_intercept(void *ptr, size_t usize) { in large_dalloc_maybe_junk_intercept()
/external/jemalloc/test/integration/
Dallocated.c18 size_t sz, usize; in thd_start() local
/external/jemalloc_new/test/integration/
Dallocated.c17 size_t sz, usize; in thd_start() local

12