• Home
  • Raw
  • Download

Lines Matching refs:m

2698 static msegmentptr segment_holding(mstate m, char* addr) {  in segment_holding()  argument
2699 msegmentptr sp = &m->seg; in segment_holding()
2709 static int has_segment_link(mstate m, msegmentptr ss) { in has_segment_link() argument
2710 msegmentptr sp = &m->seg; in has_segment_link()
2771 static void reset_on_error(mstate m);
2773 #define CORRUPTION_ERROR_ACTION(m) reset_on_error(m) argument
2774 #define USAGE_ERROR_ACTION(m, p) argument
2779 #define CORRUPTION_ERROR_ACTION(m) ABORT argument
2783 #define USAGE_ERROR_ACTION(m,p) ABORT argument
2808 static void do_check_any_chunk(mstate m, mchunkptr p);
2809 static void do_check_top_chunk(mstate m, mchunkptr p);
2810 static void do_check_mmapped_chunk(mstate m, mchunkptr p);
2811 static void do_check_inuse_chunk(mstate m, mchunkptr p);
2812 static void do_check_free_chunk(mstate m, mchunkptr p);
2813 static void do_check_malloced_chunk(mstate m, void* mem, size_t s);
2814 static void do_check_tree(mstate m, tchunkptr t);
2815 static void do_check_treebin(mstate m, bindex_t i);
2816 static void do_check_smallbin(mstate m, bindex_t i);
2817 static void do_check_malloc_state(mstate m);
2818 static int bin_find(mstate m, mchunkptr x);
2819 static size_t traverse_and_check(mstate m);
3233 static void do_check_any_chunk(mstate m, mchunkptr p) { in do_check_any_chunk() argument
3235 assert(ok_address(m, p)); in do_check_any_chunk()
3239 static void do_check_top_chunk(mstate m, mchunkptr p) { in do_check_top_chunk() argument
3240 msegmentptr sp = segment_holding(m, (char*)p); in do_check_top_chunk()
3244 assert(ok_address(m, p)); in do_check_top_chunk()
3245 assert(sz == m->topsize); in do_check_top_chunk()
3253 static void do_check_mmapped_chunk(mstate m, mchunkptr p) { in do_check_mmapped_chunk() argument
3257 assert(use_mmap(m)); in do_check_mmapped_chunk()
3259 assert(ok_address(m, p)); in do_check_mmapped_chunk()
3267 static void do_check_inuse_chunk(mstate m, mchunkptr p) { in do_check_inuse_chunk() argument
3268 do_check_any_chunk(m, p); in do_check_inuse_chunk()
3274 do_check_mmapped_chunk(m, p); in do_check_inuse_chunk()
3278 static void do_check_free_chunk(mstate m, mchunkptr p) { in do_check_free_chunk() argument
3281 do_check_any_chunk(m, p); in do_check_free_chunk()
3285 if (p != m->dv && p != m->top) { in do_check_free_chunk()
3291 assert (next == m->top || is_inuse(next)); in do_check_free_chunk()
3301 static void do_check_malloced_chunk(mstate m, void* mem, size_t s) { in do_check_malloced_chunk() argument
3305 do_check_inuse_chunk(m, p); in do_check_malloced_chunk()
3315 static void do_check_tree(mstate m, tchunkptr t) { in do_check_tree() argument
3328 do_check_any_chunk(m, ((mchunkptr)u)); in do_check_tree()
3349 do_check_tree(m, u->child[0]); in do_check_tree()
3354 do_check_tree(m, u->child[1]); in do_check_tree()
3366 static void do_check_treebin(mstate m, bindex_t i) { in do_check_treebin() argument
3367 tbinptr* tb = treebin_at(m, i); in do_check_treebin()
3369 int empty = (m->treemap & (1U << i)) == 0; in do_check_treebin()
3373 do_check_tree(m, t); in do_check_treebin()
3377 static void do_check_smallbin(mstate m, bindex_t i) { in do_check_smallbin() argument
3378 sbinptr b = smallbin_at(m, i); in do_check_smallbin()
3380 unsigned int empty = (m->smallmap & (1U << i)) == 0; in do_check_smallbin()
3388 do_check_free_chunk(m, p); in do_check_smallbin()
3395 do_check_inuse_chunk(m, q); in do_check_smallbin()
3401 static int bin_find(mstate m, mchunkptr x) { in bin_find() argument
3405 sbinptr b = smallbin_at(m, sidx); in bin_find()
3406 if (smallmap_is_marked(m, sidx)) { in bin_find()
3417 if (treemap_is_marked(m, tidx)) { in bin_find()
3418 tchunkptr t = *treebin_at(m, tidx); in bin_find()
3437 static size_t traverse_and_check(mstate m) { in traverse_and_check() argument
3439 if (is_initialized(m)) { in traverse_and_check()
3440 msegmentptr s = &m->seg; in traverse_and_check()
3441 sum += m->topsize + TOP_FOOT_SIZE; in traverse_and_check()
3447 q != m->top && q->head != FENCEPOST_HEAD) { in traverse_and_check()
3450 assert(!bin_find(m, q)); in traverse_and_check()
3451 do_check_inuse_chunk(m, q); in traverse_and_check()
3454 assert(q == m->dv || bin_find(m, q)); in traverse_and_check()
3456 do_check_free_chunk(m, q); in traverse_and_check()
3469 static void do_check_malloc_state(mstate m) { in do_check_malloc_state() argument
3474 do_check_smallbin(m, i); in do_check_malloc_state()
3476 do_check_treebin(m, i); in do_check_malloc_state()
3478 if (m->dvsize != 0) { /* check dv chunk */ in do_check_malloc_state()
3479 do_check_any_chunk(m, m->dv); in do_check_malloc_state()
3480 assert(m->dvsize == chunksize(m->dv)); in do_check_malloc_state()
3481 assert(m->dvsize >= MIN_CHUNK_SIZE); in do_check_malloc_state()
3482 assert(bin_find(m, m->dv) == 0); in do_check_malloc_state()
3485 if (m->top != 0) { /* check top chunk */ in do_check_malloc_state()
3486 do_check_top_chunk(m, m->top); in do_check_malloc_state()
3488 assert(m->topsize > 0); in do_check_malloc_state()
3489 assert(bin_find(m, m->top) == 0); in do_check_malloc_state()
3492 total = traverse_and_check(m); in do_check_malloc_state()
3493 assert(total <= m->footprint); in do_check_malloc_state()
3494 assert(m->footprint <= m->max_footprint); in do_check_malloc_state()
3501 static struct mallinfo internal_mallinfo(mstate m) { in internal_mallinfo() argument
3504 if (!PREACTION(m)) { in internal_mallinfo()
3505 check_malloc_state(m); in internal_mallinfo()
3506 if (is_initialized(m)) { in internal_mallinfo()
3508 size_t mfree = m->topsize + TOP_FOOT_SIZE; in internal_mallinfo()
3510 msegmentptr s = &m->seg; in internal_mallinfo()
3514 q != m->top && q->head != FENCEPOST_HEAD) { in internal_mallinfo()
3528 nm.hblkhd = m->footprint - sum; in internal_mallinfo()
3529 nm.usmblks = m->max_footprint; in internal_mallinfo()
3530 nm.uordblks = m->footprint - mfree; in internal_mallinfo()
3532 nm.keepcost = m->topsize; in internal_mallinfo()
3535 POSTACTION(m); in internal_mallinfo()
3542 static void internal_malloc_stats(mstate m) { in internal_malloc_stats() argument
3544 if (!PREACTION(m)) { in internal_malloc_stats()
3548 check_malloc_state(m); in internal_malloc_stats()
3549 if (is_initialized(m)) { in internal_malloc_stats()
3550 msegmentptr s = &m->seg; in internal_malloc_stats()
3551 maxfp = m->max_footprint; in internal_malloc_stats()
3552 fp = m->footprint; in internal_malloc_stats()
3553 used = fp - (m->topsize + TOP_FOOT_SIZE); in internal_malloc_stats()
3558 q != m->top && q->head != FENCEPOST_HEAD) { in internal_malloc_stats()
3566 POSTACTION(m); /* drop lock */ in internal_malloc_stats()
3815 #define internal_malloc(m, b) mspace_malloc(m, b) argument
3816 #define internal_free(m, mem) mspace_free(m,mem); argument
3819 #define internal_malloc(m, b)\ argument
3820 ((m == gm)? dlmalloc(b) : mspace_malloc(m, b))
3821 #define internal_free(m, mem)\ argument
3822 if (m == gm) dlfree(mem); else mspace_free(m,mem);
3824 #define internal_malloc(m, b) dlmalloc(b) argument
3825 #define internal_free(m, mem) dlfree(mem) argument
3840 static void* mmap_alloc(mstate m, size_t nb) { in mmap_alloc() argument
3842 if (m->footprint_limit != 0) { in mmap_alloc()
3843 size_t fp = m->footprint + mmsize; in mmap_alloc()
3844 if (fp <= m->footprint || fp > m->footprint_limit) in mmap_alloc()
3855 mark_inuse_foot(m, p, psize); in mmap_alloc()
3859 if (m->least_addr == 0 || mm < m->least_addr) in mmap_alloc()
3860 m->least_addr = mm; in mmap_alloc()
3861 if ((m->footprint += mmsize) > m->max_footprint) in mmap_alloc()
3862 m->max_footprint = m->footprint; in mmap_alloc()
3864 check_mmapped_chunk(m, p); in mmap_alloc()
3872 static mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb, int flags) { in mmap_resize() argument
3891 mark_inuse_foot(m, newp, psize); in mmap_resize()
3895 if (cp < m->least_addr) in mmap_resize()
3896 m->least_addr = cp; in mmap_resize()
3897 if ((m->footprint += newmmsize - oldmmsize) > m->max_footprint) in mmap_resize()
3898 m->max_footprint = m->footprint; in mmap_resize()
3899 check_mmapped_chunk(m, newp); in mmap_resize()
3910 static void init_top(mstate m, mchunkptr p, size_t psize) { in init_top() argument
3916 m->top = p; in init_top()
3917 m->topsize = psize; in init_top()
3921 m->trim_check = mparams.trim_threshold; /* reset on each update */ in init_top()
3925 static void init_bins(mstate m) { in init_bins() argument
3929 sbinptr bin = smallbin_at(m,i); in init_bins()
3937 static void reset_on_error(mstate m) { in reset_on_error() argument
3941 m->smallmap = m->treemap = 0; in reset_on_error()
3942 m->dvsize = m->topsize = 0; in reset_on_error()
3943 m->seg.base = 0; in reset_on_error()
3944 m->seg.size = 0; in reset_on_error()
3945 m->seg.next = 0; in reset_on_error()
3946 m->top = m->dv = 0; in reset_on_error()
3948 *treebin_at(m, i) = 0; in reset_on_error()
3949 init_bins(m); in reset_on_error()
3954 static void* prepend_alloc(mstate m, char* newbase, char* oldbase, in prepend_alloc() argument
3961 set_size_and_pinuse_of_inuse_chunk(m, p, nb); in prepend_alloc()
3968 if (oldfirst == m->top) { in prepend_alloc()
3969 size_t tsize = m->topsize += qsize; in prepend_alloc()
3970 m->top = q; in prepend_alloc()
3972 check_top_chunk(m, q); in prepend_alloc()
3974 else if (oldfirst == m->dv) { in prepend_alloc()
3975 size_t dsize = m->dvsize += qsize; in prepend_alloc()
3976 m->dv = q; in prepend_alloc()
3982 unlink_chunk(m, oldfirst, nsize); in prepend_alloc()
3987 insert_chunk(m, q, qsize); in prepend_alloc()
3988 check_free_chunk(m, q); in prepend_alloc()
3991 check_malloced_chunk(m, chunk2mem(p), nb); in prepend_alloc()
3996 static void add_segment(mstate m, char* tbase, size_t tsize, flag_t mmapped) { in add_segment() argument
3998 char* old_top = (char*)m->top; in add_segment()
3999 msegmentptr oldsp = segment_holding(m, old_top); in add_segment()
4013 init_top(m, (mchunkptr)tbase, tsize - TOP_FOOT_SIZE); in add_segment()
4017 set_size_and_pinuse_of_inuse_chunk(m, sp, ssize); in add_segment()
4018 *ss = m->seg; /* Push current record */ in add_segment()
4019 m->seg.base = tbase; in add_segment()
4020 m->seg.size = tsize; in add_segment()
4021 m->seg.sflags = mmapped; in add_segment()
4022 m->seg.next = ss; in add_segment()
4042 insert_chunk(m, q, psize); in add_segment()
4045 check_top_chunk(m, m->top); in add_segment()
4051 static void* sys_alloc(mstate m, size_t nb) { in sys_alloc() argument
4060 if (use_mmap(m) && nb >= mparams.mmap_threshold && m->topsize != 0) { in sys_alloc()
4061 void* mem = mmap_alloc(m, nb); in sys_alloc()
4073 if (m->footprint_limit != 0) { in sys_alloc()
4074 size_t fp = m->footprint + asize; in sys_alloc()
4075 if (fp <= m->footprint || fp > m->footprint_limit) { in sys_alloc()
4105 if (MORECORE_CONTIGUOUS && !use_noncontiguous(m)) { in sys_alloc()
4108 msegmentptr ss = (m->top == 0)? 0 : segment_holding(m, (char*)m->top); in sys_alloc()
4118 fp = m->footprint + ssize; /* recheck limits */ in sys_alloc()
4120 (m->footprint_limit == 0 || in sys_alloc()
4121 (fp > m->footprint && fp <= m->footprint_limit)) && in sys_alloc()
4130 ssize = granularity_align(nb - m->topsize + SYS_ALLOC_PADDING); in sys_alloc()
4160 disable_contiguous(m); /* Don't try contiguous path in the future */ in sys_alloc()
4195 if ((m->footprint += tsize) > m->max_footprint) in sys_alloc()
4196 m->max_footprint = m->footprint; in sys_alloc()
4198 if (!is_initialized(m)) { /* first-time initialization */ in sys_alloc()
4199 if (m->least_addr == 0 || tbase < m->least_addr) in sys_alloc()
4200 m->least_addr = tbase; in sys_alloc()
4201 m->seg.base = tbase; in sys_alloc()
4202 m->seg.size = tsize; in sys_alloc()
4203 m->seg.sflags = mmap_flag; in sys_alloc()
4204 m->magic = mparams.magic; in sys_alloc()
4205 m->release_checks = MAX_RELEASE_CHECK_RATE; in sys_alloc()
4206 init_bins(m); in sys_alloc()
4208 if (is_global(m)) in sys_alloc()
4209 init_top(m, (mchunkptr)tbase, tsize - TOP_FOOT_SIZE); in sys_alloc()
4214 mchunkptr mn = next_chunk(mem2chunk(m)); in sys_alloc()
4215 init_top(m, mn, (size_t)((tbase + tsize) - (char*)mn) -TOP_FOOT_SIZE); in sys_alloc()
4221 msegmentptr sp = &m->seg; in sys_alloc()
4228 segment_holds(sp, m->top)) { /* append */ in sys_alloc()
4230 init_top(m, m->top, m->topsize + tsize); in sys_alloc()
4233 if (tbase < m->least_addr) in sys_alloc()
4234 m->least_addr = tbase; in sys_alloc()
4235 sp = &m->seg; in sys_alloc()
4244 return prepend_alloc(m, tbase, oldbase, nb); in sys_alloc()
4247 add_segment(m, tbase, tsize, mmap_flag); in sys_alloc()
4251 if (nb < m->topsize) { /* Allocate from new or extended top space */ in sys_alloc()
4252 size_t rsize = m->topsize -= nb; in sys_alloc()
4253 mchunkptr p = m->top; in sys_alloc()
4254 mchunkptr r = m->top = chunk_plus_offset(p, nb); in sys_alloc()
4256 set_size_and_pinuse_of_inuse_chunk(m, p, nb); in sys_alloc()
4257 check_top_chunk(m, m->top); in sys_alloc()
4258 check_malloced_chunk(m, chunk2mem(p), nb); in sys_alloc()
4270 static size_t release_unused_segments(mstate m) { in release_unused_segments() argument
4273 msegmentptr pred = &m->seg; in release_unused_segments()
4287 if (p == m->dv) { in release_unused_segments()
4288 m->dv = 0; in release_unused_segments()
4289 m->dvsize = 0; in release_unused_segments()
4292 unlink_large_chunk(m, tp); in release_unused_segments()
4296 m->footprint -= size; in release_unused_segments()
4302 insert_large_chunk(m, tp, psize); in release_unused_segments()
4312 m->release_checks = (((size_t) nsegs > (size_t) MAX_RELEASE_CHECK_RATE)? in release_unused_segments()
4317 static int sys_trim(mstate m, size_t pad) { in sys_trim() argument
4320 if (pad < MAX_REQUEST && is_initialized(m)) { in sys_trim()
4323 if (m->topsize > pad) { in sys_trim()
4326 size_t extra = ((m->topsize - pad + (unit - SIZE_T_ONE)) / unit - in sys_trim()
4328 msegmentptr sp = segment_holding(m, (char*)m->top); in sys_trim()
4334 !has_segment_link(m, sp)) { /* can't shrink if pinned */ in sys_trim()
4364 m->footprint -= released; in sys_trim()
4365 init_top(m, m->top, m->topsize - released); in sys_trim()
4366 check_top_chunk(m, m->top); in sys_trim()
4372 released += release_unused_segments(m); in sys_trim()
4375 if (released == 0 && m->topsize > m->trim_check) in sys_trim()
4376 m->trim_check = MAX_SIZE_T; in sys_trim()
4385 static void dispose_chunk(mstate m, mchunkptr p, size_t psize) { in dispose_chunk() argument
4393 m->footprint -= psize; in dispose_chunk()
4399 if (RTCHECK(ok_address(m, prev))) { /* consolidate backward */ in dispose_chunk()
4400 if (p != m->dv) { in dispose_chunk()
4401 unlink_chunk(m, p, prevsize); in dispose_chunk()
4404 m->dvsize = psize; in dispose_chunk()
4410 CORRUPTION_ERROR_ACTION(m); in dispose_chunk()
4414 if (RTCHECK(ok_address(m, next))) { in dispose_chunk()
4416 if (next == m->top) { in dispose_chunk()
4417 size_t tsize = m->topsize += psize; in dispose_chunk()
4418 m->top = p; in dispose_chunk()
4420 if (p == m->dv) { in dispose_chunk()
4421 m->dv = 0; in dispose_chunk()
4422 m->dvsize = 0; in dispose_chunk()
4426 else if (next == m->dv) { in dispose_chunk()
4427 size_t dsize = m->dvsize += psize; in dispose_chunk()
4428 m->dv = p; in dispose_chunk()
4435 unlink_chunk(m, next, nsize); in dispose_chunk()
4437 if (p == m->dv) { in dispose_chunk()
4438 m->dvsize = psize; in dispose_chunk()
4446 insert_chunk(m, p, psize); in dispose_chunk()
4449 CORRUPTION_ERROR_ACTION(m); in dispose_chunk()
4456 static void* tmalloc_large(mstate m, size_t nb) { in tmalloc_large() argument
4462 if ((t = *treebin_at(m, idx)) != 0) { in tmalloc_large()
4486 binmap_t leftbits = left_bits(idx2bit(idx)) & m->treemap; in tmalloc_large()
4491 t = *treebin_at(m, i); in tmalloc_large()
4505 if (v != 0 && rsize < (size_t)(m->dvsize - nb)) { in tmalloc_large()
4506 if (RTCHECK(ok_address(m, v))) { /* split */ in tmalloc_large()
4510 unlink_large_chunk(m, v); in tmalloc_large()
4512 set_inuse_and_pinuse(m, v, (rsize + nb)); in tmalloc_large()
4514 set_size_and_pinuse_of_inuse_chunk(m, v, nb); in tmalloc_large()
4516 insert_chunk(m, r, rsize); in tmalloc_large()
4521 CORRUPTION_ERROR_ACTION(m); in tmalloc_large()
4527 static void* tmalloc_small(mstate m, size_t nb) { in tmalloc_small() argument
4531 binmap_t leastbit = least_bit(m->treemap); in tmalloc_small()
4533 v = t = *treebin_at(m, i); in tmalloc_small()
4544 if (RTCHECK(ok_address(m, v))) { in tmalloc_small()
4548 unlink_large_chunk(m, v); in tmalloc_small()
4550 set_inuse_and_pinuse(m, v, (rsize + nb)); in tmalloc_small()
4552 set_size_and_pinuse_of_inuse_chunk(m, v, nb); in tmalloc_small()
4554 replace_dv(m, r, rsize); in tmalloc_small()
4560 CORRUPTION_ERROR_ACTION(m); in tmalloc_small()
4833 static mchunkptr try_realloc_chunk(mstate m, mchunkptr p, size_t nb, in try_realloc_chunk() argument
4838 if (RTCHECK(ok_address(m, p) && ok_inuse(p) && in try_realloc_chunk()
4841 newp = mmap_resize(m, p, nb, can_move); in try_realloc_chunk()
4847 set_inuse(m, p, nb); in try_realloc_chunk()
4848 set_inuse(m, r, rsize); in try_realloc_chunk()
4849 dispose_chunk(m, r, rsize); in try_realloc_chunk()
4853 else if (next == m->top) { /* extend into top */ in try_realloc_chunk()
4854 if (oldsize + m->topsize > nb) { in try_realloc_chunk()
4855 size_t newsize = oldsize + m->topsize; in try_realloc_chunk()
4858 set_inuse(m, p, nb); in try_realloc_chunk()
4860 m->top = newtop; in try_realloc_chunk()
4861 m->topsize = newtopsize; in try_realloc_chunk()
4865 else if (next == m->dv) { /* extend into dv */ in try_realloc_chunk()
4866 size_t dvs = m->dvsize; in try_realloc_chunk()
4872 set_inuse(m, p, nb); in try_realloc_chunk()
4875 m->dvsize = dsize; in try_realloc_chunk()
4876 m->dv = r; in try_realloc_chunk()
4880 set_inuse(m, p, newsize); in try_realloc_chunk()
4881 m->dvsize = 0; in try_realloc_chunk()
4882 m->dv = 0; in try_realloc_chunk()
4891 unlink_chunk(m, next, nextsize); in try_realloc_chunk()
4894 set_inuse(m, p, newsize); in try_realloc_chunk()
4898 set_inuse(m, p, nb); in try_realloc_chunk()
4899 set_inuse(m, r, rsize); in try_realloc_chunk()
4900 dispose_chunk(m, r, rsize); in try_realloc_chunk()
4907 USAGE_ERROR_ACTION(m, chunk2mem(p)); in try_realloc_chunk()
4912 static void* internal_memalign(mstate m, size_t alignment, size_t bytes) { in internal_memalign() argument
4922 if (m != 0) { /* Test isn't needed but avoids compiler warning */ in internal_memalign()
4929 mem = internal_malloc(m, req); in internal_memalign()
4932 if (PREACTION(m)) in internal_memalign()
4957 set_inuse(m, newp, newsize); in internal_memalign()
4958 set_inuse(m, p, leadsize); in internal_memalign()
4959 dispose_chunk(m, p, leadsize); in internal_memalign()
4970 set_inuse(m, p, nb); in internal_memalign()
4971 set_inuse(m, remainder, remainder_size); in internal_memalign()
4972 dispose_chunk(m, remainder, remainder_size); in internal_memalign()
4979 check_inuse_chunk(m, p); in internal_memalign()
4980 POSTACTION(m); in internal_memalign()
4993 static void** ialloc(mstate m, in ialloc() argument
5022 return (void**)internal_malloc(m, 0); in ialloc()
5046 was_enabled = use_mmap(m); in ialloc()
5047 disable_mmap(m); in ialloc()
5048 mem = internal_malloc(m, size - CHUNK_OVERHEAD); in ialloc()
5050 enable_mmap(m); in ialloc()
5054 if (PREACTION(m)) return 0; in ialloc()
5070 set_size_and_pinuse_of_inuse_chunk(m, array_chunk, array_chunk_size); in ialloc()
5083 set_size_and_pinuse_of_inuse_chunk(m, p, size); in ialloc()
5087 set_size_and_pinuse_of_inuse_chunk(m, p, remainder_size); in ialloc()
5101 check_inuse_chunk(m, mem2chunk(marray)); in ialloc()
5104 check_inuse_chunk(m, mem2chunk(marray[i])); in ialloc()
5108 POSTACTION(m); in ialloc()
5119 static size_t internal_bulk_free(mstate m, void* array[], size_t nelem) { in internal_bulk_free() argument
5121 if (!PREACTION(m)) { in internal_bulk_free()
5130 if (get_mstate_for(p) != m) { in internal_bulk_free()
5135 check_inuse_chunk(m, p); in internal_bulk_free()
5137 if (RTCHECK(ok_address(m, p) && ok_inuse(p))) { in internal_bulk_free()
5142 set_inuse(m, p, newsize); in internal_bulk_free()
5146 dispose_chunk(m, p, psize); in internal_bulk_free()
5149 CORRUPTION_ERROR_ACTION(m); in internal_bulk_free()
5154 if (should_trim(m, m->topsize)) in internal_bulk_free()
5155 sys_trim(m, 0); in internal_bulk_free()
5156 POSTACTION(m); in internal_bulk_free()
5163 static void internal_inspect_all(mstate m, in internal_inspect_all() argument
5169 if (is_initialized(m)) { in internal_inspect_all()
5170 mchunkptr top = m->top; in internal_inspect_all()
5172 for (s = &m->seg; s != 0; s = s->next) { in internal_inspect_all()
5224 mstate m = gm; in dlrealloc() local
5226 mstate m = get_mstate_for(oldp); in dlrealloc() local
5227 if (!ok_magic(m)) { in dlrealloc()
5228 USAGE_ERROR_ACTION(m, oldmem); in dlrealloc()
5232 if (!PREACTION(m)) { in dlrealloc()
5233 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 1); in dlrealloc()
5234 POSTACTION(m); in dlrealloc()
5236 check_inuse_chunk(m, newp); in dlrealloc()
5240 mem = internal_malloc(m, bytes); in dlrealloc()
5244 internal_free(m, oldmem); in dlrealloc()
5262 mstate m = gm; in dlrealloc_in_place() local
5264 mstate m = get_mstate_for(oldp); in dlrealloc_in_place() local
5265 if (!ok_magic(m)) { in dlrealloc_in_place()
5266 USAGE_ERROR_ACTION(m, oldmem); in dlrealloc_in_place()
5270 if (!PREACTION(m)) { in dlrealloc_in_place()
5271 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 0); in dlrealloc_in_place()
5272 POSTACTION(m); in dlrealloc_in_place()
5274 check_inuse_chunk(m, newp); in dlrealloc_in_place()
5427 mstate m = (mstate)(chunk2mem(msp)); in init_user_mstate() local
5428 memset(m, 0, msize); in init_user_mstate()
5429 (void)INITIAL_LOCK(&m->mutex); in init_user_mstate()
5431 m->seg.base = m->least_addr = tbase; in init_user_mstate()
5432 m->seg.size = m->footprint = m->max_footprint = tsize; in init_user_mstate()
5433 m->magic = mparams.magic; in init_user_mstate()
5434 m->release_checks = MAX_RELEASE_CHECK_RATE; in init_user_mstate()
5435 m->mflags = mparams.default_mflags; in init_user_mstate()
5436 m->extp = 0; in init_user_mstate()
5437 m->exts = 0; in init_user_mstate()
5438 disable_contiguous(m); in init_user_mstate()
5439 init_bins(m); in init_user_mstate()
5440 mn = next_chunk(mem2chunk(m)); in init_user_mstate()
5441 init_top(m, mn, (size_t)((tbase + tsize) - (char*)mn) - TOP_FOOT_SIZE); in init_user_mstate()
5442 check_top_chunk(m, m->top); in init_user_mstate()
5443 return m; in init_user_mstate()
5447 mstate m = 0; in create_mspace() local
5457 m = init_user_mstate(tbase, tsize); in create_mspace()
5458 m->seg.sflags = USE_MMAP_BIT; in create_mspace()
5459 set_lock(m, locked); in create_mspace()
5462 return (mspace)m; in create_mspace()
5466 mstate m = 0; in create_mspace_with_base() local
5472 m = init_user_mstate((char*)base, capacity); in create_mspace_with_base()
5473 m->seg.sflags = EXTERN_BIT; in create_mspace_with_base()
5474 set_lock(m, locked); in create_mspace_with_base()
5476 return (mspace)m; in create_mspace_with_base()
5776 mstate m = (mstate)msp; in mspace_realloc() local
5778 mstate m = get_mstate_for(oldp); in mspace_realloc() local
5779 if (!ok_magic(m)) { in mspace_realloc()
5780 USAGE_ERROR_ACTION(m, oldmem); in mspace_realloc()
5784 if (!PREACTION(m)) { in mspace_realloc()
5785 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 1); in mspace_realloc()
5786 POSTACTION(m); in mspace_realloc()
5788 check_inuse_chunk(m, newp); in mspace_realloc()
5792 mem = mspace_malloc(m, bytes); in mspace_realloc()
5796 mspace_free(m, oldmem); in mspace_realloc()
5814 mstate m = (mstate)msp; in mspace_realloc_in_place() local
5816 mstate m = get_mstate_for(oldp); in mspace_realloc_in_place() local
5818 if (!ok_magic(m)) { in mspace_realloc_in_place()
5819 USAGE_ERROR_ACTION(m, oldmem); in mspace_realloc_in_place()
5823 if (!PREACTION(m)) { in mspace_realloc_in_place()
5824 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 0); in mspace_realloc_in_place()
5825 POSTACTION(m); in mspace_realloc_in_place()
5827 check_inuse_chunk(m, newp); in mspace_realloc_in_place()