• Home
  • Raw
  • Download

Lines Matching refs:nb

3153 static void* mmap_alloc(mstate m, size_t nb) {  in mmap_alloc()  argument
3154 size_t mmsize = granularity_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK); in mmap_alloc()
3155 if (mmsize > nb) { /* Check for wrap around 0 */ in mmap_alloc()
3180 static mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb) { in mmap_resize() argument
3182 if (is_small(nb)) /* Can't shrink mmap regions below small size */ in mmap_resize()
3185 if (oldsize >= nb + SIZE_T_SIZE && in mmap_resize()
3186 (oldsize - nb) <= (mparams.granularity << 1)) in mmap_resize()
3191 size_t newmmsize = granularity_align(nb + SIX_SIZE_T_SIZES + in mmap_resize()
3262 size_t nb) { in prepend_alloc() argument
3266 mchunkptr q = chunk_plus_offset(p, nb); in prepend_alloc()
3267 size_t qsize = psize - nb; in prepend_alloc()
3268 set_size_and_pinuse_of_inuse_chunk(m, p, nb); in prepend_alloc()
3298 check_malloced_chunk(m, chunk2mem(p), nb); in prepend_alloc()
3359 static void* sys_alloc(mstate m, size_t nb) { in sys_alloc() argument
3367 if (use_mmap(m) && nb >= mparams.mmap_threshold) { in sys_alloc()
3368 void* mem = mmap_alloc(m, nb); in sys_alloc()
3399 asize = granularity_align(nb + TOP_FOOT_SIZE + SIZE_T_ONE); in sys_alloc()
3413 asize = granularity_align(nb - m->topsize + TOP_FOOT_SIZE + SIZE_T_ONE); in sys_alloc()
3425 asize < nb + TOP_FOOT_SIZE + SIZE_T_ONE) { in sys_alloc()
3426 size_t esize = granularity_align(nb + TOP_FOOT_SIZE + SIZE_T_ONE - asize); in sys_alloc()
3450 size_t req = nb + TOP_FOOT_SIZE + SIZE_T_ONE; in sys_alloc()
3452 if (rsize > nb) { /* Fail if wraps around zero */ in sys_alloc()
3463 size_t asize = granularity_align(nb + TOP_FOOT_SIZE + SIZE_T_ONE); in sys_alloc()
3473 if (ssize > nb + TOP_FOOT_SIZE) { in sys_alloc()
3527 return prepend_alloc(m, tbase, oldbase, nb); in sys_alloc()
3534 if (nb < m->topsize) { /* Allocate from new or extended top space */ in sys_alloc()
3535 size_t rsize = m->topsize -= nb; in sys_alloc()
3537 mchunkptr r = m->top = chunk_plus_offset(p, nb); in sys_alloc()
3539 set_size_and_pinuse_of_inuse_chunk(m, p, nb); in sys_alloc()
3541 check_malloced_chunk(m, chunk2mem(p), nb); in sys_alloc()
3659 static void* tmalloc_large(mstate m, size_t nb) { in tmalloc_large() argument
3661 size_t rsize = -nb; /* Unsigned negation */ in tmalloc_large()
3664 compute_tree_index(nb, idx); in tmalloc_large()
3668 size_t sizebits = nb << leftshift_for_tree_index(idx); in tmalloc_large()
3672 size_t trem = chunksize(t) - nb; in tmalloc_large()
3701 size_t trem = chunksize(t) - nb; in tmalloc_large()
3710 if (v != 0 && rsize < (size_t)(m->dvsize - nb)) { in tmalloc_large()
3712 mchunkptr r = chunk_plus_offset(v, nb); in tmalloc_large()
3713 assert(chunksize(v) == rsize + nb); in tmalloc_large()
3717 set_inuse_and_pinuse(m, v, (rsize + nb)); in tmalloc_large()
3719 set_size_and_pinuse_of_inuse_chunk(m, v, nb); in tmalloc_large()
3732 static void* tmalloc_small(mstate m, size_t nb) { in tmalloc_small() argument
3740 rsize = chunksize(t) - nb; in tmalloc_small()
3743 size_t trem = chunksize(t) - nb; in tmalloc_small()
3751 mchunkptr r = chunk_plus_offset(v, nb); in tmalloc_small()
3752 assert(chunksize(v) == rsize + nb); in tmalloc_small()
3756 set_inuse_and_pinuse(m, v, (rsize + nb)); in tmalloc_small()
3758 set_size_and_pinuse_of_inuse_chunk(m, v, nb); in tmalloc_small()
3788 size_t nb = request2size(bytes); in internal_realloc() local
3790 newp = mmap_resize(m, oldp, nb); in internal_realloc()
3791 else if (oldsize >= nb) { /* already big enough */ in internal_realloc()
3792 size_t rsize = oldsize - nb; in internal_realloc()
3795 mchunkptr remainder = chunk_plus_offset(newp, nb); in internal_realloc()
3796 set_inuse(m, newp, nb); in internal_realloc()
3801 else if (next == m->top && oldsize + m->topsize > nb) { in internal_realloc()
3804 size_t newtopsize = newsize - nb; in internal_realloc()
3805 mchunkptr newtop = chunk_plus_offset(oldp, nb); in internal_realloc()
3806 set_inuse(m, oldp, nb); in internal_realloc()
3860 size_t nb = request2size(bytes); in internal_memalign() local
3861 size_t req = nb + alignment + MIN_CHUNK_SIZE - CHUNK_OVERHEAD; in internal_memalign()
3903 if (size > nb + MIN_CHUNK_SIZE) { in internal_memalign()
3904 size_t remainder_size = size - nb; in internal_memalign()
3905 mchunkptr remainder = chunk_plus_offset(p, nb); in internal_memalign()
3906 set_inuse(m, p, nb); in internal_memalign()
3912 assert (chunksize(p) >= nb); in internal_memalign()
4087 size_t nb; in dlmalloc() local
4091 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes); in dlmalloc()
4092 idx = small_index(nb); in dlmalloc()
4104 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4108 else if (nb > gm->dvsize) { in dlmalloc()
4120 rsize = small_index2size(i) - nb; in dlmalloc()
4125 set_size_and_pinuse_of_inuse_chunk(gm, p, nb); in dlmalloc()
4126 r = chunk_plus_offset(p, nb); in dlmalloc()
4131 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4135 else if (gm->treemap != 0 && (mem = tmalloc_small(gm, nb)) != 0) { in dlmalloc()
4136 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4142 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */ in dlmalloc()
4144 nb = pad_request(bytes); in dlmalloc()
4145 if (gm->treemap != 0 && (mem = tmalloc_large(gm, nb)) != 0) { in dlmalloc()
4146 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4151 if (nb <= gm->dvsize) { in dlmalloc()
4152 size_t rsize = gm->dvsize - nb; in dlmalloc()
4155 mchunkptr r = gm->dv = chunk_plus_offset(p, nb); in dlmalloc()
4158 set_size_and_pinuse_of_inuse_chunk(gm, p, nb); in dlmalloc()
4167 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4171 else if (nb < gm->topsize) { /* Split top */ in dlmalloc()
4172 size_t rsize = gm->topsize -= nb; in dlmalloc()
4174 mchunkptr r = gm->top = chunk_plus_offset(p, nb); in dlmalloc()
4176 set_size_and_pinuse_of_inuse_chunk(gm, p, nb); in dlmalloc()
4179 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4183 mem = sys_alloc(gm, nb); in dlmalloc()
4494 size_t nb; in mspace_malloc() local
4498 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes); in mspace_malloc()
4499 idx = small_index(nb); in mspace_malloc()
4511 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
4515 else if (nb > ms->dvsize) { in mspace_malloc()
4527 rsize = small_index2size(i) - nb; in mspace_malloc()
4532 set_size_and_pinuse_of_inuse_chunk(ms, p, nb); in mspace_malloc()
4533 r = chunk_plus_offset(p, nb); in mspace_malloc()
4538 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
4542 else if (ms->treemap != 0 && (mem = tmalloc_small(ms, nb)) != 0) { in mspace_malloc()
4543 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
4549 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */ in mspace_malloc()
4551 nb = pad_request(bytes); in mspace_malloc()
4552 if (ms->treemap != 0 && (mem = tmalloc_large(ms, nb)) != 0) { in mspace_malloc()
4553 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
4558 if (nb <= ms->dvsize) { in mspace_malloc()
4559 size_t rsize = ms->dvsize - nb; in mspace_malloc()
4562 mchunkptr r = ms->dv = chunk_plus_offset(p, nb); in mspace_malloc()
4565 set_size_and_pinuse_of_inuse_chunk(ms, p, nb); in mspace_malloc()
4574 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
4578 else if (nb < ms->topsize) { /* Split top */ in mspace_malloc()
4579 size_t rsize = ms->topsize -= nb; in mspace_malloc()
4581 mchunkptr r = ms->top = chunk_plus_offset(p, nb); in mspace_malloc()
4583 set_size_and_pinuse_of_inuse_chunk(ms, p, nb); in mspace_malloc()
4586 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
4590 mem = sys_alloc(ms, nb); in mspace_malloc()