Lines Matching +full:- +full:- +full:bins
24 struct bin bins[64]; member
49 lock(mal.bins[i].lock); in lock_bin()
50 if (!mal.bins[i].head) in lock_bin()
51 mal.bins[i].head = mal.bins[i].tail = BIN_TO_CHUNK(i); in lock_bin()
56 unlock(mal.bins[i].lock); in unlock_bin()
78 return 32 + debruijn32[(y&-y)*0x076be629 >> 27]; in first_set()
80 return debruijn32[(y&-y)*0x076be629 >> 27]; in first_set()
82 return debruijn64[(x&-x)*0x022fdd63cc95386dull >> 58]; in first_set()
95 x = x / SIZE_ALIGN - 1; in bin_index()
97 if (x < 512) return bin_tab[x/8-4]; in bin_index()
99 return bin_tab[x/128-4] + 16; in bin_index()
104 x = x / SIZE_ALIGN - 1; in bin_index_up()
106 x--; in bin_index_up()
107 if (x < 512) return bin_tab[x/8-4] + 1; in bin_index_up()
108 return bin_tab[x/128-4] + 17; in bin_index_up()
119 c->csize & 15,
120 NEXT_CHUNK(c)->psize & 15);
122 if (mal.bins[i].head != BIN_TO_CHUNK(i) && mal.bins[i].head) {
123 fprintf(stderr, "bin %d: %p\n", i, mal.bins[i].head);
133 * intersects the 'len'-sized interval below &libc.auxv
134 * (interpreted as the main-thread stack) or below &b
144 a = b > len ? b-len : 0; in traverses_stack_p()
148 a = b > len ? b-len : 0; in traverses_stack_p()
154 /* Expand the heap in-place if brk can be used, or otherwise via mmap,
168 if (n > SIZE_MAX/2 - PAGE_SIZE) { in __expand_heap()
172 n += -n & PAGE_SIZE-1; in __expand_heap()
176 brk += -brk & PAGE_SIZE-1; in __expand_heap()
179 if (n < SIZE_MAX-brk && !traverses_stack_p(brk, brk+n) in __expand_heap()
183 return (void *)(brk-n); in __expand_heap()
189 MAP_PRIVATE|MAP_ANONYMOUS, -1, 0); in __expand_heap()
203 * overhead needs, but if the heap can't be extended in-place, in expand_heap()
204 * we need room for an extra zero-sized sentinel chunk. */ in expand_heap()
214 n -= SIZE_ALIGN; in expand_heap()
217 w->psize = 0 | C_INUSE; in expand_heap()
223 w->psize = n | C_INUSE; in expand_heap()
224 w->csize = 0 | C_INUSE; in expand_heap()
227 * zero-size sentinel header at the old end-of-heap. */ in expand_heap()
229 w->csize = n | C_INUSE; in expand_heap()
237 if (*n-1 > PTRDIFF_MAX - SIZE_ALIGN - PAGE_SIZE) { in adjust_size()
240 return -1; in adjust_size()
246 *n = (*n + OVERHEAD + SIZE_ALIGN - 1) & SIZE_MASK; in adjust_size()
252 if (c->prev == c->next) in unbin()
254 c->prev->next = c->next; in unbin()
255 c->next->prev = c->prev; in unbin()
256 c->csize |= C_INUSE; in unbin()
257 NEXT_CHUNK(c)->psize |= C_INUSE; in unbin()
262 self->next = BIN_TO_CHUNK(i); in bin_chunk()
263 self->prev = mal.bins[i].tail; in bin_chunk()
264 self->next->prev = self; in bin_chunk()
265 self->prev->next = self; in bin_chunk()
266 if (self->prev == BIN_TO_CHUNK(i)) in bin_chunk()
275 if (n >= n1 - DONTCARE) return; in trim()
280 split->psize = n | C_INUSE; in trim()
281 split->csize = n1-n; in trim()
282 next->psize = n1-n; in trim()
283 self->csize = n | C_INUSE; in trim()
285 int i = bin_index(n1-n); in trim()
302 size_t len = n + OVERHEAD + PAGE_SIZE - 1 & -PAGE_SIZE; in malloc()
304 MAP_PRIVATE|MAP_ANONYMOUS, -1, 0); in malloc()
305 if (base == (void *)-1) return 0; in malloc()
306 c = (void *)(base + SIZE_ALIGN - OVERHEAD); in malloc()
307 c->csize = len - (SIZE_ALIGN - OVERHEAD); in malloc()
308 c->psize = SIZE_ALIGN - OVERHEAD; in malloc()
315 c = mal.bins[i].head; in malloc()
316 if (c != BIN_TO_CHUNK(i) && CHUNK_SIZE(c)-n <= DONTCARE) { in malloc()
324 for (mask = mal.binmap & -(1ULL<<i); mask; mask -= (mask&-mask)) { in malloc()
327 c = mal.bins[j].head; in malloc()
365 if (n<=n0 && n0-n<=DONTCARE) return p; in realloc()
368 size_t extra = self->psize; in realloc()
369 char *base = (char *)self - extra; in realloc()
374 if (newlen < PAGE_SIZE && (new = malloc(n-OVERHEAD))) { in realloc()
378 newlen = (newlen + PAGE_SIZE-1) & -PAGE_SIZE; in realloc()
381 if (base == (void *)-1) in realloc()
384 self->csize = newlen - extra; in realloc()
391 if (next->psize != self->csize) a_crash(); in realloc()
399 self->csize = split->psize = n | C_INUSE; in realloc()
400 split->csize = next->psize = n0-n | C_INUSE; in realloc()
407 size_t nsize = next->csize & C_INUSE ? 0 : CHUNK_SIZE(next); in realloc()
411 if (!(next->csize & C_INUSE)) { in realloc()
415 self->csize = next->psize = n0+nsize | C_INUSE; in realloc()
426 new = malloc(n-OVERHEAD); in realloc()
429 memcpy(new, p, (n<n0 ? n : n0) - OVERHEAD); in realloc()
439 if (next->psize != self->csize) a_crash(); in __bin_chunk()
446 * in-use can race; in-use to free is impossible */ in __bin_chunk()
447 size_t psize = self->psize & C_INUSE ? 0 : CHUNK_PSIZE(self); in __bin_chunk()
448 size_t nsize = next->csize & C_INUSE ? 0 : CHUNK_SIZE(next); in __bin_chunk()
453 if (!(self->psize & C_INUSE)) { in __bin_chunk()
464 if (!(next->csize & C_INUSE)) { in __bin_chunk()
475 self->csize = size; in __bin_chunk()
476 next->psize = size; in __bin_chunk()
481 if (size > RECLAIM && (size^(size-osize)) > size-osize) { in __bin_chunk()
482 uintptr_t a = (uintptr_t)self + SIZE_ALIGN+PAGE_SIZE-1 & -PAGE_SIZE; in __bin_chunk()
483 uintptr_t b = (uintptr_t)next - SIZE_ALIGN & -PAGE_SIZE; in __bin_chunk()
486 __madvise((void *)a, b-a, MADV_DONTNEED); in __bin_chunk()
488 __mmap((void *)a, b-a, PROT_READ|PROT_WRITE, in __bin_chunk()
489 MAP_PRIVATE|MAP_ANONYMOUS|MAP_FIXED, -1, 0); in __bin_chunk()
499 size_t extra = self->psize; in unmap_chunk()
500 char *base = (char *)self - extra; in unmap_chunk()
523 size_t align_start_up = (SIZE_ALIGN-1) & (-(uintptr_t)start - OVERHEAD); in __malloc_donate()
524 size_t align_end_down = (SIZE_ALIGN-1) & (uintptr_t)end; in __malloc_donate()
529 if (end - start <= OVERHEAD + align_start_up + align_end_down) in __malloc_donate()
532 end -= align_end_down; in __malloc_donate()
535 c->psize = n->csize = C_INUSE; in __malloc_donate()
536 c->csize = n->psize = C_INUSE | (end-start); in __malloc_donate()
545 lock(mal.bins[i].lock); in __malloc_atfork()
548 unlock(mal.bins[i].lock); in __malloc_atfork()
552 mal.bins[i].lock[0] = mal.bins[i].lock[1] = 0; in __malloc_atfork()