• Home
  • Raw
  • Download

Lines Matching refs:MIN_CHUNK_SIZE

1684 #define MIN_CHUNK_SIZE\  macro
1694 #define MAX_REQUEST ((-MIN_CHUNK_SIZE) << 2)
1695 #define MIN_REQUEST (MIN_CHUNK_SIZE - CHUNK_OVERHEAD - SIZE_T_ONE)
1703 (((req) < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(req))
2188 (align_offset(chunk2mem(0))+pad_request(sizeof(struct malloc_segment))+MIN_CHUNK_SIZE)
2287 #define MIN_SMALL_INDEX (small_index(MIN_CHUNK_SIZE))
2577 (MAX_SIZE_T < MIN_CHUNK_SIZE) || in init_mparams()
2669 if (sz >= MIN_CHUNK_SIZE) { in do_check_free_chunk()
2690 assert(sz >= MIN_CHUNK_SIZE); in do_check_malloced_chunk()
2693 assert(is_mmapped(p) || sz < (s + MIN_CHUNK_SIZE)); in do_check_malloced_chunk()
2863 assert(m->dvsize >= MIN_CHUNK_SIZE); in do_check_malloc_state()
2968 assert(S >= MIN_CHUNK_SIZE);\
3334 assert(qsize >= MIN_CHUNK_SIZE); in prepend_alloc()
3375 char* csp = (asp < (old_top + MIN_CHUNK_SIZE))? old_top : asp; in add_segment()
3778 if (rsize < MIN_CHUNK_SIZE) in tmalloc_large()
3817 if (rsize < MIN_CHUNK_SIZE) in tmalloc_small()
3856 if (rsize >= MIN_CHUNK_SIZE) { in internal_realloc()
3908 if (alignment < MIN_CHUNK_SIZE) /* must be at least a minimum chunk size */ in internal_memalign()
3909 alignment = MIN_CHUNK_SIZE; in internal_memalign()
3923 size_t req = nb + alignment + MIN_CHUNK_SIZE - CHUNK_OVERHEAD; in internal_memalign()
3944 char* pos = ((size_t)(br - (char*)(p)) >= MIN_CHUNK_SIZE)? in internal_memalign()
3965 if (size > nb + MIN_CHUNK_SIZE) { in internal_memalign()
4153 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes); in dlmalloc()
4184 if (SIZE_T_SIZE != 4 && rsize < MIN_CHUNK_SIZE) in dlmalloc()
4216 if (rsize >= MIN_CHUNK_SIZE) { /* split dv */ in dlmalloc()
4560 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes); in mspace_malloc()
4591 if (SIZE_T_SIZE != 4 && rsize < MIN_CHUNK_SIZE) in mspace_malloc()
4623 if (rsize >= MIN_CHUNK_SIZE) { /* split dv */ in mspace_malloc()