Lines Matching refs:SIZE_SZ
1274 #define SIZE_SZ (sizeof(INTERNAL_SIZE_T))
1275 #define MALLOC_ALIGNMENT (SIZE_SZ + SIZE_SZ)
1281 #define chunk2mem(p) ((Void_t*)((char*)(p) + 2*SIZE_SZ))
1282 #define mem2chunk(mem) ((mchunkptr)((char*)(mem) - 2*SIZE_SZ))
1287 (((long)((req) + (SIZE_SZ + MALLOC_ALIGN_MASK)) < \
1289 (((req) + (SIZE_SZ + MALLOC_ALIGN_MASK)) & ~(MALLOC_ALIGN_MASK)))
1436 #define bin_at(i) ((mbinptr)((char*)&(av_[2*(i) + 2]) - 2*SIZE_SZ))
1643 else /* markers are always of size SIZE_SZ */
1644 assert(sz == SIZE_SZ);
1811 /* For mmapped chunks, the overhead is one SIZE_SZ unit larger, because
1814 size = (size + SIZE_SZ + page_mask) & ~page_mask;
1892 /* Note the extra SIZE_SZ overhead as in mmap_chunk(). */
1893 new_size = (new_size + offset + SIZE_SZ + page_mask) & ~page_mask;
2015 old_top_size = (old_top_size - 3*SIZE_SZ) & ~MALLOC_ALIGN_MASK;
2018 SIZE_SZ|PREV_INUSE;
2019 chunk_at_offset(old_top, old_top_size + SIZE_SZ)->size =
2020 SIZE_SZ|PREV_INUSE;
2543 /* Note the extra SIZE_SZ overhead. */
2544 if(oldsize - SIZE_SZ >= nb) return oldmem; /* do nothing */
2548 MALLOC_COPY(newmem, oldmem, oldsize - 2*SIZE_SZ);
2613 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
2629 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
2641 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
2664 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
2894 MALLOC_ZERO(mem, csz - SIZE_SZ);
3027 return chunksize(p) - SIZE_SZ;
3029 return chunksize(p) - 2*SIZE_SZ;