1 #ifndef JEMALLOC_INTERNAL_INLINES_C_H
2 #define JEMALLOC_INTERNAL_INLINES_C_H
3
4 #include "jemalloc/internal/jemalloc_internal_types.h"
5 #include "jemalloc/internal/sz.h"
6 #include "jemalloc/internal/witness.h"
7
8 /*
9 * Translating the names of the 'i' functions:
10 * Abbreviations used in the first part of the function name (before
11 * alloc/dalloc) describe what that function accomplishes:
12 * a: arena (query)
13 * s: size (query, or sized deallocation)
14 * e: extent (query)
15 * p: aligned (allocates)
16 * vs: size (query, without knowing that the pointer is into the heap)
17 * r: rallocx implementation
18 * x: xallocx implementation
19 * Abbreviations used in the second part of the function name (after
20 * alloc/dalloc) describe the arguments it takes
21 * z: whether to return zeroed memory
22 * t: accepts a tcache_t * parameter
23 * m: accepts an arena_t * parameter
24 */
25
26 JEMALLOC_ALWAYS_INLINE arena_t *
iaalloc(tsdn_t * tsdn,const void * ptr)27 iaalloc(tsdn_t *tsdn, const void *ptr) {
28 assert(ptr != NULL);
29
30 return arena_aalloc(tsdn, ptr);
31 }
32
33 JEMALLOC_ALWAYS_INLINE size_t
isalloc(tsdn_t * tsdn,const void * ptr)34 isalloc(tsdn_t *tsdn, const void *ptr) {
35 assert(ptr != NULL);
36
37 return arena_salloc(tsdn, ptr);
38 }
39
40 JEMALLOC_ALWAYS_INLINE void *
iallocztm(tsdn_t * tsdn,size_t size,szind_t ind,bool zero,tcache_t * tcache,bool is_internal,arena_t * arena,bool slow_path)41 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache,
42 bool is_internal, arena_t *arena, bool slow_path) {
43 void *ret;
44
45 assert(size != 0);
46 assert(!is_internal || tcache == NULL);
47 assert(!is_internal || arena == NULL || arena_is_auto(arena));
48 if (!tsdn_null(tsdn) && tsd_reentrancy_level_get(tsdn_tsd(tsdn)) == 0) {
49 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
50 WITNESS_RANK_CORE, 0);
51 }
52
53 ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path);
54 if (config_stats && is_internal && likely(ret != NULL)) {
55 arena_internal_add(iaalloc(tsdn, ret), isalloc(tsdn, ret));
56 }
57 return ret;
58 }
59
60 JEMALLOC_ALWAYS_INLINE void *
ialloc(tsd_t * tsd,size_t size,szind_t ind,bool zero,bool slow_path)61 ialloc(tsd_t *tsd, size_t size, szind_t ind, bool zero, bool slow_path) {
62 return iallocztm(tsd_tsdn(tsd), size, ind, zero, tcache_get(tsd), false,
63 NULL, slow_path);
64 }
65
66 JEMALLOC_ALWAYS_INLINE void *
ipallocztm(tsdn_t * tsdn,size_t usize,size_t alignment,bool zero,tcache_t * tcache,bool is_internal,arena_t * arena)67 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
68 tcache_t *tcache, bool is_internal, arena_t *arena) {
69 void *ret;
70
71 assert(usize != 0);
72 assert(usize == sz_sa2u(usize, alignment));
73 assert(!is_internal || tcache == NULL);
74 assert(!is_internal || arena == NULL || arena_is_auto(arena));
75 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
76 WITNESS_RANK_CORE, 0);
77
78 ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache);
79 assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret);
80 if (config_stats && is_internal && likely(ret != NULL)) {
81 arena_internal_add(iaalloc(tsdn, ret), isalloc(tsdn, ret));
82 }
83 return ret;
84 }
85
86 JEMALLOC_ALWAYS_INLINE void *
ipalloct(tsdn_t * tsdn,size_t usize,size_t alignment,bool zero,tcache_t * tcache,arena_t * arena)87 ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
88 tcache_t *tcache, arena_t *arena) {
89 return ipallocztm(tsdn, usize, alignment, zero, tcache, false, arena);
90 }
91
92 JEMALLOC_ALWAYS_INLINE void *
ipalloc(tsd_t * tsd,size_t usize,size_t alignment,bool zero)93 ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero) {
94 return ipallocztm(tsd_tsdn(tsd), usize, alignment, zero,
95 tcache_get(tsd), false, NULL);
96 }
97
98 JEMALLOC_ALWAYS_INLINE size_t
ivsalloc(tsdn_t * tsdn,const void * ptr)99 ivsalloc(tsdn_t *tsdn, const void *ptr) {
100 return arena_vsalloc(tsdn, ptr);
101 }
102
103 JEMALLOC_ALWAYS_INLINE void
idalloctm(tsdn_t * tsdn,void * ptr,tcache_t * tcache,alloc_ctx_t * alloc_ctx,bool is_internal,bool slow_path)104 idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, alloc_ctx_t *alloc_ctx,
105 bool is_internal, bool slow_path) {
106 assert(ptr != NULL);
107 assert(!is_internal || tcache == NULL);
108 assert(!is_internal || arena_is_auto(iaalloc(tsdn, ptr)));
109 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
110 WITNESS_RANK_CORE, 0);
111 if (config_stats && is_internal) {
112 arena_internal_sub(iaalloc(tsdn, ptr), isalloc(tsdn, ptr));
113 }
114 if (!is_internal && !tsdn_null(tsdn) &&
115 tsd_reentrancy_level_get(tsdn_tsd(tsdn)) != 0) {
116 assert(tcache == NULL);
117 }
118 arena_dalloc(tsdn, ptr, tcache, alloc_ctx, slow_path);
119 }
120
121 JEMALLOC_ALWAYS_INLINE void
idalloc(tsd_t * tsd,void * ptr)122 idalloc(tsd_t *tsd, void *ptr) {
123 idalloctm(tsd_tsdn(tsd), ptr, tcache_get(tsd), NULL, false, true);
124 }
125
126 JEMALLOC_ALWAYS_INLINE void
isdalloct(tsdn_t * tsdn,void * ptr,size_t size,tcache_t * tcache,alloc_ctx_t * alloc_ctx,bool slow_path)127 isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
128 alloc_ctx_t *alloc_ctx, bool slow_path) {
129 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
130 WITNESS_RANK_CORE, 0);
131 arena_sdalloc(tsdn, ptr, size, tcache, alloc_ctx, slow_path);
132 }
133
134 JEMALLOC_ALWAYS_INLINE void *
iralloct_realign(tsdn_t * tsdn,void * ptr,size_t oldsize,size_t size,size_t extra,size_t alignment,bool zero,tcache_t * tcache,arena_t * arena)135 iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
136 size_t extra, size_t alignment, bool zero, tcache_t *tcache,
137 arena_t *arena) {
138 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
139 WITNESS_RANK_CORE, 0);
140 void *p;
141 size_t usize, copysize;
142
143 usize = sz_sa2u(size + extra, alignment);
144 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) {
145 return NULL;
146 }
147 p = ipalloct(tsdn, usize, alignment, zero, tcache, arena);
148 if (p == NULL) {
149 if (extra == 0) {
150 return NULL;
151 }
152 /* Try again, without extra this time. */
153 usize = sz_sa2u(size, alignment);
154 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) {
155 return NULL;
156 }
157 p = ipalloct(tsdn, usize, alignment, zero, tcache, arena);
158 if (p == NULL) {
159 return NULL;
160 }
161 }
162 /*
163 * Copy at most size bytes (not size+extra), since the caller has no
164 * expectation that the extra bytes will be reliably preserved.
165 */
166 copysize = (size < oldsize) ? size : oldsize;
167 memcpy(p, ptr, copysize);
168 isdalloct(tsdn, ptr, oldsize, tcache, NULL, true);
169 return p;
170 }
171
172 JEMALLOC_ALWAYS_INLINE void *
iralloct(tsdn_t * tsdn,void * ptr,size_t oldsize,size_t size,size_t alignment,bool zero,tcache_t * tcache,arena_t * arena)173 iralloct(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t alignment,
174 bool zero, tcache_t *tcache, arena_t *arena) {
175 assert(ptr != NULL);
176 assert(size != 0);
177 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
178 WITNESS_RANK_CORE, 0);
179
180 if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1))
181 != 0) {
182 /*
183 * Existing object alignment is inadequate; allocate new space
184 * and copy.
185 */
186 return iralloct_realign(tsdn, ptr, oldsize, size, 0, alignment,
187 zero, tcache, arena);
188 }
189
190 return arena_ralloc(tsdn, arena, ptr, oldsize, size, alignment, zero,
191 tcache);
192 }
193
194 JEMALLOC_ALWAYS_INLINE void *
iralloc(tsd_t * tsd,void * ptr,size_t oldsize,size_t size,size_t alignment,bool zero)195 iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment,
196 bool zero) {
197 return iralloct(tsd_tsdn(tsd), ptr, oldsize, size, alignment, zero,
198 tcache_get(tsd), NULL);
199 }
200
201 JEMALLOC_ALWAYS_INLINE bool
ixalloc(tsdn_t * tsdn,void * ptr,size_t oldsize,size_t size,size_t extra,size_t alignment,bool zero)202 ixalloc(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t extra,
203 size_t alignment, bool zero) {
204 assert(ptr != NULL);
205 assert(size != 0);
206 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
207 WITNESS_RANK_CORE, 0);
208
209 if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1))
210 != 0) {
211 /* Existing object alignment is inadequate. */
212 return true;
213 }
214
215 return arena_ralloc_no_move(tsdn, ptr, oldsize, size, extra, zero);
216 }
217
218 #endif /* JEMALLOC_INTERNAL_INLINES_C_H */
219