1 /*
2 * Copyright (C) 2009 The Android Open Source Project
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in
12 * the documentation and/or other materials provided with the
13 * distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
16 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
17 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
18 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
19 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
22 * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
25 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
29 // Contains a thin layer that calls whatever real native allocator
30 // has been defined. For the libc shared library, this allows the
31 // implementation of a debug malloc that can intercept all of the allocation
32 // calls and add special debugging code to attempt to catch allocation
33 // errors. All of the debugging code is implemented in a separate shared
34 // library that is only loaded when the property "libc.debug.malloc.options"
35 // is set to a non-zero value.
36
37 #include <errno.h>
38 #include <stdint.h>
39 #include <stdio.h>
40
41 #include <platform/bionic/malloc.h>
42 #include <private/ScopedPthreadMutexLocker.h>
43 #include <private/bionic_config.h>
44 #include <private/bionic_defs.h>
45
46 #include "gwp_asan_wrappers.h"
47 #include "heap_tagging.h"
48 #include "heap_zero_init.h"
49 #include "malloc_common.h"
50 #include "malloc_limit.h"
51 #include "malloc_tagged_pointers.h"
52
53 // =============================================================================
54 // Global variables instantations.
55 // =============================================================================
56
57 // Malloc hooks globals.
58 void* (*volatile __malloc_hook)(size_t, const void*);
59 void* (*volatile __realloc_hook)(void*, size_t, const void*);
60 void (*volatile __free_hook)(void*, const void*);
61 void* (*volatile __memalign_hook)(size_t, size_t, const void*);
62 // =============================================================================
63
64 // =============================================================================
65 // Allocation functions
66 // =============================================================================
calloc(size_t n_elements,size_t elem_size)67 extern "C" void* calloc(size_t n_elements, size_t elem_size) {
68 auto dispatch_table = GetDispatchTable();
69 if (__predict_false(dispatch_table != nullptr)) {
70 return MaybeTagPointer(dispatch_table->calloc(n_elements, elem_size));
71 }
72 void* result = Malloc(calloc)(n_elements, elem_size);
73 if (__predict_false(result == nullptr)) {
74 warning_log("calloc(%zu, %zu) failed: returning null pointer", n_elements, elem_size);
75 }
76 return MaybeTagPointer(result);
77 }
78
free(void * mem)79 extern "C" void free(void* mem) {
80 auto dispatch_table = GetDispatchTable();
81 mem = MaybeUntagAndCheckPointer(mem);
82 if (__predict_false(dispatch_table != nullptr)) {
83 dispatch_table->free(mem);
84 } else {
85 Malloc(free)(mem);
86 }
87 }
88
mallinfo()89 extern "C" struct mallinfo mallinfo() {
90 auto dispatch_table = GetDispatchTable();
91 if (__predict_false(dispatch_table != nullptr)) {
92 return dispatch_table->mallinfo();
93 }
94 return Malloc(mallinfo)();
95 }
96
malloc_info(int options,FILE * fp)97 extern "C" int malloc_info(int options, FILE* fp) {
98 auto dispatch_table = GetDispatchTable();
99 if (__predict_false(dispatch_table != nullptr)) {
100 return dispatch_table->malloc_info(options, fp);
101 }
102 return Malloc(malloc_info)(options, fp);
103 }
104
mallopt(int param,int value)105 extern "C" int mallopt(int param, int value) {
106 // Some are handled by libc directly rather than by the allocator.
107 if (param == M_BIONIC_SET_HEAP_TAGGING_LEVEL) {
108 ScopedPthreadMutexLocker locker(&g_heap_tagging_lock);
109 return SetHeapTaggingLevel(static_cast<HeapTaggingLevel>(value));
110 }
111 if (param == M_BIONIC_ZERO_INIT) {
112 return SetHeapZeroInitialize(value);
113 }
114
115 // The rest we pass on...
116 int retval;
117 auto dispatch_table = GetDispatchTable();
118 if (__predict_false(dispatch_table != nullptr)) {
119 retval = dispatch_table->mallopt(param, value);
120 } else {
121 retval = Malloc(mallopt)(param, value);
122 }
123
124 // Track the M_DECAY_TIME mallopt calls.
125 if (param == M_DECAY_TIME && retval == 1) {
126 __libc_globals.mutate([value](libc_globals* globals) {
127 if (value <= 0) {
128 atomic_store(&globals->decay_time_enabled, false);
129 } else {
130 atomic_store(&globals->decay_time_enabled, true);
131 }
132 });
133 }
134 return retval;
135 }
136
malloc(size_t bytes)137 extern "C" void* malloc(size_t bytes) {
138 auto dispatch_table = GetDispatchTable();
139 void *result;
140 if (__predict_false(dispatch_table != nullptr)) {
141 result = dispatch_table->malloc(bytes);
142 } else {
143 result = Malloc(malloc)(bytes);
144 }
145 if (__predict_false(result == nullptr)) {
146 warning_log("malloc(%zu) failed: returning null pointer", bytes);
147 return nullptr;
148 }
149 return MaybeTagPointer(result);
150 }
151
malloc_usable_size(const void * mem)152 extern "C" size_t malloc_usable_size(const void* mem) {
153 auto dispatch_table = GetDispatchTable();
154 mem = MaybeUntagAndCheckPointer(mem);
155 if (__predict_false(dispatch_table != nullptr)) {
156 return dispatch_table->malloc_usable_size(mem);
157 }
158 return Malloc(malloc_usable_size)(mem);
159 }
160
memalign(size_t alignment,size_t bytes)161 extern "C" void* memalign(size_t alignment, size_t bytes) {
162 auto dispatch_table = GetDispatchTable();
163 if (__predict_false(dispatch_table != nullptr)) {
164 return MaybeTagPointer(dispatch_table->memalign(alignment, bytes));
165 }
166 void* result = Malloc(memalign)(alignment, bytes);
167 if (__predict_false(result == nullptr)) {
168 warning_log("memalign(%zu, %zu) failed: returning null pointer", alignment, bytes);
169 }
170 return MaybeTagPointer(result);
171 }
172
posix_memalign(void ** memptr,size_t alignment,size_t size)173 extern "C" int posix_memalign(void** memptr, size_t alignment, size_t size) {
174 auto dispatch_table = GetDispatchTable();
175 int result;
176 if (__predict_false(dispatch_table != nullptr)) {
177 result = dispatch_table->posix_memalign(memptr, alignment, size);
178 } else {
179 result = Malloc(posix_memalign)(memptr, alignment, size);
180 }
181 if (result == 0) {
182 *memptr = MaybeTagPointer(*memptr);
183 }
184 return result;
185 }
186
aligned_alloc(size_t alignment,size_t size)187 extern "C" void* aligned_alloc(size_t alignment, size_t size) {
188 auto dispatch_table = GetDispatchTable();
189 if (__predict_false(dispatch_table != nullptr)) {
190 return MaybeTagPointer(dispatch_table->aligned_alloc(alignment, size));
191 }
192 void* result = Malloc(aligned_alloc)(alignment, size);
193 if (__predict_false(result == nullptr)) {
194 warning_log("aligned_alloc(%zu, %zu) failed: returning null pointer", alignment, size);
195 }
196 return MaybeTagPointer(result);
197 }
198
realloc(void * old_mem,size_t bytes)199 extern "C" __attribute__((__noinline__)) void* realloc(void* old_mem, size_t bytes) {
200 auto dispatch_table = GetDispatchTable();
201 old_mem = MaybeUntagAndCheckPointer(old_mem);
202 if (__predict_false(dispatch_table != nullptr)) {
203 return MaybeTagPointer(dispatch_table->realloc(old_mem, bytes));
204 }
205 void* result = Malloc(realloc)(old_mem, bytes);
206 if (__predict_false(result == nullptr && bytes != 0)) {
207 warning_log("realloc(%p, %zu) failed: returning null pointer", old_mem, bytes);
208 }
209 return MaybeTagPointer(result);
210 }
211
reallocarray(void * old_mem,size_t item_count,size_t item_size)212 extern "C" void* reallocarray(void* old_mem, size_t item_count, size_t item_size) {
213 size_t new_size;
214 if (__builtin_mul_overflow(item_count, item_size, &new_size)) {
215 warning_log("reallocaray(%p, %zu, %zu) failed: returning null pointer",
216 old_mem, item_count, item_size);
217 errno = ENOMEM;
218 return nullptr;
219 }
220 return realloc(old_mem, new_size);
221 }
222
223 #if defined(HAVE_DEPRECATED_MALLOC_FUNCS)
pvalloc(size_t bytes)224 extern "C" void* pvalloc(size_t bytes) {
225 auto dispatch_table = GetDispatchTable();
226 if (__predict_false(dispatch_table != nullptr)) {
227 return MaybeTagPointer(dispatch_table->pvalloc(bytes));
228 }
229 void* result = Malloc(pvalloc)(bytes);
230 if (__predict_false(result == nullptr)) {
231 warning_log("pvalloc(%zu) failed: returning null pointer", bytes);
232 }
233 return MaybeTagPointer(result);
234 }
235
valloc(size_t bytes)236 extern "C" void* valloc(size_t bytes) {
237 auto dispatch_table = GetDispatchTable();
238 if (__predict_false(dispatch_table != nullptr)) {
239 return MaybeTagPointer(dispatch_table->valloc(bytes));
240 }
241 void* result = Malloc(valloc)(bytes);
242 if (__predict_false(result == nullptr)) {
243 warning_log("valloc(%zu) failed: returning null pointer", bytes);
244 }
245 return MaybeTagPointer(result);
246 }
247 #endif
248 // =============================================================================
249
250 struct CallbackWrapperArg {
251 void (*callback)(uintptr_t base, size_t size, void* arg);
252 void* arg;
253 };
254
CallbackWrapper(uintptr_t base,size_t size,void * arg)255 void CallbackWrapper(uintptr_t base, size_t size, void* arg) {
256 CallbackWrapperArg* wrapper_arg = reinterpret_cast<CallbackWrapperArg*>(arg);
257 wrapper_arg->callback(
258 reinterpret_cast<uintptr_t>(MaybeTagPointer(reinterpret_cast<void*>(base))),
259 size, wrapper_arg->arg);
260 }
261
262 // =============================================================================
263 // Exported for use by libmemunreachable.
264 // =============================================================================
265
266 // Calls callback for every allocation in the anonymous heap mapping
267 // [base, base+size). Must be called between malloc_disable and malloc_enable.
268 // `base` in this can take either a tagged or untagged pointer, but we always
269 // provide a tagged pointer to the `base` argument of `callback` if the kernel
270 // supports tagged pointers.
malloc_iterate(uintptr_t base,size_t size,void (* callback)(uintptr_t base,size_t size,void * arg),void * arg)271 extern "C" int malloc_iterate(uintptr_t base, size_t size,
272 void (*callback)(uintptr_t base, size_t size, void* arg), void* arg) {
273 auto dispatch_table = GetDispatchTable();
274 // Wrap the malloc_iterate callback we were provided, in order to provide
275 // pointer tagging support.
276 CallbackWrapperArg wrapper_arg;
277 wrapper_arg.callback = callback;
278 wrapper_arg.arg = arg;
279 uintptr_t untagged_base =
280 reinterpret_cast<uintptr_t>(UntagPointer(reinterpret_cast<void*>(base)));
281 if (__predict_false(dispatch_table != nullptr)) {
282 return dispatch_table->malloc_iterate(
283 untagged_base, size, CallbackWrapper, &wrapper_arg);
284 }
285 return Malloc(malloc_iterate)(
286 untagged_base, size, CallbackWrapper, &wrapper_arg);
287 }
288
289 // Disable calls to malloc so malloc_iterate gets a consistent view of
290 // allocated memory.
malloc_disable()291 extern "C" void malloc_disable() {
292 auto dispatch_table = GetDispatchTable();
293 if (__predict_false(dispatch_table != nullptr)) {
294 return dispatch_table->malloc_disable();
295 }
296 return Malloc(malloc_disable)();
297 }
298
299 // Re-enable calls to malloc after a previous call to malloc_disable.
malloc_enable()300 extern "C" void malloc_enable() {
301 auto dispatch_table = GetDispatchTable();
302 if (__predict_false(dispatch_table != nullptr)) {
303 return dispatch_table->malloc_enable();
304 }
305 return Malloc(malloc_enable)();
306 }
307
308 #if defined(LIBC_STATIC)
malloc_backtrace(void *,uintptr_t *,size_t)309 extern "C" ssize_t malloc_backtrace(void*, uintptr_t*, size_t) {
310 return 0;
311 }
312 #endif
313
314 #if __has_feature(hwaddress_sanitizer)
315 // FIXME: implement these in HWASan allocator.
__sanitizer_malloc_iterate(uintptr_t base __unused,size_t size __unused,void (* callback)(uintptr_t base,size_t size,void * arg)__unused,void * arg __unused)316 extern "C" int __sanitizer_malloc_iterate(uintptr_t base __unused, size_t size __unused,
317 void (*callback)(uintptr_t base, size_t size, void* arg)
318 __unused,
319 void* arg __unused) {
320 return 0;
321 }
322
__sanitizer_malloc_disable()323 extern "C" void __sanitizer_malloc_disable() {
324 }
325
__sanitizer_malloc_enable()326 extern "C" void __sanitizer_malloc_enable() {
327 }
328
__sanitizer_malloc_info(int,FILE *)329 extern "C" int __sanitizer_malloc_info(int, FILE*) {
330 errno = ENOTSUP;
331 return -1;
332 }
333 #endif
334 // =============================================================================
335
336 static constexpr MallocDispatch __libc_malloc_default_dispatch __attribute__((unused)) = {
337 Malloc(calloc),
338 Malloc(free),
339 Malloc(mallinfo),
340 Malloc(malloc),
341 Malloc(malloc_usable_size),
342 Malloc(memalign),
343 Malloc(posix_memalign),
344 #if defined(HAVE_DEPRECATED_MALLOC_FUNCS)
345 Malloc(pvalloc),
346 #endif
347 Malloc(realloc),
348 #if defined(HAVE_DEPRECATED_MALLOC_FUNCS)
349 Malloc(valloc),
350 #endif
351 Malloc(malloc_iterate),
352 Malloc(malloc_disable),
353 Malloc(malloc_enable),
354 Malloc(mallopt),
355 Malloc(aligned_alloc),
356 Malloc(malloc_info),
357 };
358
NativeAllocatorDispatch()359 const MallocDispatch* NativeAllocatorDispatch() {
360 return &__libc_malloc_default_dispatch;
361 }
362
363 #if !defined(LIBC_STATIC)
364 void MallocInitImpl(libc_globals* globals);
365 #endif
366
367 // Initializes memory allocation framework.
368 // This routine is called from __libc_init routines in libc_init_dynamic.cpp
369 // and libc_init_static.cpp.
370 __BIONIC_WEAK_FOR_NATIVE_BRIDGE
__libc_init_malloc(libc_globals * globals)371 __LIBC_HIDDEN__ void __libc_init_malloc(libc_globals* globals) {
372 #if !defined(LIBC_STATIC)
373 MallocInitImpl(globals);
374 #endif
375 const char* value = getenv("MALLOC_USE_APP_DEFAULTS");
376 if (value == nullptr || value[0] == '\0') {
377 return;
378 }
379
380 // Normal apps currently turn off zero init for performance reasons.
381 SetHeapZeroInitialize(false);
382
383 // Do not call mallopt directly since that will try and lock the globals
384 // data structure.
385 int retval;
386 auto dispatch_table = GetDispatchTable();
387 if (__predict_false(dispatch_table != nullptr)) {
388 retval = dispatch_table->mallopt(M_DECAY_TIME, 1);
389 } else {
390 retval = Malloc(mallopt)(M_DECAY_TIME, 1);
391 }
392 if (retval == 1) {
393 globals->decay_time_enabled = true;
394 }
395 }
396