• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  *  * Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  *  * Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in
12  *    the documentation and/or other materials provided with the
13  *    distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
16  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
17  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
18  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
19  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
22  * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
25  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26  * SUCH DAMAGE.
27  */
28 
29 #include <errno.h>
30 #include <inttypes.h>
31 #include <malloc.h>
32 #include <pthread.h>
33 #include <stdio.h>
34 #include <stdlib.h>
35 #include <string.h>
36 #include <sys/cdefs.h>
37 #include <sys/param.h>
38 #include <unistd.h>
39 
40 #include <mutex>
41 #include <vector>
42 
43 #include <android-base/file.h>
44 #include <android-base/properties.h>
45 #include <android-base/stringprintf.h>
46 #include <private/bionic_malloc_dispatch.h>
47 #include <private/MallocXmlElem.h>
48 
49 #include "Config.h"
50 #include "DebugData.h"
51 #include "backtrace.h"
52 #include "debug_disable.h"
53 #include "debug_log.h"
54 #include "malloc_debug.h"
55 #include "UnwindBacktrace.h"
56 
57 // ------------------------------------------------------------------------
58 // Global Data
59 // ------------------------------------------------------------------------
60 DebugData* g_debug;
61 
62 bool* g_zygote_child;
63 
64 const MallocDispatch* g_dispatch;
65 // ------------------------------------------------------------------------
66 
67 // ------------------------------------------------------------------------
68 // Use C style prototypes for all exported functions. This makes it easy
69 // to do dlsym lookups during libc initialization when malloc debug
70 // is enabled.
71 // ------------------------------------------------------------------------
72 __BEGIN_DECLS
73 
74 bool debug_initialize(const MallocDispatch* malloc_dispatch, bool* malloc_zygote_child,
75                       const char* options);
76 void debug_finalize();
77 void debug_dump_heap(const char* file_name);
78 void debug_get_malloc_leak_info(uint8_t** info, size_t* overall_size, size_t* info_size,
79                                 size_t* total_memory, size_t* backtrace_size);
80 bool debug_write_malloc_leak_info(FILE* fp);
81 ssize_t debug_malloc_backtrace(void* pointer, uintptr_t* frames, size_t frame_count);
82 void debug_free_malloc_leak_info(uint8_t* info);
83 size_t debug_malloc_usable_size(void* pointer);
84 void* debug_malloc(size_t size);
85 void debug_free(void* pointer);
86 void* debug_aligned_alloc(size_t alignment, size_t size);
87 void* debug_memalign(size_t alignment, size_t bytes);
88 void* debug_realloc(void* pointer, size_t bytes);
89 void* debug_calloc(size_t nmemb, size_t bytes);
90 struct mallinfo debug_mallinfo();
91 int debug_mallopt(int param, int value);
92 int debug_malloc_info(int options, FILE* fp);
93 int debug_posix_memalign(void** memptr, size_t alignment, size_t size);
94 int debug_iterate(uintptr_t base, size_t size,
95                   void (*callback)(uintptr_t base, size_t size, void* arg), void* arg);
96 void debug_malloc_disable();
97 void debug_malloc_enable();
98 
99 #if defined(HAVE_DEPRECATED_MALLOC_FUNCS)
100 void* debug_pvalloc(size_t bytes);
101 void* debug_valloc(size_t size);
102 #endif
103 
104 __END_DECLS
105 // ------------------------------------------------------------------------
106 
107 class ScopedConcurrentLock {
108  public:
ScopedConcurrentLock()109   ScopedConcurrentLock() {
110     pthread_rwlock_rdlock(&lock_);
111   }
~ScopedConcurrentLock()112   ~ScopedConcurrentLock() {
113     pthread_rwlock_unlock(&lock_);
114   }
115 
Init()116   static void Init() {
117     pthread_rwlockattr_t attr;
118     // Set the attribute so that when a write lock is pending, read locks are no
119     // longer granted.
120     pthread_rwlockattr_setkind_np(&attr, PTHREAD_RWLOCK_PREFER_WRITER_NONRECURSIVE_NP);
121     pthread_rwlock_init(&lock_, &attr);
122   }
123 
BlockAllOperations()124   static void BlockAllOperations() {
125     pthread_rwlock_wrlock(&lock_);
126   }
127 
128  private:
129   static pthread_rwlock_t lock_;
130 };
131 pthread_rwlock_t ScopedConcurrentLock::lock_;
132 
InitAtfork()133 static void InitAtfork() {
134   static pthread_once_t atfork_init = PTHREAD_ONCE_INIT;
135   pthread_once(&atfork_init, []() {
136     pthread_atfork(
137         []() {
138           if (g_debug != nullptr) {
139             g_debug->PrepareFork();
140           }
141         },
142         []() {
143           if (g_debug != nullptr) {
144             g_debug->PostForkParent();
145           }
146         },
147         []() {
148           if (g_debug != nullptr) {
149             g_debug->PostForkChild();
150           }
151         });
152   });
153 }
154 
BacktraceAndLog()155 void BacktraceAndLog() {
156   if (g_debug->config().options() & BACKTRACE_FULL) {
157     std::vector<uintptr_t> frames;
158     std::vector<unwindstack::LocalFrameData> frames_info;
159     if (!Unwind(&frames, &frames_info, 256)) {
160       error_log("  Backtrace failed to get any frames.");
161     } else {
162       UnwindLog(frames_info);
163     }
164   } else {
165     std::vector<uintptr_t> frames(256);
166     size_t num_frames = backtrace_get(frames.data(), frames.size());
167     if (num_frames == 0) {
168       error_log("  Backtrace failed to get any frames.");
169     } else {
170       backtrace_log(frames.data(), num_frames);
171     }
172   }
173 }
174 
LogError(const void * pointer,const char * error_str)175 static void LogError(const void* pointer, const char* error_str) {
176   error_log(LOG_DIVIDER);
177   error_log("+++ ALLOCATION %p %s", pointer, error_str);
178 
179   // If we are tracking already freed pointers, check to see if this is
180   // one so we can print extra information.
181   if (g_debug->config().options() & FREE_TRACK) {
182     PointerData::LogFreeBacktrace(pointer);
183   }
184 
185   error_log("Backtrace at time of failure:");
186   BacktraceAndLog();
187   error_log(LOG_DIVIDER);
188   if (g_debug->config().options() & ABORT_ON_ERROR) {
189     abort();
190   }
191 }
192 
VerifyPointer(const void * pointer,const char * function_name)193 static bool VerifyPointer(const void* pointer, const char* function_name) {
194   if (g_debug->HeaderEnabled()) {
195     Header* header = g_debug->GetHeader(pointer);
196     if (header->tag != DEBUG_TAG) {
197       std::string error_str;
198       if (header->tag == DEBUG_FREE_TAG) {
199         error_str = std::string("USED AFTER FREE (") + function_name + ")";
200       } else {
201         error_str = android::base::StringPrintf("HAS INVALID TAG %" PRIx32 " (%s)", header->tag,
202                                                 function_name);
203       }
204       LogError(pointer, error_str.c_str());
205       return false;
206     }
207   }
208 
209   if (g_debug->TrackPointers()) {
210     if (!PointerData::Exists(pointer)) {
211       std::string error_str(std::string("UNKNOWN POINTER (") + function_name + ")");
212       LogError(pointer, error_str.c_str());
213       return false;
214     }
215   }
216   return true;
217 }
218 
InternalMallocUsableSize(void * pointer)219 static size_t InternalMallocUsableSize(void* pointer) {
220   if (g_debug->HeaderEnabled()) {
221     return g_debug->GetHeader(pointer)->usable_size;
222   } else {
223     return g_dispatch->malloc_usable_size(pointer);
224   }
225 }
226 
InitHeader(Header * header,void * orig_pointer,size_t size)227 static void* InitHeader(Header* header, void* orig_pointer, size_t size) {
228   header->tag = DEBUG_TAG;
229   header->orig_pointer = orig_pointer;
230   header->size = size;
231   header->usable_size = g_dispatch->malloc_usable_size(orig_pointer);
232   if (header->usable_size == 0) {
233     g_dispatch->free(orig_pointer);
234     return nullptr;
235   }
236   header->usable_size -= g_debug->pointer_offset() + reinterpret_cast<uintptr_t>(header) -
237                          reinterpret_cast<uintptr_t>(orig_pointer);
238 
239   if (g_debug->config().options() & FRONT_GUARD) {
240     uint8_t* guard = g_debug->GetFrontGuard(header);
241     memset(guard, g_debug->config().front_guard_value(), g_debug->config().front_guard_bytes());
242   }
243 
244   if (g_debug->config().options() & REAR_GUARD) {
245     uint8_t* guard = g_debug->GetRearGuard(header);
246     memset(guard, g_debug->config().rear_guard_value(), g_debug->config().rear_guard_bytes());
247     // If the rear guard is enabled, set the usable size to the exact size
248     // of the allocation.
249     header->usable_size = header->size;
250   }
251 
252   return g_debug->GetPointer(header);
253 }
254 
debug_initialize(const MallocDispatch * malloc_dispatch,bool * zygote_child,const char * options)255 bool debug_initialize(const MallocDispatch* malloc_dispatch, bool* zygote_child,
256                       const char* options) {
257   if (zygote_child == nullptr || options == nullptr) {
258     return false;
259   }
260 
261   InitAtfork();
262 
263   g_zygote_child = zygote_child;
264 
265   g_dispatch = malloc_dispatch;
266 
267   if (!DebugDisableInitialize()) {
268     return false;
269   }
270 
271   DebugData* debug = new DebugData();
272   if (!debug->Initialize(options)) {
273     delete debug;
274     DebugDisableFinalize();
275     return false;
276   }
277   g_debug = debug;
278 
279   // Always enable the backtrace code since we will use it in a number
280   // of different error cases.
281   backtrace_startup();
282 
283   if (g_debug->config().options() & VERBOSE) {
284     info_log("%s: malloc debug enabled", getprogname());
285   }
286 
287   ScopedConcurrentLock::Init();
288 
289   return true;
290 }
291 
debug_finalize()292 void debug_finalize() {
293   if (g_debug == nullptr) {
294     return;
295   }
296 
297   // Make sure that there are no other threads doing debug allocations
298   // before we kill everything.
299   ScopedConcurrentLock::BlockAllOperations();
300 
301   // Turn off capturing allocations calls.
302   DebugDisableSet(true);
303 
304   if (g_debug->config().options() & FREE_TRACK) {
305     PointerData::VerifyAllFreed();
306   }
307 
308   if (g_debug->config().options() & LEAK_TRACK) {
309     PointerData::LogLeaks();
310   }
311 
312   if ((g_debug->config().options() & BACKTRACE) && g_debug->config().backtrace_dump_on_exit()) {
313     debug_dump_heap(android::base::StringPrintf("%s.%d.exit.txt",
314                                                 g_debug->config().backtrace_dump_prefix().c_str(),
315                                                 getpid()).c_str());
316   }
317 
318   backtrace_shutdown();
319 
320   delete g_debug;
321   g_debug = nullptr;
322 
323   DebugDisableFinalize();
324 }
325 
debug_get_malloc_leak_info(uint8_t ** info,size_t * overall_size,size_t * info_size,size_t * total_memory,size_t * backtrace_size)326 void debug_get_malloc_leak_info(uint8_t** info, size_t* overall_size, size_t* info_size,
327                                 size_t* total_memory, size_t* backtrace_size) {
328   ScopedConcurrentLock lock;
329 
330   ScopedDisableDebugCalls disable;
331 
332   // Verify the arguments.
333   if (info == nullptr || overall_size == nullptr || info_size == nullptr || total_memory == nullptr ||
334       backtrace_size == nullptr) {
335     error_log("get_malloc_leak_info: At least one invalid parameter.");
336     return;
337   }
338 
339   *info = nullptr;
340   *overall_size = 0;
341   *info_size = 0;
342   *total_memory = 0;
343   *backtrace_size = 0;
344 
345   if (!(g_debug->config().options() & BACKTRACE)) {
346     error_log(
347         "get_malloc_leak_info: Allocations not being tracked, to enable "
348         "set the option 'backtrace'.");
349     return;
350   }
351 
352   PointerData::GetInfo(info, overall_size, info_size, total_memory, backtrace_size);
353 }
354 
debug_free_malloc_leak_info(uint8_t * info)355 void debug_free_malloc_leak_info(uint8_t* info) {
356   g_dispatch->free(info);
357 }
358 
debug_malloc_usable_size(void * pointer)359 size_t debug_malloc_usable_size(void* pointer) {
360   if (DebugCallsDisabled() || pointer == nullptr) {
361     return g_dispatch->malloc_usable_size(pointer);
362   }
363   ScopedConcurrentLock lock;
364   ScopedDisableDebugCalls disable;
365 
366   if (!VerifyPointer(pointer, "malloc_usable_size")) {
367     return 0;
368   }
369 
370   return InternalMallocUsableSize(pointer);
371 }
372 
InternalMalloc(size_t size)373 static void* InternalMalloc(size_t size) {
374   if ((g_debug->config().options() & BACKTRACE) && g_debug->pointer->ShouldDumpAndReset()) {
375     debug_dump_heap(android::base::StringPrintf(
376                         "%s.%d.txt", g_debug->config().backtrace_dump_prefix().c_str(), getpid())
377                         .c_str());
378   }
379 
380   if (size == 0) {
381     size = 1;
382   }
383 
384   size_t real_size = size + g_debug->extra_bytes();
385   if (real_size < size) {
386     // Overflow.
387     errno = ENOMEM;
388     return nullptr;
389   }
390 
391   if (size > PointerInfoType::MaxSize()) {
392     errno = ENOMEM;
393     return nullptr;
394   }
395 
396   void* pointer;
397   if (g_debug->HeaderEnabled()) {
398     Header* header =
399         reinterpret_cast<Header*>(g_dispatch->memalign(MINIMUM_ALIGNMENT_BYTES, real_size));
400     if (header == nullptr) {
401       return nullptr;
402     }
403     pointer = InitHeader(header, header, size);
404   } else {
405     pointer = g_dispatch->malloc(real_size);
406   }
407 
408   if (pointer != nullptr) {
409     if (g_debug->TrackPointers()) {
410       PointerData::Add(pointer, size);
411     }
412 
413     if (g_debug->config().options() & FILL_ON_ALLOC) {
414       size_t bytes = InternalMallocUsableSize(pointer);
415       size_t fill_bytes = g_debug->config().fill_on_alloc_bytes();
416       bytes = (bytes < fill_bytes) ? bytes : fill_bytes;
417       memset(pointer, g_debug->config().fill_alloc_value(), bytes);
418     }
419   }
420   return pointer;
421 }
422 
debug_malloc(size_t size)423 void* debug_malloc(size_t size) {
424   if (DebugCallsDisabled()) {
425     return g_dispatch->malloc(size);
426   }
427   ScopedConcurrentLock lock;
428   ScopedDisableDebugCalls disable;
429 
430   void* pointer = InternalMalloc(size);
431 
432   if (g_debug->config().options() & RECORD_ALLOCS) {
433     g_debug->record->AddEntry(new MallocEntry(pointer, size));
434   }
435 
436   return pointer;
437 }
438 
InternalFree(void * pointer)439 static void InternalFree(void* pointer) {
440   if ((g_debug->config().options() & BACKTRACE) && g_debug->pointer->ShouldDumpAndReset()) {
441     debug_dump_heap(android::base::StringPrintf(
442                         "%s.%d.txt", g_debug->config().backtrace_dump_prefix().c_str(), getpid())
443                         .c_str());
444   }
445 
446   void* free_pointer = pointer;
447   size_t bytes;
448   Header* header;
449   if (g_debug->HeaderEnabled()) {
450     header = g_debug->GetHeader(pointer);
451     free_pointer = header->orig_pointer;
452 
453     if (g_debug->config().options() & FRONT_GUARD) {
454       if (!g_debug->front_guard->Valid(header)) {
455         g_debug->front_guard->LogFailure(header);
456       }
457     }
458     if (g_debug->config().options() & REAR_GUARD) {
459       if (!g_debug->rear_guard->Valid(header)) {
460         g_debug->rear_guard->LogFailure(header);
461       }
462     }
463 
464     header->tag = DEBUG_FREE_TAG;
465 
466     bytes = header->usable_size;
467   } else {
468     bytes = g_dispatch->malloc_usable_size(pointer);
469   }
470 
471   if (g_debug->config().options() & FILL_ON_FREE) {
472     size_t fill_bytes = g_debug->config().fill_on_free_bytes();
473     bytes = (bytes < fill_bytes) ? bytes : fill_bytes;
474     memset(pointer, g_debug->config().fill_free_value(), bytes);
475   }
476 
477   if (g_debug->TrackPointers()) {
478     PointerData::Remove(pointer);
479   }
480 
481   if (g_debug->config().options() & FREE_TRACK) {
482     // Do not add the allocation until we are done modifying the pointer
483     // itself. This avoids a race if a lot of threads are all doing
484     // frees at the same time and we wind up trying to really free this
485     // pointer from another thread, while still trying to free it in
486     // this function.
487     pointer = PointerData::AddFreed(pointer);
488     if (pointer != nullptr) {
489       if (g_debug->HeaderEnabled()) {
490         pointer = g_debug->GetHeader(pointer)->orig_pointer;
491       }
492       g_dispatch->free(pointer);
493     }
494   } else {
495     g_dispatch->free(free_pointer);
496   }
497 }
498 
debug_free(void * pointer)499 void debug_free(void* pointer) {
500   if (DebugCallsDisabled() || pointer == nullptr) {
501     return g_dispatch->free(pointer);
502   }
503   ScopedConcurrentLock lock;
504   ScopedDisableDebugCalls disable;
505 
506   if (g_debug->config().options() & RECORD_ALLOCS) {
507     g_debug->record->AddEntry(new FreeEntry(pointer));
508   }
509 
510   if (!VerifyPointer(pointer, "free")) {
511     return;
512   }
513 
514   InternalFree(pointer);
515 }
516 
debug_memalign(size_t alignment,size_t bytes)517 void* debug_memalign(size_t alignment, size_t bytes) {
518   if (DebugCallsDisabled()) {
519     return g_dispatch->memalign(alignment, bytes);
520   }
521   ScopedConcurrentLock lock;
522   ScopedDisableDebugCalls disable;
523 
524   if (bytes == 0) {
525     bytes = 1;
526   }
527 
528   if (bytes > PointerInfoType::MaxSize()) {
529     errno = ENOMEM;
530     return nullptr;
531   }
532 
533   void* pointer;
534   if (g_debug->HeaderEnabled()) {
535     // Make the alignment a power of two.
536     if (!powerof2(alignment)) {
537       alignment = BIONIC_ROUND_UP_POWER_OF_2(alignment);
538     }
539     // Force the alignment to at least MINIMUM_ALIGNMENT_BYTES to guarantee
540     // that the header is aligned properly.
541     if (alignment < MINIMUM_ALIGNMENT_BYTES) {
542       alignment = MINIMUM_ALIGNMENT_BYTES;
543     }
544 
545     // We don't have any idea what the natural alignment of
546     // the underlying native allocator is, so we always need to
547     // over allocate.
548     size_t real_size = alignment + bytes + g_debug->extra_bytes();
549     if (real_size < bytes) {
550       // Overflow.
551       errno = ENOMEM;
552       return nullptr;
553     }
554 
555     pointer = g_dispatch->malloc(real_size);
556     if (pointer == nullptr) {
557       return nullptr;
558     }
559 
560     uintptr_t value = reinterpret_cast<uintptr_t>(pointer) + g_debug->pointer_offset();
561     // Now align the pointer.
562     value += (-value % alignment);
563 
564     Header* header = g_debug->GetHeader(reinterpret_cast<void*>(value));
565     pointer = InitHeader(header, pointer, bytes);
566   } else {
567     size_t real_size = bytes + g_debug->extra_bytes();
568     if (real_size < bytes) {
569       // Overflow.
570       errno = ENOMEM;
571       return nullptr;
572     }
573     pointer = g_dispatch->memalign(alignment, real_size);
574   }
575 
576   if (pointer != nullptr) {
577     if (g_debug->TrackPointers()) {
578       PointerData::Add(pointer, bytes);
579     }
580 
581     if (g_debug->config().options() & FILL_ON_ALLOC) {
582       size_t bytes = InternalMallocUsableSize(pointer);
583       size_t fill_bytes = g_debug->config().fill_on_alloc_bytes();
584       bytes = (bytes < fill_bytes) ? bytes : fill_bytes;
585       memset(pointer, g_debug->config().fill_alloc_value(), bytes);
586     }
587 
588     if (g_debug->config().options() & RECORD_ALLOCS) {
589       g_debug->record->AddEntry(new MemalignEntry(pointer, bytes, alignment));
590     }
591   }
592 
593   return pointer;
594 }
595 
debug_realloc(void * pointer,size_t bytes)596 void* debug_realloc(void* pointer, size_t bytes) {
597   if (DebugCallsDisabled()) {
598     return g_dispatch->realloc(pointer, bytes);
599   }
600   ScopedConcurrentLock lock;
601   ScopedDisableDebugCalls disable;
602 
603   if (pointer == nullptr) {
604     pointer = InternalMalloc(bytes);
605     if (g_debug->config().options() & RECORD_ALLOCS) {
606       g_debug->record->AddEntry(new ReallocEntry(pointer, bytes, nullptr));
607     }
608     return pointer;
609   }
610 
611   if (!VerifyPointer(pointer, "realloc")) {
612     return nullptr;
613   }
614 
615   if (bytes == 0) {
616     if (g_debug->config().options() & RECORD_ALLOCS) {
617       g_debug->record->AddEntry(new ReallocEntry(nullptr, bytes, pointer));
618     }
619 
620     InternalFree(pointer);
621     return nullptr;
622   }
623 
624   size_t real_size = bytes;
625   if (g_debug->config().options() & EXPAND_ALLOC) {
626     real_size += g_debug->config().expand_alloc_bytes();
627     if (real_size < bytes) {
628       // Overflow.
629       errno = ENOMEM;
630       return nullptr;
631     }
632   }
633 
634   if (bytes > PointerInfoType::MaxSize()) {
635     errno = ENOMEM;
636     return nullptr;
637   }
638 
639   void* new_pointer;
640   size_t prev_size;
641   if (g_debug->HeaderEnabled()) {
642     // Same size, do nothing.
643     Header* header = g_debug->GetHeader(pointer);
644     if (real_size == header->size) {
645       if (g_debug->TrackPointers()) {
646         // Remove and re-add so that the backtrace is updated.
647         PointerData::Remove(pointer);
648         PointerData::Add(pointer, real_size);
649       }
650       return pointer;
651     }
652 
653     // Allocation is shrinking.
654     if (real_size < header->usable_size) {
655       header->size = real_size;
656       if (g_debug->config().options() & REAR_GUARD) {
657         // Don't bother allocating a smaller pointer in this case, simply
658         // change the header usable_size and reset the rear guard.
659         header->usable_size = header->size;
660         memset(g_debug->GetRearGuard(header), g_debug->config().rear_guard_value(),
661                g_debug->config().rear_guard_bytes());
662       }
663       if (g_debug->TrackPointers()) {
664         // Remove and re-add so that the backtrace is updated.
665         PointerData::Remove(pointer);
666         PointerData::Add(pointer, real_size);
667       }
668       return pointer;
669     }
670 
671     // Allocate the new size.
672     new_pointer = InternalMalloc(bytes);
673     if (new_pointer == nullptr) {
674       errno = ENOMEM;
675       return nullptr;
676     }
677 
678     prev_size = header->usable_size;
679     memcpy(new_pointer, pointer, prev_size);
680     InternalFree(pointer);
681   } else {
682     if (g_debug->TrackPointers()) {
683       PointerData::Remove(pointer);
684     }
685 
686     prev_size = g_dispatch->malloc_usable_size(pointer);
687     new_pointer = g_dispatch->realloc(pointer, real_size);
688     if (new_pointer == nullptr) {
689       return nullptr;
690     }
691 
692     if (g_debug->TrackPointers()) {
693       PointerData::Add(new_pointer, real_size);
694     }
695   }
696 
697   if (g_debug->config().options() & FILL_ON_ALLOC) {
698     size_t bytes = InternalMallocUsableSize(new_pointer);
699     if (bytes > g_debug->config().fill_on_alloc_bytes()) {
700       bytes = g_debug->config().fill_on_alloc_bytes();
701     }
702     if (bytes > prev_size) {
703       memset(reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(new_pointer) + prev_size),
704              g_debug->config().fill_alloc_value(), bytes - prev_size);
705     }
706   }
707 
708   if (g_debug->config().options() & RECORD_ALLOCS) {
709     g_debug->record->AddEntry(new ReallocEntry(new_pointer, bytes, pointer));
710   }
711 
712   return new_pointer;
713 }
714 
debug_calloc(size_t nmemb,size_t bytes)715 void* debug_calloc(size_t nmemb, size_t bytes) {
716   if (DebugCallsDisabled()) {
717     return g_dispatch->calloc(nmemb, bytes);
718   }
719   ScopedConcurrentLock lock;
720   ScopedDisableDebugCalls disable;
721 
722   size_t size;
723   if (__builtin_mul_overflow(nmemb, bytes, &size)) {
724     // Overflow
725     errno = ENOMEM;
726     return nullptr;
727   }
728 
729   if (size == 0) {
730     size = 1;
731   }
732 
733   size_t real_size;
734   if (__builtin_add_overflow(size, g_debug->extra_bytes(), &real_size)) {
735     // Overflow.
736     errno = ENOMEM;
737     return nullptr;
738   }
739 
740   if (real_size > PointerInfoType::MaxSize()) {
741     errno = ENOMEM;
742     return nullptr;
743   }
744 
745   void* pointer;
746   if (g_debug->HeaderEnabled()) {
747     // Need to guarantee the alignment of the header.
748     Header* header =
749         reinterpret_cast<Header*>(g_dispatch->memalign(MINIMUM_ALIGNMENT_BYTES, real_size));
750     if (header == nullptr) {
751       return nullptr;
752     }
753     memset(header, 0, g_dispatch->malloc_usable_size(header));
754     pointer = InitHeader(header, header, size);
755   } else {
756     pointer = g_dispatch->calloc(1, real_size);
757   }
758 
759   if (g_debug->config().options() & RECORD_ALLOCS) {
760     g_debug->record->AddEntry(new CallocEntry(pointer, bytes, nmemb));
761   }
762 
763   if (pointer != nullptr && g_debug->TrackPointers()) {
764     PointerData::Add(pointer, size);
765   }
766   return pointer;
767 }
768 
debug_mallinfo()769 struct mallinfo debug_mallinfo() {
770   return g_dispatch->mallinfo();
771 }
772 
debug_mallopt(int param,int value)773 int debug_mallopt(int param, int value) {
774   return g_dispatch->mallopt(param, value);
775 }
776 
debug_malloc_info(int options,FILE * fp)777 int debug_malloc_info(int options, FILE* fp) {
778   if (DebugCallsDisabled() || !g_debug->TrackPointers()) {
779     return g_dispatch->malloc_info(options, fp);
780   }
781   ScopedConcurrentLock lock;
782   ScopedDisableDebugCalls disable;
783 
784   MallocXmlElem root(fp, "malloc", "version=\"debug-malloc-1\"");
785   std::vector<ListInfoType> list;
786   PointerData::GetAllocList(&list);
787 
788   size_t alloc_num = 0;
789   for (size_t i = 0; i < list.size(); i++) {
790     MallocXmlElem alloc(fp, "allocation", "nr=\"%zu\"", alloc_num);
791 
792     size_t total = 1;
793     size_t size = list[i].size;
794     while (i < list.size() - 1 && list[i + 1].size == size) {
795       i++;
796       total++;
797     }
798     MallocXmlElem(fp, "size").Contents("%zu", list[i].size);
799     MallocXmlElem(fp, "total").Contents("%zu", total);
800     alloc_num++;
801   }
802   return 0;
803 }
804 
debug_aligned_alloc(size_t alignment,size_t size)805 void* debug_aligned_alloc(size_t alignment, size_t size) {
806   if (DebugCallsDisabled()) {
807     return g_dispatch->aligned_alloc(alignment, size);
808   }
809   if (!powerof2(alignment) || (size % alignment) != 0) {
810     errno = EINVAL;
811     return nullptr;
812   }
813   return debug_memalign(alignment, size);
814 }
815 
debug_posix_memalign(void ** memptr,size_t alignment,size_t size)816 int debug_posix_memalign(void** memptr, size_t alignment, size_t size) {
817   if (DebugCallsDisabled()) {
818     return g_dispatch->posix_memalign(memptr, alignment, size);
819   }
820 
821   if (alignment < sizeof(void*) || !powerof2(alignment)) {
822     return EINVAL;
823   }
824   int saved_errno = errno;
825   *memptr = debug_memalign(alignment, size);
826   errno = saved_errno;
827   return (*memptr != nullptr) ? 0 : ENOMEM;
828 }
829 
debug_iterate(uintptr_t base,size_t size,void (* callback)(uintptr_t,size_t,void *),void * arg)830 int debug_iterate(uintptr_t base, size_t size, void (*callback)(uintptr_t, size_t, void*),
831                   void* arg) {
832   ScopedConcurrentLock lock;
833   if (g_debug->TrackPointers()) {
834     // Since malloc is disabled, don't bother acquiring any locks.
835     for (auto it = PointerData::begin(); it != PointerData::end(); ++it) {
836       callback(it->first, InternalMallocUsableSize(reinterpret_cast<void*>(it->first)), arg);
837     }
838     return 0;
839   }
840 
841   // An option that adds a header will add pointer tracking, so no need to
842   // check if headers are enabled.
843   return g_dispatch->iterate(base, size, callback, arg);
844 }
845 
debug_malloc_disable()846 void debug_malloc_disable() {
847   ScopedConcurrentLock lock;
848   g_dispatch->malloc_disable();
849   if (g_debug->pointer) {
850     g_debug->pointer->PrepareFork();
851   }
852 }
853 
debug_malloc_enable()854 void debug_malloc_enable() {
855   ScopedConcurrentLock lock;
856   if (g_debug->pointer) {
857     g_debug->pointer->PostForkParent();
858   }
859   g_dispatch->malloc_enable();
860 }
861 
debug_malloc_backtrace(void * pointer,uintptr_t * frames,size_t max_frames)862 ssize_t debug_malloc_backtrace(void* pointer, uintptr_t* frames, size_t max_frames) {
863   if (DebugCallsDisabled() || pointer == nullptr) {
864     return 0;
865   }
866   ScopedConcurrentLock lock;
867   ScopedDisableDebugCalls disable;
868 
869   if (!(g_debug->config().options() & BACKTRACE)) {
870     return 0;
871   }
872   return PointerData::GetFrames(pointer, frames, max_frames);
873 }
874 
875 #if defined(HAVE_DEPRECATED_MALLOC_FUNCS)
debug_pvalloc(size_t bytes)876 void* debug_pvalloc(size_t bytes) {
877   if (DebugCallsDisabled()) {
878     return g_dispatch->pvalloc(bytes);
879   }
880 
881   size_t pagesize = getpagesize();
882   size_t size = __BIONIC_ALIGN(bytes, pagesize);
883   if (size < bytes) {
884     // Overflow
885     errno = ENOMEM;
886     return nullptr;
887   }
888   return debug_memalign(pagesize, size);
889 }
890 
debug_valloc(size_t size)891 void* debug_valloc(size_t size) {
892   if (DebugCallsDisabled()) {
893     return g_dispatch->valloc(size);
894   }
895   return debug_memalign(getpagesize(), size);
896 }
897 #endif
898 
899 static std::mutex g_dump_lock;
900 
write_dump(FILE * fp)901 static void write_dump(FILE* fp) {
902   fprintf(fp, "Android Native Heap Dump v1.2\n\n");
903 
904   std::string fingerprint = android::base::GetProperty("ro.build.fingerprint", "unknown");
905   fprintf(fp, "Build fingerprint: '%s'\n\n", fingerprint.c_str());
906 
907   PointerData::DumpLiveToFile(fp);
908 
909   fprintf(fp, "MAPS\n");
910   std::string content;
911   if (!android::base::ReadFileToString("/proc/self/maps", &content)) {
912     fprintf(fp, "Could not open /proc/self/maps\n");
913   } else {
914     fprintf(fp, "%s", content.c_str());
915   }
916   fprintf(fp, "END\n");
917 }
918 
debug_write_malloc_leak_info(FILE * fp)919 bool debug_write_malloc_leak_info(FILE* fp) {
920   ScopedConcurrentLock lock;
921   ScopedDisableDebugCalls disable;
922 
923   std::lock_guard<std::mutex> guard(g_dump_lock);
924 
925   if (!(g_debug->config().options() & BACKTRACE)) {
926     return false;
927   }
928 
929   write_dump(fp);
930   return true;
931 }
932 
debug_dump_heap(const char * file_name)933 void debug_dump_heap(const char* file_name) {
934   ScopedConcurrentLock lock;
935   ScopedDisableDebugCalls disable;
936 
937   std::lock_guard<std::mutex> guard(g_dump_lock);
938 
939   FILE* fp = fopen(file_name, "w+e");
940   if (fp == nullptr) {
941     error_log("Unable to create file: %s", file_name);
942     return;
943   }
944 
945   error_log("Dumping to file: %s\n", file_name);
946   write_dump(fp);
947   fclose(fp);
948 }
949