• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <gtest/gtest.h>
18 
19 #include <elf.h>
20 #include <limits.h>
21 #include <pthread.h>
22 #include <stdint.h>
23 #include <stdio.h>
24 #include <stdlib.h>
25 #include <string.h>
26 #include <sys/types.h>
27 #include <sys/wait.h>
28 #include <malloc.h>
29 #include <unistd.h>
30 
31 #include <atomic>
32 #include <tinyxml2.h>
33 
34 #include <android-base/file.h>
35 
36 #include "private/bionic_config.h"
37 #include "private/bionic_malloc.h"
38 #include "utils.h"
39 
40 #if defined(__BIONIC__)
41 #define HAVE_REALLOCARRAY 1
42 #else
43 #define HAVE_REALLOCARRAY __GLIBC_PREREQ(2, 26)
44 #endif
45 
TEST(malloc,malloc_std)46 TEST(malloc, malloc_std) {
47   // Simple malloc test.
48   void *ptr = malloc(100);
49   ASSERT_TRUE(ptr != nullptr);
50   ASSERT_LE(100U, malloc_usable_size(ptr));
51   free(ptr);
52 }
53 
TEST(malloc,malloc_overflow)54 TEST(malloc, malloc_overflow) {
55   SKIP_WITH_HWASAN;
56   errno = 0;
57   ASSERT_EQ(nullptr, malloc(SIZE_MAX));
58   ASSERT_EQ(ENOMEM, errno);
59 }
60 
TEST(malloc,calloc_std)61 TEST(malloc, calloc_std) {
62   // Simple calloc test.
63   size_t alloc_len = 100;
64   char *ptr = (char *)calloc(1, alloc_len);
65   ASSERT_TRUE(ptr != nullptr);
66   ASSERT_LE(alloc_len, malloc_usable_size(ptr));
67   for (size_t i = 0; i < alloc_len; i++) {
68     ASSERT_EQ(0, ptr[i]);
69   }
70   free(ptr);
71 }
72 
TEST(malloc,calloc_illegal)73 TEST(malloc, calloc_illegal) {
74   SKIP_WITH_HWASAN;
75   errno = 0;
76   ASSERT_EQ(nullptr, calloc(-1, 100));
77   ASSERT_EQ(ENOMEM, errno);
78 }
79 
TEST(malloc,calloc_overflow)80 TEST(malloc, calloc_overflow) {
81   SKIP_WITH_HWASAN;
82   errno = 0;
83   ASSERT_EQ(nullptr, calloc(1, SIZE_MAX));
84   ASSERT_EQ(ENOMEM, errno);
85   errno = 0;
86   ASSERT_EQ(nullptr, calloc(SIZE_MAX, SIZE_MAX));
87   ASSERT_EQ(ENOMEM, errno);
88   errno = 0;
89   ASSERT_EQ(nullptr, calloc(2, SIZE_MAX));
90   ASSERT_EQ(ENOMEM, errno);
91   errno = 0;
92   ASSERT_EQ(nullptr, calloc(SIZE_MAX, 2));
93   ASSERT_EQ(ENOMEM, errno);
94 }
95 
TEST(malloc,memalign_multiple)96 TEST(malloc, memalign_multiple) {
97   SKIP_WITH_HWASAN << "hwasan requires power of 2 alignment";
98   // Memalign test where the alignment is any value.
99   for (size_t i = 0; i <= 12; i++) {
100     for (size_t alignment = 1 << i; alignment < (1U << (i+1)); alignment++) {
101       char *ptr = reinterpret_cast<char*>(memalign(alignment, 100));
102       ASSERT_TRUE(ptr != nullptr) << "Failed at alignment " << alignment;
103       ASSERT_LE(100U, malloc_usable_size(ptr)) << "Failed at alignment " << alignment;
104       ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(ptr) % ((1U << i)))
105           << "Failed at alignment " << alignment;
106       free(ptr);
107     }
108   }
109 }
110 
TEST(malloc,memalign_overflow)111 TEST(malloc, memalign_overflow) {
112   SKIP_WITH_HWASAN;
113   ASSERT_EQ(nullptr, memalign(4096, SIZE_MAX));
114 }
115 
TEST(malloc,memalign_non_power2)116 TEST(malloc, memalign_non_power2) {
117   SKIP_WITH_HWASAN;
118   void* ptr;
119   for (size_t align = 0; align <= 256; align++) {
120     ptr = memalign(align, 1024);
121     ASSERT_TRUE(ptr != nullptr) << "Failed at align " << align;
122     free(ptr);
123   }
124 }
125 
TEST(malloc,memalign_realloc)126 TEST(malloc, memalign_realloc) {
127   // Memalign and then realloc the pointer a couple of times.
128   for (size_t alignment = 1; alignment <= 4096; alignment <<= 1) {
129     char *ptr = (char*)memalign(alignment, 100);
130     ASSERT_TRUE(ptr != nullptr);
131     ASSERT_LE(100U, malloc_usable_size(ptr));
132     ASSERT_EQ(0U, (intptr_t)ptr % alignment);
133     memset(ptr, 0x23, 100);
134 
135     ptr = (char*)realloc(ptr, 200);
136     ASSERT_TRUE(ptr != nullptr);
137     ASSERT_LE(200U, malloc_usable_size(ptr));
138     ASSERT_TRUE(ptr != nullptr);
139     for (size_t i = 0; i < 100; i++) {
140       ASSERT_EQ(0x23, ptr[i]);
141     }
142     memset(ptr, 0x45, 200);
143 
144     ptr = (char*)realloc(ptr, 300);
145     ASSERT_TRUE(ptr != nullptr);
146     ASSERT_LE(300U, malloc_usable_size(ptr));
147     for (size_t i = 0; i < 200; i++) {
148       ASSERT_EQ(0x45, ptr[i]);
149     }
150     memset(ptr, 0x67, 300);
151 
152     ptr = (char*)realloc(ptr, 250);
153     ASSERT_TRUE(ptr != nullptr);
154     ASSERT_LE(250U, malloc_usable_size(ptr));
155     for (size_t i = 0; i < 250; i++) {
156       ASSERT_EQ(0x67, ptr[i]);
157     }
158     free(ptr);
159   }
160 }
161 
TEST(malloc,malloc_realloc_larger)162 TEST(malloc, malloc_realloc_larger) {
163   // Realloc to a larger size, malloc is used for the original allocation.
164   char *ptr = (char *)malloc(100);
165   ASSERT_TRUE(ptr != nullptr);
166   ASSERT_LE(100U, malloc_usable_size(ptr));
167   memset(ptr, 67, 100);
168 
169   ptr = (char *)realloc(ptr, 200);
170   ASSERT_TRUE(ptr != nullptr);
171   ASSERT_LE(200U, malloc_usable_size(ptr));
172   for (size_t i = 0; i < 100; i++) {
173     ASSERT_EQ(67, ptr[i]);
174   }
175   free(ptr);
176 }
177 
TEST(malloc,malloc_realloc_smaller)178 TEST(malloc, malloc_realloc_smaller) {
179   // Realloc to a smaller size, malloc is used for the original allocation.
180   char *ptr = (char *)malloc(200);
181   ASSERT_TRUE(ptr != nullptr);
182   ASSERT_LE(200U, malloc_usable_size(ptr));
183   memset(ptr, 67, 200);
184 
185   ptr = (char *)realloc(ptr, 100);
186   ASSERT_TRUE(ptr != nullptr);
187   ASSERT_LE(100U, malloc_usable_size(ptr));
188   for (size_t i = 0; i < 100; i++) {
189     ASSERT_EQ(67, ptr[i]);
190   }
191   free(ptr);
192 }
193 
TEST(malloc,malloc_multiple_realloc)194 TEST(malloc, malloc_multiple_realloc) {
195   // Multiple reallocs, malloc is used for the original allocation.
196   char *ptr = (char *)malloc(200);
197   ASSERT_TRUE(ptr != nullptr);
198   ASSERT_LE(200U, malloc_usable_size(ptr));
199   memset(ptr, 0x23, 200);
200 
201   ptr = (char *)realloc(ptr, 100);
202   ASSERT_TRUE(ptr != nullptr);
203   ASSERT_LE(100U, malloc_usable_size(ptr));
204   for (size_t i = 0; i < 100; i++) {
205     ASSERT_EQ(0x23, ptr[i]);
206   }
207 
208   ptr = (char*)realloc(ptr, 50);
209   ASSERT_TRUE(ptr != nullptr);
210   ASSERT_LE(50U, malloc_usable_size(ptr));
211   for (size_t i = 0; i < 50; i++) {
212     ASSERT_EQ(0x23, ptr[i]);
213   }
214 
215   ptr = (char*)realloc(ptr, 150);
216   ASSERT_TRUE(ptr != nullptr);
217   ASSERT_LE(150U, malloc_usable_size(ptr));
218   for (size_t i = 0; i < 50; i++) {
219     ASSERT_EQ(0x23, ptr[i]);
220   }
221   memset(ptr, 0x23, 150);
222 
223   ptr = (char*)realloc(ptr, 425);
224   ASSERT_TRUE(ptr != nullptr);
225   ASSERT_LE(425U, malloc_usable_size(ptr));
226   for (size_t i = 0; i < 150; i++) {
227     ASSERT_EQ(0x23, ptr[i]);
228   }
229   free(ptr);
230 }
231 
TEST(malloc,calloc_realloc_larger)232 TEST(malloc, calloc_realloc_larger) {
233   // Realloc to a larger size, calloc is used for the original allocation.
234   char *ptr = (char *)calloc(1, 100);
235   ASSERT_TRUE(ptr != nullptr);
236   ASSERT_LE(100U, malloc_usable_size(ptr));
237 
238   ptr = (char *)realloc(ptr, 200);
239   ASSERT_TRUE(ptr != nullptr);
240   ASSERT_LE(200U, malloc_usable_size(ptr));
241   for (size_t i = 0; i < 100; i++) {
242     ASSERT_EQ(0, ptr[i]);
243   }
244   free(ptr);
245 }
246 
TEST(malloc,calloc_realloc_smaller)247 TEST(malloc, calloc_realloc_smaller) {
248   // Realloc to a smaller size, calloc is used for the original allocation.
249   char *ptr = (char *)calloc(1, 200);
250   ASSERT_TRUE(ptr != nullptr);
251   ASSERT_LE(200U, malloc_usable_size(ptr));
252 
253   ptr = (char *)realloc(ptr, 100);
254   ASSERT_TRUE(ptr != nullptr);
255   ASSERT_LE(100U, malloc_usable_size(ptr));
256   for (size_t i = 0; i < 100; i++) {
257     ASSERT_EQ(0, ptr[i]);
258   }
259   free(ptr);
260 }
261 
TEST(malloc,calloc_multiple_realloc)262 TEST(malloc, calloc_multiple_realloc) {
263   // Multiple reallocs, calloc is used for the original allocation.
264   char *ptr = (char *)calloc(1, 200);
265   ASSERT_TRUE(ptr != nullptr);
266   ASSERT_LE(200U, malloc_usable_size(ptr));
267 
268   ptr = (char *)realloc(ptr, 100);
269   ASSERT_TRUE(ptr != nullptr);
270   ASSERT_LE(100U, malloc_usable_size(ptr));
271   for (size_t i = 0; i < 100; i++) {
272     ASSERT_EQ(0, ptr[i]);
273   }
274 
275   ptr = (char*)realloc(ptr, 50);
276   ASSERT_TRUE(ptr != nullptr);
277   ASSERT_LE(50U, malloc_usable_size(ptr));
278   for (size_t i = 0; i < 50; i++) {
279     ASSERT_EQ(0, ptr[i]);
280   }
281 
282   ptr = (char*)realloc(ptr, 150);
283   ASSERT_TRUE(ptr != nullptr);
284   ASSERT_LE(150U, malloc_usable_size(ptr));
285   for (size_t i = 0; i < 50; i++) {
286     ASSERT_EQ(0, ptr[i]);
287   }
288   memset(ptr, 0, 150);
289 
290   ptr = (char*)realloc(ptr, 425);
291   ASSERT_TRUE(ptr != nullptr);
292   ASSERT_LE(425U, malloc_usable_size(ptr));
293   for (size_t i = 0; i < 150; i++) {
294     ASSERT_EQ(0, ptr[i]);
295   }
296   free(ptr);
297 }
298 
TEST(malloc,realloc_overflow)299 TEST(malloc, realloc_overflow) {
300   SKIP_WITH_HWASAN;
301   errno = 0;
302   ASSERT_EQ(nullptr, realloc(nullptr, SIZE_MAX));
303   ASSERT_EQ(ENOMEM, errno);
304   void* ptr = malloc(100);
305   ASSERT_TRUE(ptr != nullptr);
306   errno = 0;
307   ASSERT_EQ(nullptr, realloc(ptr, SIZE_MAX));
308   ASSERT_EQ(ENOMEM, errno);
309   free(ptr);
310 }
311 
312 #if defined(HAVE_DEPRECATED_MALLOC_FUNCS)
313 extern "C" void* pvalloc(size_t);
314 extern "C" void* valloc(size_t);
315 
TEST(malloc,pvalloc_std)316 TEST(malloc, pvalloc_std) {
317   size_t pagesize = sysconf(_SC_PAGESIZE);
318   void* ptr = pvalloc(100);
319   ASSERT_TRUE(ptr != nullptr);
320   ASSERT_TRUE((reinterpret_cast<uintptr_t>(ptr) & (pagesize-1)) == 0);
321   ASSERT_LE(pagesize, malloc_usable_size(ptr));
322   free(ptr);
323 }
324 
TEST(malloc,pvalloc_overflow)325 TEST(malloc, pvalloc_overflow) {
326   ASSERT_EQ(nullptr, pvalloc(SIZE_MAX));
327 }
328 
TEST(malloc,valloc_std)329 TEST(malloc, valloc_std) {
330   size_t pagesize = sysconf(_SC_PAGESIZE);
331   void* ptr = pvalloc(100);
332   ASSERT_TRUE(ptr != nullptr);
333   ASSERT_TRUE((reinterpret_cast<uintptr_t>(ptr) & (pagesize-1)) == 0);
334   free(ptr);
335 }
336 
TEST(malloc,valloc_overflow)337 TEST(malloc, valloc_overflow) {
338   ASSERT_EQ(nullptr, valloc(SIZE_MAX));
339 }
340 #endif
341 
TEST(malloc,malloc_info)342 TEST(malloc, malloc_info) {
343 #ifdef __BIONIC__
344   SKIP_WITH_HWASAN; // hwasan does not implement malloc_info
345   char* buf;
346   size_t bufsize;
347   FILE* memstream = open_memstream(&buf, &bufsize);
348   ASSERT_NE(nullptr, memstream);
349   ASSERT_EQ(0, malloc_info(0, memstream));
350   ASSERT_EQ(0, fclose(memstream));
351 
352   tinyxml2::XMLDocument doc;
353   ASSERT_EQ(tinyxml2::XML_SUCCESS, doc.Parse(buf));
354 
355   auto root = doc.FirstChildElement();
356   ASSERT_NE(nullptr, root);
357   ASSERT_STREQ("malloc", root->Name());
358   if (std::string(root->Attribute("version")) == "jemalloc-1") {
359     // Verify jemalloc version of this data.
360     ASSERT_STREQ("jemalloc-1", root->Attribute("version"));
361 
362     auto arena = root->FirstChildElement();
363     for (; arena != nullptr; arena = arena->NextSiblingElement()) {
364       int val;
365 
366       ASSERT_STREQ("heap", arena->Name());
367       ASSERT_EQ(tinyxml2::XML_SUCCESS, arena->QueryIntAttribute("nr", &val));
368       ASSERT_EQ(tinyxml2::XML_SUCCESS,
369                 arena->FirstChildElement("allocated-large")->QueryIntText(&val));
370       ASSERT_EQ(tinyxml2::XML_SUCCESS,
371                 arena->FirstChildElement("allocated-huge")->QueryIntText(&val));
372       ASSERT_EQ(tinyxml2::XML_SUCCESS,
373                 arena->FirstChildElement("allocated-bins")->QueryIntText(&val));
374       ASSERT_EQ(tinyxml2::XML_SUCCESS,
375                 arena->FirstChildElement("bins-total")->QueryIntText(&val));
376 
377       auto bin = arena->FirstChildElement("bin");
378       for (; bin != nullptr; bin = bin ->NextSiblingElement()) {
379         if (strcmp(bin->Name(), "bin") == 0) {
380           ASSERT_EQ(tinyxml2::XML_SUCCESS, bin->QueryIntAttribute("nr", &val));
381           ASSERT_EQ(tinyxml2::XML_SUCCESS,
382                     bin->FirstChildElement("allocated")->QueryIntText(&val));
383           ASSERT_EQ(tinyxml2::XML_SUCCESS,
384                     bin->FirstChildElement("nmalloc")->QueryIntText(&val));
385           ASSERT_EQ(tinyxml2::XML_SUCCESS,
386                     bin->FirstChildElement("ndalloc")->QueryIntText(&val));
387         }
388       }
389     }
390   } else {
391     // Only verify that this is debug-malloc-1, the malloc debug unit tests
392     // verify the output.
393     ASSERT_STREQ("debug-malloc-1", root->Attribute("version"));
394   }
395 #endif
396 }
397 
TEST(malloc,malloc_info_matches_mallinfo)398 TEST(malloc, malloc_info_matches_mallinfo) {
399 #ifdef __BIONIC__
400   SKIP_WITH_HWASAN; // hwasan does not implement malloc_info
401 
402   char* buf;
403   size_t bufsize;
404   FILE* memstream = open_memstream(&buf, &bufsize);
405   ASSERT_NE(nullptr, memstream);
406   size_t mallinfo_before_allocated_bytes = mallinfo().uordblks;
407   ASSERT_EQ(0, malloc_info(0, memstream));
408   size_t mallinfo_after_allocated_bytes = mallinfo().uordblks;
409   ASSERT_EQ(0, fclose(memstream));
410 
411   tinyxml2::XMLDocument doc;
412   ASSERT_EQ(tinyxml2::XML_SUCCESS, doc.Parse(buf));
413 
414   size_t total_allocated_bytes = 0;
415   auto root = doc.FirstChildElement();
416   ASSERT_NE(nullptr, root);
417   ASSERT_STREQ("malloc", root->Name());
418   if (std::string(root->Attribute("version")) == "jemalloc-1") {
419     // Verify jemalloc version of this data.
420     ASSERT_STREQ("jemalloc-1", root->Attribute("version"));
421 
422     auto arena = root->FirstChildElement();
423     for (; arena != nullptr; arena = arena->NextSiblingElement()) {
424       int val;
425 
426       ASSERT_STREQ("heap", arena->Name());
427       ASSERT_EQ(tinyxml2::XML_SUCCESS, arena->QueryIntAttribute("nr", &val));
428       ASSERT_EQ(tinyxml2::XML_SUCCESS,
429                 arena->FirstChildElement("allocated-large")->QueryIntText(&val));
430       total_allocated_bytes += val;
431       ASSERT_EQ(tinyxml2::XML_SUCCESS,
432                 arena->FirstChildElement("allocated-huge")->QueryIntText(&val));
433       total_allocated_bytes += val;
434       ASSERT_EQ(tinyxml2::XML_SUCCESS,
435                 arena->FirstChildElement("allocated-bins")->QueryIntText(&val));
436       total_allocated_bytes += val;
437       ASSERT_EQ(tinyxml2::XML_SUCCESS,
438                 arena->FirstChildElement("bins-total")->QueryIntText(&val));
439     }
440     // The total needs to be between the mallinfo call before and after
441     // since malloc_info allocates some memory.
442     EXPECT_LE(mallinfo_before_allocated_bytes, total_allocated_bytes);
443     EXPECT_GE(mallinfo_after_allocated_bytes, total_allocated_bytes);
444   } else {
445     // Only verify that this is debug-malloc-1, the malloc debug unit tests
446     // verify the output.
447     ASSERT_STREQ("debug-malloc-1", root->Attribute("version"));
448   }
449 #endif
450 }
451 
TEST(malloc,calloc_usable_size)452 TEST(malloc, calloc_usable_size) {
453   for (size_t size = 1; size <= 2048; size++) {
454     void* pointer = malloc(size);
455     ASSERT_TRUE(pointer != nullptr);
456     memset(pointer, 0xeb, malloc_usable_size(pointer));
457     free(pointer);
458 
459     // We should get a previous pointer that has been set to non-zero.
460     // If calloc does not zero out all of the data, this will fail.
461     uint8_t* zero_mem = reinterpret_cast<uint8_t*>(calloc(1, size));
462     ASSERT_TRUE(pointer != nullptr);
463     size_t usable_size = malloc_usable_size(zero_mem);
464     for (size_t i = 0; i < usable_size; i++) {
465       ASSERT_EQ(0, zero_mem[i]) << "Failed at allocation size " << size << " at byte " << i;
466     }
467     free(zero_mem);
468   }
469 }
470 
TEST(malloc,malloc_0)471 TEST(malloc, malloc_0) {
472   void* p = malloc(0);
473   ASSERT_TRUE(p != nullptr);
474   free(p);
475 }
476 
TEST(malloc,calloc_0_0)477 TEST(malloc, calloc_0_0) {
478   void* p = calloc(0, 0);
479   ASSERT_TRUE(p != nullptr);
480   free(p);
481 }
482 
TEST(malloc,calloc_0_1)483 TEST(malloc, calloc_0_1) {
484   void* p = calloc(0, 1);
485   ASSERT_TRUE(p != nullptr);
486   free(p);
487 }
488 
TEST(malloc,calloc_1_0)489 TEST(malloc, calloc_1_0) {
490   void* p = calloc(1, 0);
491   ASSERT_TRUE(p != nullptr);
492   free(p);
493 }
494 
TEST(malloc,realloc_nullptr_0)495 TEST(malloc, realloc_nullptr_0) {
496   // realloc(nullptr, size) is actually malloc(size).
497   void* p = realloc(nullptr, 0);
498   ASSERT_TRUE(p != nullptr);
499   free(p);
500 }
501 
TEST(malloc,realloc_0)502 TEST(malloc, realloc_0) {
503   void* p = malloc(1024);
504   ASSERT_TRUE(p != nullptr);
505   // realloc(p, 0) is actually free(p).
506   void* p2 = realloc(p, 0);
507   ASSERT_TRUE(p2 == nullptr);
508 }
509 
510 constexpr size_t MAX_LOOPS = 200;
511 
512 // Make sure that memory returned by malloc is aligned to allow these data types.
TEST(malloc,verify_alignment)513 TEST(malloc, verify_alignment) {
514   uint32_t** values_32 = new uint32_t*[MAX_LOOPS];
515   uint64_t** values_64 = new uint64_t*[MAX_LOOPS];
516   long double** values_ldouble = new long double*[MAX_LOOPS];
517   // Use filler to attempt to force the allocator to get potentially bad alignments.
518   void** filler = new void*[MAX_LOOPS];
519 
520   for (size_t i = 0; i < MAX_LOOPS; i++) {
521     // Check uint32_t pointers.
522     filler[i] = malloc(1);
523     ASSERT_TRUE(filler[i] != nullptr);
524 
525     values_32[i] = reinterpret_cast<uint32_t*>(malloc(sizeof(uint32_t)));
526     ASSERT_TRUE(values_32[i] != nullptr);
527     *values_32[i] = i;
528     ASSERT_EQ(*values_32[i], i);
529     ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(values_32[i]) & (sizeof(uint32_t) - 1));
530 
531     free(filler[i]);
532   }
533 
534   for (size_t i = 0; i < MAX_LOOPS; i++) {
535     // Check uint64_t pointers.
536     filler[i] = malloc(1);
537     ASSERT_TRUE(filler[i] != nullptr);
538 
539     values_64[i] = reinterpret_cast<uint64_t*>(malloc(sizeof(uint64_t)));
540     ASSERT_TRUE(values_64[i] != nullptr);
541     *values_64[i] = 0x1000 + i;
542     ASSERT_EQ(*values_64[i], 0x1000 + i);
543     ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(values_64[i]) & (sizeof(uint64_t) - 1));
544 
545     free(filler[i]);
546   }
547 
548   for (size_t i = 0; i < MAX_LOOPS; i++) {
549     // Check long double pointers.
550     filler[i] = malloc(1);
551     ASSERT_TRUE(filler[i] != nullptr);
552 
553     values_ldouble[i] = reinterpret_cast<long double*>(malloc(sizeof(long double)));
554     ASSERT_TRUE(values_ldouble[i] != nullptr);
555     *values_ldouble[i] = 5.5 + i;
556     ASSERT_DOUBLE_EQ(*values_ldouble[i], 5.5 + i);
557     // 32 bit glibc has a long double size of 12 bytes, so hardcode the
558     // required alignment to 0x7.
559 #if !defined(__BIONIC__) && !defined(__LP64__)
560     ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(values_ldouble[i]) & 0x7);
561 #else
562     ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(values_ldouble[i]) & (sizeof(long double) - 1));
563 #endif
564 
565     free(filler[i]);
566   }
567 
568   for (size_t i = 0; i < MAX_LOOPS; i++) {
569     free(values_32[i]);
570     free(values_64[i]);
571     free(values_ldouble[i]);
572   }
573 
574   delete[] filler;
575   delete[] values_32;
576   delete[] values_64;
577   delete[] values_ldouble;
578 }
579 
TEST(malloc,mallopt_smoke)580 TEST(malloc, mallopt_smoke) {
581   errno = 0;
582   ASSERT_EQ(0, mallopt(-1000, 1));
583   // mallopt doesn't set errno.
584   ASSERT_EQ(0, errno);
585 }
586 
TEST(malloc,mallopt_decay)587 TEST(malloc, mallopt_decay) {
588 #if defined(__BIONIC__)
589   SKIP_WITH_HWASAN << "hwasan does not implement mallopt";
590   errno = 0;
591   ASSERT_EQ(1, mallopt(M_DECAY_TIME, 1));
592   ASSERT_EQ(1, mallopt(M_DECAY_TIME, 0));
593   ASSERT_EQ(1, mallopt(M_DECAY_TIME, 1));
594   ASSERT_EQ(1, mallopt(M_DECAY_TIME, 0));
595 #else
596   GTEST_SKIP() << "bionic-only test";
597 #endif
598 }
599 
TEST(malloc,mallopt_purge)600 TEST(malloc, mallopt_purge) {
601 #if defined(__BIONIC__)
602   SKIP_WITH_HWASAN << "hwasan does not implement mallopt";
603   errno = 0;
604   ASSERT_EQ(1, mallopt(M_PURGE, 0));
605 #else
606   GTEST_SKIP() << "bionic-only test";
607 #endif
608 }
609 
TEST(malloc,reallocarray_overflow)610 TEST(malloc, reallocarray_overflow) {
611 #if HAVE_REALLOCARRAY
612   // Values that cause overflow to a result small enough (8 on LP64) that malloc would "succeed".
613   size_t a = static_cast<size_t>(INTPTR_MIN + 4);
614   size_t b = 2;
615 
616   errno = 0;
617   ASSERT_TRUE(reallocarray(nullptr, a, b) == nullptr);
618   ASSERT_EQ(ENOMEM, errno);
619 
620   errno = 0;
621   ASSERT_TRUE(reallocarray(nullptr, b, a) == nullptr);
622   ASSERT_EQ(ENOMEM, errno);
623 #else
624   GTEST_SKIP() << "reallocarray not available";
625 #endif
626 }
627 
TEST(malloc,reallocarray)628 TEST(malloc, reallocarray) {
629 #if HAVE_REALLOCARRAY
630   void* p = reallocarray(nullptr, 2, 32);
631   ASSERT_TRUE(p != nullptr);
632   ASSERT_GE(malloc_usable_size(p), 64U);
633 #else
634   GTEST_SKIP() << "reallocarray not available";
635 #endif
636 }
637 
TEST(malloc,mallinfo)638 TEST(malloc, mallinfo) {
639 #if defined(__BIONIC__)
640   SKIP_WITH_HWASAN << "hwasan does not implement mallinfo";
641   static size_t sizes[] = {
642     8, 32, 128, 4096, 32768, 131072, 1024000, 10240000, 20480000, 300000000
643   };
644 
645   constexpr static size_t kMaxAllocs = 50;
646 
647   for (size_t size : sizes) {
648     // If some of these allocations are stuck in a thread cache, then keep
649     // looping until we make an allocation that changes the total size of the
650     // memory allocated.
651     // jemalloc implementations counts the thread cache allocations against
652     // total memory allocated.
653     void* ptrs[kMaxAllocs] = {};
654     bool pass = false;
655     for (size_t i = 0; i < kMaxAllocs; i++) {
656       size_t allocated = mallinfo().uordblks;
657       ptrs[i] = malloc(size);
658       ASSERT_TRUE(ptrs[i] != nullptr);
659       size_t new_allocated = mallinfo().uordblks;
660       if (allocated != new_allocated) {
661         size_t usable_size = malloc_usable_size(ptrs[i]);
662         // Only check if the total got bigger by at least allocation size.
663         // Sometimes the mallinfo numbers can go backwards due to compaction
664         // and/or freeing of cached data.
665         if (new_allocated >= allocated + usable_size) {
666           pass = true;
667           break;
668         }
669       }
670     }
671     for (void* ptr : ptrs) {
672       free(ptr);
673     }
674     ASSERT_TRUE(pass)
675         << "For size " << size << " allocated bytes did not increase after "
676         << kMaxAllocs << " allocations.";
677   }
678 #else
679   GTEST_SKIP() << "glibc is broken";
680 #endif
681 }
682 
TEST(android_mallopt,error_on_unexpected_option)683 TEST(android_mallopt, error_on_unexpected_option) {
684 #if defined(__BIONIC__)
685   const int unrecognized_option = -1;
686   errno = 0;
687   EXPECT_EQ(false, android_mallopt(unrecognized_option, nullptr, 0));
688   EXPECT_EQ(ENOTSUP, errno);
689 #else
690   GTEST_SKIP() << "bionic-only test";
691 #endif
692 }
693 
IsDynamic()694 bool IsDynamic() {
695 #if defined(__LP64__)
696   Elf64_Ehdr ehdr;
697 #else
698   Elf32_Ehdr ehdr;
699 #endif
700   std::string path(android::base::GetExecutablePath());
701 
702   int fd = open(path.c_str(), O_RDONLY | O_CLOEXEC);
703   if (fd == -1) {
704     // Assume dynamic on error.
705     return true;
706   }
707   bool read_completed = android::base::ReadFully(fd, &ehdr, sizeof(ehdr));
708   close(fd);
709   // Assume dynamic in error cases.
710   return !read_completed || ehdr.e_type == ET_DYN;
711 }
712 
TEST(android_mallopt,init_zygote_child_profiling)713 TEST(android_mallopt, init_zygote_child_profiling) {
714 #if defined(__BIONIC__)
715   // Successful call.
716   errno = 0;
717   if (IsDynamic()) {
718     EXPECT_EQ(true, android_mallopt(M_INIT_ZYGOTE_CHILD_PROFILING, nullptr, 0));
719     EXPECT_EQ(0, errno);
720   } else {
721     // Not supported in static executables.
722     EXPECT_EQ(false, android_mallopt(M_INIT_ZYGOTE_CHILD_PROFILING, nullptr, 0));
723     EXPECT_EQ(ENOTSUP, errno);
724   }
725 
726   // Unexpected arguments rejected.
727   errno = 0;
728   char unexpected = 0;
729   EXPECT_EQ(false, android_mallopt(M_INIT_ZYGOTE_CHILD_PROFILING, &unexpected, 1));
730   if (IsDynamic()) {
731     EXPECT_EQ(EINVAL, errno);
732   } else {
733     EXPECT_EQ(ENOTSUP, errno);
734   }
735 #else
736   GTEST_SKIP() << "bionic-only test";
737 #endif
738 }
739 
740 #if defined(__BIONIC__)
741 template <typename FuncType>
CheckAllocationFunction(FuncType func)742 void CheckAllocationFunction(FuncType func) {
743   // Assumes that no more than 108MB of memory is allocated before this.
744   size_t limit = 128 * 1024 * 1024;
745   ASSERT_TRUE(android_mallopt(M_SET_ALLOCATION_LIMIT_BYTES, &limit, sizeof(limit)));
746   if (!func(20 * 1024 * 1024))
747     exit(1);
748   if (func(128 * 1024 * 1024))
749     exit(1);
750   exit(0);
751 }
752 #endif
753 
TEST(android_mallopt,set_allocation_limit)754 TEST(android_mallopt, set_allocation_limit) {
755 #if defined(__BIONIC__)
756   EXPECT_EXIT(CheckAllocationFunction([](size_t bytes) { return calloc(bytes, 1) != nullptr; }),
757               testing::ExitedWithCode(0), "");
758   EXPECT_EXIT(CheckAllocationFunction([](size_t bytes) { return calloc(1, bytes) != nullptr; }),
759               testing::ExitedWithCode(0), "");
760   EXPECT_EXIT(CheckAllocationFunction([](size_t bytes) { return malloc(bytes) != nullptr; }),
761               testing::ExitedWithCode(0), "");
762   EXPECT_EXIT(CheckAllocationFunction(
763                   [](size_t bytes) { return memalign(sizeof(void*), bytes) != nullptr; }),
764               testing::ExitedWithCode(0), "");
765   EXPECT_EXIT(CheckAllocationFunction([](size_t bytes) {
766                 void* ptr;
767                 return posix_memalign(&ptr, sizeof(void *), bytes) == 0;
768               }),
769               testing::ExitedWithCode(0), "");
770   EXPECT_EXIT(CheckAllocationFunction(
771                   [](size_t bytes) { return aligned_alloc(sizeof(void*), bytes) != nullptr; }),
772               testing::ExitedWithCode(0), "");
773   EXPECT_EXIT(CheckAllocationFunction([](size_t bytes) {
774                 void* p = malloc(1024 * 1024);
775                 return realloc(p, bytes) != nullptr;
776               }),
777               testing::ExitedWithCode(0), "");
778 #if !defined(__LP64__)
779   EXPECT_EXIT(CheckAllocationFunction([](size_t bytes) { return pvalloc(bytes) != nullptr; }),
780               testing::ExitedWithCode(0), "");
781   EXPECT_EXIT(CheckAllocationFunction([](size_t bytes) { return valloc(bytes) != nullptr; }),
782               testing::ExitedWithCode(0), "");
783 #endif
784 #else
785   GTEST_SKIP() << "bionic extension";
786 #endif
787 }
788 
TEST(android_mallopt,set_allocation_limit_multiple)789 TEST(android_mallopt, set_allocation_limit_multiple) {
790 #if defined(__BIONIC__)
791   // Only the first set should work.
792   size_t limit = 256 * 1024 * 1024;
793   ASSERT_TRUE(android_mallopt(M_SET_ALLOCATION_LIMIT_BYTES, &limit, sizeof(limit)));
794   limit = 32 * 1024 * 1024;
795   ASSERT_FALSE(android_mallopt(M_SET_ALLOCATION_LIMIT_BYTES, &limit, sizeof(limit)));
796 #else
797   GTEST_SKIP() << "bionic extension";
798 #endif
799 }
800 
801 #if defined(__BIONIC__)
802 static constexpr size_t kAllocationSize = 8 * 1024 * 1024;
803 
GetMaxAllocations()804 static size_t GetMaxAllocations() {
805   size_t max_pointers = 0;
806   void* ptrs[20];
807   for (size_t i = 0; i < sizeof(ptrs) / sizeof(void*); i++) {
808     ptrs[i] = malloc(kAllocationSize);
809     if (ptrs[i] == nullptr) {
810       max_pointers = i;
811       break;
812     }
813   }
814   for (size_t i = 0; i < max_pointers; i++) {
815     free(ptrs[i]);
816   }
817   return max_pointers;
818 }
819 
VerifyMaxPointers(size_t max_pointers)820 static void VerifyMaxPointers(size_t max_pointers) {
821   // Now verify that we can allocate the same number as before.
822   void* ptrs[20];
823   for (size_t i = 0; i < max_pointers; i++) {
824     ptrs[i] = malloc(kAllocationSize);
825     ASSERT_TRUE(ptrs[i] != nullptr) << "Failed to allocate on iteration " << i;
826   }
827 
828   // Make sure the next allocation still fails.
829   ASSERT_TRUE(malloc(kAllocationSize) == nullptr);
830   for (size_t i = 0; i < max_pointers; i++) {
831     free(ptrs[i]);
832   }
833 }
834 #endif
835 
TEST(android_mallopt,set_allocation_limit_realloc_increase)836 TEST(android_mallopt, set_allocation_limit_realloc_increase) {
837 #if defined(__BIONIC__)
838   size_t limit = 128 * 1024 * 1024;
839   ASSERT_TRUE(android_mallopt(M_SET_ALLOCATION_LIMIT_BYTES, &limit, sizeof(limit)));
840 
841   size_t max_pointers = GetMaxAllocations();
842   ASSERT_TRUE(max_pointers != 0) << "Limit never reached.";
843 
844   void* memory = malloc(10 * 1024 * 1024);
845   ASSERT_TRUE(memory != nullptr);
846 
847   // Increase size.
848   memory = realloc(memory, 20 * 1024 * 1024);
849   ASSERT_TRUE(memory != nullptr);
850   memory = realloc(memory, 40 * 1024 * 1024);
851   ASSERT_TRUE(memory != nullptr);
852   memory = realloc(memory, 60 * 1024 * 1024);
853   ASSERT_TRUE(memory != nullptr);
854   memory = realloc(memory, 80 * 1024 * 1024);
855   ASSERT_TRUE(memory != nullptr);
856   // Now push past limit.
857   memory = realloc(memory, 130 * 1024 * 1024);
858   ASSERT_TRUE(memory == nullptr);
859 
860   VerifyMaxPointers(max_pointers);
861 #else
862   GTEST_SKIP() << "bionic extension";
863 #endif
864 }
865 
TEST(android_mallopt,set_allocation_limit_realloc_decrease)866 TEST(android_mallopt, set_allocation_limit_realloc_decrease) {
867 #if defined(__BIONIC__)
868   size_t limit = 100 * 1024 * 1024;
869   ASSERT_TRUE(android_mallopt(M_SET_ALLOCATION_LIMIT_BYTES, &limit, sizeof(limit)));
870 
871   size_t max_pointers = GetMaxAllocations();
872   ASSERT_TRUE(max_pointers != 0) << "Limit never reached.";
873 
874   void* memory = malloc(80 * 1024 * 1024);
875   ASSERT_TRUE(memory != nullptr);
876 
877   // Decrease size.
878   memory = realloc(memory, 60 * 1024 * 1024);
879   ASSERT_TRUE(memory != nullptr);
880   memory = realloc(memory, 40 * 1024 * 1024);
881   ASSERT_TRUE(memory != nullptr);
882   memory = realloc(memory, 20 * 1024 * 1024);
883   ASSERT_TRUE(memory != nullptr);
884   memory = realloc(memory, 10 * 1024 * 1024);
885   ASSERT_TRUE(memory != nullptr);
886   free(memory);
887 
888   VerifyMaxPointers(max_pointers);
889 #else
890   GTEST_SKIP() << "bionic extension";
891 #endif
892 }
893 
TEST(android_mallopt,set_allocation_limit_realloc_free)894 TEST(android_mallopt, set_allocation_limit_realloc_free) {
895 #if defined(__BIONIC__)
896   size_t limit = 100 * 1024 * 1024;
897   ASSERT_TRUE(android_mallopt(M_SET_ALLOCATION_LIMIT_BYTES, &limit, sizeof(limit)));
898 
899   size_t max_pointers = GetMaxAllocations();
900   ASSERT_TRUE(max_pointers != 0) << "Limit never reached.";
901 
902   void* memory = malloc(60 * 1024 * 1024);
903   ASSERT_TRUE(memory != nullptr);
904 
905   memory = realloc(memory, 0);
906   ASSERT_TRUE(memory == nullptr);
907 
908   VerifyMaxPointers(max_pointers);
909 #else
910   GTEST_SKIP() << "bionic extension";
911 #endif
912 }
913 
914 #if defined(__BIONIC__)
SetAllocationLimit(void * data)915 static void* SetAllocationLimit(void* data) {
916   std::atomic_bool* go = reinterpret_cast<std::atomic_bool*>(data);
917   while (!go->load()) {
918   }
919   size_t limit = 500 * 1024 * 1024;
920   if (android_mallopt(M_SET_ALLOCATION_LIMIT_BYTES, &limit, sizeof(limit))) {
921     return reinterpret_cast<void*>(-1);
922   }
923   return nullptr;
924 }
925 
SetAllocationLimitMultipleThreads()926 static void SetAllocationLimitMultipleThreads() {
927   std::atomic_bool go;
928   go = false;
929 
930   static constexpr size_t kNumThreads = 4;
931   pthread_t threads[kNumThreads];
932   for (size_t i = 0; i < kNumThreads; i++) {
933     ASSERT_EQ(0, pthread_create(&threads[i], nullptr, SetAllocationLimit, &go));
934   }
935 
936   // Let them go all at once.
937   go = true;
938   ASSERT_EQ(0, kill(getpid(), __SIGRTMIN + 4));
939 
940   size_t num_successful = 0;
941   for (size_t i = 0; i < kNumThreads; i++) {
942     void* result;
943     ASSERT_EQ(0, pthread_join(threads[i], &result));
944     if (result != nullptr) {
945       num_successful++;
946     }
947   }
948   ASSERT_EQ(1U, num_successful);
949   exit(0);
950 }
951 #endif
952 
TEST(android_mallopt,set_allocation_limit_multiple_threads)953 TEST(android_mallopt, set_allocation_limit_multiple_threads) {
954 #if defined(__BIONIC__)
955   if (IsDynamic()) {
956     ASSERT_TRUE(android_mallopt(M_INIT_ZYGOTE_CHILD_PROFILING, nullptr, 0));
957   }
958 
959   // Run this a number of times as a stress test.
960   for (size_t i = 0; i < 100; i++) {
961     // Not using ASSERT_EXIT because errors messages are not displayed.
962     pid_t pid;
963     if ((pid = fork()) == 0) {
964       ASSERT_NO_FATAL_FAILURE(SetAllocationLimitMultipleThreads());
965     }
966     ASSERT_NE(-1, pid);
967     int status;
968     ASSERT_EQ(pid, wait(&status));
969     ASSERT_EQ(0, WEXITSTATUS(status));
970   }
971 #else
972   GTEST_SKIP() << "bionic extension";
973 #endif
974 }
975