1 //===-- sanitizer_allocator64_test.cc -------------------------------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 // Tests for sanitizer_allocator64.h.
10 //===----------------------------------------------------------------------===//
11 #include "sanitizer_common/sanitizer_allocator64.h"
12 #include "gtest/gtest.h"
13
14 #include <algorithm>
15 #include <vector>
16
17 static const uptr kAllocatorSpace = 0x600000000000ULL;
18 static const uptr kAllocatorSize = 0x10000000000; // 1T.
19
20 typedef DefaultSizeClassMap SCMap;
21 typedef
22 SizeClassAllocator64<kAllocatorSpace, kAllocatorSize, 16, SCMap> Allocator;
23 typedef SizeClassAllocatorLocalCache<Allocator::kNumClasses, Allocator>
24 AllocatorCache;
25
TEST(SanitizerCommon,DefaultSizeClassMap)26 TEST(SanitizerCommon, DefaultSizeClassMap) {
27 #if 0
28 for (uptr i = 0; i < SCMap::kNumClasses; i++) {
29 printf("c%ld => %ld cached=%ld(%ld)\n",
30 i, SCMap::Size(i), SCMap::MaxCached(i) * SCMap::Size(i),
31 SCMap::MaxCached(i));
32 }
33 #endif
34
35 for (uptr c = 0; c < SCMap::kNumClasses; c++) {
36 uptr s = SCMap::Size(c);
37 CHECK_EQ(SCMap::ClassID(s), c);
38 if (c != SCMap::kNumClasses - 1)
39 CHECK_EQ(SCMap::ClassID(s + 1), c + 1);
40 CHECK_EQ(SCMap::ClassID(s - 1), c);
41 if (c)
42 CHECK_GT(SCMap::Size(c), SCMap::Size(c-1));
43 }
44 CHECK_EQ(SCMap::ClassID(SCMap::kMaxSize + 1), 0);
45
46 for (uptr s = 1; s <= SCMap::kMaxSize; s++) {
47 uptr c = SCMap::ClassID(s);
48 CHECK_LT(c, SCMap::kNumClasses);
49 CHECK_GE(SCMap::Size(c), s);
50 if (c > 0)
51 CHECK_LT(SCMap::Size(c-1), s);
52 }
53 }
54
TEST(SanitizerCommon,SizeClassAllocator64)55 TEST(SanitizerCommon, SizeClassAllocator64) {
56 Allocator a;
57 a.Init();
58
59 static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
60 50000, 60000, 100000, 300000, 500000, 1000000, 2000000};
61
62 std::vector<void *> allocated;
63
64 uptr last_total_allocated = 0;
65 for (int i = 0; i < 5; i++) {
66 // Allocate a bunch of chunks.
67 for (uptr s = 0; s < sizeof(sizes) /sizeof(sizes[0]); s++) {
68 uptr size = sizes[s];
69 // printf("s = %ld\n", size);
70 uptr n_iter = std::max((uptr)2, 1000000 / size);
71 for (uptr i = 0; i < n_iter; i++) {
72 void *x = a.Allocate(size, 1);
73 allocated.push_back(x);
74 CHECK(a.PointerIsMine(x));
75 CHECK_GE(a.GetActuallyAllocatedSize(x), size);
76 uptr class_id = a.GetSizeClass(x);
77 CHECK_EQ(class_id, SCMap::ClassID(size));
78 uptr *metadata = reinterpret_cast<uptr*>(a.GetMetaData(x));
79 metadata[0] = reinterpret_cast<uptr>(x) + 1;
80 metadata[1] = 0xABCD;
81 }
82 }
83 // Deallocate all.
84 for (uptr i = 0; i < allocated.size(); i++) {
85 void *x = allocated[i];
86 uptr *metadata = reinterpret_cast<uptr*>(a.GetMetaData(x));
87 CHECK_EQ(metadata[0], reinterpret_cast<uptr>(x) + 1);
88 CHECK_EQ(metadata[1], 0xABCD);
89 a.Deallocate(x);
90 }
91 allocated.clear();
92 uptr total_allocated = a.TotalMemoryUsed();
93 if (last_total_allocated == 0)
94 last_total_allocated = total_allocated;
95 CHECK_EQ(last_total_allocated, total_allocated);
96 }
97
98 a.TestOnlyUnmap();
99 }
100
101
TEST(SanitizerCommon,SizeClassAllocator64MetadataStress)102 TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
103 Allocator a;
104 a.Init();
105 static volatile void *sink;
106
107 const uptr kNumAllocs = 10000;
108 void *allocated[kNumAllocs];
109 for (uptr i = 0; i < kNumAllocs; i++) {
110 uptr size = (i % 4096) + 1;
111 void *x = a.Allocate(size, 1);
112 allocated[i] = x;
113 }
114 // Get Metadata kNumAllocs^2 times.
115 for (uptr i = 0; i < kNumAllocs * kNumAllocs; i++) {
116 sink = a.GetMetaData(allocated[i % kNumAllocs]);
117 }
118 for (uptr i = 0; i < kNumAllocs; i++) {
119 a.Deallocate(allocated[i]);
120 }
121
122 a.TestOnlyUnmap();
123 (void)sink;
124 }
125
FailInAssertionOnOOM()126 void FailInAssertionOnOOM() {
127 Allocator a;
128 a.Init();
129 const uptr size = 1 << 20;
130 for (int i = 0; i < 1000000; i++) {
131 a.Allocate(size, 1);
132 }
133
134 a.TestOnlyUnmap();
135 }
136
TEST(SanitizerCommon,SizeClassAllocator64Overflow)137 TEST(SanitizerCommon, SizeClassAllocator64Overflow) {
138 EXPECT_DEATH(FailInAssertionOnOOM(),
139 "allocated_user.*allocated_meta.*kRegionSize");
140 }
141
TEST(SanitizerCommon,LargeMmapAllocator)142 TEST(SanitizerCommon, LargeMmapAllocator) {
143 LargeMmapAllocator a;
144 a.Init();
145
146 static const int kNumAllocs = 100;
147 void *allocated[kNumAllocs];
148 static const uptr size = 1000;
149 // Allocate some.
150 for (int i = 0; i < kNumAllocs; i++) {
151 allocated[i] = a.Allocate(size, 1);
152 }
153 // Deallocate all.
154 CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
155 for (int i = 0; i < kNumAllocs; i++) {
156 void *p = allocated[i];
157 CHECK(a.PointerIsMine(p));
158 a.Deallocate(p);
159 }
160 // Check that non left.
161 CHECK_EQ(a.TotalMemoryUsed(), 0);
162
163 // Allocate some more, also add metadata.
164 for (int i = 0; i < kNumAllocs; i++) {
165 void *x = a.Allocate(size, 1);
166 CHECK_GE(a.GetActuallyAllocatedSize(x), size);
167 uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
168 *meta = i;
169 allocated[i] = x;
170 }
171 CHECK_GT(a.TotalMemoryUsed(), size * kNumAllocs);
172 // Deallocate all in reverse order.
173 for (int i = 0; i < kNumAllocs; i++) {
174 int idx = kNumAllocs - i - 1;
175 void *p = allocated[idx];
176 uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(p));
177 CHECK_EQ(*meta, idx);
178 CHECK(a.PointerIsMine(p));
179 a.Deallocate(p);
180 }
181 CHECK_EQ(a.TotalMemoryUsed(), 0);
182 }
183
TEST(SanitizerCommon,CombinedAllocator)184 TEST(SanitizerCommon, CombinedAllocator) {
185 typedef Allocator PrimaryAllocator;
186 typedef LargeMmapAllocator SecondaryAllocator;
187 typedef CombinedAllocator<PrimaryAllocator, AllocatorCache,
188 SecondaryAllocator> Allocator;
189
190 AllocatorCache cache;
191 Allocator a;
192 a.Init();
193 cache.Init();
194
195 EXPECT_EQ(a.Allocate(&cache, -1, 1), (void*)0);
196 EXPECT_EQ(a.Allocate(&cache, -1, 1024), (void*)0);
197 EXPECT_EQ(a.Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
198 EXPECT_EQ(a.Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
199 EXPECT_EQ(a.Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
200
201 const uptr kNumAllocs = 100000;
202 const uptr kNumIter = 10;
203 for (uptr iter = 0; iter < kNumIter; iter++) {
204 std::vector<void*> allocated;
205 for (uptr i = 0; i < kNumAllocs; i++) {
206 uptr size = (i % (1 << 14)) + 1;
207 if ((i % 1024) == 0)
208 size = 1 << (10 + (i % 14));
209 void *x = a.Allocate(&cache, size, 1);
210 uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
211 CHECK_EQ(*meta, 0);
212 *meta = size;
213 allocated.push_back(x);
214 }
215
216 random_shuffle(allocated.begin(), allocated.end());
217
218 for (uptr i = 0; i < kNumAllocs; i++) {
219 void *x = allocated[i];
220 uptr *meta = reinterpret_cast<uptr*>(a.GetMetaData(x));
221 CHECK_NE(*meta, 0);
222 CHECK(a.PointerIsMine(x));
223 *meta = 0;
224 a.Deallocate(&cache, x);
225 }
226 allocated.clear();
227 a.SwallowCache(&cache);
228 }
229 a.TestOnlyUnmap();
230 }
231
232 static THREADLOCAL AllocatorCache static_allocator_cache;
233
TEST(SanitizerCommon,SizeClassAllocatorLocalCache)234 TEST(SanitizerCommon, SizeClassAllocatorLocalCache) {
235 static_allocator_cache.Init();
236
237 Allocator a;
238 AllocatorCache cache;
239
240 a.Init();
241 cache.Init();
242
243 const uptr kNumAllocs = 10000;
244 const int kNumIter = 100;
245 uptr saved_total = 0;
246 for (int i = 0; i < kNumIter; i++) {
247 void *allocated[kNumAllocs];
248 for (uptr i = 0; i < kNumAllocs; i++) {
249 allocated[i] = cache.Allocate(&a, 0);
250 }
251 for (uptr i = 0; i < kNumAllocs; i++) {
252 cache.Deallocate(&a, 0, allocated[i]);
253 }
254 cache.Drain(&a);
255 uptr total_allocated = a.TotalMemoryUsed();
256 if (saved_total)
257 CHECK_EQ(saved_total, total_allocated);
258 saved_total = total_allocated;
259 }
260
261 a.TestOnlyUnmap();
262 }
263