1 /*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "base/arena_allocator-inl.h"
18 #include "base/arena_bit_vector.h"
19 #include "base/memory_tool.h"
20 #include "gtest/gtest.h"
21
22 namespace art {
23
24 class ArenaAllocatorTest : public testing::Test {
25 protected:
NumberOfArenas(ArenaAllocator * allocator)26 size_t NumberOfArenas(ArenaAllocator* allocator) {
27 size_t result = 0u;
28 for (Arena* a = allocator->arena_head_; a != nullptr; a = a->next_) {
29 ++result;
30 }
31 return result;
32 }
33 };
34
TEST_F(ArenaAllocatorTest,Test)35 TEST_F(ArenaAllocatorTest, Test) {
36 ArenaPool pool;
37 ArenaAllocator allocator(&pool);
38 ArenaBitVector bv(&allocator, 10, true);
39 bv.SetBit(5);
40 EXPECT_EQ(1U, bv.GetStorageSize());
41 bv.SetBit(35);
42 EXPECT_EQ(2U, bv.GetStorageSize());
43 }
44
TEST_F(ArenaAllocatorTest,MakeDefined)45 TEST_F(ArenaAllocatorTest, MakeDefined) {
46 // Regression test to make sure we mark the allocated area defined.
47 ArenaPool pool;
48 static constexpr size_t kSmallArraySize = 10;
49 static constexpr size_t kLargeArraySize = 50;
50 uint32_t* small_array;
51 {
52 // Allocate a small array from an arena and release it.
53 ArenaAllocator allocator(&pool);
54 small_array = allocator.AllocArray<uint32_t>(kSmallArraySize);
55 ASSERT_EQ(0u, small_array[kSmallArraySize - 1u]);
56 }
57 {
58 // Reuse the previous arena and allocate more than previous allocation including red zone.
59 ArenaAllocator allocator(&pool);
60 uint32_t* large_array = allocator.AllocArray<uint32_t>(kLargeArraySize);
61 ASSERT_EQ(0u, large_array[kLargeArraySize - 1u]);
62 // Verify that the allocation was made on the same arena.
63 ASSERT_EQ(small_array, large_array);
64 }
65 }
66
TEST_F(ArenaAllocatorTest,LargeAllocations)67 TEST_F(ArenaAllocatorTest, LargeAllocations) {
68 if (arena_allocator::kArenaAllocatorPreciseTracking) {
69 printf("WARNING: TEST DISABLED FOR precise arena tracking\n");
70 return;
71 }
72
73 {
74 ArenaPool pool;
75 ArenaAllocator allocator(&pool);
76 // Note: Leaving some space for memory tool red zones.
77 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 5 / 8);
78 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 2 / 8);
79 ASSERT_NE(alloc1, alloc2);
80 ASSERT_EQ(1u, NumberOfArenas(&allocator));
81 }
82 {
83 ArenaPool pool;
84 ArenaAllocator allocator(&pool);
85 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16);
86 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 11 / 16);
87 ASSERT_NE(alloc1, alloc2);
88 ASSERT_EQ(2u, NumberOfArenas(&allocator));
89 void* alloc3 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 7 / 16);
90 ASSERT_NE(alloc1, alloc3);
91 ASSERT_NE(alloc2, alloc3);
92 ASSERT_EQ(3u, NumberOfArenas(&allocator));
93 }
94 {
95 ArenaPool pool;
96 ArenaAllocator allocator(&pool);
97 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16);
98 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 9 / 16);
99 ASSERT_NE(alloc1, alloc2);
100 ASSERT_EQ(2u, NumberOfArenas(&allocator));
101 // Note: Leaving some space for memory tool red zones.
102 void* alloc3 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 5 / 16);
103 ASSERT_NE(alloc1, alloc3);
104 ASSERT_NE(alloc2, alloc3);
105 ASSERT_EQ(2u, NumberOfArenas(&allocator));
106 }
107 {
108 ArenaPool pool;
109 ArenaAllocator allocator(&pool);
110 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 9 / 16);
111 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16);
112 ASSERT_NE(alloc1, alloc2);
113 ASSERT_EQ(2u, NumberOfArenas(&allocator));
114 // Note: Leaving some space for memory tool red zones.
115 void* alloc3 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 5 / 16);
116 ASSERT_NE(alloc1, alloc3);
117 ASSERT_NE(alloc2, alloc3);
118 ASSERT_EQ(2u, NumberOfArenas(&allocator));
119 }
120 {
121 ArenaPool pool;
122 ArenaAllocator allocator(&pool);
123 // Note: Leaving some space for memory tool red zones.
124 for (size_t i = 0; i != 15; ++i) {
125 // Allocate 15 times from the same arena.
126 allocator.Alloc(arena_allocator::kArenaDefaultSize * 1 / 16);
127 ASSERT_EQ(i + 1u, NumberOfArenas(&allocator));
128 // Allocate a separate arena.
129 allocator.Alloc(arena_allocator::kArenaDefaultSize * 17 / 16);
130 ASSERT_EQ(i + 2u, NumberOfArenas(&allocator));
131 }
132 }
133 }
134
TEST_F(ArenaAllocatorTest,AllocAlignment)135 TEST_F(ArenaAllocatorTest, AllocAlignment) {
136 ArenaPool pool;
137 ArenaAllocator allocator(&pool);
138 for (size_t iterations = 0; iterations <= 10; ++iterations) {
139 for (size_t size = 1; size <= ArenaAllocator::kAlignment + 1; ++size) {
140 void* allocation = allocator.Alloc(size);
141 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(allocation))
142 << reinterpret_cast<uintptr_t>(allocation);
143 }
144 }
145 }
146
TEST_F(ArenaAllocatorTest,ReallocReuse)147 TEST_F(ArenaAllocatorTest, ReallocReuse) {
148 // Realloc does not reuse arenas when running under sanitization. So we cannot do those
149 if (RUNNING_ON_MEMORY_TOOL != 0) {
150 printf("WARNING: TEST DISABLED FOR MEMORY_TOOL\n");
151 return;
152 }
153
154 {
155 // Case 1: small aligned allocation, aligned extend inside arena.
156 ArenaPool pool;
157 ArenaAllocator allocator(&pool);
158
159 const size_t original_size = ArenaAllocator::kAlignment * 2;
160 void* original_allocation = allocator.Alloc(original_size);
161
162 const size_t new_size = ArenaAllocator::kAlignment * 3;
163 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
164 EXPECT_EQ(original_allocation, realloc_allocation);
165 }
166
167 {
168 // Case 2: small aligned allocation, non-aligned extend inside arena.
169 ArenaPool pool;
170 ArenaAllocator allocator(&pool);
171
172 const size_t original_size = ArenaAllocator::kAlignment * 2;
173 void* original_allocation = allocator.Alloc(original_size);
174
175 const size_t new_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
176 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
177 EXPECT_EQ(original_allocation, realloc_allocation);
178 }
179
180 {
181 // Case 3: small non-aligned allocation, aligned extend inside arena.
182 ArenaPool pool;
183 ArenaAllocator allocator(&pool);
184
185 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
186 void* original_allocation = allocator.Alloc(original_size);
187
188 const size_t new_size = ArenaAllocator::kAlignment * 4;
189 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
190 EXPECT_EQ(original_allocation, realloc_allocation);
191 }
192
193 {
194 // Case 4: small non-aligned allocation, aligned non-extend inside arena.
195 ArenaPool pool;
196 ArenaAllocator allocator(&pool);
197
198 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
199 void* original_allocation = allocator.Alloc(original_size);
200
201 const size_t new_size = ArenaAllocator::kAlignment * 3;
202 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
203 EXPECT_EQ(original_allocation, realloc_allocation);
204 }
205
206 // The next part is brittle, as the default size for an arena is variable, and we don't know about
207 // sanitization.
208
209 {
210 // Case 5: large allocation, aligned extend into next arena.
211 ArenaPool pool;
212 ArenaAllocator allocator(&pool);
213
214 const size_t original_size = arena_allocator::kArenaDefaultSize -
215 ArenaAllocator::kAlignment * 5;
216 void* original_allocation = allocator.Alloc(original_size);
217
218 const size_t new_size = arena_allocator::kArenaDefaultSize + ArenaAllocator::kAlignment * 2;
219 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
220 EXPECT_NE(original_allocation, realloc_allocation);
221 }
222
223 {
224 // Case 6: large allocation, non-aligned extend into next arena.
225 ArenaPool pool;
226 ArenaAllocator allocator(&pool);
227
228 const size_t original_size = arena_allocator::kArenaDefaultSize -
229 ArenaAllocator::kAlignment * 4 -
230 ArenaAllocator::kAlignment / 2;
231 void* original_allocation = allocator.Alloc(original_size);
232
233 const size_t new_size = arena_allocator::kArenaDefaultSize +
234 ArenaAllocator::kAlignment * 2 +
235 ArenaAllocator::kAlignment / 2;
236 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
237 EXPECT_NE(original_allocation, realloc_allocation);
238 }
239 }
240
TEST_F(ArenaAllocatorTest,ReallocAlignment)241 TEST_F(ArenaAllocatorTest, ReallocAlignment) {
242 {
243 // Case 1: small aligned allocation, aligned extend inside arena.
244 ArenaPool pool;
245 ArenaAllocator allocator(&pool);
246
247 const size_t original_size = ArenaAllocator::kAlignment * 2;
248 void* original_allocation = allocator.Alloc(original_size);
249 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
250
251 const size_t new_size = ArenaAllocator::kAlignment * 3;
252 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
253 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
254
255 void* after_alloc = allocator.Alloc(1);
256 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
257 }
258
259 {
260 // Case 2: small aligned allocation, non-aligned extend inside arena.
261 ArenaPool pool;
262 ArenaAllocator allocator(&pool);
263
264 const size_t original_size = ArenaAllocator::kAlignment * 2;
265 void* original_allocation = allocator.Alloc(original_size);
266 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
267
268 const size_t new_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
269 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
270 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
271
272 void* after_alloc = allocator.Alloc(1);
273 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
274 }
275
276 {
277 // Case 3: small non-aligned allocation, aligned extend inside arena.
278 ArenaPool pool;
279 ArenaAllocator allocator(&pool);
280
281 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
282 void* original_allocation = allocator.Alloc(original_size);
283 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
284
285 const size_t new_size = ArenaAllocator::kAlignment * 4;
286 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
287 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
288
289 void* after_alloc = allocator.Alloc(1);
290 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
291 }
292
293 {
294 // Case 4: small non-aligned allocation, aligned non-extend inside arena.
295 ArenaPool pool;
296 ArenaAllocator allocator(&pool);
297
298 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
299 void* original_allocation = allocator.Alloc(original_size);
300 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
301
302 const size_t new_size = ArenaAllocator::kAlignment * 3;
303 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
304 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
305
306 void* after_alloc = allocator.Alloc(1);
307 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
308 }
309
310 // The next part is brittle, as the default size for an arena is variable, and we don't know about
311 // sanitization.
312
313 {
314 // Case 5: large allocation, aligned extend into next arena.
315 ArenaPool pool;
316 ArenaAllocator allocator(&pool);
317
318 const size_t original_size = arena_allocator::kArenaDefaultSize -
319 ArenaAllocator::kAlignment * 5;
320 void* original_allocation = allocator.Alloc(original_size);
321 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
322
323 const size_t new_size = arena_allocator::kArenaDefaultSize + ArenaAllocator::kAlignment * 2;
324 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
325 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
326
327 void* after_alloc = allocator.Alloc(1);
328 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
329 }
330
331 {
332 // Case 6: large allocation, non-aligned extend into next arena.
333 ArenaPool pool;
334 ArenaAllocator allocator(&pool);
335
336 const size_t original_size = arena_allocator::kArenaDefaultSize -
337 ArenaAllocator::kAlignment * 4 -
338 ArenaAllocator::kAlignment / 2;
339 void* original_allocation = allocator.Alloc(original_size);
340 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
341
342 const size_t new_size = arena_allocator::kArenaDefaultSize +
343 ArenaAllocator::kAlignment * 2 +
344 ArenaAllocator::kAlignment / 2;
345 void* realloc_allocation = allocator.Realloc(original_allocation, original_size, new_size);
346 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
347
348 void* after_alloc = allocator.Alloc(1);
349 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
350 }
351 }
352
353
354 } // namespace art
355