1 /**
2 * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <gtest/gtest.h>
17
18 #include "runtime/include/thread_scopes.h"
19 #include "runtime/handle_scope-inl.h"
20 #include "runtime/mem/gc/epsilon-g1/epsilon-g1.h"
21
22 #include "test_utils.h"
23
24 namespace ark::mem {
25
26 // NOLINTBEGIN(readability-magic-numbers)
27 class EpsilonGCTest : public testing::Test {
28 public:
EpsilonGCTest()29 explicit EpsilonGCTest() : EpsilonGCTest(CreateDefaultOptions()) {}
30
EpsilonGCTest(const RuntimeOptions & options)31 explicit EpsilonGCTest(const RuntimeOptions &options)
32 {
33 Runtime::Create(options);
34 }
35
~EpsilonGCTest()36 ~EpsilonGCTest() override
37 {
38 Runtime::Destroy();
39 }
40
41 NO_COPY_SEMANTIC(EpsilonGCTest);
42 NO_MOVE_SEMANTIC(EpsilonGCTest);
43
CreateDefaultOptions(GCType gcType=GCType::EPSILON_GC)44 static RuntimeOptions CreateDefaultOptions(GCType gcType = GCType::EPSILON_GC)
45 {
46 constexpr size_t HEAP_SIZE_LIMIT_FOR_TEST = 16_MB;
47 RuntimeOptions options;
48 switch (gcType) {
49 case GCType::EPSILON_GC:
50 options.SetGcType("epsilon");
51 break;
52 case GCType::EPSILON_G1_GC:
53 options.SetGcType("epsilon-g1");
54 options.SetGcWorkersCount(0);
55 options.SetG1PromotionRegionAliveRate(100U);
56 break;
57 default:
58 UNREACHABLE();
59 }
60 options.SetHeapSizeLimit(HEAP_SIZE_LIMIT_FOR_TEST);
61 options.SetLoadRuntimes({"core"});
62 options.SetRunGcInPlace(true);
63 options.SetCompilerEnableJit(false);
64 options.SetGcTriggerType("debug-never");
65 options.SetShouldLoadBootPandaFiles(false);
66 options.SetShouldInitializeIntrinsics(false);
67 return options;
68 }
69
70 template <typename T>
GetAllocator()71 T *GetAllocator()
72 {
73 static_assert(std::is_same<T, ObjectAllocatorNoGen<>>::value || std::is_same<T, ObjectAllocatorG1<>>::value);
74 Runtime *runtime = Runtime::GetCurrent();
75 GC *gc = runtime->GetPandaVM()->GetGC();
76 return static_cast<T *>(gc->GetObjectAllocator());
77 }
78
79 template <typename T>
AllocObjectsForTest()80 std::vector<ObjectHeader *> AllocObjectsForTest()
81 {
82 std::vector<ObjectHeader *> objVector;
83
84 objVector.emplace_back(ObjectAllocator::AllocArray(GetAllocator<T>()->GetRegularObjectMaxSize() * 0.8F,
85 ClassRoot::ARRAY_U8, false));
86 objVector.emplace_back(
87 ObjectAllocator::AllocArray(GetAllocator<T>()->GetLargeObjectMaxSize(), ClassRoot::ARRAY_U8, false));
88 objVector.emplace_back(ObjectAllocator::AllocArray(GetAllocator<T>()->GetRegularObjectMaxSize() * 0.5F,
89 ClassRoot::ARRAY_U8, true));
90 objVector.emplace_back(ObjectAllocator::AllocString(GetAllocator<T>()->GetRegularObjectMaxSize() * 0.8F));
91 objVector.emplace_back(ObjectAllocator::AllocString(GetAllocator<T>()->GetLargeObjectMaxSize()));
92
93 return objVector;
94 }
95
96 static constexpr size_t NUM_OF_ELEMS_CHECKED = 5;
97 };
98
99 class EpsilonG1GCTest : public EpsilonGCTest {
100 public:
EpsilonG1GCTest()101 explicit EpsilonG1GCTest() : EpsilonGCTest(CreateDefaultOptions(GCType::EPSILON_G1_GC)) {}
102
103 NO_COPY_SEMANTIC(EpsilonG1GCTest);
104 NO_MOVE_SEMANTIC(EpsilonG1GCTest);
105
106 ~EpsilonG1GCTest() override = default;
107
GetHumongousStringLength()108 static constexpr size_t GetHumongousStringLength()
109 {
110 // Total string size will be G1_REGION_SIZE + sizeof(String).
111 // It is enough to make it humongous.
112 return G1_REGION_SIZE;
113 }
114
115 static constexpr size_t YOUNG_OBJECT_SIZE =
116 AlignUp(static_cast<size_t>(G1_REGION_SIZE * 0.8F), DEFAULT_ALIGNMENT_IN_BYTES);
117 };
118
TEST_F(EpsilonGCTest,TestObjectsAllocation)119 TEST_F(EpsilonGCTest, TestObjectsAllocation)
120 {
121 MTManagedThread *thread = MTManagedThread::GetCurrent();
122 ScopedManagedCodeThread s(thread);
123 ASSERT_EQ(Runtime::GetCurrent()->GetPandaVM()->GetGC()->GetType(), GCType::EPSILON_GC);
124
125 // Allocation of objects of different sizes in movable/non-movable spaces and checking that they are accessible
126 std::vector<ObjectHeader *> allocatedObjects = AllocObjectsForTest<ObjectAllocatorNoGen<>>();
127 for (size_t i = 0; i < allocatedObjects.size(); ++i) {
128 ASSERT_NE(allocatedObjects[i], nullptr);
129 if (i < 3U) { // First 3 elements are coretypes::Array
130 ASSERT_NE(static_cast<coretypes::Array *>(allocatedObjects[i])->GetLength(), 0);
131 } else {
132 ASSERT_NE(static_cast<coretypes::String *>(allocatedObjects[i])->GetLength(), 0);
133 }
134 }
135 }
136
TEST_F(EpsilonGCTest,TestOOMAndGCTriggering)137 TEST_F(EpsilonGCTest, TestOOMAndGCTriggering)
138 {
139 MTManagedThread *thread = MTManagedThread::GetCurrent();
140 GC *gc = Runtime::GetCurrent()->GetPandaVM()->GetGC();
141 ScopedManagedCodeThread s(thread);
142 [[maybe_unused]] HandleScope<ObjectHeader *> hs(thread);
143 VMHandle<coretypes::Array> nonmovable = VMHandle<coretypes::Array>(
144 thread, ObjectAllocator::AllocArray(NUM_OF_ELEMS_CHECKED, ClassRoot::ARRAY_STRING, true));
145 std::vector<VMHandle<coretypes::String>> strings;
146 coretypes::String *objString;
147
148 // Alloc objects until OOM
149 // First NUM_OF_ELEMS_CHECKED objects are added to nonmovable array to check their addresses after triggered GC
150 do {
151 objString =
152 ObjectAllocator::AllocString(GetAllocator<ObjectAllocatorNoGen<>>()->GetRegularObjectMaxSize() * 0.8F);
153 if (strings.size() < NUM_OF_ELEMS_CHECKED) {
154 strings.emplace_back(thread, objString);
155 size_t lastElemIndx = strings.size() - 1;
156 nonmovable->Set<ObjectHeader *>(lastElemIndx, strings[lastElemIndx].GetPtr());
157 }
158 } while (objString != nullptr);
159 VMHandle<coretypes::String> objAfterOom = VMHandle<coretypes::String>(
160 thread, ObjectAllocator::AllocString(GetAllocator<ObjectAllocatorNoGen<>>()->GetRegularObjectMaxSize() * 0.8F));
161 ASSERT_EQ(objAfterOom.GetPtr(), nullptr) << "Expected OOM";
162 ASSERT_EQ(strings.size(), NUM_OF_ELEMS_CHECKED);
163
164 // Checking if the addresses are correct before triggering GC
165 for (size_t i = 0; i < strings.size(); ++i) {
166 ASSERT_EQ(strings[i].GetPtr(), nonmovable->Get<ObjectHeader *>(i));
167 }
168
169 {
170 ScopedNativeCodeThread sn(thread);
171 GCTask task(GCTaskCause::OOM_CAUSE);
172 task.Run(*gc);
173 }
174 // Checking if the addresses are correct after triggering GC
175 for (size_t i = 0; i < strings.size(); ++i) {
176 ASSERT_EQ(strings[i].GetPtr(), nonmovable->Get<ObjectHeader *>(i));
177 }
178
179 {
180 ScopedNativeCodeThread sn(thread);
181 GCTask task(GCTaskCause::EXPLICIT_CAUSE);
182 task.Run(*gc);
183 }
184 // Checking if the addresses are correct after triggering GC
185 for (size_t i = 0; i < strings.size(); ++i) {
186 ASSERT_EQ(strings[i].GetPtr(), nonmovable->Get<ObjectHeader *>(i));
187 }
188
189 // Trying to alloc after triggering GC
190 VMHandle<coretypes::String> objAfterTriggeredGc = VMHandle<coretypes::String>(
191 thread, ObjectAllocator::AllocString(GetAllocator<ObjectAllocatorNoGen<>>()->GetRegularObjectMaxSize() * 0.8F));
192 ASSERT_EQ(objAfterOom.GetPtr(), nullptr) << "Expected OOM";
193 }
194
TEST_F(EpsilonG1GCTest,TestObjectsAllocation)195 TEST_F(EpsilonG1GCTest, TestObjectsAllocation)
196 {
197 MTManagedThread *thread = MTManagedThread::GetCurrent();
198 ScopedManagedCodeThread s(thread);
199 ASSERT_EQ(Runtime::GetCurrent()->GetPandaVM()->GetGC()->GetType(), GCType::EPSILON_G1_GC);
200
201 // Allocation of objects of different sizes in movable/non-movable spaces and checking that they are accessible and
202 // are in correct regions
203 std::vector<ObjectHeader *> allocatedObjects = AllocObjectsForTest<ObjectAllocatorG1<>>();
204 ASSERT_EQ(allocatedObjects.size(), NUM_OF_ELEMS_CHECKED);
205 for (auto &allocatedObject : allocatedObjects) {
206 ASSERT_NE(allocatedObject, nullptr);
207 }
208
209 ASSERT_TRUE(ObjectToRegion(allocatedObjects[0U])->HasFlag(RegionFlag::IS_EDEN));
210 ASSERT_FALSE(ObjectToRegion(allocatedObjects[0U])->HasFlag(RegionFlag::IS_NONMOVABLE));
211 ASSERT_TRUE(ObjectToRegion(allocatedObjects[1U])->HasFlag(RegionFlag::IS_LARGE_OBJECT));
212 ASSERT_TRUE(ObjectToRegion(allocatedObjects[2U])->HasFlag(RegionFlag::IS_NONMOVABLE));
213 ASSERT_TRUE(ObjectToRegion(allocatedObjects[3U])->HasFlag(RegionFlag::IS_EDEN));
214 ASSERT_TRUE(ObjectToRegion(allocatedObjects[4U])->HasFlag(RegionFlag::IS_LARGE_OBJECT));
215 }
216
TEST_F(EpsilonG1GCTest,TestOOM)217 TEST_F(EpsilonG1GCTest, TestOOM)
218 {
219 MTManagedThread *thread = MTManagedThread::GetCurrent();
220 ScopedManagedCodeThread s(thread);
221 [[maybe_unused]] HandleScope<ObjectHeader *> hs(thread);
222 auto objectAllocatorG1 = thread->GetVM()->GetGC()->GetObjectAllocator();
223 VMHandle<coretypes::String> objString;
224 VMHandle<coretypes::String> objStringHuge;
225
226 while (reinterpret_cast<GenerationalSpaces *>(objectAllocatorG1->GetHeapSpace())->GetCurrentFreeYoungSize() >=
227 YOUNG_OBJECT_SIZE) {
228 objString = VMHandle<coretypes::String>(thread, ObjectAllocator::AllocString(YOUNG_OBJECT_SIZE));
229 ASSERT_NE(objString.GetPtr(), nullptr) << "Cannot allocate an object in young space when heap is not full";
230 }
231 objString = VMHandle<coretypes::String>(thread, ObjectAllocator::AllocString(YOUNG_OBJECT_SIZE));
232 ASSERT_EQ(objString.GetPtr(), nullptr) << "Expected OOM in young space";
233
234 size_t hugeStringSize = GetHumongousStringLength();
235 size_t newRegionForHugeStringSize =
236 Region::RegionSize(AlignUp(hugeStringSize, GetAlignmentInBytes(DEFAULT_ALIGNMENT)), G1_REGION_SIZE);
237 while (reinterpret_cast<GenerationalSpaces *>(objectAllocatorG1->GetHeapSpace())->GetCurrentFreeTenuredSize() >
238 newRegionForHugeStringSize) {
239 objStringHuge = VMHandle<coretypes::String>(thread, ObjectAllocator::AllocString(hugeStringSize));
240 ASSERT_NE(objStringHuge.GetPtr(), nullptr)
241 << "Cannot allocate an object in tenured space when heap is not full";
242 }
243 objStringHuge = VMHandle<coretypes::String>(thread, ObjectAllocator::AllocString(hugeStringSize));
244 ASSERT_EQ(objStringHuge.GetPtr(), nullptr) << "Expected OOM in tenured space";
245 }
246
TEST_F(EpsilonG1GCTest,TestGCTriggering)247 TEST_F(EpsilonG1GCTest, TestGCTriggering)
248 {
249 MTManagedThread *thread = MTManagedThread::GetCurrent();
250 GC *gc = Runtime::GetCurrent()->GetPandaVM()->GetGC();
251 ScopedManagedCodeThread s(thread);
252 [[maybe_unused]] HandleScope<ObjectHeader *> hs(thread);
253 ASSERT_EQ(gc->GetType(), GCType::EPSILON_G1_GC);
254
255 VMHandle<coretypes::Array> nonmovable;
256 ObjectHeader *obj;
257 ObjectHeader *hugeObj;
258 uintptr_t objAddr;
259 uintptr_t hugeObjAddr;
260
261 nonmovable = VMHandle<coretypes::Array>(thread, ObjectAllocator::AllocArray(2U, ClassRoot::ARRAY_STRING, true));
262 obj = ObjectAllocator::AllocObjectInYoung();
263 hugeObj = ObjectAllocator::AllocString(GetHumongousStringLength());
264 nonmovable->Set(0, obj);
265 nonmovable->Set(1, hugeObj);
266 objAddr = ToUintPtr(obj);
267 hugeObjAddr = ToUintPtr(hugeObj);
268 ASSERT_TRUE(ObjectToRegion(obj)->HasFlag(RegionFlag::IS_EDEN));
269 ASSERT_TRUE(ObjectToRegion(hugeObj)->HasFlag(RegionFlag::IS_OLD));
270 ASSERT_TRUE(ObjectToRegion(hugeObj)->HasFlag(RegionFlag::IS_LARGE_OBJECT));
271
272 {
273 ScopedNativeCodeThread sn(thread);
274 GCTask task(GCTaskCause::YOUNG_GC_CAUSE);
275 task.Run(*gc);
276 }
277 // Check obj was not propagated to tenured and huge_obj was not moved to any other region
278 obj = nonmovable->Get<ObjectHeader *>(0);
279 hugeObj = nonmovable->Get<ObjectHeader *>(1);
280 ASSERT_EQ(objAddr, ToUintPtr(obj));
281 ASSERT_EQ(hugeObjAddr, ToUintPtr(hugeObj));
282 ASSERT_TRUE(ObjectToRegion(obj)->HasFlag(RegionFlag::IS_EDEN));
283 ASSERT_TRUE(ObjectToRegion(hugeObj)->HasFlag(RegionFlag::IS_OLD));
284 ASSERT_TRUE(ObjectToRegion(hugeObj)->HasFlag(RegionFlag::IS_LARGE_OBJECT));
285
286 {
287 ScopedNativeCodeThread sn(thread);
288 GCTask task(GCTaskCause::EXPLICIT_CAUSE); // run full GC
289 task.Run(*gc);
290 }
291 // Check obj was not propagated to tenured and huge_obj was not propagated to any other region after full gc
292 obj = nonmovable->Get<ObjectHeader *>(0);
293 hugeObj = nonmovable->Get<ObjectHeader *>(1);
294 ASSERT_EQ(objAddr, ToUintPtr(obj));
295 ASSERT_EQ(hugeObjAddr, ToUintPtr(hugeObj));
296 ASSERT_TRUE(ObjectToRegion(obj)->HasFlag(RegionFlag::IS_EDEN));
297 ASSERT_TRUE(ObjectToRegion(hugeObj)->HasFlag(RegionFlag::IS_OLD));
298 ASSERT_TRUE(ObjectToRegion(hugeObj)->HasFlag(RegionFlag::IS_LARGE_OBJECT));
299
300 // Check the objects are accessible
301 ASSERT_NE(nullptr, obj->ClassAddr<Class>());
302 ASSERT_NE(nullptr, hugeObj->ClassAddr<Class>());
303 }
304 // NOLINTEND(readability-magic-numbers)
305
306 } // namespace ark::mem
307