1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ecmascript/mem/linear_space.h"
17
18 #include "ecmascript/js_hclass-inl.h"
19 #include "ecmascript/mem/allocator-inl.h"
20 #include "ecmascript/mem/mem_controller.h"
21
22 namespace panda::ecmascript {
LinearSpace(Heap * heap,MemSpaceType type,size_t initialCapacity,size_t maximumCapacity)23 LinearSpace::LinearSpace(Heap *heap, MemSpaceType type, size_t initialCapacity, size_t maximumCapacity)
24 : Space(heap, heap->GetHeapRegionAllocator(), type, initialCapacity, maximumCapacity),
25 localHeap_(heap),
26 thread_(heap->GetJSThread()),
27 waterLine_(0)
28 {
29 }
30
Allocate(size_t size,bool isPromoted)31 uintptr_t LinearSpace::Allocate(size_t size, bool isPromoted)
32 {
33 #if ECMASCRIPT_ENABLE_THREAD_STATE_CHECK
34 if (UNLIKELY(!localHeap_->GetJSThread()->IsInRunningStateOrProfiling())) {
35 LOG_ECMA(FATAL) << "Allocate must be in jsthread running state";
36 UNREACHABLE();
37 }
38 #endif
39 auto object = allocator_.Allocate(size);
40 if (object != 0) {
41 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
42 // can not heap sampling in gc.
43 if (!isPromoted) {
44 InvokeAllocationInspector(object, size, size);
45 }
46 #endif
47 return object;
48 }
49 if (Expand(isPromoted)) {
50 if (!isPromoted && !localHeap_->NeedStopCollection()) {
51 localHeap_->TryTriggerIncrementalMarking();
52 localHeap_->TryTriggerIdleCollection();
53 localHeap_->TryTriggerConcurrentMarking();
54 }
55 object = allocator_.Allocate(size);
56 } else if (localHeap_->IsMarking() || !localHeap_->IsEmptyIdleTask()) {
57 // Temporary adjust semi space capacity
58 if (localHeap_->IsConcurrentFullMark()) {
59 overShootSize_ = localHeap_->CalculateLinearSpaceOverShoot();
60 } else {
61 size_t stepOverShootSize = localHeap_->GetEcmaParamConfiguration().GetSemiSpaceStepOvershootSize();
62 size_t maxOverShootSize = std::max(initialCapacity_ / 2, stepOverShootSize); // 2: half
63 if (overShootSize_ < maxOverShootSize) {
64 overShootSize_ += stepOverShootSize;
65 }
66 }
67
68 if (Expand(isPromoted)) {
69 object = allocator_.Allocate(size);
70 }
71 }
72 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
73 if (object != 0 && !isPromoted) {
74 InvokeAllocationInspector(object, size, size);
75 }
76 #endif
77 return object;
78 }
79
Expand(bool isPromoted)80 bool LinearSpace::Expand(bool isPromoted)
81 {
82 if (committedSize_ >= initialCapacity_ + overShootSize_ + outOfMemoryOvershootSize_ &&
83 !localHeap_->NeedStopCollection()) {
84 return false;
85 }
86
87 uintptr_t top = allocator_.GetTop();
88 auto currentRegion = GetCurrentRegion();
89 if (currentRegion != nullptr) {
90 if (!isPromoted) {
91 if (currentRegion->HasAgeMark()) {
92 allocateAfterLastGC_ +=
93 currentRegion->GetAllocatedBytes(top) - currentRegion->GetAllocatedBytes(waterLine_);
94 } else {
95 allocateAfterLastGC_ += currentRegion->GetAllocatedBytes(top);
96 }
97 } else {
98 // For GC
99 survivalObjectSize_ += currentRegion->GetAllocatedBytes(top);
100 }
101 currentRegion->SetHighWaterMark(top);
102 }
103 JSThread *thread = localHeap_->GetJSThread();
104 Region *region = heapRegionAllocator_->AllocateAlignedRegion(this, DEFAULT_REGION_SIZE, thread, localHeap_,
105 thread_->IsConcurrentMarkingOrFinished());
106 allocator_.Reset(region->GetBegin(), region->GetEnd());
107 AddRegion(region);
108 return true;
109 }
110
Stop()111 void LinearSpace::Stop()
112 {
113 if (GetCurrentRegion() != nullptr) {
114 GetCurrentRegion()->SetHighWaterMark(allocator_.GetTop());
115 }
116 }
117
ResetAllocator()118 void LinearSpace::ResetAllocator()
119 {
120 auto currentRegion = GetCurrentRegion();
121 if (currentRegion != nullptr) {
122 allocator_.Reset(currentRegion->GetBegin(), currentRegion->GetEnd(), currentRegion->GetHighWaterMark());
123 }
124 }
125
IterateOverObjects(const std::function<void (TaggedObject * object)> & visitor) const126 void LinearSpace::IterateOverObjects(const std::function<void(TaggedObject *object)> &visitor) const
127 {
128 auto current = GetCurrentRegion();
129 EnumerateRegions([&](Region *region) {
130 auto curPtr = region->GetBegin();
131 uintptr_t endPtr = 0;
132 if (region == current) {
133 auto top = allocator_.GetTop();
134 endPtr = curPtr + region->GetAllocatedBytes(top);
135 } else {
136 endPtr = curPtr + region->GetAllocatedBytes();
137 }
138
139 size_t objSize;
140 while (curPtr < endPtr) {
141 auto freeObject = FreeObject::Cast(curPtr);
142 // If curPtr is freeObject, It must to mark unpoison first.
143 ASAN_UNPOISON_MEMORY_REGION(freeObject, TaggedObject::TaggedObjectSize());
144 if (!freeObject->IsFreeObject()) {
145 auto obj = reinterpret_cast<TaggedObject *>(curPtr);
146 visitor(obj);
147 objSize = obj->GetClass()->SizeFromJSHClass(obj);
148 } else {
149 freeObject->AsanUnPoisonFreeObject();
150 objSize = freeObject->Available();
151 freeObject->AsanPoisonFreeObject();
152 }
153 curPtr += objSize;
154 CHECK_OBJECT_SIZE(objSize);
155 }
156 CHECK_REGION_END(curPtr, endPtr);
157 });
158 }
159
InvokeAllocationInspector(Address object,size_t size,size_t alignedSize)160 void LinearSpace::InvokeAllocationInspector(Address object, size_t size, size_t alignedSize)
161 {
162 ASSERT(size <= alignedSize);
163 if (LIKELY(!allocationCounter_.IsActive())) {
164 return;
165 }
166 if (alignedSize >= allocationCounter_.NextBytes()) {
167 allocationCounter_.InvokeAllocationInspector(object, size, alignedSize);
168 }
169 allocationCounter_.AdvanceAllocationInspector(alignedSize);
170 }
171
EdenSpace(Heap * heap,size_t initialCapacity,size_t maximumCapacity)172 EdenSpace::EdenSpace(Heap *heap, size_t initialCapacity, size_t maximumCapacity)
173 : LinearSpace(heap, MemSpaceType::EDEN_SPACE, initialCapacity, maximumCapacity)
174 {
175 size_t memSize = AlignUp(maximumCapacity_, DEFAULT_REGION_SIZE);
176 memMap_ = PageMap(memSize, PAGE_PROT_READWRITE, DEFAULT_REGION_SIZE);
177 PageTag(memMap_.GetMem(), memMap_.GetSize(), PageTagType::HEAP, ToSpaceTypeName(MemSpaceType::EDEN_SPACE),
178 localHeap_->GetJSThread()->GetThreadId());
179 auto mem = ToUintPtr(memMap_.GetMem());
180 auto count = memMap_.GetSize() / DEFAULT_REGION_SIZE;
181 while (count-- > 0) {
182 freeRegions_.emplace_back(ToVoidPtr(mem), DEFAULT_REGION_SIZE);
183 mem = mem + DEFAULT_REGION_SIZE;
184 }
185 }
186
~EdenSpace()187 EdenSpace::~EdenSpace()
188 {
189 PageUnmap(memMap_);
190 }
191
Initialize()192 void EdenSpace::Initialize()
193 {
194 auto region = AllocRegion();
195 if (UNLIKELY(region == nullptr)) {
196 LOG_GC(ERROR) << "region is nullptr";
197 return;
198 }
199 AddRegion(region);
200 allocator_.Reset(region->GetBegin(), region->GetEnd());
201 localHeap_->InstallEdenAllocator();
202 }
203
Restart()204 void EdenSpace::Restart()
205 {
206 overShootSize_ = 0;
207 survivalObjectSize_ = 0;
208 allocateAfterLastGC_ = 0;
209 isFull_ = false;
210 Initialize();
211 }
212
AllocateSync(size_t size)213 uintptr_t EdenSpace::AllocateSync(size_t size)
214 {
215 LockHolder lock(lock_);
216 return Allocate(size);
217 }
218
Allocate(size_t size)219 uintptr_t EdenSpace::Allocate(size_t size)
220 {
221 if (isFull_) {
222 return 0;
223 }
224 auto object = allocator_.Allocate(size);
225 if (object != 0) {
226 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
227 // can not heap sampling in gc.
228 InvokeAllocationInspector(object, size, size);
229 #endif
230 return object;
231 }
232 if (Expand()) {
233 if (!localHeap_->NeedStopCollection()) {
234 localHeap_->TryTriggerIncrementalMarking();
235 localHeap_->TryTriggerIdleCollection();
236 localHeap_->TryTriggerConcurrentMarking();
237 }
238 object = allocator_.Allocate(size);
239 } else {
240 isFull_ = true;
241 localHeap_->ReleaseEdenAllocator();
242 }
243 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
244 if (object != 0) {
245 InvokeAllocationInspector(object, size, size);
246 }
247 #endif
248 return object;
249 }
250
AllocRegion()251 Region *EdenSpace::AllocRegion()
252 {
253 if (freeRegions_.empty()) {
254 return nullptr;
255 }
256 auto memmap = freeRegions_.back();
257 freeRegions_.pop_back();
258 heapRegionAllocator_->IncreaseAnnoMemoryUsage(memmap.GetSize());
259 auto mem = reinterpret_cast<uintptr_t>(memmap.GetMem());
260 // Check that the address is 256K byte aligned
261 LOG_ECMA_IF(AlignUp(mem, PANDA_POOL_ALIGNMENT_IN_BYTES) != mem, FATAL) << "region not align by 256KB";
262
263 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
264 uintptr_t begin = AlignUp(mem + sizeof(Region), static_cast<size_t>(MemAlignment::MEM_ALIGN_REGION));
265 uintptr_t end = mem + memmap.GetSize();
266 auto region = new (ToVoidPtr(mem)) Region(localHeap_->GetNativeAreaAllocator(), mem, begin, end,
267 GetRegionFlag(), RegionTypeFlag::DEFAULT);
268 region->Initialize();
269 return region;
270 }
271
Expand()272 bool EdenSpace::Expand()
273 {
274 Region *region = AllocRegion();
275 if (region == nullptr) {
276 return false;
277 }
278
279 uintptr_t top = allocator_.GetTop();
280 auto currentRegion = GetCurrentRegion();
281 if (currentRegion != nullptr) {
282 if (currentRegion->HasAgeMark()) {
283 allocateAfterLastGC_ +=
284 currentRegion->GetAllocatedBytes(top) - currentRegion->GetAllocatedBytes(waterLine_);
285 } else {
286 allocateAfterLastGC_ += currentRegion->GetAllocatedBytes(top);
287 }
288 currentRegion->SetHighWaterMark(top);
289 }
290 allocator_.Reset(region->GetBegin(), region->GetEnd());
291 AddRegion(region);
292 return true;
293 }
294
ReclaimRegions(size_t cachedSize)295 void EdenSpace::ReclaimRegions([[maybe_unused]] size_t cachedSize)
296 {
297 const auto spaceName = ToSpaceTypeName(MemSpaceType::EDEN_SPACE);
298 EnumerateRegions([this, &spaceName](Region *current) {
299 LOG_GC(DEBUG) << "Clear region from: " << current << " to " << spaceName;
300 current->DeleteLocalToShareRSet();
301 DecreaseCommitted(current->GetCapacity());
302 DecreaseObjectSize(current->GetSize());
303 current->Invalidate();
304 current->ClearMembers();
305 void *mem = ToVoidPtr(current->GetAllocateBase());
306 size_t memSize = current->GetCapacity();
307 freeRegions_.emplace_back(mem, memSize);
308 heapRegionAllocator_->DecreaseAnnoMemoryUsage(memSize);
309 });
310 regionList_.Clear();
311 committedSize_ = 0;
312 }
313
GetHeapObjectSize() const314 size_t EdenSpace::GetHeapObjectSize() const
315 {
316 return survivalObjectSize_ + allocateAfterLastGC_;
317 }
318
GetSurvivalObjectSize() const319 size_t EdenSpace::GetSurvivalObjectSize() const
320 {
321 return survivalObjectSize_;
322 }
323
SetOverShootSize(size_t size)324 void EdenSpace::SetOverShootSize(size_t size)
325 {
326 overShootSize_ = size;
327 }
328
GetAllocatedSizeSinceGC(uintptr_t top) const329 size_t EdenSpace::GetAllocatedSizeSinceGC(uintptr_t top) const
330 {
331 size_t currentRegionSize = 0;
332 auto currentRegion = GetCurrentRegion();
333 if (currentRegion != nullptr) {
334 currentRegionSize = currentRegion->GetAllocatedBytes(top);
335 if (currentRegion->HasAgeMark()) {
336 currentRegionSize -= currentRegion->GetAllocatedBytes(waterLine_);
337 }
338 }
339 return allocateAfterLastGC_ + currentRegionSize;
340 }
341
SemiSpace(Heap * heap,size_t initialCapacity,size_t maximumCapacity)342 SemiSpace::SemiSpace(Heap *heap, size_t initialCapacity, size_t maximumCapacity)
343 : LinearSpace(heap, MemSpaceType::SEMI_SPACE, initialCapacity, maximumCapacity),
344 minimumCapacity_(initialCapacity) {}
345
Initialize()346 void SemiSpace::Initialize()
347 {
348 JSThread *thread = localHeap_->GetJSThread();
349 Region *region = heapRegionAllocator_->AllocateAlignedRegion(this, DEFAULT_REGION_SIZE, thread, localHeap_);
350 AddRegion(region);
351 allocator_.Reset(region->GetBegin(), region->GetEnd());
352 }
353
Restart(size_t overShootSize)354 void SemiSpace::Restart(size_t overShootSize)
355 {
356 overShootSize_ = overShootSize;
357 survivalObjectSize_ = 0;
358 allocateAfterLastGC_ = 0;
359 Initialize();
360 }
361
CalculateNewOverShootSize()362 size_t SemiSpace::CalculateNewOverShootSize()
363 {
364 return committedSize_ <= maximumCapacity_ ?
365 0 : AlignUp((committedSize_ - maximumCapacity_) / 2, DEFAULT_REGION_SIZE); // 2 is the half.
366 }
367
CommittedSizeIsLarge()368 bool SemiSpace::CommittedSizeIsLarge()
369 {
370 return committedSize_ >= maximumCapacity_ * 2; // 2 is the half.
371 }
372
AllocateSync(size_t size)373 uintptr_t SemiSpace::AllocateSync(size_t size)
374 {
375 LockHolder lock(lock_);
376 return Allocate(size, true);
377 }
378
SwapRegion(Region * region,SemiSpace * fromSpace)379 bool SemiSpace::SwapRegion(Region *region, SemiSpace *fromSpace)
380 {
381 LockHolder lock(lock_);
382 if (committedSize_ + region->GetCapacity() > maximumCapacity_ + overShootSize_) {
383 return false;
384 }
385 fromSpace->RemoveRegion(region);
386
387 region->SetGCFlag(RegionGCFlags::IN_NEW_TO_NEW_SET);
388
389 if (UNLIKELY(heap_->ShouldVerifyHeap())) {
390 region->ResetInactiveSemiSpace();
391 }
392
393 regionList_.AddNodeToFront(region);
394 IncreaseCommitted(region->GetCapacity());
395 IncreaseObjectSize(region->GetSize());
396 survivalObjectSize_ += region->GetAllocatedBytes();
397 return true;
398 }
399
SetWaterLine()400 void SemiSpace::SetWaterLine()
401 {
402 waterLine_ = allocator_.GetTop();
403 allocateAfterLastGC_ = 0;
404 Region *last = GetCurrentRegion();
405 if (last != nullptr) {
406 last->SetGCFlag(RegionGCFlags::HAS_AGE_MARK);
407
408 EnumerateRegions([&last](Region *current) {
409 if (current != last) {
410 current->SetGCFlag(RegionGCFlags::BELOW_AGE_MARK);
411 }
412 });
413 survivalObjectSize_ += last->GetAllocatedBytes(waterLine_);
414 } else {
415 LOG_GC(INFO) << "SetWaterLine: No region survival in current gc, current region available size: "
416 << allocator_.Available();
417 }
418 }
419
GetHeapObjectSize() const420 size_t SemiSpace::GetHeapObjectSize() const
421 {
422 return survivalObjectSize_ + allocateAfterLastGC_;
423 }
424
GetSurvivalObjectSize() const425 size_t SemiSpace::GetSurvivalObjectSize() const
426 {
427 return survivalObjectSize_;
428 }
429
SetOverShootSize(size_t size)430 void SemiSpace::SetOverShootSize(size_t size)
431 {
432 overShootSize_ = size;
433 }
434
AdjustCapacity(size_t allocatedSizeSinceGC,JSThread * thread)435 bool SemiSpace::AdjustCapacity(size_t allocatedSizeSinceGC, JSThread *thread)
436 {
437 if (allocatedSizeSinceGC <= initialCapacity_ * GROW_OBJECT_SURVIVAL_RATE / GROWING_FACTOR) {
438 return false;
439 }
440 double curObjectSurvivalRate = static_cast<double>(survivalObjectSize_) / allocatedSizeSinceGC;
441 double initialObjectRate = static_cast<double>(survivalObjectSize_) / initialCapacity_;
442 if (curObjectSurvivalRate > GROW_OBJECT_SURVIVAL_RATE || initialObjectRate > GROW_OBJECT_SURVIVAL_RATE) {
443 if (initialCapacity_ >= maximumCapacity_) {
444 return false;
445 }
446 size_t newCapacity = initialCapacity_ * GROWING_FACTOR;
447 SetInitialCapacity(std::min(newCapacity, maximumCapacity_));
448 if (newCapacity == maximumCapacity_) {
449 localHeap_->GetJSObjectResizingStrategy()->UpdateGrowStep(
450 thread,
451 JSObjectResizingStrategy::PROPERTIES_GROW_SIZE * 2); // 2: double
452 }
453 return true;
454 } else if (curObjectSurvivalRate < SHRINK_OBJECT_SURVIVAL_RATE) {
455 if (initialCapacity_ <= minimumCapacity_) {
456 return false;
457 }
458 double speed = localHeap_->GetMemController()->GetNewSpaceAllocationThroughputPerMS();
459 if (speed > LOW_ALLOCATION_SPEED_PER_MS) {
460 return false;
461 }
462 size_t newCapacity = initialCapacity_ / GROWING_FACTOR;
463 SetInitialCapacity(std::max(newCapacity, minimumCapacity_));
464 localHeap_->GetJSObjectResizingStrategy()->UpdateGrowStep(thread);
465 return true;
466 }
467 return false;
468 }
469
GetAllocatedSizeSinceGC(uintptr_t top) const470 size_t SemiSpace::GetAllocatedSizeSinceGC(uintptr_t top) const
471 {
472 size_t currentRegionSize = 0;
473 auto currentRegion = GetCurrentRegion();
474 if (currentRegion != nullptr) {
475 currentRegionSize = currentRegion->GetAllocatedBytes(top);
476 if (currentRegion->HasAgeMark()) {
477 currentRegionSize -= currentRegion->GetAllocatedBytes(waterLine_);
478 }
479 }
480 return allocateAfterLastGC_ + currentRegionSize;
481 }
482
SnapshotSpace(Heap * heap,size_t initialCapacity,size_t maximumCapacity)483 SnapshotSpace::SnapshotSpace(Heap *heap, size_t initialCapacity, size_t maximumCapacity)
484 : LinearSpace(heap, MemSpaceType::SNAPSHOT_SPACE, initialCapacity, maximumCapacity) {}
485
ReadOnlySpace(Heap * heap,size_t initialCapacity,size_t maximumCapacity,MemSpaceType type)486 ReadOnlySpace::ReadOnlySpace(Heap *heap, size_t initialCapacity, size_t maximumCapacity, MemSpaceType type)
487 : LinearSpace(heap, type, initialCapacity, maximumCapacity) {}
488 } // namespace panda::ecmascript
489