1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ecmascript/mem/linear_space.h"
17
18 #include "ecmascript/js_hclass-inl.h"
19 #include "ecmascript/mem/allocator-inl.h"
20 #include "ecmascript/mem/mem_controller.h"
21
22 namespace panda::ecmascript {
LinearSpace(Heap * heap,MemSpaceType type,size_t initialCapacity,size_t maximumCapacity)23 LinearSpace::LinearSpace(Heap *heap, MemSpaceType type, size_t initialCapacity, size_t maximumCapacity)
24 : Space(heap, heap->GetHeapRegionAllocator(), type, initialCapacity, maximumCapacity),
25 localHeap_(heap),
26 thread_(heap->GetJSThread()),
27 waterLine_(0)
28 {
29 }
30
Allocate(size_t size,bool isPromoted)31 uintptr_t LinearSpace::Allocate(size_t size, bool isPromoted)
32 {
33 #if ECMASCRIPT_ENABLE_THREAD_STATE_CHECK
34 if (UNLIKELY(!localHeap_->GetJSThread()->IsInRunningStateOrProfiling())) {
35 LOG_ECMA(FATAL) << "Allocate must be in jsthread running state";
36 UNREACHABLE();
37 }
38 #endif
39 auto object = allocator_.Allocate(size);
40 if (object != 0) {
41 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
42 // can not heap sampling in gc.
43 if (!isPromoted) {
44 InvokeAllocationInspector(object, size, size);
45 }
46 #endif
47 return object;
48 }
49 if (Expand(isPromoted)) {
50 if (!isPromoted && !localHeap_->NeedStopCollection()) {
51 localHeap_->TryTriggerIncrementalMarking();
52 localHeap_->TryTriggerIdleCollection();
53 localHeap_->TryTriggerConcurrentMarking();
54 }
55 object = allocator_.Allocate(size);
56 } else if (localHeap_->IsMarking() || !localHeap_->IsEmptyIdleTask()) {
57 // Temporary adjust semi space capacity
58 if (localHeap_->IsConcurrentFullMark()) {
59 overShootSize_ = localHeap_->CalculateLinearSpaceOverShoot();
60 } else {
61 size_t stepOverShootSize = localHeap_->GetEcmaParamConfiguration().GetSemiSpaceStepOvershootSize();
62 size_t maxOverShootSize = std::max(initialCapacity_ / 2, stepOverShootSize); // 2: half
63 if (overShootSize_ < maxOverShootSize) {
64 overShootSize_ += stepOverShootSize;
65 }
66 }
67
68 if (Expand(isPromoted)) {
69 object = allocator_.Allocate(size);
70 }
71 }
72 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
73 if (object != 0 && !isPromoted) {
74 InvokeAllocationInspector(object, size, size);
75 }
76 #endif
77 return object;
78 }
79
Expand(bool isPromoted)80 bool LinearSpace::Expand(bool isPromoted)
81 {
82 if (committedSize_ >= initialCapacity_ + overShootSize_ + outOfMemoryOvershootSize_ &&
83 !localHeap_->NeedStopCollection()) {
84 return false;
85 }
86
87 uintptr_t top = allocator_.GetTop();
88 auto currentRegion = GetCurrentRegion();
89 if (currentRegion != nullptr) {
90 if (!isPromoted) {
91 if (currentRegion->HasAgeMark()) {
92 allocateAfterLastGC_ +=
93 currentRegion->GetAllocatedBytes(top) - currentRegion->GetAllocatedBytes(waterLine_);
94 } else {
95 allocateAfterLastGC_ += currentRegion->GetAllocatedBytes(top);
96 }
97 } else {
98 // For GC
99 survivalObjectSize_ += currentRegion->GetAllocatedBytes(top);
100 }
101 currentRegion->SetHighWaterMark(top);
102 }
103 JSThread *thread = localHeap_->GetJSThread();
104 Region *region = heapRegionAllocator_->AllocateAlignedRegion(this, DEFAULT_REGION_SIZE, thread, localHeap_,
105 thread_->IsConcurrentMarkingOrFinished());
106 allocator_.Reset(region->GetBegin(), region->GetEnd());
107 AddRegion(region);
108 return true;
109 }
110
Stop()111 void LinearSpace::Stop()
112 {
113 if (GetCurrentRegion() != nullptr) {
114 GetCurrentRegion()->SetHighWaterMark(allocator_.GetTop());
115 }
116 }
117
ResetAllocator()118 void LinearSpace::ResetAllocator()
119 {
120 auto currentRegion = GetCurrentRegion();
121 if (currentRegion != nullptr) {
122 allocator_.Reset(currentRegion->GetBegin(), currentRegion->GetEnd(), currentRegion->GetHighWaterMark());
123 }
124 }
125
IterateOverObjects(const std::function<void (TaggedObject * object)> & visitor) const126 void LinearSpace::IterateOverObjects(const std::function<void(TaggedObject *object)> &visitor) const
127 {
128 auto current = GetCurrentRegion();
129 EnumerateRegions([&](Region *region) {
130 auto curPtr = region->GetBegin();
131 uintptr_t endPtr = 0;
132 if (region == current) {
133 auto top = allocator_.GetTop();
134 endPtr = curPtr + region->GetAllocatedBytes(top);
135 } else {
136 endPtr = curPtr + region->GetAllocatedBytes();
137 }
138
139 size_t objSize;
140 while (curPtr < endPtr) {
141 auto freeObject = FreeObject::Cast(curPtr);
142 // If curPtr is freeObject, It must to mark unpoison first.
143 ASAN_UNPOISON_MEMORY_REGION(freeObject, TaggedObject::TaggedObjectSize());
144 if (!freeObject->IsFreeObject()) {
145 auto obj = reinterpret_cast<TaggedObject *>(curPtr);
146 visitor(obj);
147 objSize = obj->GetClass()->SizeFromJSHClass(obj);
148 } else {
149 freeObject->AsanUnPoisonFreeObject();
150 objSize = freeObject->Available();
151 freeObject->AsanPoisonFreeObject();
152 }
153 curPtr += objSize;
154 CHECK_OBJECT_SIZE(objSize);
155 }
156 CHECK_REGION_END(curPtr, endPtr);
157 });
158 }
159
InvokeAllocationInspector(Address object,size_t size,size_t alignedSize)160 void LinearSpace::InvokeAllocationInspector(Address object, size_t size, size_t alignedSize)
161 {
162 ASSERT(size <= alignedSize);
163 if (LIKELY(!allocationCounter_.IsActive())) {
164 return;
165 }
166 if (alignedSize >= allocationCounter_.NextBytes()) {
167 allocationCounter_.InvokeAllocationInspector(object, size, alignedSize);
168 }
169 allocationCounter_.AdvanceAllocationInspector(alignedSize);
170 }
171
EdenSpace(Heap * heap,size_t initialCapacity,size_t maximumCapacity)172 EdenSpace::EdenSpace(Heap *heap, size_t initialCapacity, size_t maximumCapacity)
173 : LinearSpace(heap, MemSpaceType::EDEN_SPACE, initialCapacity, maximumCapacity)
174 {
175 size_t memSize = AlignUp(maximumCapacity_, DEFAULT_REGION_SIZE);
176 memMap_ = PageMap(memSize, PAGE_PROT_READWRITE, DEFAULT_REGION_SIZE);
177 JSThread::ThreadId threadId = 0;
178 if (heap->EnablePageTagThreadId()) {
179 threadId = heap->GetJSThread()->GetThreadId();
180 }
181 PageTag(memMap_.GetMem(), memMap_.GetSize(), PageTagType::HEAP, ToSpaceTypeName(MemSpaceType::EDEN_SPACE),
182 threadId);
183 auto mem = ToUintPtr(memMap_.GetMem());
184 auto count = memMap_.GetSize() / DEFAULT_REGION_SIZE;
185 while (count-- > 0) {
186 freeRegions_.emplace_back(ToVoidPtr(mem), DEFAULT_REGION_SIZE);
187 mem = mem + DEFAULT_REGION_SIZE;
188 }
189 }
190
~EdenSpace()191 EdenSpace::~EdenSpace()
192 {
193 PageUnmap(memMap_);
194 }
195
Initialize()196 void EdenSpace::Initialize()
197 {
198 auto region = AllocRegion();
199 if (UNLIKELY(region == nullptr)) {
200 LOG_GC(ERROR) << "region is nullptr";
201 return;
202 }
203 AddRegion(region);
204 allocator_.Reset(region->GetBegin(), region->GetEnd());
205 localHeap_->InstallEdenAllocator();
206 }
207
Restart()208 void EdenSpace::Restart()
209 {
210 overShootSize_ = 0;
211 survivalObjectSize_ = 0;
212 allocateAfterLastGC_ = 0;
213 isFull_ = false;
214 Initialize();
215 }
216
AllocateSync(size_t size)217 uintptr_t EdenSpace::AllocateSync(size_t size)
218 {
219 LockHolder lock(lock_);
220 return Allocate(size);
221 }
222
Allocate(size_t size)223 uintptr_t EdenSpace::Allocate(size_t size)
224 {
225 if (isFull_) {
226 return 0;
227 }
228 auto object = allocator_.Allocate(size);
229 if (object != 0) {
230 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
231 // can not heap sampling in gc.
232 InvokeAllocationInspector(object, size, size);
233 #endif
234 return object;
235 }
236 if (Expand()) {
237 if (!localHeap_->NeedStopCollection()) {
238 localHeap_->TryTriggerIncrementalMarking();
239 localHeap_->TryTriggerIdleCollection();
240 localHeap_->TryTriggerConcurrentMarking();
241 }
242 object = allocator_.Allocate(size);
243 } else {
244 isFull_ = true;
245 localHeap_->ReleaseEdenAllocator();
246 }
247 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
248 if (object != 0) {
249 InvokeAllocationInspector(object, size, size);
250 }
251 #endif
252 return object;
253 }
254
AllocRegion()255 Region *EdenSpace::AllocRegion()
256 {
257 if (freeRegions_.empty()) {
258 return nullptr;
259 }
260 auto memmap = freeRegions_.back();
261 freeRegions_.pop_back();
262 heapRegionAllocator_->IncreaseAnnoMemoryUsage(memmap.GetSize());
263 auto mem = reinterpret_cast<uintptr_t>(memmap.GetMem());
264 // Check that the address is 256K byte aligned
265 LOG_ECMA_IF(AlignUp(mem, PANDA_POOL_ALIGNMENT_IN_BYTES) != mem, FATAL) << "region not align by 256KB";
266
267 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
268 uintptr_t begin = AlignUp(mem + sizeof(Region), static_cast<size_t>(MemAlignment::MEM_ALIGN_REGION));
269 uintptr_t end = mem + memmap.GetSize();
270 auto region = new (ToVoidPtr(mem)) Region(localHeap_->GetNativeAreaAllocator(), mem, begin, end,
271 GetRegionFlag(), RegionTypeFlag::DEFAULT);
272 region->Initialize();
273 return region;
274 }
275
Expand()276 bool EdenSpace::Expand()
277 {
278 Region *region = AllocRegion();
279 if (region == nullptr) {
280 return false;
281 }
282
283 uintptr_t top = allocator_.GetTop();
284 auto currentRegion = GetCurrentRegion();
285 if (currentRegion != nullptr) {
286 if (currentRegion->HasAgeMark()) {
287 allocateAfterLastGC_ +=
288 currentRegion->GetAllocatedBytes(top) - currentRegion->GetAllocatedBytes(waterLine_);
289 } else {
290 allocateAfterLastGC_ += currentRegion->GetAllocatedBytes(top);
291 }
292 currentRegion->SetHighWaterMark(top);
293 }
294 allocator_.Reset(region->GetBegin(), region->GetEnd());
295 AddRegion(region);
296 return true;
297 }
298
ReclaimRegions(size_t cachedSize)299 void EdenSpace::ReclaimRegions([[maybe_unused]] size_t cachedSize)
300 {
301 const auto spaceName = ToSpaceTypeName(MemSpaceType::EDEN_SPACE);
302 EnumerateRegions([this, &spaceName](Region *current) {
303 LOG_GC(DEBUG) << "Clear region from: " << current << " to " << spaceName;
304 current->DeleteLocalToShareRSet();
305 DecreaseCommitted(current->GetCapacity());
306 DecreaseObjectSize(current->GetSize());
307 current->Invalidate();
308 current->ClearMembers();
309 void *mem = ToVoidPtr(current->GetAllocateBase());
310 size_t memSize = current->GetCapacity();
311 freeRegions_.emplace_back(mem, memSize);
312 heapRegionAllocator_->DecreaseAnnoMemoryUsage(memSize);
313 });
314 regionList_.Clear();
315 committedSize_ = 0;
316 }
317
GetHeapObjectSize() const318 size_t EdenSpace::GetHeapObjectSize() const
319 {
320 return survivalObjectSize_ + allocateAfterLastGC_;
321 }
322
GetSurvivalObjectSize() const323 size_t EdenSpace::GetSurvivalObjectSize() const
324 {
325 return survivalObjectSize_;
326 }
327
SetOverShootSize(size_t size)328 void EdenSpace::SetOverShootSize(size_t size)
329 {
330 overShootSize_ = size;
331 }
332
GetAllocatedSizeSinceGC(uintptr_t top) const333 size_t EdenSpace::GetAllocatedSizeSinceGC(uintptr_t top) const
334 {
335 size_t currentRegionSize = 0;
336 auto currentRegion = GetCurrentRegion();
337 if (currentRegion != nullptr) {
338 currentRegionSize = currentRegion->GetAllocatedBytes(top);
339 if (currentRegion->HasAgeMark()) {
340 currentRegionSize -= currentRegion->GetAllocatedBytes(waterLine_);
341 }
342 }
343 return allocateAfterLastGC_ + currentRegionSize;
344 }
345
SemiSpace(Heap * heap,size_t initialCapacity,size_t maximumCapacity)346 SemiSpace::SemiSpace(Heap *heap, size_t initialCapacity, size_t maximumCapacity)
347 : LinearSpace(heap, MemSpaceType::SEMI_SPACE, initialCapacity, maximumCapacity),
348 minimumCapacity_(initialCapacity) {}
349
Initialize()350 void SemiSpace::Initialize()
351 {
352 JSThread *thread = localHeap_->GetJSThread();
353 Region *region = heapRegionAllocator_->AllocateAlignedRegion(this, DEFAULT_REGION_SIZE, thread, localHeap_);
354 AddRegion(region);
355 allocator_.Reset(region->GetBegin(), region->GetEnd());
356 }
357
Restart(size_t overShootSize)358 void SemiSpace::Restart(size_t overShootSize)
359 {
360 overShootSize_ = overShootSize;
361 survivalObjectSize_ = 0;
362 allocateAfterLastGC_ = 0;
363 Initialize();
364 }
365
CalculateNewOverShootSize()366 size_t SemiSpace::CalculateNewOverShootSize()
367 {
368 return committedSize_ <= maximumCapacity_ ?
369 0 : AlignUp((committedSize_ - maximumCapacity_) / 2, DEFAULT_REGION_SIZE); // 2 is the half.
370 }
371
CommittedSizeIsLarge()372 bool SemiSpace::CommittedSizeIsLarge()
373 {
374 return committedSize_ >= maximumCapacity_ * 2; // 2 is the half.
375 }
376
AllocateSync(size_t size)377 uintptr_t SemiSpace::AllocateSync(size_t size)
378 {
379 LockHolder lock(lock_);
380 return Allocate(size, true);
381 }
382
SwapRegion(Region * region,SemiSpace * fromSpace)383 bool SemiSpace::SwapRegion(Region *region, SemiSpace *fromSpace)
384 {
385 LockHolder lock(lock_);
386 if (committedSize_ + region->GetCapacity() > maximumCapacity_ + overShootSize_) {
387 return false;
388 }
389 fromSpace->RemoveRegion(region);
390
391 region->SetGCFlag(RegionGCFlags::IN_NEW_TO_NEW_SET);
392
393 if (UNLIKELY(heap_->ShouldVerifyHeap())) {
394 region->ResetInactiveSemiSpace();
395 }
396
397 regionList_.AddNodeToFront(region);
398 IncreaseCommitted(region->GetCapacity());
399 IncreaseObjectSize(region->GetSize());
400 survivalObjectSize_ += region->GetAllocatedBytes();
401 return true;
402 }
403
SetWaterLine()404 void SemiSpace::SetWaterLine()
405 {
406 waterLine_ = allocator_.GetTop();
407 allocateAfterLastGC_ = 0;
408 Region *last = GetCurrentRegion();
409 if (last != nullptr) {
410 last->SetGCFlag(RegionGCFlags::HAS_AGE_MARK);
411
412 EnumerateRegions([&last](Region *current) {
413 if (current != last) {
414 current->SetGCFlag(RegionGCFlags::BELOW_AGE_MARK);
415 }
416 });
417 survivalObjectSize_ += last->GetAllocatedBytes(waterLine_);
418 } else {
419 LOG_GC(INFO) << "SetWaterLine: No region survival in current gc, current region available size: "
420 << allocator_.Available();
421 }
422 }
423
GetHeapObjectSize() const424 size_t SemiSpace::GetHeapObjectSize() const
425 {
426 return survivalObjectSize_ + allocateAfterLastGC_;
427 }
428
GetSurvivalObjectSize() const429 size_t SemiSpace::GetSurvivalObjectSize() const
430 {
431 return survivalObjectSize_;
432 }
433
SetOverShootSize(size_t size)434 void SemiSpace::SetOverShootSize(size_t size)
435 {
436 overShootSize_ = size;
437 }
438
AdjustCapacity(size_t allocatedSizeSinceGC,JSThread * thread)439 bool SemiSpace::AdjustCapacity(size_t allocatedSizeSinceGC, JSThread *thread)
440 {
441 if (allocatedSizeSinceGC <= initialCapacity_ * GROW_OBJECT_SURVIVAL_RATE / GROWING_FACTOR) {
442 return false;
443 }
444 double curObjectSurvivalRate = static_cast<double>(survivalObjectSize_) / allocatedSizeSinceGC;
445 double initialObjectRate = static_cast<double>(survivalObjectSize_) / initialCapacity_;
446 if (curObjectSurvivalRate > GROW_OBJECT_SURVIVAL_RATE || initialObjectRate > GROW_OBJECT_SURVIVAL_RATE) {
447 if (initialCapacity_ >= maximumCapacity_) {
448 return false;
449 }
450 size_t newCapacity = initialCapacity_ * GROWING_FACTOR;
451 SetInitialCapacity(std::min(newCapacity, maximumCapacity_));
452 if (newCapacity == maximumCapacity_) {
453 localHeap_->GetJSObjectResizingStrategy()->UpdateGrowStep(
454 thread,
455 JSObjectResizingStrategy::PROPERTIES_GROW_SIZE * 2); // 2: double
456 }
457 return true;
458 } else if (curObjectSurvivalRate < SHRINK_OBJECT_SURVIVAL_RATE) {
459 if (initialCapacity_ <= minimumCapacity_) {
460 return false;
461 }
462 double speed = localHeap_->GetMemController()->GetNewSpaceAllocationThroughputPerMS();
463 if (speed > LOW_ALLOCATION_SPEED_PER_MS) {
464 return false;
465 }
466 size_t newCapacity = initialCapacity_ / GROWING_FACTOR;
467 SetInitialCapacity(std::max(newCapacity, minimumCapacity_));
468 localHeap_->GetJSObjectResizingStrategy()->UpdateGrowStep(thread);
469 return true;
470 }
471 return false;
472 }
473
GetAllocatedSizeSinceGC(uintptr_t top) const474 size_t SemiSpace::GetAllocatedSizeSinceGC(uintptr_t top) const
475 {
476 size_t currentRegionSize = 0;
477 auto currentRegion = GetCurrentRegion();
478 if (currentRegion != nullptr) {
479 currentRegionSize = currentRegion->GetAllocatedBytes(top);
480 if (currentRegion->HasAgeMark()) {
481 currentRegionSize -= currentRegion->GetAllocatedBytes(waterLine_);
482 }
483 }
484 return allocateAfterLastGC_ + currentRegionSize;
485 }
486
SnapshotSpace(Heap * heap,size_t initialCapacity,size_t maximumCapacity)487 SnapshotSpace::SnapshotSpace(Heap *heap, size_t initialCapacity, size_t maximumCapacity)
488 : LinearSpace(heap, MemSpaceType::SNAPSHOT_SPACE, initialCapacity, maximumCapacity) {}
489
ReadOnlySpace(Heap * heap,size_t initialCapacity,size_t maximumCapacity,MemSpaceType type)490 ReadOnlySpace::ReadOnlySpace(Heap *heap, size_t initialCapacity, size_t maximumCapacity, MemSpaceType type)
491 : LinearSpace(heap, type, initialCapacity, maximumCapacity) {}
492 } // namespace panda::ecmascript
493