1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ecmascript/mem/linear_space.h"
17
18 #include "ecmascript/js_hclass-inl.h"
19 #include "ecmascript/mem/allocator-inl.h"
20 #include "ecmascript/mem/mem_controller.h"
21
22 namespace panda::ecmascript {
LinearSpace(Heap * heap,MemSpaceType type,size_t initialCapacity,size_t maximumCapacity)23 LinearSpace::LinearSpace(Heap *heap, MemSpaceType type, size_t initialCapacity, size_t maximumCapacity)
24 : Space(heap, heap->GetHeapRegionAllocator(), type, initialCapacity, maximumCapacity),
25 localHeap_(heap),
26 thread_(heap->GetJSThread()),
27 waterLine_(0)
28 {
29 }
30
Allocate(size_t size,bool isPromoted)31 uintptr_t LinearSpace::Allocate(size_t size, bool isPromoted)
32 {
33 #if ECMASCRIPT_ENABLE_THREAD_STATE_CHECK
34 if (UNLIKELY(!localHeap_->GetJSThread()->IsInRunningStateOrProfiling())) { // LOCV_EXCL_BR_LINE
35 LOG_ECMA(FATAL) << "Allocate must be in jsthread running state";
36 UNREACHABLE();
37 }
38 #endif
39 auto object = allocator_.Allocate(size);
40 if (object != 0) {
41 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
42 // can not heap sampling in gc.
43 if (!isPromoted) {
44 InvokeAllocationInspector(object, size, size);
45 }
46 #endif
47 return object;
48 }
49 if (Expand(isPromoted)) {
50 if (!isPromoted) {
51 if (!localHeap_->NeedStopCollection() || localHeap_->IsNearGCInSensitive() ||
52 (localHeap_->IsJustFinishStartup() && localHeap_->ObjectExceedJustFinishStartupThresholdForCM())) {
53 localHeap_->TryTriggerIncrementalMarking();
54 localHeap_->TryTriggerIdleCollection();
55 localHeap_->TryTriggerConcurrentMarking();
56 }
57 }
58 object = allocator_.Allocate(size);
59 } else if (localHeap_->IsMarking() || !localHeap_->IsEmptyIdleTask()) {
60 // Temporary adjust semi space capacity
61 if (localHeap_->IsConcurrentFullMark()) {
62 overShootSize_ = localHeap_->CalculateLinearSpaceOverShoot();
63 } else {
64 size_t stepOverShootSize = localHeap_->GetEcmaParamConfiguration().GetSemiSpaceStepOvershootSize();
65 size_t maxOverShootSize = std::max(initialCapacity_ / 2, stepOverShootSize); // 2: half
66 if (overShootSizeForConcurrentMark_ < maxOverShootSize) {
67 overShootSize_ += stepOverShootSize;
68 overShootSizeForConcurrentMark_ += stepOverShootSize;
69 }
70 }
71
72 if (Expand(isPromoted)) {
73 object = allocator_.Allocate(size);
74 }
75 }
76 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
77 if (object != 0 && !isPromoted) {
78 InvokeAllocationInspector(object, size, size);
79 }
80 #endif
81 return object;
82 }
83
Expand(bool isPromoted)84 bool LinearSpace::Expand(bool isPromoted)
85 {
86 if (committedSize_ >= initialCapacity_ + overShootSize_ + outOfMemoryOvershootSize_ &&
87 (isPromoted || !localHeap_->NeedStopCollection())) {
88 return false;
89 }
90
91 uintptr_t top = allocator_.GetTop();
92 auto currentRegion = GetCurrentRegion();
93 if (currentRegion != nullptr) {
94 if (!isPromoted) {
95 if (currentRegion->HasAgeMark()) {
96 allocateAfterLastGC_ +=
97 currentRegion->GetAllocatedBytes(top) - currentRegion->GetAllocatedBytes(waterLine_);
98 } else {
99 allocateAfterLastGC_ += currentRegion->GetAllocatedBytes(top);
100 }
101 } else {
102 // For GC
103 survivalObjectSize_ += currentRegion->GetAllocatedBytes(top);
104 }
105 currentRegion->SetHighWaterMark(top);
106 }
107 JSThread *thread = localHeap_->GetJSThread();
108 Region *region = heapRegionAllocator_->AllocateAlignedRegion(this, DEFAULT_REGION_SIZE, thread, localHeap_,
109 thread_->IsConcurrentMarkingOrFinished());
110 allocator_.Reset(region->GetBegin(), region->GetEnd());
111 AddRegion(region);
112 return true;
113 }
114
Stop()115 void LinearSpace::Stop()
116 {
117 if (GetCurrentRegion() != nullptr) {
118 GetCurrentRegion()->SetHighWaterMark(allocator_.GetTop());
119 }
120 }
121
ResetAllocator()122 void LinearSpace::ResetAllocator()
123 {
124 auto currentRegion = GetCurrentRegion();
125 if (currentRegion != nullptr) {
126 allocator_.Reset(currentRegion->GetBegin(), currentRegion->GetEnd(), currentRegion->GetHighWaterMark());
127 }
128 }
129
IterateOverObjects(const std::function<void (TaggedObject * object)> & visitor) const130 void LinearSpace::IterateOverObjects(const std::function<void(TaggedObject *object)> &visitor) const
131 {
132 auto current = GetCurrentRegion();
133 EnumerateRegions([&](Region *region) {
134 auto curPtr = region->GetBegin();
135 uintptr_t endPtr = 0;
136 if (region == current) {
137 auto top = allocator_.GetTop();
138 endPtr = curPtr + region->GetAllocatedBytes(top);
139 } else {
140 endPtr = curPtr + region->GetAllocatedBytes();
141 }
142
143 size_t objSize;
144 while (curPtr < endPtr) {
145 auto freeObject = FreeObject::Cast(curPtr);
146 // If curPtr is freeObject, It must to mark unpoison first.
147 ASAN_UNPOISON_MEMORY_REGION(freeObject, TaggedObject::TaggedObjectSize());
148 if (!freeObject->IsFreeObject()) {
149 auto obj = reinterpret_cast<TaggedObject *>(curPtr);
150 visitor(obj);
151 objSize = obj->GetClass()->SizeFromJSHClass(obj);
152 } else {
153 freeObject->AsanUnPoisonFreeObject();
154 objSize = freeObject->Available();
155 freeObject->AsanPoisonFreeObject();
156 }
157 curPtr += objSize;
158 CHECK_OBJECT_SIZE(objSize);
159 }
160 CHECK_REGION_END(curPtr, endPtr);
161 });
162 }
163
InvokeAllocationInspector(Address object,size_t size,size_t alignedSize)164 void LinearSpace::InvokeAllocationInspector(Address object, size_t size, size_t alignedSize)
165 {
166 ASSERT(size <= alignedSize);
167 if (LIKELY(!allocationCounter_.IsActive())) {
168 return;
169 }
170 if (alignedSize >= allocationCounter_.NextBytes()) {
171 allocationCounter_.InvokeAllocationInspector(object, size, alignedSize);
172 }
173 allocationCounter_.AdvanceAllocationInspector(alignedSize);
174 }
175
SemiSpace(Heap * heap,size_t initialCapacity,size_t maximumCapacity)176 SemiSpace::SemiSpace(Heap *heap, size_t initialCapacity, size_t maximumCapacity)
177 : LinearSpace(heap, MemSpaceType::SEMI_SPACE, initialCapacity, maximumCapacity),
178 minimumCapacity_(initialCapacity) {}
179
Initialize()180 void SemiSpace::Initialize()
181 {
182 JSThread *thread = localHeap_->GetJSThread();
183 Region *region = heapRegionAllocator_->AllocateAlignedRegion(this, DEFAULT_REGION_SIZE, thread, localHeap_);
184 AddRegion(region);
185 allocator_.Reset(region->GetBegin(), region->GetEnd());
186 }
187
Restart(size_t overShootSize)188 void SemiSpace::Restart(size_t overShootSize)
189 {
190 overShootSize_ = overShootSize;
191 overShootSizeForConcurrentMark_ = 0;
192 survivalObjectSize_ = 0;
193 allocateAfterLastGC_ = 0;
194 Initialize();
195 }
196
CalculateNewOverShootSize()197 size_t SemiSpace::CalculateNewOverShootSize()
198 {
199 return committedSize_ <= maximumCapacity_ ?
200 0 : AlignUp(static_cast<size_t>((committedSize_ - maximumCapacity_) * HPPGC_NEWSPACE_SIZE_RATIO),
201 DEFAULT_REGION_SIZE);
202 }
203
CommittedSizeIsLarge()204 bool SemiSpace::CommittedSizeIsLarge()
205 {
206 return committedSize_ >= maximumCapacity_ * 2; // 2 means double.
207 }
208
AllocateSync(size_t size)209 uintptr_t SemiSpace::AllocateSync(size_t size)
210 {
211 LockHolder lock(lock_);
212 return Allocate(size, true);
213 }
214
SwapRegion(Region * region,SemiSpace * fromSpace)215 bool SemiSpace::SwapRegion(Region *region, SemiSpace *fromSpace)
216 {
217 if (committedSize_ + region->GetCapacity() > maximumCapacity_ + overShootSize_) {
218 return false;
219 }
220 fromSpace->RemoveRegion(region);
221
222 region->SetGCFlag(RegionGCFlags::IN_NEW_TO_NEW_SET);
223
224 if (UNLIKELY(heap_->ShouldVerifyHeap())) { // LCOV_EXCL_BR_LINE
225 region->ResetInactiveSemiSpace();
226 }
227
228 regionList_.AddNodeToFront(region);
229 IncreaseCommitted(region->GetCapacity());
230 IncreaseObjectSize(region->GetSize());
231 survivalObjectSize_ += region->GetAllocatedBytes();
232 return true;
233 }
234
SetWaterLine()235 void SemiSpace::SetWaterLine()
236 {
237 waterLine_ = allocator_.GetTop();
238 allocateAfterLastGC_ = 0;
239 Region *last = GetCurrentRegion();
240 if (last != nullptr) {
241 last->SetGCFlag(RegionGCFlags::HAS_AGE_MARK);
242
243 EnumerateRegions([&last](Region *current) {
244 if (current != last) {
245 current->SetGCFlag(RegionGCFlags::BELOW_AGE_MARK);
246 }
247 });
248 survivalObjectSize_ += last->GetAllocatedBytes(waterLine_);
249 } else {
250 LOG_GC(INFO) << "SetWaterLine: No region survival in current gc, current region available size: "
251 << allocator_.Available();
252 }
253 }
254
GetHeapObjectSize() const255 size_t SemiSpace::GetHeapObjectSize() const
256 {
257 return survivalObjectSize_ + allocateAfterLastGC_;
258 }
259
GetSurvivalObjectSize() const260 size_t SemiSpace::GetSurvivalObjectSize() const
261 {
262 return survivalObjectSize_;
263 }
264
SetOverShootSize(size_t size)265 void SemiSpace::SetOverShootSize(size_t size)
266 {
267 overShootSize_ = size;
268 }
269
AddOverShootSize(size_t size)270 void SemiSpace::AddOverShootSize(size_t size)
271 {
272 overShootSize_ += size;
273 }
274
AdjustCapacity(size_t allocatedSizeSinceGC,JSThread * thread)275 bool SemiSpace::AdjustCapacity(size_t allocatedSizeSinceGC, JSThread *thread)
276 {
277 if (allocatedSizeSinceGC <= initialCapacity_ * GROW_OBJECT_SURVIVAL_RATE / GROWING_FACTOR) {
278 return false;
279 }
280 size_t committedSize = GetCommittedSize();
281 double curObjectSurvivalRate = static_cast<double>(survivalObjectSize_) / allocatedSizeSinceGC;
282 double committedSurvivalRate = static_cast<double>(committedSize) / initialCapacity_;
283 SetOverShootSize(0);
284 double allocSpeed = localHeap_->GetMemController()->GetNewSpaceAllocationThroughputPerMS();
285 if (curObjectSurvivalRate > GROW_OBJECT_SURVIVAL_RATE || committedSurvivalRate > GROW_OBJECT_SURVIVAL_RATE) {
286 size_t newCapacity = initialCapacity_ * GROWING_FACTOR;
287 while (committedSize >= newCapacity && newCapacity < maximumCapacity_) {
288 newCapacity = newCapacity * GROWING_FACTOR;
289 }
290 SetInitialCapacity(std::min(newCapacity, maximumCapacity_));
291 if (committedSize >= initialCapacity_ * GROW_OBJECT_SURVIVAL_RATE) {
292 // Overshoot size is too large. Avoid heapObjectSize is too close to committed size.
293 SetOverShootSize(committedSize);
294 }
295 if (newCapacity == maximumCapacity_) {
296 localHeap_->GetJSObjectResizingStrategy()->UpdateGrowStep(
297 thread,
298 JSObjectResizingStrategy::PROPERTIES_GROW_SIZE * 2); // 2: double
299 }
300 return true;
301 } else if (initialCapacity_ < (MIN_GC_INTERVAL_MS * allocSpeed) &&
302 initialCapacity_ < maximumCapacity_) {
303 size_t newCapacity = initialCapacity_ * GROWING_FACTOR;
304 SetInitialCapacity(std::min(newCapacity, maximumCapacity_));
305 if (newCapacity == maximumCapacity_) {
306 localHeap_->GetJSObjectResizingStrategy()->UpdateGrowStep(
307 thread,
308 JSObjectResizingStrategy::PROPERTIES_GROW_SIZE * 2); // 2: double
309 }
310 return true;
311 } else if (curObjectSurvivalRate < SHRINK_OBJECT_SURVIVAL_RATE) {
312 if (initialCapacity_ <= minimumCapacity_) {
313 return false;
314 }
315 if (allocSpeed > LOW_ALLOCATION_SPEED_PER_MS) {
316 return false;
317 }
318 size_t newCapacity = initialCapacity_ / GROWING_FACTOR;
319 SetInitialCapacity(std::max(newCapacity, minimumCapacity_));
320 localHeap_->GetJSObjectResizingStrategy()->UpdateGrowStep(thread);
321 return true;
322 }
323 return false;
324 }
325
GetAllocatedSizeSinceGC(uintptr_t top) const326 size_t SemiSpace::GetAllocatedSizeSinceGC(uintptr_t top) const
327 {
328 size_t currentRegionSize = 0;
329 auto currentRegion = GetCurrentRegion();
330 if (currentRegion != nullptr) {
331 currentRegionSize = currentRegion->GetAllocatedBytes(top);
332 if (currentRegion->HasAgeMark()) {
333 currentRegionSize -= currentRegion->GetAllocatedBytes(waterLine_);
334 }
335 }
336 return allocateAfterLastGC_ + currentRegionSize;
337 }
338
SnapshotSpace(Heap * heap,size_t initialCapacity,size_t maximumCapacity)339 SnapshotSpace::SnapshotSpace(Heap *heap, size_t initialCapacity, size_t maximumCapacity)
340 : LinearSpace(heap, MemSpaceType::SNAPSHOT_SPACE, initialCapacity, maximumCapacity) {}
341
ReadOnlySpace(Heap * heap,size_t initialCapacity,size_t maximumCapacity,MemSpaceType type)342 ReadOnlySpace::ReadOnlySpace(Heap *heap, size_t initialCapacity, size_t maximumCapacity, MemSpaceType type)
343 : LinearSpace(heap, type, initialCapacity, maximumCapacity) {}
344 } // namespace panda::ecmascript
345