1 /*
2 * Copyright (c) 2021 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef ECMASCRIPT_MEM_PARALLEL_MARKER_INL_H
17 #define ECMASCRIPT_MEM_PARALLEL_MARKER_INL_H
18
19 #include "ecmascript/mem/parallel_marker.h"
20
21 #include "ecmascript/js_hclass-inl.h"
22 #include "ecmascript/mem/gc_bitset.h"
23 #include "ecmascript/mem/heap.h"
24 #include "ecmascript/mem/region-inl.h"
25 #include "ecmascript/mem/tlab_allocator-inl.h"
26
27 namespace panda::ecmascript {
28 constexpr size_t HEAD_SIZE = TaggedObject::TaggedObjectSize();
29
30 template <typename Callback>
VisitBodyInObj(TaggedObject * root,ObjectSlot start,ObjectSlot end,bool needBarrier,Callback callback)31 ARK_INLINE bool NonMovableMarker::VisitBodyInObj(TaggedObject *root, ObjectSlot start, ObjectSlot end,
32 bool needBarrier, Callback callback)
33 {
34 auto hclass = root->SynchronizedGetClass();
35 Region *rootRegion = Region::ObjectAddressToRange(root);
36 int index = 0;
37 auto layout = LayoutInfo::UncheckCast(hclass->GetLayout().GetTaggedObject());
38 ObjectSlot realEnd = start;
39 realEnd += layout->GetPropertiesCapacity();
40 end = end > realEnd ? realEnd : end;
41 for (ObjectSlot slot = start; slot < end; slot++) {
42 auto attr = layout->GetAttr(index++);
43 if (attr.IsTaggedRep()) {
44 callback(slot, rootRegion, needBarrier);
45 }
46 }
47 return true;
48 }
49
MarkValue(uint32_t threadId,ObjectSlot & slot,Region * rootRegion,bool needBarrier)50 inline void NonMovableMarker::MarkValue(uint32_t threadId, ObjectSlot &slot, Region *rootRegion, bool needBarrier)
51 {
52 JSTaggedValue value(slot.GetTaggedType());
53 if (value.IsHeapObject()) {
54 TaggedObject *obj = nullptr;
55 if (!value.IsWeakForHeapObject()) {
56 obj = value.GetTaggedObject();
57 MarkObject(threadId, obj);
58 } else {
59 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(slot.SlotAddress()), rootRegion);
60 obj = value.GetWeakReferentUnChecked();
61 }
62 if (needBarrier) {
63 Region *valueRegion = Region::ObjectAddressToRange(obj);
64 if (valueRegion->InCollectSet()) {
65 rootRegion->AtomicInsertCrossRegionRSet(slot.SlotAddress());
66 }
67 }
68 }
69 }
70
MarkObject(uint32_t threadId,TaggedObject * object)71 inline void NonMovableMarker::MarkObject(uint32_t threadId, TaggedObject *object)
72 {
73 Region *objectRegion = Region::ObjectAddressToRange(object);
74
75 if (!heap_->IsConcurrentFullMark() && !objectRegion->InYoungSpace()) {
76 return;
77 }
78
79 if (objectRegion->AtomicMark(object)) {
80 workManager_->Push(threadId, object, objectRegion);
81 }
82 }
83
HandleRoots(uint32_t threadId,Root type,ObjectSlot slot)84 inline void NonMovableMarker::HandleRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot slot)
85 {
86 JSTaggedValue value(slot.GetTaggedType());
87 if (value.IsHeapObject()) {
88 MarkObject(threadId, value.GetTaggedObject());
89 }
90 }
91
HandleRangeRoots(uint32_t threadId,Root type,ObjectSlot start,ObjectSlot end)92 inline void NonMovableMarker::HandleRangeRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot start,
93 ObjectSlot end)
94 {
95 for (ObjectSlot slot = start; slot < end; slot++) {
96 JSTaggedValue value(slot.GetTaggedType());
97 if (value.IsHeapObject()) {
98 if (value.IsWeakForHeapObject()) {
99 LOG_ECMA_MEM(FATAL) << "Weak Reference in NonMovableMarker roots";
100 }
101 MarkObject(threadId, value.GetTaggedObject());
102 }
103 }
104 }
105
HandleDerivedRoots(Root type,ObjectSlot base,ObjectSlot derived,uintptr_t baseOldObject)106 inline void NonMovableMarker::HandleDerivedRoots([[maybe_unused]] Root type, [[maybe_unused]] ObjectSlot base,
107 [[maybe_unused]] ObjectSlot derived,
108 [[maybe_unused]] uintptr_t baseOldObject)
109 {
110 // It is only used to update the derived value. The mark of partial GC does not need to update slot
111 }
112
HandleOldToNewRSet(uint32_t threadId,Region * region)113 inline void NonMovableMarker::HandleOldToNewRSet(uint32_t threadId, Region *region)
114 {
115 region->IterateAllOldToNewBits([this, threadId, ®ion](void *mem) -> bool {
116 ObjectSlot slot(ToUintPtr(mem));
117 JSTaggedValue value(slot.GetTaggedType());
118 if (value.IsHeapObject()) {
119 if (value.IsWeakForHeapObject()) {
120 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(mem), region);
121 } else {
122 MarkObject(threadId, value.GetTaggedObject());
123 }
124 }
125 return true;
126 });
127 }
128
RecordWeakReference(uint32_t threadId,JSTaggedType * ref,Region * objectRegion)129 inline void NonMovableMarker::RecordWeakReference(uint32_t threadId, JSTaggedType *ref, Region *objectRegion)
130 {
131 auto value = JSTaggedValue(*ref);
132 Region *valueRegion = Region::ObjectAddressToRange(value.GetTaggedWeakRef());
133 if (!objectRegion->InYoungSpaceOrCSet() && !valueRegion->InYoungSpaceOrCSet()) {
134 workManager_->PushWeakReference(threadId, ref);
135 }
136 }
137
138 template <typename Callback>
VisitBodyInObj(TaggedObject * root,ObjectSlot start,ObjectSlot end,Callback callback)139 ARK_INLINE bool MovableMarker::VisitBodyInObj(TaggedObject *root, ObjectSlot start, ObjectSlot end, Callback callback)
140 {
141 auto hclass = root->GetClass();
142 int index = 0;
143 TaggedObject *dst = hclass->GetLayout().GetTaggedObject();
144 auto layout = LayoutInfo::UncheckCast(dst);
145 ObjectSlot realEnd = start;
146 realEnd += layout->GetPropertiesCapacity();
147 end = end > realEnd ? realEnd : end;
148 for (ObjectSlot slot = start; slot < end; slot++) {
149 auto attr = layout->GetAttr(index++);
150 if (attr.IsTaggedRep()) {
151 callback(slot, root);
152 }
153 }
154 return true;
155 }
156
HandleRoots(uint32_t threadId,Root type,ObjectSlot slot)157 inline void MovableMarker::HandleRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot slot)
158 {
159 JSTaggedValue value(slot.GetTaggedType());
160 if (value.IsHeapObject()) {
161 MarkObject(threadId, value.GetTaggedObject(), slot);
162 }
163 }
164
HandleRangeRoots(uint32_t threadId,Root type,ObjectSlot start,ObjectSlot end)165 inline void MovableMarker::HandleRangeRoots(uint32_t threadId, [[maybe_unused]] Root type, ObjectSlot start,
166 ObjectSlot end)
167 {
168 for (ObjectSlot slot = start; slot < end; slot++) {
169 JSTaggedValue value(slot.GetTaggedType());
170 if (value.IsHeapObject()) {
171 if (value.IsWeakForHeapObject()) {
172 Region *objectRegion = Region::ObjectAddressToRange(start.SlotAddress());
173 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(slot.SlotAddress()), objectRegion);
174 } else {
175 MarkObject(threadId, value.GetTaggedObject(), slot);
176 }
177 }
178 }
179 }
180
HandleDerivedRoots(Root type,ObjectSlot base,ObjectSlot derived,uintptr_t baseOldObject)181 inline void MovableMarker::HandleDerivedRoots([[maybe_unused]] Root type, ObjectSlot base,
182 ObjectSlot derived, uintptr_t baseOldObject)
183 {
184 if (JSTaggedValue(base.GetTaggedType()).IsHeapObject()) {
185 derived.Update(base.GetTaggedType() + derived.GetTaggedType() - baseOldObject);
186 }
187 }
188
HandleOldToNewRSet(uint32_t threadId,Region * region)189 inline void MovableMarker::HandleOldToNewRSet(uint32_t threadId, Region *region)
190 {
191 region->IterateAllOldToNewBits([this, threadId, ®ion](void *mem) -> bool {
192 ObjectSlot slot(ToUintPtr(mem));
193 JSTaggedValue value(slot.GetTaggedType());
194 if (value.IsHeapObject()) {
195 if (value.IsWeakForHeapObject()) {
196 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(mem), region);
197 return true;
198 }
199 auto slotStatus = MarkObject(threadId, value.GetTaggedObject(), slot);
200 if (slotStatus == SlotStatus::CLEAR_SLOT) {
201 return false;
202 }
203 }
204 return true;
205 });
206 }
207
AllocateDstSpace(uint32_t threadId,size_t size,bool & shouldPromote)208 inline uintptr_t MovableMarker::AllocateDstSpace(uint32_t threadId, size_t size, bool &shouldPromote)
209 {
210 uintptr_t forwardAddress = 0;
211 if (shouldPromote) {
212 forwardAddress = workManager_->GetTlabAllocator(threadId)->Allocate(size, COMPRESS_SPACE);
213 if (UNLIKELY(forwardAddress == 0)) {
214 LOG_ECMA_MEM(FATAL) << "EvacuateObject alloc failed: "
215 << " size: " << size;
216 UNREACHABLE();
217 }
218 } else {
219 forwardAddress = workManager_->GetTlabAllocator(threadId)->Allocate(size, SEMI_SPACE);
220 if (UNLIKELY(forwardAddress == 0)) {
221 forwardAddress = workManager_->GetTlabAllocator(threadId)->Allocate(size, COMPRESS_SPACE);
222 if (UNLIKELY(forwardAddress == 0)) {
223 LOG_ECMA_MEM(FATAL) << "EvacuateObject alloc failed: "
224 << " size: " << size;
225 UNREACHABLE();
226 }
227 shouldPromote = true;
228 }
229 }
230 return forwardAddress;
231 }
232
UpdateForwardAddressIfSuccess(uint32_t threadId,TaggedObject * object,JSHClass * klass,uintptr_t toAddress,size_t size,const MarkWord & markWord,ObjectSlot slot,bool isPromoted)233 inline void MovableMarker::UpdateForwardAddressIfSuccess(uint32_t threadId, TaggedObject *object, JSHClass *klass,
234 uintptr_t toAddress, size_t size, const MarkWord &markWord, ObjectSlot slot, bool isPromoted)
235 {
236 if (memcpy_s(ToVoidPtr(toAddress + HEAD_SIZE), size - HEAD_SIZE, ToVoidPtr(ToUintPtr(object) + HEAD_SIZE),
237 size - HEAD_SIZE) != EOK) {
238 LOG_FULL(FATAL) << "memcpy_s failed";
239 }
240 workManager_->IncreaseAliveSize(threadId, size);
241 if (isPromoted) {
242 workManager_->IncreasePromotedSize(threadId, size);
243 }
244
245 *reinterpret_cast<MarkWordType *>(toAddress) = markWord.GetValue();
246 heap_->OnMoveEvent(reinterpret_cast<intptr_t>(object), reinterpret_cast<TaggedObject *>(toAddress), size);
247 if (klass->HasReferenceField()) {
248 workManager_->Push(threadId, reinterpret_cast<TaggedObject *>(toAddress));
249 }
250 slot.Update(reinterpret_cast<TaggedObject *>(toAddress));
251 }
252
UpdateForwardAddressIfFailed(TaggedObject * object,uintptr_t toAddress,size_t size,ObjectSlot slot)253 inline bool MovableMarker::UpdateForwardAddressIfFailed(TaggedObject *object, uintptr_t toAddress, size_t size,
254 ObjectSlot slot)
255 {
256 FreeObject::FillFreeObject(heap_->GetEcmaVM(), toAddress, size);
257 TaggedObject *dst = MarkWord(object).ToForwardingAddress();
258 slot.Update(dst);
259 return Region::ObjectAddressToRange(dst)->InYoungSpace();
260 }
261
MarkValue(uint32_t threadId,TaggedObject * root,ObjectSlot slot)262 inline void SemiGCMarker::MarkValue(uint32_t threadId, TaggedObject *root, ObjectSlot slot)
263 {
264 JSTaggedValue value(slot.GetTaggedType());
265 if (value.IsHeapObject()) {
266 Region *rootRegion = Region::ObjectAddressToRange(root);
267 if (value.IsWeakForHeapObject()) {
268 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(slot.SlotAddress()), rootRegion);
269 return;
270 }
271 auto slotStatus = MarkObject(threadId, value.GetTaggedObject(), slot);
272 if (!rootRegion->InYoungSpace() && slotStatus == SlotStatus::KEEP_SLOT) {
273 SlotNeedUpdate waitUpdate(reinterpret_cast<TaggedObject *>(root), slot);
274 workManager_->PushSlotNeedUpdate(threadId, waitUpdate);
275 }
276 }
277 }
278
MarkObject(uint32_t threadId,TaggedObject * object,ObjectSlot slot)279 inline SlotStatus SemiGCMarker::MarkObject(uint32_t threadId, TaggedObject *object, ObjectSlot slot)
280 {
281 Region *objectRegion = Region::ObjectAddressToRange(object);
282 if (!objectRegion->InYoungSpace()) {
283 return SlotStatus::CLEAR_SLOT;
284 }
285
286 MarkWord markWord(object);
287 if (markWord.IsForwardingAddress()) {
288 TaggedObject *dst = markWord.ToForwardingAddress();
289 slot.Update(dst);
290 Region *valueRegion = Region::ObjectAddressToRange(dst);
291 return valueRegion->InYoungSpace() ? SlotStatus::KEEP_SLOT : SlotStatus::CLEAR_SLOT;
292 }
293 return EvacuateObject(threadId, object, markWord, slot);
294 }
295
EvacuateObject(uint32_t threadId,TaggedObject * object,const MarkWord & markWord,ObjectSlot slot)296 inline SlotStatus SemiGCMarker::EvacuateObject(uint32_t threadId, TaggedObject *object, const MarkWord &markWord,
297 ObjectSlot slot)
298 {
299 JSHClass *klass = markWord.GetJSHClass();
300 size_t size = klass->SizeFromJSHClass(object);
301 bool isPromoted = ShouldBePromoted(object);
302
303 uintptr_t forwardAddress = AllocateDstSpace(threadId, size, isPromoted);
304 auto oldValue = markWord.GetValue();
305 auto result = Barriers::AtomicSetPrimitive(object, 0, oldValue,
306 MarkWord::FromForwardingAddress(forwardAddress));
307 if (result == oldValue) {
308 UpdateForwardAddressIfSuccess(threadId, object, klass, forwardAddress, size, markWord, slot, isPromoted);
309 return isPromoted ? SlotStatus::CLEAR_SLOT : SlotStatus::KEEP_SLOT;
310 }
311 bool keepSlot = UpdateForwardAddressIfFailed(object, forwardAddress, size, slot);
312 return keepSlot ? SlotStatus::KEEP_SLOT : SlotStatus::CLEAR_SLOT;
313 }
314
ShouldBePromoted(TaggedObject * object)315 inline bool SemiGCMarker::ShouldBePromoted(TaggedObject *object)
316 {
317 Region *region = Region::ObjectAddressToRange(object);
318 return (region->BelowAgeMark() || (region->HasAgeMark() && ToUintPtr(object) < waterLine_));
319 }
320
RecordWeakReference(uint32_t threadId,JSTaggedType * ref,Region * objectRegion)321 inline void SemiGCMarker::RecordWeakReference(uint32_t threadId, JSTaggedType *ref,
322 [[maybe_unused]] Region *objectRegion)
323 {
324 auto value = JSTaggedValue(*ref);
325 Region *valueRegion = Region::ObjectAddressToRange(value.GetTaggedWeakRef());
326 if (valueRegion->InYoungSpace()) {
327 workManager_->PushWeakReference(threadId, ref);
328 }
329 }
330
MarkValue(uint32_t threadId,ObjectSlot slot)331 inline void CompressGCMarker::MarkValue(uint32_t threadId, ObjectSlot slot)
332 {
333 JSTaggedValue value(slot.GetTaggedType());
334 if (value.IsHeapObject()) {
335 if (value.IsWeakForHeapObject()) {
336 // It is unnecessary to use region pointer in compressGCMarker.
337 RecordWeakReference(threadId, reinterpret_cast<JSTaggedType *>(slot.SlotAddress()));
338 return;
339 }
340 MarkObject(threadId, value.GetTaggedObject(), slot);
341 }
342 }
343
MarkObject(uint32_t threadId,TaggedObject * object,ObjectSlot slot)344 inline SlotStatus CompressGCMarker::MarkObject(uint32_t threadId, TaggedObject *object, ObjectSlot slot)
345 {
346 Region *objectRegion = Region::ObjectAddressToRange(object);
347 if (!NeedEvacuate(objectRegion)) {
348 if (objectRegion->AtomicMark(object)) {
349 workManager_->Push(threadId, object);
350 }
351 return SlotStatus::CLEAR_SLOT;
352 }
353
354 MarkWord markWord(object);
355 if (markWord.IsForwardingAddress()) {
356 TaggedObject *dst = markWord.ToForwardingAddress();
357 slot.Update(dst);
358 return SlotStatus::CLEAR_SLOT;
359 }
360 return EvacuateObject(threadId, object, markWord, slot);
361 }
362
AllocateReadOnlySpace(size_t size)363 inline uintptr_t CompressGCMarker::AllocateReadOnlySpace(size_t size)
364 {
365 LockHolder lock(mutex_);
366 uintptr_t forwardAddress = heap_->GetReadOnlySpace()->Allocate(size);
367 if (UNLIKELY(forwardAddress == 0)) {
368 LOG_ECMA_MEM(FATAL) << "Evacuate Read only Object: alloc failed: "
369 << " size: " << size;
370 UNREACHABLE();
371 }
372 return forwardAddress;
373 }
374
AllocateAppSpawnSpace(size_t size)375 inline uintptr_t CompressGCMarker::AllocateAppSpawnSpace(size_t size)
376 {
377 LockHolder lock(mutex_);
378 uintptr_t forwardAddress = heap_->GetAppSpawnSpace()->Allocate(size);
379 if (UNLIKELY(forwardAddress == 0)) {
380 LOG_ECMA_MEM(FATAL) << "Evacuate AppSpawn Object: alloc failed: "
381 << " size: " << size;
382 UNREACHABLE();
383 }
384 return forwardAddress;
385 }
386
EvacuateObject(uint32_t threadId,TaggedObject * object,const MarkWord & markWord,ObjectSlot slot)387 inline SlotStatus CompressGCMarker::EvacuateObject(uint32_t threadId, TaggedObject *object, const MarkWord &markWord,
388 ObjectSlot slot)
389 {
390 JSHClass *klass = markWord.GetJSHClass();
391 size_t size = klass->SizeFromJSHClass(object);
392 uintptr_t forwardAddress = AllocateForwardAddress(threadId, size, klass, object);
393 auto oldValue = markWord.GetValue();
394 auto result = Barriers::AtomicSetPrimitive(object, 0, oldValue,
395 MarkWord::FromForwardingAddress(forwardAddress));
396 if (result == oldValue) {
397 UpdateForwardAddressIfSuccess(threadId, object, klass, forwardAddress, size, markWord, slot);
398 if (isAppSpawn_ && klass->IsString()) {
399 // calculate and set hashcode for read-only ecmastring in advance
400 EcmaStringAccessor(reinterpret_cast<TaggedObject *>(forwardAddress)).GetHashcode();
401 }
402 return SlotStatus::CLEAR_SLOT;
403 }
404 UpdateForwardAddressIfFailed(object, forwardAddress, size, slot);
405 return SlotStatus::CLEAR_SLOT;
406 }
407
RecordWeakReference(uint32_t threadId,JSTaggedType * ref,Region * objectRegion)408 inline void CompressGCMarker::RecordWeakReference(uint32_t threadId, JSTaggedType *ref,
409 [[maybe_unused]] Region *objectRegion)
410 {
411 workManager_->PushWeakReference(threadId, ref);
412 }
413
NeedEvacuate(Region * region)414 inline bool CompressGCMarker::NeedEvacuate(Region *region)
415 {
416 if (isAppSpawn_) {
417 return !region->InHugeObjectSpace() && !region->InReadOnlySpace() && !region->InNonMovableSpace();
418 }
419 return region->InYoungOrOldSpace();
420 }
421 } // namespace panda::ecmascript
422 #endif // ECMASCRIPT_MEM_PARALLEL_MARKER_INL_H
423