• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "ecmascript/serializer/base_deserializer.h"
17 
18 #include "common_interfaces/heap/heap_allocator.h"
19 #include "ecmascript/free_object.h"
20 #include "ecmascript/global_env.h"
21 #include "ecmascript/js_arraybuffer.h"
22 #include "ecmascript/js_function.h"
23 #include "ecmascript/js_regexp.h"
24 #include "ecmascript/checkpoint/thread_state_transition.h"
25 #include "ecmascript/napi/jsnapi_helper.h"
26 #include "mem/mem.h"
27 
28 namespace panda::ecmascript {
29 
BaseDeserializer(JSThread * thread,SerializeData * data,void * hint)30 BaseDeserializer::BaseDeserializer(JSThread *thread, SerializeData *data, void *hint)
31     : thread_(thread), data_(data), engine_(hint), heap_(const_cast<Heap *>(thread->GetEcmaVM()->GetHeap()))
32 {
33     sheap_ = SharedHeap::GetInstance();
34     uint32_t index = data_->GetDataIndex();
35     if (index != 0) {
36         sharedObjChunk_ = Runtime::GetInstance()->GetSerializeRootMapValue(thread_, index);
37         if (sharedObjChunk_ == nullptr) {
38             LOG_ECMA(FATAL) << "Unknown serializer root index: " << index;
39             UNREACHABLE();
40         }
41     }
42 }
43 
ReadValue()44 JSHandle<JSTaggedValue> BaseDeserializer::ReadValue()
45 {
46     ECMA_BYTRACE_NAME(HITRACE_LEVEL_COMMERCIAL, HITRACE_TAG_ARK,
47         ("Deserialize dataSize: " + std::to_string(data_->Size())).c_str(), "");
48     if (data_->IsIncompleteData()) {
49         LOG_ECMA(ERROR) << "The serialization data is incomplete";
50         return JSHandle<JSTaggedValue>();
51     }
52     JSHandle<JSTaggedValue> res = DeserializeJSTaggedValue();
53     return res;
54 }
55 
DeserializeJSTaggedValue()56 JSHandle<JSTaggedValue> BaseDeserializer::DeserializeJSTaggedValue()
57 {
58     // Use a heap address to store the result object, to unify the process, so that do not need to do special
59     // check `IsRoot`.
60     JSHandle<TaggedArray> resHolderHandle = heap_->GetEcmaVM()->GetFactory()->NewTaggedArray(1);
61     AllocateToDifferentSpaces();
62 
63     // stop gc during deserialize
64     heap_->SetOnSerializeEvent(true);
65 
66     uint8_t encodeFlag = data_->ReadUint8(position_);
67     uintptr_t resHolderAddr = static_cast<uintptr_t>(resHolderHandle.GetTaggedType());
68     while (ReadSingleEncodeData(encodeFlag, resHolderAddr, TaggedArray::DATA_OFFSET) == 0) {
69         encodeFlag = data_->ReadUint8(position_);
70     }
71 
72     // initialize concurrent func here
73     for (auto func : concurrentFunctions_) {
74         JSFunction::InitializeForConcurrentFunction(thread_, func);
75     }
76     concurrentFunctions_.clear();
77 
78     // new native binding object here
79     for (auto &nativeBindingInfo : nativeBindingAttachInfos_) {
80         DeserializeNativeBindingObject(&nativeBindingInfo);
81     }
82     nativeBindingAttachInfos_.clear();
83 
84     // new js error here
85     for (auto &jsErrorInfo : jsErrorInfos_) {
86         DeserializeJSError(&jsErrorInfo);
87     }
88     jsErrorInfos_.clear();
89 
90     // May be some other special object need to process at last
91     DeserializeSpecialRecordedObjects();
92 
93     // recovery gc after serialize
94     heap_->SetOnSerializeEvent(false);
95 
96     return JSHandle<JSTaggedValue>(thread_, resHolderHandle->Get(thread_, 0));
97 }
98 
DeserializeTaggedObject(SerializedObjectSpace space)99 uintptr_t BaseDeserializer::DeserializeTaggedObject(SerializedObjectSpace space)
100 {
101     size_t objSize = data_->ReadUint32(position_);
102     uintptr_t res = RelocateObjectAddr(space, objSize);
103     objectVector_.push_back(static_cast<JSTaggedType>(res));
104     DeserializeObjectField(res, res + objSize);
105     return res;
106 }
107 
DeserializeObjectField(uintptr_t start,uintptr_t end)108 void BaseDeserializer::DeserializeObjectField(uintptr_t start, uintptr_t end)
109 {
110     size_t offset = 0; // 0: initial offset
111     while (start + offset < end) {
112         uint8_t encodeFlag = data_->ReadUint8(position_);
113         offset += ReadSingleEncodeData(encodeFlag, start, offset);
114     }
115 }
116 
DeserializeNativeBindingObject(NativeBindingAttachInfo * info)117 void BaseDeserializer::DeserializeNativeBindingObject(NativeBindingAttachInfo *info)
118 {
119     [[maybe_unused]] EcmaHandleScope scope(thread_);
120     AttachFunc af = info->af_;
121     void *bufferPointer = info->bufferPointer_;
122     void *hint = info->hint_;
123     void *attachData = info->attachData_;
124     Local<JSValueRef> attachVal;
125     {
126         ThreadNativeScope nativeScope(thread_);
127         attachVal = af(engine_, bufferPointer, hint, attachData);
128     }
129     if (attachVal.IsEmpty()) {
130         LOG_ECMA(ERROR) << "NativeBindingObject is empty";
131         attachVal = JSValueRef::Undefined(thread_->GetEcmaVM());
132     }
133     JSTaggedType res = JSNApiHelper::ToJSHandle(attachVal).GetTaggedType();
134     ObjectSlot slot = info->GetSlot();
135     slot.Update(res);
136     if (!JSTaggedValue(res).IsInvalidValue()) {
137         WriteBarrier(thread_, reinterpret_cast<void *>(info->GetObjAddr()), info->GetFieldOffset(), res);
138     }
139 }
140 
DeserializeJSError(JSErrorInfo * info)141 void BaseDeserializer::DeserializeJSError(JSErrorInfo *info)
142 {
143     [[maybe_unused]] EcmaHandleScope scope(thread_);
144     uint8_t type = info->errorType_;
145     base::ErrorType errorType = base::ErrorType(type - static_cast<uint8_t>(JSType::JS_ERROR_FIRST));
146     JSHandle<JSTaggedValue> errorMsg = info->errorMsg_;
147     ObjectFactory *factory = thread_->GetEcmaVM()->GetFactory();
148     JSHandle<JSObject> errorTag = factory->NewJSError(errorType, JSHandle<EcmaString>(errorMsg), StackCheck::NO);
149     ObjectSlot slot = info->GetSlot();
150     slot.Update(errorTag.GetTaggedType());
151     if (!errorTag.GetTaggedValue().IsInvalidValue()) {
152         WriteBarrier(thread_, reinterpret_cast<void *>(info->GetObjAddr()), info->GetFieldOffset(),
153                      errorTag.GetTaggedType());
154     }
155 }
156 
HandleNewObjectEncodeFlag(SerializedObjectSpace space,uintptr_t objAddr,size_t fieldOffset)157 void BaseDeserializer::HandleNewObjectEncodeFlag(SerializedObjectSpace space,  uintptr_t objAddr, size_t fieldOffset)
158 {
159     // deserialize object prologue
160     bool isWeak = GetAndResetWeak();
161     bool isTransferBuffer = GetAndResetTransferBuffer();
162     bool isSharedArrayBuffer = GetAndResetSharedArrayBuffer();
163     void *bufferPointer = GetAndResetBufferPointer();
164     // save lazyArray, moduleFileName, moduleRecordName here, cause content will change in DeserializeTaggedObject.
165     bool* lazyArray = GetLazyArray();
166     CString moduleFileName = moduleFileName_;
167     moduleFileName_.clear();
168     CString moduleRecordName = moduleRecordName_;
169     moduleRecordName_.clear();
170     // deserialize object here
171     uintptr_t addr = DeserializeTaggedObject(space);
172 
173     // deserialize object epilogue
174     if (isTransferBuffer) {
175         TransferArrayBufferAttach(addr);
176     } else if (isSharedArrayBuffer) {
177         IncreaseSharedArrayBufferReference(addr);
178     } else if (bufferPointer != nullptr) {
179         ResetNativePointerBuffer(addr, bufferPointer);
180     }
181     TaggedObject *object = reinterpret_cast<TaggedObject *>(addr);
182     if (object->GetClass()->IsJSNativePointer()) {
183         JSNativePointer *nativePointer = reinterpret_cast<JSNativePointer *>(object);
184         if (nativePointer->GetDeleter() != nullptr) {
185             if (!object->GetClass()->IsJSShared()) {
186                 thread_->GetEcmaVM()->PushToNativePointerList(nativePointer);
187             }
188         }
189     } else if (object->GetClass()->IsJSFunction()) {
190         JSFunction* func = reinterpret_cast<JSFunction *>(object);
191         FunctionKind funcKind = func->GetFunctionKind(thread_);
192         if (funcKind == FunctionKind::CONCURRENT_FUNCTION || object->GetClass()->IsJSSharedFunction()) {
193             // defer initialize concurrent function
194             JSHandle<JSFunction> funcHandle(thread_, func);
195             concurrentFunctions_.push_back(funcHandle);
196         }
197         func->SetRawProfileTypeInfo<SKIP_BARRIER>(thread_, thread_->GlobalConstants()->GetEmptyProfileTypeInfoCell());
198         func->SetWorkNodePointer(reinterpret_cast<uintptr_t>(nullptr));
199     } else if (object->GetClass()->IsSourceTextModule()) {
200         SourceTextModule* module = reinterpret_cast<SourceTextModule *>(object);
201         module->SetEcmaModuleFilenameStringForDeserialize(moduleFileName);
202         module->SetEcmaModuleRecordNameStringForDeserialize(moduleRecordName);
203         module->SetLazyImportArrayForDeserialize(lazyArray);
204         if (module->GetStatus() > ModuleStatus::INSTANTIATED) {
205             module->SetStatus(ModuleStatus::INSTANTIATED);
206         }
207         module->SetException(thread_, thread_->GlobalConstants()->GetHole());
208         module->SetCycleRoot(thread_, JSTaggedValue(module));
209     }
210     UpdateMaybeWeak(ObjectSlot(objAddr + fieldOffset), addr, isWeak);
211     WriteBarrier<WriteBarrierType::DESERIALIZE>(thread_, reinterpret_cast<void *>(objAddr), fieldOffset,
212                                                 static_cast<JSTaggedType>(addr));
213 }
214 
TransferArrayBufferAttach(uintptr_t objAddr)215 void BaseDeserializer::TransferArrayBufferAttach(uintptr_t objAddr)
216 {
217     ASSERT(JSTaggedValue(static_cast<JSTaggedType>(objAddr)).IsArrayBuffer());
218     JSArrayBuffer *arrayBuffer = reinterpret_cast<JSArrayBuffer *>(objAddr);
219     size_t arrayLength = arrayBuffer->GetArrayBufferByteLength();
220     bool withNativeAreaAllocator = arrayBuffer->GetWithNativeAreaAllocator();
221     JSNativePointer *np =
222         reinterpret_cast<JSNativePointer *>(arrayBuffer->GetArrayBufferData(thread_).GetTaggedObject());
223     arrayBuffer->Attach(thread_, arrayLength, JSTaggedValue(np), withNativeAreaAllocator);
224 }
225 
IncreaseSharedArrayBufferReference(uintptr_t objAddr)226 void BaseDeserializer::IncreaseSharedArrayBufferReference(uintptr_t objAddr)
227 {
228     ASSERT(JSTaggedValue(static_cast<JSTaggedType>(objAddr)).IsSharedArrayBuffer());
229     JSArrayBuffer *arrayBuffer = reinterpret_cast<JSArrayBuffer *>(objAddr);
230     size_t arrayLength = arrayBuffer->GetArrayBufferByteLength();
231     JSNativePointer *np =
232         reinterpret_cast<JSNativePointer *>(arrayBuffer->GetArrayBufferData(thread_).GetTaggedObject());
233     void *buffer = np->GetExternalPointer();
234     if (JSSharedMemoryManager::GetInstance()->CreateOrLoad(&buffer, arrayLength)) {
235         LOG_ECMA(FATAL) << "BaseDeserializer::IncreaseSharedArrayBufferReference failed";
236     }
237 }
238 
ResetNativePointerBuffer(uintptr_t objAddr,void * bufferPointer)239 void BaseDeserializer::ResetNativePointerBuffer(uintptr_t objAddr, void *bufferPointer)
240 {
241     JSTaggedValue obj = JSTaggedValue(static_cast<JSTaggedType>(objAddr));
242     ASSERT(obj.IsArrayBuffer() || obj.IsJSRegExp());
243     auto nativeAreaAllocator = thread_->GetEcmaVM()->GetNativeAreaAllocator();
244     JSNativePointer *np = nullptr;
245     if (obj.IsArrayBuffer()) {
246         JSArrayBuffer *arrayBuffer = reinterpret_cast<JSArrayBuffer *>(objAddr);
247         arrayBuffer->SetWithNativeAreaAllocator(true);
248         np = reinterpret_cast<JSNativePointer *>(arrayBuffer->GetArrayBufferData(thread_).GetTaggedObject());
249         nativeAreaAllocator->IncreaseNativeSizeStats(arrayBuffer->GetArrayBufferByteLength(), NativeFlag::ARRAY_BUFFER);
250     } else {
251         JSRegExp *jsRegExp = reinterpret_cast<JSRegExp *>(objAddr);
252         np = reinterpret_cast<JSNativePointer *>(jsRegExp->GetByteCodeBuffer(thread_).GetTaggedObject());
253         nativeAreaAllocator->IncreaseNativeSizeStats(jsRegExp->GetLength(), NativeFlag::REGEXP_BTYECODE);
254     }
255 
256     np->SetExternalPointer(bufferPointer);
257     np->SetDeleter(NativeAreaAllocator::FreeBufferFunc);
258     np->SetData(thread_->GetEcmaVM()->GetNativeAreaAllocator());
259 }
260 
ReadSingleEncodeData(uint8_t encodeFlag,uintptr_t objAddr,size_t fieldOffset)261 size_t BaseDeserializer::ReadSingleEncodeData(uint8_t encodeFlag, uintptr_t objAddr, size_t fieldOffset)
262 {
263     size_t handledFieldSize = sizeof(JSTaggedType);
264     ObjectSlot slot(objAddr + fieldOffset);
265     switch (encodeFlag) {
266         case (uint8_t)SerializedObjectSpace::REGULAR_SPACE:
267         case (uint8_t)SerializedObjectSpace::PIN_SPACE:
268         case (uint8_t)SerializedObjectSpace::LARGE_SPACE:
269         case (uint8_t)SerializedObjectSpace::OLD_SPACE:
270         case (uint8_t)SerializedObjectSpace::NON_MOVABLE_SPACE:
271         case (uint8_t)SerializedObjectSpace::MACHINE_CODE_SPACE:
272         case (uint8_t)SerializedObjectSpace::HUGE_SPACE:
273         case (uint8_t)SerializedObjectSpace::SHARED_OLD_SPACE:
274         case (uint8_t)SerializedObjectSpace::SHARED_NON_MOVABLE_SPACE:
275         case (uint8_t)SerializedObjectSpace::SHARED_HUGE_SPACE: {
276             SerializedObjectSpace space = SerializeData::DecodeSpace(encodeFlag);
277             HandleNewObjectEncodeFlag(space, objAddr, fieldOffset);
278             break;
279         }
280         case (uint8_t)EncodeFlag::REFERENCE: {
281             uint32_t valueIndex = data_->ReadUint32(position_);
282             JSTaggedType valueAddr = objectVector_.at(valueIndex);
283             UpdateMaybeWeak(slot, valueAddr, GetAndResetWeak());
284             WriteBarrier<WriteBarrierType::DESERIALIZE>(thread_, reinterpret_cast<void *>(objAddr), fieldOffset,
285                                                         valueAddr);
286             break;
287         }
288         case (uint8_t)EncodeFlag::WEAK: {
289             ASSERT(!isWeak_);
290             isWeak_ = true;
291             handledFieldSize = 0;
292             break;
293         }
294         case (uint8_t)EncodeFlag::PRIMITIVE: {
295             JSTaggedType value = data_->ReadJSTaggedType(position_);
296             slot.Update(value);
297             break;
298         }
299         case (uint8_t)EncodeFlag::MULTI_RAW_DATA: {
300             uint32_t size = data_->ReadUint32(position_);
301             data_->ReadRawData(objAddr + fieldOffset, size, position_);
302             handledFieldSize = size;
303             break;
304         }
305         case (uint8_t)EncodeFlag::ROOT_OBJECT: {
306             uint32_t index = data_->ReadUint32(position_);
307             uintptr_t valueAddr = thread_->GetEcmaVM()->GetSnapshotEnv()->RelocateRootObjectAddr(index);
308             if (valueAddr > JSTaggedValue::INVALID_VALUE_LIMIT) {
309                 WriteBarrier(thread_, reinterpret_cast<void *>(objAddr), fieldOffset,
310                              static_cast<JSTaggedType>(valueAddr));
311             }
312             UpdateMaybeWeak(slot, valueAddr, GetAndResetWeak());
313             break;
314         }
315         case (uint8_t)EncodeFlag::OBJECT_PROTO: {
316             uint8_t type = data_->ReadUint8(position_);
317             uintptr_t protoAddr = RelocateObjectProtoAddr(type);
318             if (protoAddr > JSTaggedValue::INVALID_VALUE_LIMIT) {
319                 WriteBarrier(thread_, reinterpret_cast<void *>(objAddr), fieldOffset,
320                              static_cast<JSTaggedType>(protoAddr));
321             }
322             UpdateMaybeWeak(slot, protoAddr, GetAndResetWeak());
323             break;
324         }
325         case (uint8_t)EncodeFlag::TRANSFER_ARRAY_BUFFER: {
326             isTransferArrayBuffer_ = true;
327             handledFieldSize = 0;
328             break;
329         }
330         case (uint8_t)EncodeFlag::SHARED_ARRAY_BUFFER: {
331             isSharedArrayBuffer_ = true;
332             handledFieldSize = 0;
333             break;
334         }
335         case (uint8_t)EncodeFlag::ARRAY_BUFFER:
336         case (uint8_t)EncodeFlag::SENDABLE_ARRAY_BUFFER:
337         case (uint8_t)EncodeFlag::JS_REG_EXP: {
338             size_t bufferLength = data_->ReadUint32(position_);
339             auto nativeAreaAllocator = thread_->GetEcmaVM()->GetNativeAreaAllocator();
340             bufferPointer_ = nativeAreaAllocator->AllocateBuffer(bufferLength);
341             heap_->IncNativeSizeAfterLastGC(bufferLength);
342             data_->ReadRawData(ToUintPtr(bufferPointer_), bufferLength, position_);
343             heap_->IncreaseNativeBindingSize(bufferLength);
344             handledFieldSize = 0;
345             break;
346         }
347         case (uint8_t)EncodeFlag::NATIVE_BINDING_OBJECT: {
348             slot.Update(JSTaggedValue::Undefined().GetRawData());
349             AttachFunc af = reinterpret_cast<AttachFunc>(data_->ReadJSTaggedType(position_));
350             void *bufferPointer = reinterpret_cast<void *>(data_->ReadJSTaggedType(position_));
351             void *hint = reinterpret_cast<void *>(data_->ReadJSTaggedType(position_));
352             void *attachData = reinterpret_cast<void *>(data_->ReadJSTaggedType(position_));
353             JSHandle<JSTaggedValue> obj(thread_, JSTaggedValue(static_cast<JSTaggedType>(objAddr)));
354             // defer new native binding object until deserialize finish
355             nativeBindingAttachInfos_.emplace_back(af, bufferPointer, hint, attachData, obj, fieldOffset);
356             break;
357         }
358         case (uint8_t)EncodeFlag::JS_ERROR: {
359             slot.Update(JSTaggedValue::Undefined().GetRawData());
360             uint8_t type = data_->ReadUint8(position_);
361             ASSERT(type >= static_cast<uint8_t>(JSType::JS_ERROR_FIRST)
362                 && type <= static_cast<uint8_t>(JSType::JS_ERROR_LAST));
363             JSHandle<JSTaggedValue> obj(thread_, JSTaggedValue(static_cast<JSTaggedType>(objAddr)));
364             jsErrorInfos_.emplace_back(type, JSHandle<JSTaggedValue>(thread_, JSTaggedValue::Undefined()), obj,
365                                        fieldOffset);
366             uint8_t flag = data_->ReadUint8(position_);
367             if (flag == 1) { // error msg is string
368                 isErrorMsg_ = true;
369                 handledFieldSize = 0;
370             }
371             break;
372         }
373         case (uint8_t)EncodeFlag::SHARED_OBJECT: {
374             uint32_t index = data_->ReadUint32(position_);
375             if (UNLIKELY(index >= sharedObjChunk_->Size())) {
376                 LOG_ECMA(FATAL) << "Shared object index invalid, index: " << index << " chunkSize: "
377                     << sharedObjChunk_->Size();
378                 UNREACHABLE();
379             }
380             JSTaggedType value = sharedObjChunk_->Get(index);
381             objectVector_.push_back(value);
382             bool isErrorMsg = GetAndResetIsErrorMsg();
383             if (isErrorMsg) {
384                 // defer new js error
385                 jsErrorInfos_.back().errorMsg_ = JSHandle<JSTaggedValue>(thread_, JSTaggedValue(value));
386                 break;
387             }
388             WriteBarrier(thread_, reinterpret_cast<void *>(objAddr), fieldOffset, value);
389             UpdateMaybeWeak(slot, value, GetAndResetWeak());
390             break;
391         }
392         case (uint8_t)EncodeFlag::GLOBAL_ENV: {
393             slot.Update(thread_->GetGlobalEnv().GetTaggedValue().GetTaggedObject());
394             break;
395         }
396         case (uint8_t)EncodeFlag::MODULE_FILE_NAME: {
397             uint32_t len = data_->ReadUint32(position_);
398             if (len > 0) {
399                 moduleFileName_.resize(len);
400                 data_->ReadRawData(ToUintPtr(moduleFileName_.data()), len, position_);
401             } else {
402                 LOG_ECMA(FATAL) << "ReadSingleEncodeData MODULE_FILE_NAME is empty.";
403             }
404             handledFieldSize = 0;
405             break;
406         }
407         case (uint8_t)EncodeFlag::MODULE_RECORD_NAME: {
408             uint32_t len = data_->ReadUint32(position_);
409             if (len > 0) {
410                 moduleRecordName_.resize(len);
411                 data_->ReadRawData(ToUintPtr(moduleRecordName_.data()), len, position_);
412             } else {
413                 moduleRecordName_ = "";
414             }
415             handledFieldSize = 0;
416             break;
417         }
418         case (uint8_t)EncodeFlag::MODULE_LAZY_ARRAY: {
419             uint32_t len = data_->ReadUint32(position_);
420             if (len > 0) {
421                 // moduleLazyArray_ will delete in release module process.
422                 moduleLazyArray_ = new bool[len/sizeof(bool)];
423                 data_->ReadRawData(ToUintPtr(moduleLazyArray_), len, position_);
424             } else {
425                 moduleLazyArray_ = nullptr;
426             }
427             handledFieldSize = 0;
428             break;
429         }
430         default:
431             // This flag may be supported by subclass.
432             return DerivedExtraReadSingleEncodeData(encodeFlag, objAddr, fieldOffset);
433     }
434     return handledFieldSize;
435 }
436 
DerivedExtraReadSingleEncodeData(uint8_t encodeFlag,uintptr_t objAddr,size_t fieldOffset)437 size_t BaseDeserializer::DerivedExtraReadSingleEncodeData(uint8_t encodeFlag, uintptr_t objAddr, size_t fieldOffset)
438 {
439     LOG_ECMA(FATAL) << "this branch is unreachable " << static_cast<int>(encodeFlag);
440     UNREACHABLE();
441 }
442 
RelocateObjectAddr(SerializedObjectSpace space,size_t objSize)443 uintptr_t BaseDeserializer::RelocateObjectAddr(SerializedObjectSpace space, size_t objSize)
444 {
445     uintptr_t res = 0U;
446     switch (space) {
447         case SerializedObjectSpace::REGULAR_SPACE: {
448             if (currentRegularObjectAddr_ + objSize >
449                     currentRegularRegionBeginAddr_ + common::Heap::GetNormalRegionAvailableSize()) {
450                 ASSERT(regularRegionIndex_ < regionVector_.size());
451                 currentRegularObjectAddr_ = regionVector_[regularRegionIndex_++];
452                 currentRegularRegionBeginAddr_ = currentRegularObjectAddr_;
453             }
454             res = currentRegularObjectAddr_;
455             currentRegularObjectAddr_ += objSize;
456             break;
457         }
458         case SerializedObjectSpace::PIN_SPACE: {
459             if (currentPinObjectAddr_ + objSize >
460                     currentPinRegionBeginAddr_ + common::Heap::GetNormalRegionAvailableSize()) {
461                 ASSERT(pinRegionIndex_ < regionVector_.size());
462                 currentPinObjectAddr_ = regionVector_[pinRegionIndex_++];
463                 currentPinRegionBeginAddr_ = currentPinObjectAddr_;
464             }
465             res = currentPinObjectAddr_;
466             currentPinObjectAddr_ += objSize;
467             break;
468         }
469         case SerializedObjectSpace::LARGE_SPACE: {
470             // no gc for this allocate
471             res = common::HeapAllocator::AllocateLargeRegion(objSize);
472             if (res == 0) {
473                 DeserializeFatalOutOfMemory(objSize, false, false);
474             }
475             break;
476         }
477         case SerializedObjectSpace::OLD_SPACE: {
478             if (oldSpaceBeginAddr_ + objSize > AlignUp(oldSpaceBeginAddr_, DEFAULT_REGION_SIZE)) {
479                 ASSERT(oldRegionIndex_ < regionVector_.size());
480                 oldSpaceBeginAddr_ = regionVector_[oldRegionIndex_++];
481             }
482             res = oldSpaceBeginAddr_;
483             oldSpaceBeginAddr_ += objSize;
484             break;
485         }
486         case SerializedObjectSpace::NON_MOVABLE_SPACE: {
487             if (nonMovableSpaceBeginAddr_ + objSize > AlignUp(nonMovableSpaceBeginAddr_, DEFAULT_REGION_SIZE)) {
488                 ASSERT(nonMovableRegionIndex_ < regionVector_.size());
489                 nonMovableSpaceBeginAddr_ = regionVector_[nonMovableRegionIndex_++];
490             }
491             res = nonMovableSpaceBeginAddr_;
492             nonMovableSpaceBeginAddr_ += objSize;
493             break;
494         }
495         case SerializedObjectSpace::MACHINE_CODE_SPACE: {
496             if (machineCodeSpaceBeginAddr_ + objSize > AlignUp(machineCodeSpaceBeginAddr_, DEFAULT_REGION_SIZE)) {
497                 ASSERT(machineCodeRegionIndex_ < regionVector_.size());
498                 machineCodeSpaceBeginAddr_ = regionVector_[machineCodeRegionIndex_++];
499             }
500             res = machineCodeSpaceBeginAddr_;
501             machineCodeSpaceBeginAddr_ += objSize;
502             break;
503         }
504         case SerializedObjectSpace::HUGE_SPACE: {
505             // no gc for this allocate
506             res = heap_->GetHugeObjectSpace()->Allocate(objSize, thread_, AllocateEventType::DESERIALIZE);
507             if (res == 0) {
508                 DeserializeFatalOutOfMemory(objSize, false, false);
509             }
510             break;
511         }
512         case SerializedObjectSpace::SHARED_OLD_SPACE: {
513             if (sOldSpaceBeginAddr_ + objSize > AlignUp(sOldSpaceBeginAddr_, DEFAULT_REGION_SIZE)) {
514                 ASSERT(sOldRegionIndex_ < regionVector_.size());
515                 sOldSpaceBeginAddr_ = regionVector_[sOldRegionIndex_++];
516             }
517             res = sOldSpaceBeginAddr_;
518             sOldSpaceBeginAddr_ += objSize;
519             break;
520         }
521         case SerializedObjectSpace::SHARED_NON_MOVABLE_SPACE: {
522             if (sNonMovableSpaceBeginAddr_ + objSize > AlignUp(sNonMovableSpaceBeginAddr_, DEFAULT_REGION_SIZE)) {
523                 ASSERT(sNonMovableRegionIndex_ < regionVector_.size());
524                 sNonMovableSpaceBeginAddr_ = regionVector_[sNonMovableRegionIndex_++];
525             }
526             res = sNonMovableSpaceBeginAddr_;
527             sNonMovableSpaceBeginAddr_ += objSize;
528             break;
529         }
530         case SerializedObjectSpace::SHARED_HUGE_SPACE: {
531             // no gc for this allocate
532             res = sheap_->GetHugeObjectSpace()->Allocate(thread_, objSize, AllocateEventType::DESERIALIZE);
533             if (res == 0) {
534                 DeserializeFatalOutOfMemory(objSize, false, true);
535             }
536             break;
537         }
538         default:
539             LOG_ECMA(FATAL) << "this branch is unreachable";
540             UNREACHABLE();
541     }
542     return res;
543 }
544 
RelocateObjectProtoAddr(uint8_t objectType)545 JSTaggedType BaseDeserializer::RelocateObjectProtoAddr(uint8_t objectType)
546 {
547     auto env = thread_->GetEcmaVM()->GetGlobalEnv();
548     switch (objectType) {
549         case (uint8_t)JSType::JS_OBJECT:
550             return env->GetObjectFunctionPrototype().GetTaggedType();
551         case (uint8_t)JSType::JS_ERROR:
552             return JSHandle<JSFunction>(env->GetErrorFunction())->GetFunctionPrototype(thread_).GetRawData();
553         case (uint8_t)JSType::JS_EVAL_ERROR:
554             return JSHandle<JSFunction>(env->GetEvalErrorFunction())->GetFunctionPrototype(thread_).GetRawData();
555         case (uint8_t)JSType::JS_RANGE_ERROR:
556             return JSHandle<JSFunction>(env->GetRangeErrorFunction())->GetFunctionPrototype(thread_).GetRawData();
557         case (uint8_t)JSType::JS_REFERENCE_ERROR:
558             return JSHandle<JSFunction>(env->GetReferenceErrorFunction())->GetFunctionPrototype(thread_).GetRawData();
559         case (uint8_t)JSType::JS_TYPE_ERROR:
560             return JSHandle<JSFunction>(env->GetTypeErrorFunction())->GetFunctionPrototype(thread_).GetRawData();
561         case (uint8_t)JSType::JS_AGGREGATE_ERROR:
562             return JSHandle<JSFunction>(env->GetAggregateErrorFunction())->GetFunctionPrototype(thread_).GetRawData();
563         case (uint8_t)JSType::JS_URI_ERROR:
564             return JSHandle<JSFunction>(env->GetURIErrorFunction())->GetFunctionPrototype(thread_).GetRawData();
565         case (uint8_t)JSType::JS_SYNTAX_ERROR:
566             return JSHandle<JSFunction>(env->GetSyntaxErrorFunction())->GetFunctionPrototype(thread_).GetRawData();
567         case (uint8_t)JSType::JS_OOM_ERROR:
568             return JSHandle<JSFunction>(env->GetOOMErrorFunction())->GetFunctionPrototype(thread_).GetRawData();
569         case (uint8_t)JSType::JS_TERMINATION_ERROR:
570             return JSHandle<JSFunction>(env->GetTerminationErrorFunction())->GetFunctionPrototype(thread_).GetRawData();
571         case (uint8_t)JSType::JS_DATE:
572             return env->GetDatePrototype().GetTaggedType();
573         case (uint8_t)JSType::JS_ARRAY:
574             return env->GetArrayPrototype().GetTaggedType();
575         case (uint8_t)JSType::JS_SHARED_ARRAY:
576             return env->GetSharedArrayPrototype().GetTaggedType();
577         case (uint8_t)JSType::JS_API_BITVECTOR:
578             return env->GetBitVectorPrototype().GetTaggedType();
579         case (uint8_t)JSType::JS_MAP:
580             return env->GetMapPrototype().GetTaggedType();
581         case (uint8_t)JSType::JS_SHARED_MAP:
582             return env->GetSharedMapPrototype().GetTaggedType();
583         case (uint8_t)JSType::JS_SET:
584             return env->GetSetPrototype().GetTaggedType();
585         case (uint8_t)JSType::JS_SHARED_SET:
586             return env->GetSharedSetPrototype().GetTaggedType();
587         case (uint8_t)JSType::JS_SENDABLE_ARRAY_BUFFER:
588             return env->GetSendableArrayBufferPrototype().GetTaggedType();
589         case (uint8_t)JSType::JS_REG_EXP:
590             return env->GetRegExpPrototype().GetTaggedType();
591         case (uint8_t)JSType::JS_INT8_ARRAY:
592             return env->GetInt8ArrayFunctionPrototype().GetTaggedType();
593         case (uint8_t)JSType::JS_UINT8_ARRAY:
594             return env->GetUint8ArrayFunctionPrototype().GetTaggedType();
595         case (uint8_t)JSType::JS_UINT8_CLAMPED_ARRAY:
596             return env->GetUint8ClampedArrayFunctionPrototype().GetTaggedType();
597         case (uint8_t)JSType::JS_INT16_ARRAY:
598             return env->GetInt16ArrayFunctionPrototype().GetTaggedType();
599         case (uint8_t)JSType::JS_UINT16_ARRAY:
600             return env->GetUint16ArrayFunctionPrototype().GetTaggedType();
601         case (uint8_t)JSType::JS_INT32_ARRAY:
602             return env->GetInt32ArrayFunctionPrototype().GetTaggedType();
603         case (uint8_t)JSType::JS_UINT32_ARRAY:
604             return env->GetUint32ArrayFunctionPrototype().GetTaggedType();
605         case (uint8_t)JSType::JS_FLOAT32_ARRAY:
606             return env->GetFloat32ArrayFunctionPrototype().GetTaggedType();
607         case (uint8_t)JSType::JS_FLOAT64_ARRAY:
608             return env->GetFloat64ArrayFunctionPrototype().GetTaggedType();
609         case (uint8_t)JSType::JS_BIGINT64_ARRAY:
610             return env->GetBigInt64ArrayFunctionPrototype().GetTaggedType();
611         case (uint8_t)JSType::JS_BIGUINT64_ARRAY:
612             return env->GetBigUint64ArrayFunctionPrototype().GetTaggedType();
613         case (uint8_t)JSType::JS_SHARED_INT8_ARRAY:
614             return env->GetSharedInt8ArrayFunctionPrototype().GetTaggedType();
615         case (uint8_t)JSType::JS_SHARED_UINT8_ARRAY:
616             return env->GetSharedUint8ArrayFunctionPrototype().GetTaggedType();
617         case (uint8_t)JSType::JS_SHARED_UINT8_CLAMPED_ARRAY:
618             return env->GetSharedUint8ClampedArrayFunctionPrototype().GetTaggedType();
619         case (uint8_t)JSType::JS_SHARED_INT16_ARRAY:
620             return env->GetSharedInt16ArrayFunctionPrototype().GetTaggedType();
621         case (uint8_t)JSType::JS_SHARED_UINT16_ARRAY:
622             return env->GetSharedUint16ArrayFunctionPrototype().GetTaggedType();
623         case (uint8_t)JSType::JS_SHARED_INT32_ARRAY:
624             return env->GetSharedInt32ArrayFunctionPrototype().GetTaggedType();
625         case (uint8_t)JSType::JS_SHARED_UINT32_ARRAY:
626             return env->GetSharedUint32ArrayFunctionPrototype().GetTaggedType();
627         case (uint8_t)JSType::JS_SHARED_FLOAT32_ARRAY:
628             return env->GetSharedFloat32ArrayFunctionPrototype().GetTaggedType();
629         case (uint8_t)JSType::JS_SHARED_FLOAT64_ARRAY:
630             return env->GetSharedFloat64ArrayFunctionPrototype().GetTaggedType();
631         case (uint8_t)JSType::JS_SHARED_BIGINT64_ARRAY:
632             return env->GetSharedBigInt64ArrayFunctionPrototype().GetTaggedType();
633         case (uint8_t)JSType::JS_SHARED_BIGUINT64_ARRAY:
634             return env->GetSharedBigUint64ArrayFunctionPrototype().GetTaggedType();
635         case (uint8_t)JSType::JS_ARRAY_BUFFER:
636             return JSHandle<JSFunction>(env->GetArrayBufferFunction())->GetFunctionPrototype(thread_).GetRawData();
637         case (uint8_t)JSType::JS_SHARED_ARRAY_BUFFER:
638             return JSHandle<JSFunction>(env->GetSharedArrayBufferFunction())
639                 ->GetFunctionPrototype(thread_)
640                 .GetRawData();
641         case (uint8_t)JSType::JS_ASYNC_FUNCTION:
642             return env->GetAsyncFunctionPrototype().GetTaggedType();
643         case (uint8_t)JSType::JS_SHARED_ASYNC_FUNCTION:
644             return env->GetSAsyncFunctionPrototype().GetTaggedType();
645         case (uint8_t)JSType::BIGINT:
646             return JSHandle<JSFunction>(env->GetBigIntFunction())->GetFunctionPrototype(thread_).GetRawData();
647         default:
648             LOG_ECMA(FATAL) << "Relocate unsupported JSType: " << JSHClass::DumpJSType(static_cast<JSType>(objectType));
649             UNREACHABLE();
650             break;
651     }
652 }
653 
AllocateToDifferentSpaces()654 void BaseDeserializer::AllocateToDifferentSpaces()
655 {
656     if (g_isEnableCMCGC) {
657         size_t regularSpaceSize = data_->GetRegularSpaceSize();
658         if (regularSpaceSize > 0) {
659             // statistic object size
660             AllocateToRegularSpace(regularSpaceSize);
661         }
662         size_t pinSpaceSize = data_->GetPinSpaceSize();
663         if (pinSpaceSize > 0) {
664             // statistic object size
665             AllocateToPinSpace(pinSpaceSize);
666         }
667     } else {
668         size_t oldSpaceSize = data_->GetOldSpaceSize();
669         if (oldSpaceSize > 0) {
670             heap_->GetOldSpace()->IncreaseLiveObjectSize(oldSpaceSize);
671             AllocateToOldSpace(oldSpaceSize);
672         }
673         size_t nonMovableSpaceSize = data_->GetNonMovableSpaceSize();
674         if (nonMovableSpaceSize > 0) {
675             heap_->GetNonMovableSpace()->IncreaseLiveObjectSize(nonMovableSpaceSize);
676             AllocateToNonMovableSpace(nonMovableSpaceSize);
677         }
678         size_t machineCodeSpaceSize = data_->GetMachineCodeSpaceSize();
679         if (machineCodeSpaceSize > 0) {
680             heap_->GetMachineCodeSpace()->IncreaseLiveObjectSize(machineCodeSpaceSize);
681             AllocateToMachineCodeSpace(machineCodeSpaceSize);
682         }
683         size_t sOldSpaceSize = data_->GetSharedOldSpaceSize();
684         if (sOldSpaceSize > 0) {
685             sheap_->GetOldSpace()->IncreaseLiveObjectSize(sOldSpaceSize);
686             AllocateToSharedOldSpace(sOldSpaceSize);
687         }
688         size_t sNonMovableSpaceSize = data_->GetSharedNonMovableSpaceSize();
689         if (sNonMovableSpaceSize > 0) {
690             sheap_->GetNonMovableSpace()->IncreaseLiveObjectSize(sNonMovableSpaceSize);
691             AllocateToSharedNonMovableSpace(sNonMovableSpaceSize);
692         }
693     }
694 }
695 
AllocateToRegularSpace(size_t regularSpaceSize)696 void BaseDeserializer::AllocateToRegularSpace(size_t regularSpaceSize)
697 {
698     if (regularSpaceSize <= common::Heap::GetNormalRegionAvailableSize()) {
699         currentRegularObjectAddr_ = common::HeapAllocator::AllocateNoGC(regularSpaceSize);
700     } else {
701         currentRegularObjectAddr_ = AllocateMultiCMCRegion(regularSpaceSize, regularRegionIndex_,
702                                                            RegionType::RegularRegion);
703     }
704     currentRegularRegionBeginAddr_ = currentRegularObjectAddr_;
705     if (currentRegularObjectAddr_ == 0U) {
706         LOG_ECMA(FATAL) << "Deserialize oom error";
707     }
708 }
AllocateToPinSpace(size_t pinSpaceSize)709 void BaseDeserializer::AllocateToPinSpace(size_t pinSpaceSize)
710 {
711     if (pinSpaceSize <= common::Heap::GetNormalRegionAvailableSize()) {
712         currentPinObjectAddr_ = common::HeapAllocator::AllocatePinNoGC(pinSpaceSize);
713     } else {
714         currentPinObjectAddr_ = AllocateMultiCMCRegion(pinSpaceSize, pinRegionIndex_, RegionType::PinRegion);
715     }
716     currentPinRegionBeginAddr_ = currentPinObjectAddr_;
717     if (currentPinObjectAddr_ == 0U) {
718         LOG_ECMA(FATAL) << "Deserialize oom error";
719     }
720 }
721 
AllocateMultiCMCRegion(size_t spaceObjSize,size_t & regionIndex,RegionType regionType)722 uintptr_t BaseDeserializer::AllocateMultiCMCRegion(size_t spaceObjSize, size_t &regionIndex, RegionType regionType)
723 {
724     constexpr size_t REGION_SIZE = common::Heap::GetNormalRegionAvailableSize();
725     ASSERT(REGION_SIZE != 0);
726     ASSERT(spaceObjSize > REGION_SIZE);
727     regionIndex = regionVector_.size();
728     size_t regionAlignedSize = SerializeData::AlignUpRegionAvailableSize(spaceObjSize);
729     ASSERT(regionAlignedSize % REGION_SIZE == 0);
730     size_t regionNum = regionAlignedSize / REGION_SIZE;
731     uintptr_t firstRegionAddr = 0U;
732     std::vector<size_t> regionRemainSizeVector;
733     size_t regionRemainSizeIndex = 0;
734     if (regionType == RegionType::RegularRegion) {
735         regionRemainSizeVector = data_->GetRegularRemainSizeVector();
736     } else {
737         regionRemainSizeVector = data_->GetPinRemainSizeVector();
738     }
739     while (regionNum > 0) {
740         uintptr_t regionAddr = 0U;
741         if (regionType == RegionType::RegularRegion) {
742             regionAddr = common::HeapAllocator::AllocateOldRegion();
743         } else {
744             regionAddr = common::HeapAllocator::AllocatePinnedRegion();
745         }
746         if (regionAddr == 0U) {
747             LOG_ECMA(FATAL) << "Deserialize allocate multi cmc region fail";
748         }
749         if (firstRegionAddr == 0U) {
750             firstRegionAddr = regionAddr;
751         } else {
752             regionVector_.push_back(regionAddr);
753         }
754         // fill region not used size
755         if (regionNum == 1) {  // last region
756             size_t lastRegionRemainSize = regionAlignedSize - spaceObjSize;
757             FreeObject::FillFreeObject(heap_, regionAddr + REGION_SIZE - lastRegionRemainSize,
758                                        lastRegionRemainSize);
759         } else {
760             auto regionAliveObjSize = REGION_SIZE - regionRemainSizeVector[regionRemainSizeIndex++];
761             FreeObject::FillFreeObject(heap_, regionAddr + regionAliveObjSize, REGION_SIZE - regionAliveObjSize);
762         }
763         regionNum--;
764     }
765     return firstRegionAddr;
766 }
767 
AllocateMultiRegion(SparseSpace * space,size_t spaceObjSize,size_t & regionIndex,SerializedObjectSpace spaceType)768 void BaseDeserializer::AllocateMultiRegion(SparseSpace *space, size_t spaceObjSize, size_t &regionIndex,
769                                            SerializedObjectSpace spaceType)
770 {
771     ASSERT(spaceType != SerializedObjectSpace::NON_MOVABLE_SPACE);
772     regionIndex = regionVector_.size();
773     size_t regionAlignedSize = SerializeData::AlignUpRegionAvailableSize(spaceObjSize);
774     size_t regionNum = regionAlignedSize / Region::GetRegionAvailableSize();
775     size_t index = 0;
776     while (regionNum > 1) { // 1: one region have allocated before
777         auto regionRemainSizeVector = data_->GetRegionRemainSizeVectors().at(static_cast<uint8_t>(spaceType));
778         auto regionAliveObjSize = Region::GetRegionAvailableSize() - regionRemainSizeVector[index++];
779         space->GetCurrentRegion()->IncreaseAliveObject(regionAliveObjSize);
780         space->ResetTopPointer(space->GetCurrentRegion()->GetBegin() + regionAliveObjSize);
781         if (!space->Expand()) {
782             DeserializeFatalOutOfMemory(spaceObjSize);
783         }
784         Region *currentRegion = space->GetCurrentRegion();
785         FreeObject::FillFreeObject(heap_, currentRegion->GetBegin(), currentRegion->GetSize());
786         regionVector_.push_back(currentRegion->GetBegin());
787         regionNum--;
788     }
789     size_t lastRegionRemainSize = regionAlignedSize - spaceObjSize;
790     space->GetCurrentRegion()->IncreaseAliveObject(Region::GetRegionAvailableSize() - lastRegionRemainSize);
791     space->ResetTopPointer(space->GetCurrentRegion()->GetEnd() - lastRegionRemainSize);
792 }
793 
AllocateMultiNonmovableRegion(SparseSpace * space,size_t spaceObjSize,size_t & regionIndex,SerializedObjectSpace spaceType)794 void BaseDeserializer::AllocateMultiNonmovableRegion(SparseSpace *space, size_t spaceObjSize, size_t &regionIndex,
795                                                      [[maybe_unused]] SerializedObjectSpace spaceType)
796 {
797     ASSERT(spaceType == SerializedObjectSpace::NON_MOVABLE_SPACE);
798     regionIndex = regionVector_.size();
799     size_t allocatedSize = 0;
800     while (allocatedSize < spaceObjSize) {
801         size_t leftSize = spaceObjSize - allocatedSize;
802         size_t size = std::min(leftSize, Region::GetRegionAvailableSize());
803         uintptr_t obj = space->Allocate(size, false);
804         if (obj > 0) {
805             if (allocatedSize == 0) {
806                 // The first region
807                 nonMovableSpaceBeginAddr_ = obj;
808             } else {
809                 regionVector_.push_back(obj);
810             }
811         } else {
812             DeserializeFatalOutOfMemory(spaceObjSize);
813         }
814         allocatedSize += size;
815     }
816 }
817 
AllocateMultiSharedRegion(SharedSparseSpace * space,size_t spaceObjSize,size_t & regionIndex,SerializedObjectSpace spaceType)818 void BaseDeserializer::AllocateMultiSharedRegion(SharedSparseSpace *space, size_t spaceObjSize, size_t &regionIndex,
819                                                  SerializedObjectSpace spaceType)
820 {
821     regionIndex = regionVector_.size();
822     size_t regionAlignedSize = SerializeData::AlignUpRegionAvailableSize(spaceObjSize);
823     size_t regionNum = regionAlignedSize / Region::GetRegionAvailableSize();
824     auto regionRemainSizeVector = data_->GetRegionRemainSizeVectors().at(static_cast<uint8_t>(spaceType));
825     std::vector<Region *> allocateRegions;
826     size_t index = 0;
827     while (regionNum > 0) {
828         if (space->CommittedSizeExceed()) {
829             DeserializeFatalOutOfMemory(spaceObjSize, true, true);
830         }
831         Region *region = space->AllocateDeserializeRegion(thread_);
832         FreeObject::FillFreeObject(sheap_, region->GetBegin(), region->GetSize());
833         if (regionNum == 1) { // 1: Last allocate region
834             size_t lastRegionRemainSize = regionAlignedSize - spaceObjSize;
835             region->SetHighWaterMark(region->GetEnd() - lastRegionRemainSize);
836         } else {
837             region->SetHighWaterMark(region->GetEnd() - regionRemainSizeVector[index++]);
838         }
839         region->IncreaseAliveObject(region->GetAllocatedBytes());
840         regionVector_.push_back(region->GetBegin());
841         allocateRegions.push_back(region);
842         regionNum--;
843     }
844     space->MergeDeserializeAllocateRegions(allocateRegions);
845 }
846 
AllocateToOldSpace(size_t oldSpaceSize)847 void BaseDeserializer::AllocateToOldSpace(size_t oldSpaceSize)
848 {
849     OldSpace *space = heap_->GetOldSpace();
850     uintptr_t object = space->AllocateSlow(oldSpaceSize, true);
851     if (UNLIKELY(object == 0U)) {
852         if (space->CommittedSizeExceed()) {
853             DeserializeFatalOutOfMemory(oldSpaceSize);
854         }
855         oldSpaceBeginAddr_ = space->GetCurrentRegion()->GetBegin();
856         FreeObject::FillFreeObject(heap_, oldSpaceBeginAddr_, space->GetCurrentRegion()->GetSize());
857         AllocateMultiRegion(space, oldSpaceSize, oldRegionIndex_, SerializedObjectSpace::OLD_SPACE);
858     } else {
859         FreeObject::FillFreeObject(heap_, object, oldSpaceSize);
860         oldSpaceBeginAddr_ = object;
861     }
862 }
863 
AllocateToNonMovableSpace(size_t nonMovableSpaceSize)864 void BaseDeserializer::AllocateToNonMovableSpace(size_t nonMovableSpaceSize)
865 {
866     SparseSpace *space = heap_->GetNonMovableSpace();
867     uintptr_t object = space->Allocate(nonMovableSpaceSize, false);
868     if (UNLIKELY(object == 0U)) {
869         AllocateMultiNonmovableRegion(space, nonMovableSpaceSize, nonMovableRegionIndex_,
870                                       SerializedObjectSpace::NON_MOVABLE_SPACE);
871     } else {
872         FreeObject::FillFreeObject(heap_, object, nonMovableSpaceSize);
873         nonMovableSpaceBeginAddr_ = object;
874     }
875 }
876 
AllocateToMachineCodeSpace(size_t machineCodeSpaceSize)877 void BaseDeserializer::AllocateToMachineCodeSpace(size_t machineCodeSpaceSize)
878 {
879     SparseSpace *space = heap_->GetMachineCodeSpace();
880     uintptr_t object = space->Allocate(machineCodeSpaceSize, false);
881     if (UNLIKELY(object == 0U)) {
882         if (space->CommittedSizeExceed()) {
883             DeserializeFatalOutOfMemory(machineCodeSpaceSize);
884         }
885         machineCodeSpaceBeginAddr_ = space->GetCurrentRegion()->GetBegin();
886         FreeObject::FillFreeObject(heap_, machineCodeSpaceBeginAddr_, space->GetCurrentRegion()->GetSize());
887         AllocateMultiRegion(space, machineCodeSpaceSize, machineCodeRegionIndex_,
888                             SerializedObjectSpace::MACHINE_CODE_SPACE);
889     } else {
890         FreeObject::FillFreeObject(heap_, object, machineCodeSpaceSize);
891         machineCodeSpaceBeginAddr_ = object;
892     }
893 }
894 
AllocateToSharedOldSpace(size_t sOldSpaceSize)895 void BaseDeserializer::AllocateToSharedOldSpace(size_t sOldSpaceSize)
896 {
897     SharedSparseSpace *space = sheap_->GetOldSpace();
898     uintptr_t object = space->AllocateNoGCAndExpand(thread_, sOldSpaceSize);
899     if (UNLIKELY(object == 0U)) {
900         AllocateMultiSharedRegion(space, sOldSpaceSize, sOldRegionIndex_, SerializedObjectSpace::SHARED_OLD_SPACE);
901         sOldSpaceBeginAddr_ = regionVector_[sOldRegionIndex_++];
902     } else {
903         if (thread_->IsSharedConcurrentMarkingOrFinished()) {
904             Region *region = Region::ObjectAddressToRange(object);
905             region->IncreaseAliveObject(sOldSpaceSize);
906         }
907         FreeObject::FillFreeObject(sheap_, object, sOldSpaceSize);
908         sOldSpaceBeginAddr_ = object;
909     }
910 }
911 
AllocateToSharedNonMovableSpace(size_t sNonMovableSpaceSize)912 void BaseDeserializer::AllocateToSharedNonMovableSpace(size_t sNonMovableSpaceSize)
913 {
914     SharedNonMovableSpace *space = sheap_->GetNonMovableSpace();
915     uintptr_t object = space->AllocateNoGCAndExpand(thread_, sNonMovableSpaceSize);
916     if (UNLIKELY(object == 0U)) {
917         AllocateMultiSharedRegion(space, sNonMovableSpaceSize, sNonMovableRegionIndex_,
918                                   SerializedObjectSpace::SHARED_NON_MOVABLE_SPACE);
919         sNonMovableSpaceBeginAddr_ = regionVector_[sNonMovableRegionIndex_++];
920     } else {
921         if (thread_->IsSharedConcurrentMarkingOrFinished()) {
922             Region *region = Region::ObjectAddressToRange(object);
923             region->IncreaseAliveObject(sNonMovableSpaceSize);
924         }
925         FreeObject::FillFreeObject(sheap_, object, sNonMovableSpaceSize);
926         sNonMovableSpaceBeginAddr_ = object;
927     }
928 }
929 
930 }  // namespace panda::ecmascript
931 
932