1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/builtins/accessors.h"
6 #include "src/codegen/compilation-cache.h"
7 #include "src/execution/isolate.h"
8 #include "src/execution/protectors.h"
9 #include "src/heap/factory.h"
10 #include "src/heap/heap-inl.h"
11 #include "src/ic/handler-configuration.h"
12 #include "src/init/heap-symbols.h"
13 #include "src/init/setup-isolate.h"
14 #include "src/interpreter/interpreter.h"
15 #include "src/objects/arguments.h"
16 #include "src/objects/call-site-info.h"
17 #include "src/objects/cell-inl.h"
18 #include "src/objects/contexts.h"
19 #include "src/objects/data-handler.h"
20 #include "src/objects/debug-objects.h"
21 #include "src/objects/descriptor-array.h"
22 #include "src/objects/dictionary.h"
23 #include "src/objects/foreign.h"
24 #include "src/objects/heap-number.h"
25 #include "src/objects/instance-type-inl.h"
26 #include "src/objects/js-generator.h"
27 #include "src/objects/js-weak-refs.h"
28 #include "src/objects/literal-objects-inl.h"
29 #include "src/objects/lookup-cache.h"
30 #include "src/objects/map.h"
31 #include "src/objects/microtask.h"
32 #include "src/objects/objects-inl.h"
33 #include "src/objects/oddball-inl.h"
34 #include "src/objects/ordered-hash-table.h"
35 #include "src/objects/promise.h"
36 #include "src/objects/property-descriptor-object.h"
37 #include "src/objects/script.h"
38 #include "src/objects/shared-function-info.h"
39 #include "src/objects/smi.h"
40 #include "src/objects/source-text-module.h"
41 #include "src/objects/string.h"
42 #include "src/objects/synthetic-module.h"
43 #include "src/objects/template-objects-inl.h"
44 #include "src/objects/torque-defined-classes-inl.h"
45 #include "src/objects/turbofan-types.h"
46 #include "src/regexp/regexp.h"
47
48 #if V8_ENABLE_WEBASSEMBLY
49 #include "src/wasm/wasm-objects.h"
50 #endif // V8_ENABLE_WEBASSEMBLY
51
52 namespace v8 {
53 namespace internal {
54
55 namespace {
56
CreateSharedFunctionInfo(Isolate * isolate,Builtin builtin,int len,FunctionKind kind=FunctionKind::kNormalFunction)57 Handle<SharedFunctionInfo> CreateSharedFunctionInfo(
58 Isolate* isolate, Builtin builtin, int len,
59 FunctionKind kind = FunctionKind::kNormalFunction) {
60 Handle<SharedFunctionInfo> shared =
61 isolate->factory()->NewSharedFunctionInfoForBuiltin(
62 isolate->factory()->empty_string(), builtin, kind);
63 shared->set_internal_formal_parameter_count(JSParameterCount(len));
64 shared->set_length(len);
65 return shared;
66 }
67
68 } // namespace
69
SetupHeapInternal(Heap * heap)70 bool SetupIsolateDelegate::SetupHeapInternal(Heap* heap) {
71 return heap->CreateHeapObjects();
72 }
73
CreateHeapObjects()74 bool Heap::CreateHeapObjects() {
75 // Create initial maps.
76 if (!CreateInitialMaps()) return false;
77 CreateApiObjects();
78
79 // Create initial objects
80 CreateInitialObjects();
81 CreateInternalAccessorInfoObjects();
82 CHECK_EQ(0u, gc_count_);
83
84 set_native_contexts_list(ReadOnlyRoots(this).undefined_value());
85 set_allocation_sites_list(ReadOnlyRoots(this).undefined_value());
86 set_dirty_js_finalization_registries_list(
87 ReadOnlyRoots(this).undefined_value());
88 set_dirty_js_finalization_registries_list_tail(
89 ReadOnlyRoots(this).undefined_value());
90
91 return true;
92 }
93
94 const Heap::StringTypeTable Heap::string_type_table[] = {
95 #define STRING_TYPE_ELEMENT(type, size, name, CamelName) \
96 {type, size, RootIndex::k##CamelName##Map},
97 STRING_TYPE_LIST(STRING_TYPE_ELEMENT)
98 #undef STRING_TYPE_ELEMENT
99 };
100
101 const Heap::ConstantStringTable Heap::constant_string_table[] = {
102 {"", RootIndex::kempty_string},
103 #define CONSTANT_STRING_ELEMENT(_, name, contents) \
104 {contents, RootIndex::k##name},
105 INTERNALIZED_STRING_LIST_GENERATOR(CONSTANT_STRING_ELEMENT, /* not used */)
106 #undef CONSTANT_STRING_ELEMENT
107 };
108
109 const Heap::StructTable Heap::struct_table[] = {
110 #define STRUCT_TABLE_ELEMENT(TYPE, Name, name) \
111 {TYPE, Name::kSize, RootIndex::k##Name##Map},
112 STRUCT_LIST(STRUCT_TABLE_ELEMENT)
113 #undef STRUCT_TABLE_ELEMENT
114
115 #define ALLOCATION_SITE_ELEMENT(_, TYPE, Name, Size, name) \
116 {TYPE, Name::kSize##Size, RootIndex::k##Name##Size##Map},
117 ALLOCATION_SITE_LIST(ALLOCATION_SITE_ELEMENT, /* not used */)
118 #undef ALLOCATION_SITE_ELEMENT
119
120 #define DATA_HANDLER_ELEMENT(_, TYPE, Name, Size, name) \
121 {TYPE, Name::kSizeWithData##Size, RootIndex::k##Name##Size##Map},
122 DATA_HANDLER_LIST(DATA_HANDLER_ELEMENT, /* not used */)
123 #undef DATA_HANDLER_ELEMENT
124 };
125
AllocateMap(InstanceType instance_type,int instance_size,ElementsKind elements_kind,int inobject_properties)126 AllocationResult Heap::AllocateMap(InstanceType instance_type,
127 int instance_size,
128 ElementsKind elements_kind,
129 int inobject_properties) {
130 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
131 bool is_js_object = InstanceTypeChecker::IsJSObject(instance_type);
132 bool is_wasm_object = false;
133 #if V8_ENABLE_WEBASSEMBLY
134 is_wasm_object =
135 instance_type == WASM_STRUCT_TYPE || instance_type == WASM_ARRAY_TYPE;
136 #endif // V8_ENABLE_WEBASSEMBLY
137 DCHECK_IMPLIES(is_js_object &&
138 !Map::CanHaveFastTransitionableElementsKind(instance_type),
139 IsDictionaryElementsKind(elements_kind) ||
140 IsTerminalElementsKind(elements_kind));
141 HeapObject result;
142 // JSObjects have maps with a mutable prototype_validity_cell, so they cannot
143 // go in RO_SPACE. Maps for managed Wasm objects have mutable subtype lists.
144 bool is_mutable = is_js_object || is_wasm_object;
145 AllocationResult allocation =
146 AllocateRaw(Map::kSize, is_mutable ? AllocationType::kMap
147 : AllocationType::kReadOnly);
148 if (!allocation.To(&result)) return allocation;
149
150 result.set_map_after_allocation(ReadOnlyRoots(this).meta_map(),
151 SKIP_WRITE_BARRIER);
152 Map map = isolate()->factory()->InitializeMap(
153 Map::cast(result), instance_type, instance_size, elements_kind,
154 inobject_properties, this);
155
156 return AllocationResult::FromObject(map);
157 }
158
AllocatePartialMap(InstanceType instance_type,int instance_size)159 AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
160 int instance_size) {
161 Object result;
162 AllocationResult allocation =
163 AllocateRaw(Map::kSize, AllocationType::kReadOnly);
164 if (!allocation.To(&result)) return allocation;
165 // Map::cast cannot be used due to uninitialized map field.
166 Map map = Map::unchecked_cast(result);
167 map.set_map_after_allocation(
168 Map::unchecked_cast(isolate()->root(RootIndex::kMetaMap)),
169 SKIP_WRITE_BARRIER);
170 map.set_instance_type(instance_type);
171 map.set_instance_size(instance_size);
172 map.set_visitor_id(Map::GetVisitorId(map));
173 map.set_inobject_properties_start_or_constructor_function_index(0);
174 DCHECK(!map.IsJSObjectMap());
175 map.set_prototype_validity_cell(Smi::FromInt(Map::kPrototypeChainValid));
176 map.SetInObjectUnusedPropertyFields(0);
177 map.set_bit_field(0);
178 map.set_bit_field2(0);
179 int bit_field3 =
180 Map::Bits3::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
181 Map::Bits3::OwnsDescriptorsBit::encode(true) |
182 Map::Bits3::ConstructionCounterBits::encode(Map::kNoSlackTracking);
183 map.set_bit_field3(bit_field3);
184 DCHECK(!map.is_in_retained_map_list());
185 map.clear_padding();
186 map.set_elements_kind(TERMINAL_FAST_ELEMENTS_KIND);
187 return AllocationResult::FromObject(map);
188 }
189
FinalizePartialMap(Map map)190 void Heap::FinalizePartialMap(Map map) {
191 ReadOnlyRoots roots(this);
192 map.set_dependent_code(DependentCode::empty_dependent_code(roots));
193 map.set_raw_transitions(MaybeObject::FromSmi(Smi::zero()));
194 map.SetInstanceDescriptors(isolate(), roots.empty_descriptor_array(), 0);
195 map.set_prototype(roots.null_value());
196 map.set_constructor_or_back_pointer(roots.null_value());
197 }
198
Allocate(Handle<Map> map,AllocationType allocation_type)199 AllocationResult Heap::Allocate(Handle<Map> map,
200 AllocationType allocation_type) {
201 DCHECK(map->instance_type() != MAP_TYPE);
202 int size = map->instance_size();
203 HeapObject result;
204 AllocationResult allocation = AllocateRaw(size, allocation_type);
205 if (!allocation.To(&result)) return allocation;
206 // New space objects are allocated white.
207 WriteBarrierMode write_barrier_mode =
208 allocation_type == AllocationType::kYoung ? SKIP_WRITE_BARRIER
209 : UPDATE_WRITE_BARRIER;
210 result.set_map_after_allocation(*map, write_barrier_mode);
211 return AllocationResult::FromObject(result);
212 }
213
CreateInitialMaps()214 bool Heap::CreateInitialMaps() {
215 HeapObject obj;
216 {
217 AllocationResult allocation = AllocatePartialMap(MAP_TYPE, Map::kSize);
218 if (!allocation.To(&obj)) return false;
219 }
220 // Map::cast cannot be used due to uninitialized map field.
221 Map new_meta_map = Map::unchecked_cast(obj);
222 set_meta_map(new_meta_map);
223 new_meta_map.set_map_after_allocation(new_meta_map);
224
225 ReadOnlyRoots roots(this);
226 { // Partial map allocation
227 #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \
228 { \
229 Map map; \
230 if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \
231 set_##field_name##_map(map); \
232 }
233
234 ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array);
235 ALLOCATE_PARTIAL_MAP(WEAK_FIXED_ARRAY_TYPE, kVariableSizeSentinel,
236 weak_fixed_array);
237 ALLOCATE_PARTIAL_MAP(WEAK_ARRAY_LIST_TYPE, kVariableSizeSentinel,
238 weak_array_list);
239 ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel,
240 fixed_cow_array)
241 DCHECK_NE(roots.fixed_array_map(), roots.fixed_cow_array_map());
242
243 ALLOCATE_PARTIAL_MAP(DESCRIPTOR_ARRAY_TYPE, kVariableSizeSentinel,
244 descriptor_array)
245
246 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined);
247 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null);
248 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, the_hole);
249
250 #undef ALLOCATE_PARTIAL_MAP
251 }
252
253 {
254 AllocationResult alloc =
255 AllocateRaw(FixedArray::SizeFor(0), AllocationType::kReadOnly);
256 if (!alloc.To(&obj)) return false;
257 obj.set_map_after_allocation(roots.fixed_array_map(), SKIP_WRITE_BARRIER);
258 FixedArray::cast(obj).set_length(0);
259 }
260 set_empty_fixed_array(FixedArray::cast(obj));
261
262 {
263 AllocationResult alloc =
264 AllocateRaw(WeakFixedArray::SizeFor(0), AllocationType::kReadOnly);
265 if (!alloc.To(&obj)) return false;
266 obj.set_map_after_allocation(roots.weak_fixed_array_map(),
267 SKIP_WRITE_BARRIER);
268 WeakFixedArray::cast(obj).set_length(0);
269 }
270 set_empty_weak_fixed_array(WeakFixedArray::cast(obj));
271
272 {
273 AllocationResult allocation = AllocateRaw(WeakArrayList::SizeForCapacity(0),
274 AllocationType::kReadOnly);
275 if (!allocation.To(&obj)) return false;
276 obj.set_map_after_allocation(roots.weak_array_list_map(),
277 SKIP_WRITE_BARRIER);
278 WeakArrayList::cast(obj).set_capacity(0);
279 WeakArrayList::cast(obj).set_length(0);
280 }
281 set_empty_weak_array_list(WeakArrayList::cast(obj));
282
283 {
284 AllocationResult allocation =
285 Allocate(roots.null_map_handle(), AllocationType::kReadOnly);
286 if (!allocation.To(&obj)) return false;
287 }
288 set_null_value(Oddball::cast(obj));
289 Oddball::cast(obj).set_kind(Oddball::kNull);
290
291 {
292 AllocationResult allocation =
293 Allocate(roots.undefined_map_handle(), AllocationType::kReadOnly);
294 if (!allocation.To(&obj)) return false;
295 }
296 set_undefined_value(Oddball::cast(obj));
297 Oddball::cast(obj).set_kind(Oddball::kUndefined);
298 DCHECK(!InYoungGeneration(roots.undefined_value()));
299 {
300 AllocationResult allocation =
301 Allocate(roots.the_hole_map_handle(), AllocationType::kReadOnly);
302 if (!allocation.To(&obj)) return false;
303 }
304 set_the_hole_value(Oddball::cast(obj));
305 Oddball::cast(obj).set_kind(Oddball::kTheHole);
306
307 // Set preliminary exception sentinel value before actually initializing it.
308 set_exception(roots.null_value());
309
310 // Setup the struct maps first (needed for the EnumCache).
311 for (unsigned i = 0; i < arraysize(struct_table); i++) {
312 const StructTable& entry = struct_table[i];
313 Map map;
314 if (!AllocatePartialMap(entry.type, entry.size).To(&map)) return false;
315 roots_table()[entry.index] = map.ptr();
316 }
317
318 // Allocate the empty enum cache.
319 {
320 AllocationResult allocation =
321 Allocate(roots.enum_cache_map_handle(), AllocationType::kReadOnly);
322 if (!allocation.To(&obj)) return false;
323 }
324 set_empty_enum_cache(EnumCache::cast(obj));
325 EnumCache::cast(obj).set_keys(roots.empty_fixed_array());
326 EnumCache::cast(obj).set_indices(roots.empty_fixed_array());
327
328 // Allocate the empty descriptor array.
329 {
330 int size = DescriptorArray::SizeFor(0);
331 if (!AllocateRaw(size, AllocationType::kReadOnly).To(&obj)) return false;
332 obj.set_map_after_allocation(roots.descriptor_array_map(),
333 SKIP_WRITE_BARRIER);
334 DescriptorArray array = DescriptorArray::cast(obj);
335 array.Initialize(roots.empty_enum_cache(), roots.undefined_value(), 0, 0);
336 }
337 set_empty_descriptor_array(DescriptorArray::cast(obj));
338
339 // Fix the instance_descriptors for the existing maps.
340 FinalizePartialMap(roots.meta_map());
341 FinalizePartialMap(roots.fixed_array_map());
342 FinalizePartialMap(roots.weak_fixed_array_map());
343 FinalizePartialMap(roots.weak_array_list_map());
344 FinalizePartialMap(roots.fixed_cow_array_map());
345 FinalizePartialMap(roots.descriptor_array_map());
346 FinalizePartialMap(roots.undefined_map());
347 roots.undefined_map().set_is_undetectable(true);
348 FinalizePartialMap(roots.null_map());
349 roots.null_map().set_is_undetectable(true);
350 FinalizePartialMap(roots.the_hole_map());
351 for (unsigned i = 0; i < arraysize(struct_table); ++i) {
352 const StructTable& entry = struct_table[i];
353 FinalizePartialMap(Map::cast(Object(roots_table()[entry.index])));
354 }
355
356 { // Map allocation
357 #define ALLOCATE_MAP(instance_type, size, field_name) \
358 { \
359 Map map; \
360 if (!AllocateMap((instance_type), size).To(&map)) return false; \
361 set_##field_name##_map(map); \
362 }
363
364 #define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \
365 ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name)
366
367 #define ALLOCATE_PRIMITIVE_MAP(instance_type, size, field_name, \
368 constructor_function_index) \
369 { \
370 ALLOCATE_MAP((instance_type), (size), field_name); \
371 roots.field_name##_map().SetConstructorFunctionIndex( \
372 (constructor_function_index)); \
373 }
374
375 ALLOCATE_VARSIZE_MAP(SCOPE_INFO_TYPE, scope_info)
376 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_info)
377 ALLOCATE_VARSIZE_MAP(CLOSURE_FEEDBACK_CELL_ARRAY_TYPE,
378 closure_feedback_cell_array)
379 ALLOCATE_VARSIZE_MAP(FEEDBACK_VECTOR_TYPE, feedback_vector)
380 ALLOCATE_PRIMITIVE_MAP(HEAP_NUMBER_TYPE, HeapNumber::kSize, heap_number,
381 Context::NUMBER_FUNCTION_INDEX)
382 ALLOCATE_PRIMITIVE_MAP(SYMBOL_TYPE, Symbol::kSize, symbol,
383 Context::SYMBOL_FUNCTION_INDEX)
384 ALLOCATE_MAP(FOREIGN_TYPE, Foreign::kSize, foreign)
385 ALLOCATE_MAP(MEGA_DOM_HANDLER_TYPE, MegaDomHandler::kSize, mega_dom_handler)
386
387 ALLOCATE_PRIMITIVE_MAP(ODDBALL_TYPE, Oddball::kSize, boolean,
388 Context::BOOLEAN_FUNCTION_INDEX);
389 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, uninitialized);
390 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, arguments_marker);
391 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, exception);
392 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, termination_exception);
393 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, optimized_out);
394 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, stale_register);
395 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, self_reference_marker);
396 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, basic_block_counters_marker);
397 ALLOCATE_VARSIZE_MAP(BIGINT_TYPE, bigint);
398
399 for (unsigned i = 0; i < arraysize(string_type_table); i++) {
400 const StringTypeTable& entry = string_type_table[i];
401 Map map;
402 if (!AllocateMap(entry.type, entry.size).To(&map)) return false;
403 map.SetConstructorFunctionIndex(Context::STRING_FUNCTION_INDEX);
404 // Mark cons string maps as unstable, because their objects can change
405 // maps during GC.
406 if (StringShape(entry.type).IsCons()) map.mark_unstable();
407 roots_table()[entry.index] = map.ptr();
408 }
409 ALLOCATE_VARSIZE_MAP(SHARED_STRING_TYPE, seq_string_migration_sentinel);
410 ALLOCATE_VARSIZE_MAP(SHARED_ONE_BYTE_STRING_TYPE,
411 one_byte_seq_string_migration_sentinel);
412
413 ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array)
414 roots.fixed_double_array_map().set_elements_kind(HOLEY_DOUBLE_ELEMENTS);
415 ALLOCATE_VARSIZE_MAP(FEEDBACK_METADATA_TYPE, feedback_metadata)
416 ALLOCATE_VARSIZE_MAP(BYTE_ARRAY_TYPE, byte_array)
417 ALLOCATE_VARSIZE_MAP(BYTECODE_ARRAY_TYPE, bytecode_array)
418 ALLOCATE_VARSIZE_MAP(FREE_SPACE_TYPE, free_space)
419 ALLOCATE_VARSIZE_MAP(PROPERTY_ARRAY_TYPE, property_array)
420 ALLOCATE_VARSIZE_MAP(SMALL_ORDERED_HASH_MAP_TYPE, small_ordered_hash_map)
421 ALLOCATE_VARSIZE_MAP(SMALL_ORDERED_HASH_SET_TYPE, small_ordered_hash_set)
422 ALLOCATE_VARSIZE_MAP(SMALL_ORDERED_NAME_DICTIONARY_TYPE,
423 small_ordered_name_dictionary)
424
425 #define TORQUE_ALLOCATE_MAP(NAME, Name, name) \
426 ALLOCATE_MAP(NAME, Name::SizeFor(), name)
427 TORQUE_DEFINED_FIXED_INSTANCE_TYPE_LIST(TORQUE_ALLOCATE_MAP);
428 #undef TORQUE_ALLOCATE_MAP
429
430 #define TORQUE_ALLOCATE_VARSIZE_MAP(NAME, Name, name) \
431 /* The DescriptorArray map is pre-allocated and initialized above. */ \
432 if (NAME != DESCRIPTOR_ARRAY_TYPE) { \
433 ALLOCATE_VARSIZE_MAP(NAME, name) \
434 }
435 TORQUE_DEFINED_VARSIZE_INSTANCE_TYPE_LIST(TORQUE_ALLOCATE_VARSIZE_MAP);
436 #undef TORQUE_ALLOCATE_VARSIZE_MAP
437
438 ALLOCATE_VARSIZE_MAP(CODE_TYPE, code)
439
440 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell);
441 {
442 // The invalid_prototype_validity_cell is needed for JSObject maps.
443 Smi value = Smi::FromInt(Map::kPrototypeChainInvalid);
444 AllocationResult alloc = AllocateRaw(Cell::kSize, AllocationType::kOld);
445 if (!alloc.To(&obj)) return false;
446 obj.set_map_after_allocation(roots.cell_map(), SKIP_WRITE_BARRIER);
447 Cell::cast(obj).set_value(value);
448 set_invalid_prototype_validity_cell(Cell::cast(obj));
449 }
450
451 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell)
452 ALLOCATE_MAP(FILLER_TYPE, kTaggedSize, one_pointer_filler)
453 ALLOCATE_MAP(FILLER_TYPE, 2 * kTaggedSize, two_pointer_filler)
454
455 // The "no closures" and "one closure" FeedbackCell maps need
456 // to be marked unstable because their objects can change maps.
457 ALLOCATE_MAP(FEEDBACK_CELL_TYPE, FeedbackCell::kAlignedSize,
458 no_closures_cell)
459 roots.no_closures_cell_map().mark_unstable();
460 ALLOCATE_MAP(FEEDBACK_CELL_TYPE, FeedbackCell::kAlignedSize,
461 one_closure_cell)
462 roots.one_closure_cell_map().mark_unstable();
463 ALLOCATE_MAP(FEEDBACK_CELL_TYPE, FeedbackCell::kAlignedSize,
464 many_closures_cell)
465
466 ALLOCATE_VARSIZE_MAP(TRANSITION_ARRAY_TYPE, transition_array)
467
468 ALLOCATE_VARSIZE_MAP(HASH_TABLE_TYPE, hash_table)
469 ALLOCATE_VARSIZE_MAP(ORDERED_HASH_MAP_TYPE, ordered_hash_map)
470 ALLOCATE_VARSIZE_MAP(ORDERED_HASH_SET_TYPE, ordered_hash_set)
471 ALLOCATE_VARSIZE_MAP(ORDERED_NAME_DICTIONARY_TYPE, ordered_name_dictionary)
472 ALLOCATE_VARSIZE_MAP(NAME_DICTIONARY_TYPE, name_dictionary)
473 ALLOCATE_VARSIZE_MAP(SWISS_NAME_DICTIONARY_TYPE, swiss_name_dictionary)
474 ALLOCATE_VARSIZE_MAP(GLOBAL_DICTIONARY_TYPE, global_dictionary)
475 ALLOCATE_VARSIZE_MAP(NUMBER_DICTIONARY_TYPE, number_dictionary)
476 ALLOCATE_VARSIZE_MAP(SIMPLE_NUMBER_DICTIONARY_TYPE,
477 simple_number_dictionary)
478 ALLOCATE_VARSIZE_MAP(NAME_TO_INDEX_HASH_TABLE_TYPE,
479 name_to_index_hash_table)
480 ALLOCATE_VARSIZE_MAP(REGISTERED_SYMBOL_TABLE_TYPE, registered_symbol_table)
481
482 ALLOCATE_VARSIZE_MAP(EMBEDDER_DATA_ARRAY_TYPE, embedder_data_array)
483 ALLOCATE_VARSIZE_MAP(EPHEMERON_HASH_TABLE_TYPE, ephemeron_hash_table)
484
485 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, array_list)
486
487 ALLOCATE_VARSIZE_MAP(SCRIPT_CONTEXT_TABLE_TYPE, script_context_table)
488
489 ALLOCATE_VARSIZE_MAP(OBJECT_BOILERPLATE_DESCRIPTION_TYPE,
490 object_boilerplate_description)
491
492 ALLOCATE_VARSIZE_MAP(COVERAGE_INFO_TYPE, coverage_info);
493
494 ALLOCATE_MAP(CALL_HANDLER_INFO_TYPE, CallHandlerInfo::kSize,
495 side_effect_call_handler_info)
496 ALLOCATE_MAP(CALL_HANDLER_INFO_TYPE, CallHandlerInfo::kSize,
497 side_effect_free_call_handler_info)
498 ALLOCATE_MAP(CALL_HANDLER_INFO_TYPE, CallHandlerInfo::kSize,
499 next_call_side_effect_free_call_handler_info)
500
501 ALLOCATE_VARSIZE_MAP(PREPARSE_DATA_TYPE, preparse_data)
502 ALLOCATE_MAP(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kAlignedSize,
503 shared_function_info)
504 ALLOCATE_MAP(SOURCE_TEXT_MODULE_TYPE, SourceTextModule::kSize,
505 source_text_module)
506 ALLOCATE_MAP(SYNTHETIC_MODULE_TYPE, SyntheticModule::kSize,
507 synthetic_module)
508 ALLOCATE_MAP(CODE_DATA_CONTAINER_TYPE, CodeDataContainer::kSize,
509 code_data_container)
510
511 IF_WASM(ALLOCATE_MAP, WASM_API_FUNCTION_REF_TYPE, WasmApiFunctionRef::kSize,
512 wasm_api_function_ref)
513 IF_WASM(ALLOCATE_MAP, WASM_CAPI_FUNCTION_DATA_TYPE,
514 WasmCapiFunctionData::kSize, wasm_capi_function_data)
515 IF_WASM(ALLOCATE_MAP, WASM_EXPORTED_FUNCTION_DATA_TYPE,
516 WasmExportedFunctionData::kSize, wasm_exported_function_data)
517 IF_WASM(ALLOCATE_MAP, WASM_INTERNAL_FUNCTION_TYPE,
518 WasmInternalFunction::kSize, wasm_internal_function)
519 IF_WASM(ALLOCATE_MAP, WASM_JS_FUNCTION_DATA_TYPE, WasmJSFunctionData::kSize,
520 wasm_js_function_data)
521 IF_WASM(ALLOCATE_MAP, WASM_ON_FULFILLED_DATA_TYPE,
522 WasmOnFulfilledData::kSize, wasm_onfulfilled_data)
523 IF_WASM(ALLOCATE_MAP, WASM_TYPE_INFO_TYPE, WasmTypeInfo::kSize,
524 wasm_type_info)
525
526 ALLOCATE_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell)
527
528 ALLOCATE_MAP(JS_MESSAGE_OBJECT_TYPE, JSMessageObject::kHeaderSize,
529 message_object)
530 ALLOCATE_MAP(JS_EXTERNAL_OBJECT_TYPE, JSExternalObject::kHeaderSize,
531 external)
532 external_map().set_is_extensible(false);
533 #undef ALLOCATE_PRIMITIVE_MAP
534 #undef ALLOCATE_VARSIZE_MAP
535 #undef ALLOCATE_MAP
536 }
537 {
538 AllocationResult alloc = AllocateRaw(
539 ArrayList::SizeFor(ArrayList::kFirstIndex), AllocationType::kReadOnly);
540 if (!alloc.To(&obj)) return false;
541 obj.set_map_after_allocation(roots.array_list_map(), SKIP_WRITE_BARRIER);
542 ArrayList::cast(obj).set_length(ArrayList::kFirstIndex);
543 ArrayList::cast(obj).SetLength(0);
544 }
545 set_empty_array_list(ArrayList::cast(obj));
546
547 {
548 AllocationResult alloc =
549 AllocateRaw(ScopeInfo::SizeFor(ScopeInfo::kVariablePartIndex),
550 AllocationType::kReadOnly);
551 if (!alloc.To(&obj)) return false;
552 obj.set_map_after_allocation(roots.scope_info_map(), SKIP_WRITE_BARRIER);
553 int flags = ScopeInfo::IsEmptyBit::encode(true);
554 DCHECK_EQ(ScopeInfo::LanguageModeBit::decode(flags), LanguageMode::kSloppy);
555 DCHECK_EQ(ScopeInfo::ReceiverVariableBits::decode(flags),
556 VariableAllocationInfo::NONE);
557 DCHECK_EQ(ScopeInfo::FunctionVariableBits::decode(flags),
558 VariableAllocationInfo::NONE);
559 ScopeInfo::cast(obj).set_flags(flags);
560 ScopeInfo::cast(obj).set_context_local_count(0);
561 ScopeInfo::cast(obj).set_parameter_count(0);
562 }
563 set_empty_scope_info(ScopeInfo::cast(obj));
564
565 {
566 // Empty boilerplate needs a field for literal_flags
567 AllocationResult alloc =
568 AllocateRaw(FixedArray::SizeFor(1), AllocationType::kReadOnly);
569 if (!alloc.To(&obj)) return false;
570 obj.set_map_after_allocation(roots.object_boilerplate_description_map(),
571 SKIP_WRITE_BARRIER);
572
573 FixedArray::cast(obj).set_length(1);
574 FixedArray::cast(obj).set(ObjectBoilerplateDescription::kLiteralTypeOffset,
575 Smi::zero());
576 }
577 set_empty_object_boilerplate_description(
578 ObjectBoilerplateDescription::cast(obj));
579
580 {
581 // Empty array boilerplate description
582 AllocationResult alloc =
583 Allocate(roots.array_boilerplate_description_map_handle(),
584 AllocationType::kReadOnly);
585 if (!alloc.To(&obj)) return false;
586
587 ArrayBoilerplateDescription::cast(obj).set_constant_elements(
588 roots.empty_fixed_array());
589 ArrayBoilerplateDescription::cast(obj).set_elements_kind(
590 ElementsKind::PACKED_SMI_ELEMENTS);
591 }
592 set_empty_array_boilerplate_description(
593 ArrayBoilerplateDescription::cast(obj));
594
595 {
596 AllocationResult allocation =
597 Allocate(roots.boolean_map_handle(), AllocationType::kReadOnly);
598 if (!allocation.To(&obj)) return false;
599 }
600 set_true_value(Oddball::cast(obj));
601 Oddball::cast(obj).set_kind(Oddball::kTrue);
602
603 {
604 AllocationResult allocation =
605 Allocate(roots.boolean_map_handle(), AllocationType::kReadOnly);
606 if (!allocation.To(&obj)) return false;
607 }
608 set_false_value(Oddball::cast(obj));
609 Oddball::cast(obj).set_kind(Oddball::kFalse);
610
611 // Empty arrays.
612 {
613 if (!AllocateRaw(ByteArray::SizeFor(0), AllocationType::kReadOnly).To(&obj))
614 return false;
615 obj.set_map_after_allocation(roots.byte_array_map(), SKIP_WRITE_BARRIER);
616 ByteArray::cast(obj).set_length(0);
617 set_empty_byte_array(ByteArray::cast(obj));
618 }
619
620 {
621 if (!AllocateRaw(FixedArray::SizeFor(0), AllocationType::kReadOnly)
622 .To(&obj)) {
623 return false;
624 }
625 obj.set_map_after_allocation(roots.property_array_map(),
626 SKIP_WRITE_BARRIER);
627 PropertyArray::cast(obj).initialize_length(0);
628 set_empty_property_array(PropertyArray::cast(obj));
629 }
630
631 {
632 if (!AllocateRaw(FixedArray::SizeFor(0), AllocationType::kReadOnly)
633 .To(&obj)) {
634 return false;
635 }
636 obj.set_map_after_allocation(roots.closure_feedback_cell_array_map(),
637 SKIP_WRITE_BARRIER);
638 FixedArray::cast(obj).set_length(0);
639 set_empty_closure_feedback_cell_array(ClosureFeedbackCellArray::cast(obj));
640 }
641
642 DCHECK(!InYoungGeneration(roots.empty_fixed_array()));
643
644 roots.bigint_map().SetConstructorFunctionIndex(
645 Context::BIGINT_FUNCTION_INDEX);
646
647 return true;
648 }
649
CreateApiObjects()650 void Heap::CreateApiObjects() {
651 Isolate* isolate = this->isolate();
652 HandleScope scope(isolate);
653
654 set_message_listeners(*TemplateList::New(isolate, 2));
655
656 Handle<InterceptorInfo> info =
657 Handle<InterceptorInfo>::cast(isolate->factory()->NewStruct(
658 INTERCEPTOR_INFO_TYPE, AllocationType::kReadOnly));
659 info->set_flags(0);
660 set_noop_interceptor_info(*info);
661 }
662
CreateInitialObjects()663 void Heap::CreateInitialObjects() {
664 HandleScope initial_objects_handle_scope(isolate());
665 Factory* factory = isolate()->factory();
666 ReadOnlyRoots roots(this);
667
668 // The -0 value must be set before NewNumber works.
669 set_minus_zero_value(
670 *factory->NewHeapNumber<AllocationType::kReadOnly>(-0.0));
671 DCHECK(std::signbit(roots.minus_zero_value().Number()));
672
673 set_nan_value(*factory->NewHeapNumber<AllocationType::kReadOnly>(
674 std::numeric_limits<double>::quiet_NaN()));
675 set_hole_nan_value(*factory->NewHeapNumberFromBits<AllocationType::kReadOnly>(
676 kHoleNanInt64));
677 set_infinity_value(
678 *factory->NewHeapNumber<AllocationType::kReadOnly>(V8_INFINITY));
679 set_minus_infinity_value(
680 *factory->NewHeapNumber<AllocationType::kReadOnly>(-V8_INFINITY));
681
682 set_hash_seed(*factory->NewByteArray(kInt64Size, AllocationType::kReadOnly));
683 InitializeHashSeed();
684
685 // There's no "current microtask" in the beginning.
686 set_current_microtask(roots.undefined_value());
687
688 set_weak_refs_keep_during_job(roots.undefined_value());
689
690 // Allocate cache for single character one byte strings.
691 set_single_character_string_cache(*factory->NewFixedArray(
692 String::kMaxOneByteCharCode + 1, AllocationType::kOld));
693
694 for (unsigned i = 0; i < arraysize(constant_string_table); i++) {
695 Handle<String> str =
696 factory->InternalizeUtf8String(constant_string_table[i].contents);
697 roots_table()[constant_string_table[i].index] = str->ptr();
698 }
699
700 // Allocate
701
702 // Finish initializing oddballs after creating the string table.
703 Oddball::Initialize(isolate(), factory->undefined_value(), "undefined",
704 factory->nan_value(), "undefined", Oddball::kUndefined);
705
706 // Initialize the null_value.
707 Oddball::Initialize(isolate(), factory->null_value(), "null",
708 handle(Smi::zero(), isolate()), "object", Oddball::kNull);
709
710 // Initialize the_hole_value.
711 Oddball::Initialize(isolate(), factory->the_hole_value(), "hole",
712 factory->hole_nan_value(), "undefined",
713 Oddball::kTheHole);
714
715 // Initialize the true_value.
716 Oddball::Initialize(isolate(), factory->true_value(), "true",
717 handle(Smi::FromInt(1), isolate()), "boolean",
718 Oddball::kTrue);
719
720 // Initialize the false_value.
721 Oddball::Initialize(isolate(), factory->false_value(), "false",
722 handle(Smi::zero(), isolate()), "boolean",
723 Oddball::kFalse);
724
725 set_uninitialized_value(
726 *factory->NewOddball(factory->uninitialized_map(), "uninitialized",
727 handle(Smi::FromInt(-1), isolate()), "undefined",
728 Oddball::kUninitialized));
729
730 set_arguments_marker(
731 *factory->NewOddball(factory->arguments_marker_map(), "arguments_marker",
732 handle(Smi::FromInt(-4), isolate()), "undefined",
733 Oddball::kArgumentsMarker));
734
735 set_termination_exception(*factory->NewOddball(
736 factory->termination_exception_map(), "termination_exception",
737 handle(Smi::FromInt(-3), isolate()), "undefined", Oddball::kOther));
738
739 set_exception(*factory->NewOddball(factory->exception_map(), "exception",
740 handle(Smi::FromInt(-5), isolate()),
741 "undefined", Oddball::kException));
742
743 set_optimized_out(*factory->NewOddball(factory->optimized_out_map(),
744 "optimized_out",
745 handle(Smi::FromInt(-6), isolate()),
746 "undefined", Oddball::kOptimizedOut));
747
748 set_stale_register(
749 *factory->NewOddball(factory->stale_register_map(), "stale_register",
750 handle(Smi::FromInt(-7), isolate()), "undefined",
751 Oddball::kStaleRegister));
752
753 // Initialize marker objects used during compilation.
754 set_self_reference_marker(*factory->NewSelfReferenceMarker());
755 set_basic_block_counters_marker(*factory->NewBasicBlockCountersMarker());
756
757 set_interpreter_entry_trampoline_for_profiling(roots.undefined_value());
758
759 {
760 HandleScope handle_scope(isolate());
761 #define SYMBOL_INIT(_, name) \
762 { \
763 Handle<Symbol> symbol( \
764 isolate()->factory()->NewPrivateSymbol(AllocationType::kReadOnly)); \
765 roots_table()[RootIndex::k##name] = symbol->ptr(); \
766 }
767 PRIVATE_SYMBOL_LIST_GENERATOR(SYMBOL_INIT, /* not used */)
768 #undef SYMBOL_INIT
769 }
770
771 {
772 HandleScope handle_scope(isolate());
773 #define SYMBOL_INIT(_, name, description) \
774 Handle<Symbol> name = factory->NewSymbol(AllocationType::kReadOnly); \
775 Handle<String> name##d = factory->InternalizeUtf8String(#description); \
776 name->set_description(*name##d); \
777 roots_table()[RootIndex::k##name] = name->ptr();
778 PUBLIC_SYMBOL_LIST_GENERATOR(SYMBOL_INIT, /* not used */)
779 #undef SYMBOL_INIT
780
781 #define SYMBOL_INIT(_, name, description) \
782 Handle<Symbol> name = factory->NewSymbol(AllocationType::kReadOnly); \
783 Handle<String> name##d = factory->InternalizeUtf8String(#description); \
784 name->set_is_well_known_symbol(true); \
785 name->set_description(*name##d); \
786 roots_table()[RootIndex::k##name] = name->ptr();
787 WELL_KNOWN_SYMBOL_LIST_GENERATOR(SYMBOL_INIT, /* not used */)
788 #undef SYMBOL_INIT
789
790 // Mark "Interesting Symbols" appropriately.
791 to_string_tag_symbol->set_is_interesting_symbol(true);
792 }
793
794 Handle<NameDictionary> empty_property_dictionary = NameDictionary::New(
795 isolate(), 1, AllocationType::kReadOnly, USE_CUSTOM_MINIMUM_CAPACITY);
796 DCHECK(!empty_property_dictionary->HasSufficientCapacityToAdd(1));
797
798 set_empty_property_dictionary(*empty_property_dictionary);
799
800 Handle<RegisteredSymbolTable> empty_symbol_table = RegisteredSymbolTable::New(
801 isolate(), 1, AllocationType::kReadOnly, USE_CUSTOM_MINIMUM_CAPACITY);
802 DCHECK(!empty_symbol_table->HasSufficientCapacityToAdd(1));
803 set_public_symbol_table(*empty_symbol_table);
804 set_api_symbol_table(*empty_symbol_table);
805 set_api_private_symbol_table(*empty_symbol_table);
806
807 set_number_string_cache(*factory->NewFixedArray(
808 kInitialNumberStringCacheSize * 2, AllocationType::kOld));
809
810 set_basic_block_profiling_data(roots.empty_array_list());
811
812 // Allocate cache for string split and regexp-multiple.
813 set_string_split_cache(*factory->NewFixedArray(
814 RegExpResultsCache::kRegExpResultsCacheSize, AllocationType::kOld));
815 set_regexp_multiple_cache(*factory->NewFixedArray(
816 RegExpResultsCache::kRegExpResultsCacheSize, AllocationType::kOld));
817
818 // Allocate FeedbackCell for builtins.
819 Handle<FeedbackCell> many_closures_cell =
820 factory->NewManyClosuresCell(factory->undefined_value());
821 set_many_closures_cell(*many_closures_cell);
822
823 set_detached_contexts(roots.empty_weak_array_list());
824 set_retaining_path_targets(roots.empty_weak_array_list());
825
826 set_feedback_vectors_for_profiling_tools(roots.undefined_value());
827 set_pending_optimize_for_test_bytecode(roots.undefined_value());
828 set_shared_wasm_memories(roots.empty_weak_array_list());
829 #ifdef V8_ENABLE_WEBASSEMBLY
830 set_active_continuation(roots.undefined_value());
831 set_active_suspender(roots.undefined_value());
832 set_wasm_canonical_rtts(roots.empty_weak_array_list());
833 #endif // V8_ENABLE_WEBASSEMBLY
834
835 set_script_list(roots.empty_weak_array_list());
836
837 Handle<NumberDictionary> slow_element_dictionary = NumberDictionary::New(
838 isolate(), 1, AllocationType::kReadOnly, USE_CUSTOM_MINIMUM_CAPACITY);
839 DCHECK(!slow_element_dictionary->HasSufficientCapacityToAdd(1));
840 set_empty_slow_element_dictionary(*slow_element_dictionary);
841
842 set_materialized_objects(*factory->NewFixedArray(0, AllocationType::kOld));
843
844 // Handling of script id generation is in Heap::NextScriptId().
845 set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId));
846 set_last_debugging_id(Smi::FromInt(DebugInfo::kNoDebuggingId));
847 set_next_template_serial_number(Smi::zero());
848
849 // Allocate the empty OrderedHashMap.
850 Handle<OrderedHashMap> empty_ordered_hash_map =
851 OrderedHashMap::AllocateEmpty(isolate(), AllocationType::kReadOnly)
852 .ToHandleChecked();
853 set_empty_ordered_hash_map(*empty_ordered_hash_map);
854
855 // Allocate the empty OrderedHashSet.
856 Handle<OrderedHashSet> empty_ordered_hash_set =
857 OrderedHashSet::AllocateEmpty(isolate(), AllocationType::kReadOnly)
858 .ToHandleChecked();
859 set_empty_ordered_hash_set(*empty_ordered_hash_set);
860
861 // Allocate the empty OrderedNameDictionary
862 Handle<OrderedNameDictionary> empty_ordered_property_dictionary =
863 OrderedNameDictionary::AllocateEmpty(isolate(), AllocationType::kReadOnly)
864 .ToHandleChecked();
865 set_empty_ordered_property_dictionary(*empty_ordered_property_dictionary);
866
867 // Allocate the empty SwissNameDictionary
868 Handle<SwissNameDictionary> empty_swiss_property_dictionary =
869 factory->CreateCanonicalEmptySwissNameDictionary();
870 set_empty_swiss_property_dictionary(*empty_swiss_property_dictionary);
871
872 // Allocate the empty FeedbackMetadata.
873 Handle<FeedbackMetadata> empty_feedback_metadata =
874 factory->NewFeedbackMetadata(0, 0, AllocationType::kReadOnly);
875 set_empty_feedback_metadata(*empty_feedback_metadata);
876
877 // Canonical scope arrays.
878 Handle<ScopeInfo> global_this_binding =
879 ScopeInfo::CreateGlobalThisBinding(isolate());
880 set_global_this_binding_scope_info(*global_this_binding);
881
882 Handle<ScopeInfo> empty_function =
883 ScopeInfo::CreateForEmptyFunction(isolate());
884 set_empty_function_scope_info(*empty_function);
885
886 Handle<ScopeInfo> native_scope_info =
887 ScopeInfo::CreateForNativeContext(isolate());
888 set_native_scope_info(*native_scope_info);
889
890 // Allocate the empty script.
891 Handle<Script> script = factory->NewScript(factory->empty_string());
892 script->set_type(Script::TYPE_NATIVE);
893 // This is used for exceptions thrown with no stack frames. Such exceptions
894 // can be shared everywhere.
895 script->set_origin_options(ScriptOriginOptions(true, false));
896 set_empty_script(*script);
897
898 // Protectors
899 set_array_buffer_detaching_protector(*factory->NewProtector());
900 set_array_constructor_protector(*factory->NewProtector());
901 set_array_iterator_protector(*factory->NewProtector());
902 set_array_species_protector(*factory->NewProtector());
903 set_is_concat_spreadable_protector(*factory->NewProtector());
904 set_map_iterator_protector(*factory->NewProtector());
905 set_no_elements_protector(*factory->NewProtector());
906 set_mega_dom_protector(*factory->NewProtector());
907 set_promise_hook_protector(*factory->NewProtector());
908 set_promise_resolve_protector(*factory->NewProtector());
909 set_promise_species_protector(*factory->NewProtector());
910 set_promise_then_protector(*factory->NewProtector());
911 set_regexp_species_protector(*factory->NewProtector());
912 set_set_iterator_protector(*factory->NewProtector());
913 set_string_iterator_protector(*factory->NewProtector());
914 set_string_length_protector(*factory->NewProtector());
915 set_typed_array_species_protector(*factory->NewProtector());
916
917 set_serialized_objects(roots.empty_fixed_array());
918 set_serialized_global_proxy_sizes(roots.empty_fixed_array());
919
920 /* Canonical off-heap trampoline data */
921 set_off_heap_trampoline_relocation_info(
922 *Builtins::GenerateOffHeapTrampolineRelocInfo(isolate_));
923
924 if (V8_EXTERNAL_CODE_SPACE_BOOL) {
925 // These roots will not be used.
926 HeapObject no_container = *isolate()->factory()->undefined_value();
927 set_trampoline_trivial_code_data_container(no_container);
928 set_trampoline_promise_rejection_code_data_container(no_container);
929
930 } else {
931 set_trampoline_trivial_code_data_container(
932 *isolate()->factory()->NewCodeDataContainer(0,
933 AllocationType::kReadOnly));
934
935 set_trampoline_promise_rejection_code_data_container(
936 *isolate()->factory()->NewCodeDataContainer(
937 Code::IsPromiseRejectionField::encode(true),
938 AllocationType::kReadOnly));
939 }
940
941 // Evaluate the hash values which will then be cached in the strings.
942 isolate()->factory()->zero_string()->EnsureHash();
943 isolate()->factory()->one_string()->EnsureHash();
944
945 // Initialize builtins constants table.
946 set_builtins_constants_table(roots.empty_fixed_array());
947
948 // Initialize descriptor cache.
949 isolate_->descriptor_lookup_cache()->Clear();
950
951 // Initialize compilation cache.
952 isolate_->compilation_cache()->Clear();
953
954 // Create internal SharedFunctionInfos.
955
956 // Async functions:
957 {
958 Handle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
959 isolate(), Builtin::kAsyncFunctionAwaitRejectClosure, 1);
960 set_async_function_await_reject_shared_fun(*info);
961
962 info = CreateSharedFunctionInfo(
963 isolate(), Builtin::kAsyncFunctionAwaitResolveClosure, 1);
964 set_async_function_await_resolve_shared_fun(*info);
965 }
966
967 // Async generators:
968 {
969 Handle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
970 isolate(), Builtin::kAsyncGeneratorAwaitResolveClosure, 1);
971 set_async_generator_await_resolve_shared_fun(*info);
972
973 info = CreateSharedFunctionInfo(
974 isolate(), Builtin::kAsyncGeneratorAwaitRejectClosure, 1);
975 set_async_generator_await_reject_shared_fun(*info);
976
977 info = CreateSharedFunctionInfo(
978 isolate(), Builtin::kAsyncGeneratorYieldResolveClosure, 1);
979 set_async_generator_yield_resolve_shared_fun(*info);
980
981 info = CreateSharedFunctionInfo(
982 isolate(), Builtin::kAsyncGeneratorReturnResolveClosure, 1);
983 set_async_generator_return_resolve_shared_fun(*info);
984
985 info = CreateSharedFunctionInfo(
986 isolate(), Builtin::kAsyncGeneratorReturnClosedResolveClosure, 1);
987 set_async_generator_return_closed_resolve_shared_fun(*info);
988
989 info = CreateSharedFunctionInfo(
990 isolate(), Builtin::kAsyncGeneratorReturnClosedRejectClosure, 1);
991 set_async_generator_return_closed_reject_shared_fun(*info);
992 }
993
994 // AsyncIterator:
995 {
996 Handle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
997 isolate_, Builtin::kAsyncIteratorValueUnwrap, 1);
998 set_async_iterator_value_unwrap_shared_fun(*info);
999 }
1000
1001 // Promises:
1002 {
1003 Handle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1004 isolate_, Builtin::kPromiseCapabilityDefaultResolve, 1,
1005 FunctionKind::kConciseMethod);
1006 info->set_native(true);
1007 info->set_function_map_index(
1008 Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX);
1009 set_promise_capability_default_resolve_shared_fun(*info);
1010
1011 info = CreateSharedFunctionInfo(isolate_,
1012 Builtin::kPromiseCapabilityDefaultReject, 1,
1013 FunctionKind::kConciseMethod);
1014 info->set_native(true);
1015 info->set_function_map_index(
1016 Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX);
1017 set_promise_capability_default_reject_shared_fun(*info);
1018
1019 info = CreateSharedFunctionInfo(
1020 isolate_, Builtin::kPromiseGetCapabilitiesExecutor, 2);
1021 set_promise_get_capabilities_executor_shared_fun(*info);
1022 }
1023
1024 // Promises / finally:
1025 {
1026 Handle<SharedFunctionInfo> info =
1027 CreateSharedFunctionInfo(isolate(), Builtin::kPromiseThenFinally, 1);
1028 info->set_native(true);
1029 set_promise_then_finally_shared_fun(*info);
1030
1031 info =
1032 CreateSharedFunctionInfo(isolate(), Builtin::kPromiseCatchFinally, 1);
1033 info->set_native(true);
1034 set_promise_catch_finally_shared_fun(*info);
1035
1036 info = CreateSharedFunctionInfo(isolate(),
1037 Builtin::kPromiseValueThunkFinally, 0);
1038 set_promise_value_thunk_finally_shared_fun(*info);
1039
1040 info =
1041 CreateSharedFunctionInfo(isolate(), Builtin::kPromiseThrowerFinally, 0);
1042 set_promise_thrower_finally_shared_fun(*info);
1043 }
1044
1045 // Promise combinators:
1046 {
1047 Handle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1048 isolate_, Builtin::kPromiseAllResolveElementClosure, 1);
1049 set_promise_all_resolve_element_shared_fun(*info);
1050
1051 info = CreateSharedFunctionInfo(
1052 isolate_, Builtin::kPromiseAllSettledResolveElementClosure, 1);
1053 set_promise_all_settled_resolve_element_shared_fun(*info);
1054
1055 info = CreateSharedFunctionInfo(
1056 isolate_, Builtin::kPromiseAllSettledRejectElementClosure, 1);
1057 set_promise_all_settled_reject_element_shared_fun(*info);
1058
1059 info = CreateSharedFunctionInfo(
1060 isolate_, Builtin::kPromiseAnyRejectElementClosure, 1);
1061 set_promise_any_reject_element_shared_fun(*info);
1062 }
1063
1064 // ProxyRevoke:
1065 {
1066 Handle<SharedFunctionInfo> info =
1067 CreateSharedFunctionInfo(isolate_, Builtin::kProxyRevoke, 0);
1068 set_proxy_revoke_shared_fun(*info);
1069 }
1070 }
1071
CreateInternalAccessorInfoObjects()1072 void Heap::CreateInternalAccessorInfoObjects() {
1073 Isolate* isolate = this->isolate();
1074 HandleScope scope(isolate);
1075 Handle<AccessorInfo> accessor_info;
1076
1077 #define INIT_ACCESSOR_INFO(_, accessor_name, AccessorName, ...) \
1078 accessor_info = Accessors::Make##AccessorName##Info(isolate); \
1079 roots_table()[RootIndex::k##AccessorName##Accessor] = accessor_info->ptr();
1080 ACCESSOR_INFO_LIST_GENERATOR(INIT_ACCESSOR_INFO, /* not used */)
1081 #undef INIT_ACCESSOR_INFO
1082
1083 #define INIT_SIDE_EFFECT_FLAG(_, accessor_name, AccessorName, GetterType, \
1084 SetterType) \
1085 AccessorInfo::cast( \
1086 Object(roots_table()[RootIndex::k##AccessorName##Accessor])) \
1087 .set_getter_side_effect_type(SideEffectType::GetterType); \
1088 AccessorInfo::cast( \
1089 Object(roots_table()[RootIndex::k##AccessorName##Accessor])) \
1090 .set_setter_side_effect_type(SideEffectType::SetterType);
1091 ACCESSOR_INFO_LIST_GENERATOR(INIT_SIDE_EFFECT_FLAG, /* not used */)
1092 #undef INIT_SIDE_EFFECT_FLAG
1093 }
1094
1095 } // namespace internal
1096 } // namespace v8
1097