• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/factory.h"
6 
7 #include <algorithm>  // For copy
8 #include <memory>     // For shared_ptr<>
9 #include <string>
10 #include <utility>  // For move
11 
12 #include "src/ast/ast-source-ranges.h"
13 #include "src/base/bits.h"
14 #include "src/builtins/accessors.h"
15 #include "src/builtins/constants-table-builder.h"
16 #include "src/codegen/compilation-cache.h"
17 #include "src/codegen/compiler.h"
18 #include "src/common/assert-scope.h"
19 #include "src/common/globals.h"
20 #include "src/diagnostics/basic-block-profiler.h"
21 #include "src/execution/isolate-inl.h"
22 #include "src/execution/protectors-inl.h"
23 #include "src/heap/basic-memory-chunk.h"
24 #include "src/heap/heap-allocator-inl.h"
25 #include "src/heap/heap-inl.h"
26 #include "src/heap/incremental-marking.h"
27 #include "src/heap/mark-compact-inl.h"
28 #include "src/heap/memory-chunk.h"
29 #include "src/heap/read-only-heap.h"
30 #include "src/ic/handler-configuration-inl.h"
31 #include "src/init/bootstrapper.h"
32 #include "src/interpreter/interpreter.h"
33 #include "src/logging/counters.h"
34 #include "src/logging/log.h"
35 #include "src/numbers/conversions.h"
36 #include "src/numbers/hash-seed-inl.h"
37 #include "src/objects/allocation-site-inl.h"
38 #include "src/objects/allocation-site-scopes.h"
39 #include "src/objects/api-callbacks.h"
40 #include "src/objects/arguments-inl.h"
41 #include "src/objects/bigint.h"
42 #include "src/objects/call-site-info-inl.h"
43 #include "src/objects/cell-inl.h"
44 #include "src/objects/debug-objects-inl.h"
45 #include "src/objects/embedder-data-array-inl.h"
46 #include "src/objects/feedback-cell-inl.h"
47 #include "src/objects/fixed-array-inl.h"
48 #include "src/objects/foreign-inl.h"
49 #include "src/objects/instance-type-inl.h"
50 #include "src/objects/js-array-buffer-inl.h"
51 #include "src/objects/js-array-inl.h"
52 #include "src/objects/js-collection-inl.h"
53 #include "src/objects/js-generator-inl.h"
54 #include "src/objects/js-objects.h"
55 #include "src/objects/js-regexp-inl.h"
56 #include "src/objects/js-weak-refs-inl.h"
57 #include "src/objects/literal-objects-inl.h"
58 #include "src/objects/megadom-handler-inl.h"
59 #include "src/objects/microtask-inl.h"
60 #include "src/objects/module-inl.h"
61 #include "src/objects/promise-inl.h"
62 #include "src/objects/property-descriptor-object-inl.h"
63 #include "src/objects/scope-info.h"
64 #include "src/objects/string-set-inl.h"
65 #include "src/objects/struct-inl.h"
66 #include "src/objects/synthetic-module-inl.h"
67 #include "src/objects/template-objects-inl.h"
68 #include "src/objects/transitions-inl.h"
69 #include "src/roots/roots.h"
70 #include "src/strings/unicode-inl.h"
71 #if V8_ENABLE_WEBASSEMBLY
72 #include "src/wasm/wasm-value.h"
73 #endif
74 
75 #include "src/heap/local-factory-inl.h"
76 #include "src/heap/local-heap-inl.h"
77 
78 namespace v8 {
79 namespace internal {
80 
CodeBuilder(Isolate * isolate,const CodeDesc & desc,CodeKind kind)81 Factory::CodeBuilder::CodeBuilder(Isolate* isolate, const CodeDesc& desc,
82                                   CodeKind kind)
83     : isolate_(isolate),
84       local_isolate_(isolate_->main_thread_local_isolate()),
85       code_desc_(desc),
86       kind_(kind),
87       position_table_(isolate_->factory()->empty_byte_array()) {}
88 
CodeBuilder(LocalIsolate * local_isolate,const CodeDesc & desc,CodeKind kind)89 Factory::CodeBuilder::CodeBuilder(LocalIsolate* local_isolate,
90                                   const CodeDesc& desc, CodeKind kind)
91     : isolate_(local_isolate->GetMainThreadIsolateUnsafe()),
92       local_isolate_(local_isolate),
93       code_desc_(desc),
94       kind_(kind),
95       position_table_(isolate_->factory()->empty_byte_array()) {}
96 
BuildInternal(bool retry_allocation_or_fail)97 MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
98     bool retry_allocation_or_fail) {
99   const auto factory = isolate_->factory();
100   // Allocate objects needed for code initialization.
101   Handle<ByteArray> reloc_info =
102       CompiledWithConcurrentBaseline()
103           ? local_isolate_->factory()->NewByteArray(code_desc_.reloc_size,
104                                                     AllocationType::kOld)
105           : factory->NewByteArray(code_desc_.reloc_size, AllocationType::kOld);
106   Handle<CodeDataContainer> data_container;
107 
108   // Use a canonical off-heap trampoline CodeDataContainer if possible.
109   const int32_t promise_rejection_flag =
110       Code::IsPromiseRejectionField::encode(true);
111   if (read_only_data_container_ &&
112       (kind_specific_flags_ == 0 ||
113        kind_specific_flags_ == promise_rejection_flag)) {
114     const ReadOnlyRoots roots(isolate_);
115     const auto canonical_code_data_container = Handle<CodeDataContainer>::cast(
116         kind_specific_flags_ == 0
117             ? roots.trampoline_trivial_code_data_container_handle()
118             : roots.trampoline_promise_rejection_code_data_container_handle());
119     DCHECK_EQ(canonical_code_data_container->kind_specific_flags(kRelaxedLoad),
120               kind_specific_flags_);
121     data_container = canonical_code_data_container;
122   } else {
123     if (CompiledWithConcurrentBaseline()) {
124       data_container = local_isolate_->factory()->NewCodeDataContainer(
125           0, AllocationType::kOld);
126     } else {
127       data_container = factory->NewCodeDataContainer(
128           0, read_only_data_container_ ? AllocationType::kReadOnly
129                                        : AllocationType::kOld);
130     }
131     if (V8_EXTERNAL_CODE_SPACE_BOOL) {
132       data_container->initialize_flags(kind_, builtin_);
133     }
134     data_container->set_kind_specific_flags(kind_specific_flags_,
135                                             kRelaxedStore);
136   }
137 
138   // Basic block profiling data for builtins is stored in the JS heap rather
139   // than in separately-allocated C++ objects. Allocate that data now if
140   // appropriate.
141   Handle<OnHeapBasicBlockProfilerData> on_heap_profiler_data;
142   if (profiler_data_ && isolate_->IsGeneratingEmbeddedBuiltins()) {
143     on_heap_profiler_data = profiler_data_->CopyToJSHeap(isolate_);
144 
145     // Add the on-heap data to a global list, which keeps it alive and allows
146     // iteration.
147     Handle<ArrayList> list(isolate_->heap()->basic_block_profiling_data(),
148                            isolate_);
149     Handle<ArrayList> new_list =
150         ArrayList::Add(isolate_, list, on_heap_profiler_data);
151     isolate_->heap()->SetBasicBlockProfilingData(new_list);
152   }
153 
154   STATIC_ASSERT(Code::kOnHeapBodyIsContiguous);
155   Heap* heap = isolate_->heap();
156   CodePageCollectionMemoryModificationScope code_allocation(heap);
157 
158   Handle<Code> code;
159   if (CompiledWithConcurrentBaseline()) {
160     if (!AllocateConcurrentSparkplugCode(retry_allocation_or_fail)
161              .ToHandle(&code)) {
162       return MaybeHandle<Code>();
163     }
164   } else if (!AllocateCode(retry_allocation_or_fail).ToHandle(&code)) {
165     return MaybeHandle<Code>();
166   }
167 
168   {
169     Code raw_code = *code;
170     constexpr bool kIsNotOffHeapTrampoline = false;
171     DisallowGarbageCollection no_gc;
172 
173     raw_code.set_raw_instruction_size(code_desc_.instruction_size());
174     raw_code.set_raw_metadata_size(code_desc_.metadata_size());
175     raw_code.set_relocation_info(*reloc_info);
176     raw_code.initialize_flags(kind_, is_turbofanned_, stack_slots_,
177                               kIsNotOffHeapTrampoline);
178     raw_code.set_builtin_id(builtin_);
179     // This might impact direct concurrent reads from TF if we are resetting
180     // this field. We currently assume it's immutable thus a relaxed read (after
181     // passing IsPendingAllocation).
182     raw_code.set_inlined_bytecode_size(inlined_bytecode_size_);
183     raw_code.set_code_data_container(*data_container, kReleaseStore);
184     if (kind_ == CodeKind::BASELINE) {
185       raw_code.set_bytecode_or_interpreter_data(*interpreter_data_);
186       raw_code.set_bytecode_offset_table(*position_table_);
187     } else {
188       raw_code.set_deoptimization_data(*deoptimization_data_);
189       raw_code.set_source_position_table(*position_table_);
190     }
191     raw_code.set_handler_table_offset(
192         code_desc_.handler_table_offset_relative());
193     raw_code.set_constant_pool_offset(
194         code_desc_.constant_pool_offset_relative());
195     raw_code.set_code_comments_offset(
196         code_desc_.code_comments_offset_relative());
197     raw_code.set_unwinding_info_offset(
198         code_desc_.unwinding_info_offset_relative());
199 
200     // Allow self references to created code object by patching the handle to
201     // point to the newly allocated Code object.
202     Handle<Object> self_reference;
203     if (self_reference_.ToHandle(&self_reference)) {
204       DCHECK(self_reference->IsOddball());
205       DCHECK_EQ(Oddball::cast(*self_reference).kind(),
206                 Oddball::kSelfReferenceMarker);
207       DCHECK_NE(kind_, CodeKind::BASELINE);
208       if (isolate_->IsGeneratingEmbeddedBuiltins()) {
209         isolate_->builtins_constants_table_builder()->PatchSelfReference(
210             self_reference, code);
211       }
212       self_reference.PatchValue(*code);
213     }
214 
215     // Likewise, any references to the basic block counters marker need to be
216     // updated to point to the newly-allocated counters array.
217     if (!on_heap_profiler_data.is_null()) {
218       isolate_->builtins_constants_table_builder()
219           ->PatchBasicBlockCountersReference(
220               handle(on_heap_profiler_data->counts(), isolate_));
221     }
222 
223     // Migrate generated code.
224     // The generated code can contain embedded objects (typically from
225     // handles) in a pointer-to-tagged-value format (i.e. with indirection
226     // like a handle) that are dereferenced during the copy to point directly
227     // to the actual heap objects. These pointers can include references to
228     // the code object itself, through the self_reference parameter.
229     raw_code.CopyFromNoFlush(*reloc_info, heap, code_desc_);
230 
231     raw_code.clear_padding();
232 
233     if (V8_EXTERNAL_CODE_SPACE_BOOL) {
234       raw_code.set_main_cage_base(isolate_->cage_base(), kRelaxedStore);
235       data_container->SetCodeAndEntryPoint(isolate_, raw_code);
236     }
237 #ifdef VERIFY_HEAP
238     if (FLAG_verify_heap) HeapObject::VerifyCodePointer(isolate_, raw_code);
239 #endif
240 
241     // Flush the instruction cache before changing the permissions.
242     // Note: we do this before setting permissions to ReadExecute because on
243     // some older ARM kernels there is a bug which causes an access error on
244     // cache flush instructions to trigger access error on non-writable memory.
245     // See https://bugs.chromium.org/p/v8/issues/detail?id=8157
246     raw_code.FlushICache();
247   }
248 
249   if (profiler_data_ && FLAG_turbo_profiling_verbose) {
250 #ifdef ENABLE_DISASSEMBLER
251     std::ostringstream os;
252     code->Disassemble(nullptr, os, isolate_);
253     if (!on_heap_profiler_data.is_null()) {
254       Handle<String> disassembly =
255           isolate_->factory()->NewStringFromAsciiChecked(os.str().c_str(),
256                                                          AllocationType::kOld);
257       on_heap_profiler_data->set_code(*disassembly);
258     } else {
259       profiler_data_->SetCode(os);
260     }
261 #endif  // ENABLE_DISASSEMBLER
262   }
263 
264   return code;
265 }
266 
267 // TODO(victorgomes): Unify the two AllocateCodes
AllocateCode(bool retry_allocation_or_fail)268 MaybeHandle<Code> Factory::CodeBuilder::AllocateCode(
269     bool retry_allocation_or_fail) {
270   Heap* heap = isolate_->heap();
271   HeapAllocator* allocator = heap->allocator();
272   HeapObject result;
273   AllocationType allocation_type = V8_EXTERNAL_CODE_SPACE_BOOL || is_executable_
274                                        ? AllocationType::kCode
275                                        : AllocationType::kReadOnly;
276   const int object_size = Code::SizeFor(code_desc_.body_size());
277   if (retry_allocation_or_fail) {
278     result = allocator->AllocateRawWith<HeapAllocator::kRetryOrFail>(
279         object_size, allocation_type, AllocationOrigin::kRuntime);
280   } else {
281     result = allocator->AllocateRawWith<HeapAllocator::kLightRetry>(
282         object_size, allocation_type, AllocationOrigin::kRuntime);
283     // Return an empty handle if we cannot allocate the code object.
284     if (result.is_null()) return MaybeHandle<Code>();
285   }
286 
287   // The code object has not been fully initialized yet.  We rely on the
288   // fact that no allocation will happen from this point on.
289   DisallowGarbageCollection no_gc;
290   result.set_map_after_allocation(*isolate_->factory()->code_map(),
291                                   SKIP_WRITE_BARRIER);
292   Handle<Code> code = handle(Code::cast(result), isolate_);
293   if (is_executable_) {
294     DCHECK(IsAligned(code->address(), kCodeAlignment));
295     DCHECK_IMPLIES(
296         !V8_ENABLE_THIRD_PARTY_HEAP_BOOL && !heap->code_region().is_empty(),
297         heap->code_region().contains(code->address()));
298   }
299   return code;
300 }
301 
AllocateConcurrentSparkplugCode(bool retry_allocation_or_fail)302 MaybeHandle<Code> Factory::CodeBuilder::AllocateConcurrentSparkplugCode(
303     bool retry_allocation_or_fail) {
304   LocalHeap* heap = local_isolate_->heap();
305   AllocationType allocation_type = V8_EXTERNAL_CODE_SPACE_BOOL || is_executable_
306                                        ? AllocationType::kCode
307                                        : AllocationType::kReadOnly;
308   const int object_size = Code::SizeFor(code_desc_.body_size());
309   HeapObject result;
310   if (!heap->AllocateRaw(object_size, allocation_type).To(&result)) {
311     return MaybeHandle<Code>();
312   }
313   CHECK(!result.is_null());
314 
315   // The code object has not been fully initialized yet.  We rely on the
316   // fact that no allocation will happen from this point on.
317   DisallowGarbageCollection no_gc;
318   result.set_map_after_allocation(*local_isolate_->factory()->code_map(),
319                                   SKIP_WRITE_BARRIER);
320   Handle<Code> code = handle(Code::cast(result), local_isolate_);
321   DCHECK_IMPLIES(is_executable_, IsAligned(code->address(), kCodeAlignment));
322   return code;
323 }
324 
TryBuild()325 MaybeHandle<Code> Factory::CodeBuilder::TryBuild() {
326   return BuildInternal(false);
327 }
328 
Build()329 Handle<Code> Factory::CodeBuilder::Build() {
330   return BuildInternal(true).ToHandleChecked();
331 }
332 
AllocateRaw(int size,AllocationType allocation,AllocationAlignment alignment)333 HeapObject Factory::AllocateRaw(int size, AllocationType allocation,
334                                 AllocationAlignment alignment) {
335   return allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>(
336       size, allocation, AllocationOrigin::kRuntime, alignment);
337 }
338 
AllocateRawWithAllocationSite(Handle<Map> map,AllocationType allocation,Handle<AllocationSite> allocation_site)339 HeapObject Factory::AllocateRawWithAllocationSite(
340     Handle<Map> map, AllocationType allocation,
341     Handle<AllocationSite> allocation_site) {
342   DCHECK(map->instance_type() != MAP_TYPE);
343   int size = map->instance_size();
344   if (!allocation_site.is_null()) {
345     DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL);
346     size += AllocationMemento::kSize;
347   }
348   HeapObject result = allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>(
349       size, allocation);
350   WriteBarrierMode write_barrier_mode = allocation == AllocationType::kYoung
351                                             ? SKIP_WRITE_BARRIER
352                                             : UPDATE_WRITE_BARRIER;
353   result.set_map_after_allocation(*map, write_barrier_mode);
354   if (!allocation_site.is_null()) {
355     AllocationMemento alloc_memento = AllocationMemento::unchecked_cast(
356         Object(result.ptr() + map->instance_size()));
357     InitializeAllocationMemento(alloc_memento, *allocation_site);
358   }
359   return result;
360 }
361 
InitializeAllocationMemento(AllocationMemento memento,AllocationSite allocation_site)362 void Factory::InitializeAllocationMemento(AllocationMemento memento,
363                                           AllocationSite allocation_site) {
364   DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL);
365   memento.set_map_after_allocation(*allocation_memento_map(),
366                                    SKIP_WRITE_BARRIER);
367   memento.set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
368   if (FLAG_allocation_site_pretenuring) {
369     allocation_site.IncrementMementoCreateCount();
370   }
371 }
372 
New(Handle<Map> map,AllocationType allocation)373 HeapObject Factory::New(Handle<Map> map, AllocationType allocation) {
374   DCHECK(map->instance_type() != MAP_TYPE);
375   int size = map->instance_size();
376   HeapObject result = allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>(
377       size, allocation);
378   // New space objects are allocated white.
379   WriteBarrierMode write_barrier_mode = allocation == AllocationType::kYoung
380                                             ? SKIP_WRITE_BARRIER
381                                             : UPDATE_WRITE_BARRIER;
382   result.set_map_after_allocation(*map, write_barrier_mode);
383   return result;
384 }
385 
NewFillerObject(int size,AllocationAlignment alignment,AllocationType allocation,AllocationOrigin origin)386 Handle<HeapObject> Factory::NewFillerObject(int size,
387                                             AllocationAlignment alignment,
388                                             AllocationType allocation,
389                                             AllocationOrigin origin) {
390   Heap* heap = isolate()->heap();
391   HeapObject result = allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>(
392       size, allocation, origin, alignment);
393   heap->CreateFillerObjectAt(result.address(), size, ClearRecordedSlots::kNo);
394   return Handle<HeapObject>(result, isolate());
395 }
396 
NewPrototypeInfo()397 Handle<PrototypeInfo> Factory::NewPrototypeInfo() {
398   auto result = NewStructInternal<PrototypeInfo>(PROTOTYPE_INFO_TYPE,
399                                                  AllocationType::kOld);
400   DisallowGarbageCollection no_gc;
401   result.set_prototype_users(Smi::zero());
402   result.set_registry_slot(PrototypeInfo::UNREGISTERED);
403   result.set_bit_field(0);
404   result.set_module_namespace(*undefined_value(), SKIP_WRITE_BARRIER);
405   return handle(result, isolate());
406 }
407 
NewEnumCache(Handle<FixedArray> keys,Handle<FixedArray> indices)408 Handle<EnumCache> Factory::NewEnumCache(Handle<FixedArray> keys,
409                                         Handle<FixedArray> indices) {
410   auto result =
411       NewStructInternal<EnumCache>(ENUM_CACHE_TYPE, AllocationType::kOld);
412   DisallowGarbageCollection no_gc;
413   result.set_keys(*keys);
414   result.set_indices(*indices);
415   return handle(result, isolate());
416 }
417 
NewTuple2(Handle<Object> value1,Handle<Object> value2,AllocationType allocation)418 Handle<Tuple2> Factory::NewTuple2(Handle<Object> value1, Handle<Object> value2,
419                                   AllocationType allocation) {
420   auto result = NewStructInternal<Tuple2>(TUPLE2_TYPE, allocation);
421   DisallowGarbageCollection no_gc;
422   result.set_value1(*value1);
423   result.set_value2(*value2);
424   return handle(result, isolate());
425 }
426 
NewOddball(Handle<Map> map,const char * to_string,Handle<Object> to_number,const char * type_of,byte kind)427 Handle<Oddball> Factory::NewOddball(Handle<Map> map, const char* to_string,
428                                     Handle<Object> to_number,
429                                     const char* type_of, byte kind) {
430   Handle<Oddball> oddball(Oddball::cast(New(map, AllocationType::kReadOnly)),
431                           isolate());
432   Oddball::Initialize(isolate(), oddball, to_string, to_number, type_of, kind);
433   return oddball;
434 }
435 
NewSelfReferenceMarker()436 Handle<Oddball> Factory::NewSelfReferenceMarker() {
437   return NewOddball(self_reference_marker_map(), "self_reference_marker",
438                     handle(Smi::FromInt(-1), isolate()), "undefined",
439                     Oddball::kSelfReferenceMarker);
440 }
441 
NewBasicBlockCountersMarker()442 Handle<Oddball> Factory::NewBasicBlockCountersMarker() {
443   return NewOddball(basic_block_counters_marker_map(),
444                     "basic_block_counters_marker",
445                     handle(Smi::FromInt(-1), isolate()), "undefined",
446                     Oddball::kBasicBlockCountersMarker);
447 }
448 
NewPropertyArray(int length,AllocationType allocation)449 Handle<PropertyArray> Factory::NewPropertyArray(int length,
450                                                 AllocationType allocation) {
451   DCHECK_LE(0, length);
452   if (length == 0) return empty_property_array();
453   HeapObject result = AllocateRawFixedArray(length, allocation);
454   DisallowGarbageCollection no_gc;
455   result.set_map_after_allocation(*property_array_map(), SKIP_WRITE_BARRIER);
456   PropertyArray array = PropertyArray::cast(result);
457   array.initialize_length(length);
458   MemsetTagged(array.data_start(), read_only_roots().undefined_value(), length);
459   return handle(array, isolate());
460 }
461 
TryNewFixedArray(int length,AllocationType allocation_type)462 MaybeHandle<FixedArray> Factory::TryNewFixedArray(
463     int length, AllocationType allocation_type) {
464   DCHECK_LE(0, length);
465   if (length == 0) return empty_fixed_array();
466 
467   int size = FixedArray::SizeFor(length);
468   Heap* heap = isolate()->heap();
469   AllocationResult allocation = heap->AllocateRaw(size, allocation_type);
470   HeapObject result;
471   if (!allocation.To(&result)) return MaybeHandle<FixedArray>();
472   if ((size > heap->MaxRegularHeapObjectSize(allocation_type)) &&
473       FLAG_use_marking_progress_bar) {
474     LargePage::FromHeapObject(result)->ProgressBar().Enable();
475   }
476   DisallowGarbageCollection no_gc;
477   result.set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
478   FixedArray array = FixedArray::cast(result);
479   array.set_length(length);
480   MemsetTagged(array.data_start(), *undefined_value(), length);
481   return handle(array, isolate());
482 }
483 
NewClosureFeedbackCellArray(int length)484 Handle<ClosureFeedbackCellArray> Factory::NewClosureFeedbackCellArray(
485     int length) {
486   if (length == 0) return empty_closure_feedback_cell_array();
487 
488   Handle<ClosureFeedbackCellArray> feedback_cell_array =
489       Handle<ClosureFeedbackCellArray>::cast(NewFixedArrayWithMap(
490           read_only_roots().closure_feedback_cell_array_map_handle(), length,
491           AllocationType::kOld));
492 
493   return feedback_cell_array;
494 }
495 
NewFeedbackVector(Handle<SharedFunctionInfo> shared,Handle<ClosureFeedbackCellArray> closure_feedback_cell_array)496 Handle<FeedbackVector> Factory::NewFeedbackVector(
497     Handle<SharedFunctionInfo> shared,
498     Handle<ClosureFeedbackCellArray> closure_feedback_cell_array) {
499   int length = shared->feedback_metadata().slot_count();
500   DCHECK_LE(0, length);
501   int size = FeedbackVector::SizeFor(length);
502 
503   FeedbackVector vector = FeedbackVector::cast(AllocateRawWithImmortalMap(
504       size, AllocationType::kOld, *feedback_vector_map()));
505   DisallowGarbageCollection no_gc;
506   vector.set_shared_function_info(*shared);
507   vector.set_maybe_optimized_code(HeapObjectReference::ClearedValue(isolate()),
508                                   kReleaseStore);
509   vector.set_length(length);
510   vector.set_invocation_count(0);
511   vector.set_profiler_ticks(0);
512   vector.reset_flags();
513   vector.set_closure_feedback_cell_array(*closure_feedback_cell_array);
514 
515   // TODO(leszeks): Initialize based on the feedback metadata.
516   MemsetTagged(ObjectSlot(vector.slots_start()), *undefined_value(), length);
517   return handle(vector, isolate());
518 }
519 
NewEmbedderDataArray(int length)520 Handle<EmbedderDataArray> Factory::NewEmbedderDataArray(int length) {
521   DCHECK_LE(0, length);
522   int size = EmbedderDataArray::SizeFor(length);
523   EmbedderDataArray array = EmbedderDataArray::cast(AllocateRawWithImmortalMap(
524       size, AllocationType::kYoung, *embedder_data_array_map()));
525   DisallowGarbageCollection no_gc;
526   array.set_length(length);
527 
528   if (length > 0) {
529     for (int i = 0; i < length; i++) {
530       // TODO(v8): consider initializing embedded data array with Smi::zero().
531       EmbedderDataSlot(array, i).Initialize(*undefined_value());
532     }
533   }
534   return handle(array, isolate());
535 }
536 
NewFixedDoubleArrayWithHoles(int length)537 Handle<FixedArrayBase> Factory::NewFixedDoubleArrayWithHoles(int length) {
538   DCHECK_LE(0, length);
539   Handle<FixedArrayBase> array = NewFixedDoubleArray(length);
540   if (length > 0) {
541     Handle<FixedDoubleArray>::cast(array)->FillWithHoles(0, length);
542   }
543   return array;
544 }
545 
546 template <typename T>
AllocateSmallOrderedHashTable(Handle<Map> map,int capacity,AllocationType allocation)547 Handle<T> Factory::AllocateSmallOrderedHashTable(Handle<Map> map, int capacity,
548                                                  AllocationType allocation) {
549   // Capacity must be a power of two, since we depend on being able
550   // to divide and multiple by 2 (kLoadFactor) to derive capacity
551   // from number of buckets. If we decide to change kLoadFactor
552   // to something other than 2, capacity should be stored as another
553   // field of this object.
554   DCHECK_EQ(T::kLoadFactor, 2);
555   capacity =
556       base::bits::RoundUpToPowerOfTwo32(std::max({T::kMinCapacity, capacity}));
557   capacity = std::min({capacity, T::kMaxCapacity});
558 
559   DCHECK_LT(0, capacity);
560   DCHECK_EQ(0, capacity % T::kLoadFactor);
561 
562   int size = T::SizeFor(capacity);
563   HeapObject result = AllocateRawWithImmortalMap(size, allocation, *map);
564   Handle<T> table(T::cast(result), isolate());
565   table->Initialize(isolate(), capacity);
566   return table;
567 }
568 
NewSmallOrderedHashSet(int capacity,AllocationType allocation)569 Handle<SmallOrderedHashSet> Factory::NewSmallOrderedHashSet(
570     int capacity, AllocationType allocation) {
571   return AllocateSmallOrderedHashTable<SmallOrderedHashSet>(
572       small_ordered_hash_set_map(), capacity, allocation);
573 }
574 
NewSmallOrderedHashMap(int capacity,AllocationType allocation)575 Handle<SmallOrderedHashMap> Factory::NewSmallOrderedHashMap(
576     int capacity, AllocationType allocation) {
577   return AllocateSmallOrderedHashTable<SmallOrderedHashMap>(
578       small_ordered_hash_map_map(), capacity, allocation);
579 }
580 
NewSmallOrderedNameDictionary(int capacity,AllocationType allocation)581 Handle<SmallOrderedNameDictionary> Factory::NewSmallOrderedNameDictionary(
582     int capacity, AllocationType allocation) {
583   Handle<SmallOrderedNameDictionary> dict =
584       AllocateSmallOrderedHashTable<SmallOrderedNameDictionary>(
585           small_ordered_name_dictionary_map(), capacity, allocation);
586   dict->SetHash(PropertyArray::kNoHashSentinel);
587   return dict;
588 }
589 
NewOrderedHashSet()590 Handle<OrderedHashSet> Factory::NewOrderedHashSet() {
591   return OrderedHashSet::Allocate(isolate(), OrderedHashSet::kInitialCapacity,
592                                   AllocationType::kYoung)
593       .ToHandleChecked();
594 }
595 
NewOrderedHashMap()596 Handle<OrderedHashMap> Factory::NewOrderedHashMap() {
597   return OrderedHashMap::Allocate(isolate(), OrderedHashMap::kInitialCapacity,
598                                   AllocationType::kYoung)
599       .ToHandleChecked();
600 }
601 
NewOrderedNameDictionary(int capacity)602 Handle<OrderedNameDictionary> Factory::NewOrderedNameDictionary(int capacity) {
603   return OrderedNameDictionary::Allocate(isolate(), capacity,
604                                          AllocationType::kYoung)
605       .ToHandleChecked();
606 }
607 
NewNameDictionary(int at_least_space_for)608 Handle<NameDictionary> Factory::NewNameDictionary(int at_least_space_for) {
609   return NameDictionary::New(isolate(), at_least_space_for);
610 }
611 
NewPropertyDescriptorObject()612 Handle<PropertyDescriptorObject> Factory::NewPropertyDescriptorObject() {
613   auto object = NewStructInternal<PropertyDescriptorObject>(
614       PROPERTY_DESCRIPTOR_OBJECT_TYPE, AllocationType::kYoung);
615   DisallowGarbageCollection no_gc;
616   object.set_flags(0);
617   Oddball the_hole = read_only_roots().the_hole_value();
618   object.set_value(the_hole, SKIP_WRITE_BARRIER);
619   object.set_get(the_hole, SKIP_WRITE_BARRIER);
620   object.set_set(the_hole, SKIP_WRITE_BARRIER);
621   return handle(object, isolate());
622 }
623 
CreateCanonicalEmptySwissNameDictionary()624 Handle<SwissNameDictionary> Factory::CreateCanonicalEmptySwissNameDictionary() {
625   // This function is only supposed to be used to create the canonical empty
626   // version and should not be used afterwards.
627   DCHECK_EQ(kNullAddress, ReadOnlyRoots(isolate()).at(
628                               RootIndex::kEmptySwissPropertyDictionary));
629 
630   ReadOnlyRoots roots(isolate());
631 
632   Handle<ByteArray> empty_meta_table =
633       NewByteArray(SwissNameDictionary::kMetaTableEnumerationDataStartIndex,
634                    AllocationType::kReadOnly);
635 
636   Map map = roots.swiss_name_dictionary_map();
637   int size = SwissNameDictionary::SizeFor(0);
638   HeapObject obj =
639       AllocateRawWithImmortalMap(size, AllocationType::kReadOnly, map);
640   SwissNameDictionary result = SwissNameDictionary::cast(obj);
641   result.Initialize(isolate(), *empty_meta_table, 0);
642   return handle(result, isolate());
643 }
644 
645 // Internalized strings are created in the old generation (data space).
InternalizeUtf8String(const base::Vector<const char> & string)646 Handle<String> Factory::InternalizeUtf8String(
647     const base::Vector<const char>& string) {
648   base::Vector<const uint8_t> utf8_data =
649       base::Vector<const uint8_t>::cast(string);
650   Utf8Decoder decoder(utf8_data);
651   if (decoder.is_ascii()) return InternalizeString(utf8_data);
652   if (decoder.is_one_byte()) {
653     std::unique_ptr<uint8_t[]> buffer(new uint8_t[decoder.utf16_length()]);
654     decoder.Decode(buffer.get(), utf8_data);
655     return InternalizeString(
656         base::Vector<const uint8_t>(buffer.get(), decoder.utf16_length()));
657   }
658   std::unique_ptr<uint16_t[]> buffer(new uint16_t[decoder.utf16_length()]);
659   decoder.Decode(buffer.get(), utf8_data);
660   return InternalizeString(
661       base::Vector<const base::uc16>(buffer.get(), decoder.utf16_length()));
662 }
663 
664 template <typename SeqString>
InternalizeString(Handle<SeqString> string,int from,int length,bool convert_encoding)665 Handle<String> Factory::InternalizeString(Handle<SeqString> string, int from,
666                                           int length, bool convert_encoding) {
667   SeqSubStringKey<SeqString> key(isolate(), string, from, length,
668                                  convert_encoding);
669   return InternalizeStringWithKey(&key);
670 }
671 
672 template Handle<String> Factory::InternalizeString(
673     Handle<SeqOneByteString> string, int from, int length,
674     bool convert_encoding);
675 template Handle<String> Factory::InternalizeString(
676     Handle<SeqTwoByteString> string, int from, int length,
677     bool convert_encoding);
678 
NewStringFromOneByte(const base::Vector<const uint8_t> & string,AllocationType allocation)679 MaybeHandle<String> Factory::NewStringFromOneByte(
680     const base::Vector<const uint8_t>& string, AllocationType allocation) {
681   DCHECK_NE(allocation, AllocationType::kReadOnly);
682   int length = string.length();
683   if (length == 0) return empty_string();
684   if (length == 1) return LookupSingleCharacterStringFromCode(string[0]);
685   Handle<SeqOneByteString> result;
686   ASSIGN_RETURN_ON_EXCEPTION(isolate(), result,
687                              NewRawOneByteString(string.length(), allocation),
688                              String);
689 
690   DisallowGarbageCollection no_gc;
691   // Copy the characters into the new object.
692   CopyChars(SeqOneByteString::cast(*result).GetChars(no_gc), string.begin(),
693             length);
694   return result;
695 }
696 
NewStringFromUtf8(const base::Vector<const char> & string,AllocationType allocation)697 MaybeHandle<String> Factory::NewStringFromUtf8(
698     const base::Vector<const char>& string, AllocationType allocation) {
699   base::Vector<const uint8_t> utf8_data =
700       base::Vector<const uint8_t>::cast(string);
701   Utf8Decoder decoder(utf8_data);
702 
703   if (decoder.utf16_length() == 0) return empty_string();
704 
705   if (decoder.is_one_byte()) {
706     // Allocate string.
707     Handle<SeqOneByteString> result;
708     ASSIGN_RETURN_ON_EXCEPTION(
709         isolate(), result,
710         NewRawOneByteString(decoder.utf16_length(), allocation), String);
711 
712     DisallowGarbageCollection no_gc;
713     decoder.Decode(result->GetChars(no_gc), utf8_data);
714     return result;
715   }
716 
717   // Allocate string.
718   Handle<SeqTwoByteString> result;
719   ASSIGN_RETURN_ON_EXCEPTION(
720       isolate(), result,
721       NewRawTwoByteString(decoder.utf16_length(), allocation), String);
722 
723   DisallowGarbageCollection no_gc;
724   decoder.Decode(result->GetChars(no_gc), utf8_data);
725   return result;
726 }
727 
NewStringFromUtf8SubString(Handle<SeqOneByteString> str,int begin,int length,AllocationType allocation)728 MaybeHandle<String> Factory::NewStringFromUtf8SubString(
729     Handle<SeqOneByteString> str, int begin, int length,
730     AllocationType allocation) {
731   base::Vector<const uint8_t> utf8_data;
732   {
733     DisallowGarbageCollection no_gc;
734     utf8_data =
735         base::Vector<const uint8_t>(str->GetChars(no_gc) + begin, length);
736   }
737   Utf8Decoder decoder(utf8_data);
738 
739   if (length == 1) {
740     uint16_t t;
741     // Decode even in the case of length 1 since it can be a bad character.
742     decoder.Decode(&t, utf8_data);
743     return LookupSingleCharacterStringFromCode(t);
744   }
745 
746   if (decoder.is_ascii()) {
747     // If the string is ASCII, we can just make a substring.
748     // TODO(v8): the allocation flag is ignored in this case.
749     return NewSubString(str, begin, begin + length);
750   }
751 
752   DCHECK_GT(decoder.utf16_length(), 0);
753 
754   if (decoder.is_one_byte()) {
755     // Allocate string.
756     Handle<SeqOneByteString> result;
757     ASSIGN_RETURN_ON_EXCEPTION(
758         isolate(), result,
759         NewRawOneByteString(decoder.utf16_length(), allocation), String);
760     DisallowGarbageCollection no_gc;
761     // Update pointer references, since the original string may have moved after
762     // allocation.
763     utf8_data =
764         base::Vector<const uint8_t>(str->GetChars(no_gc) + begin, length);
765     decoder.Decode(result->GetChars(no_gc), utf8_data);
766     return result;
767   }
768 
769   // Allocate string.
770   Handle<SeqTwoByteString> result;
771   ASSIGN_RETURN_ON_EXCEPTION(
772       isolate(), result,
773       NewRawTwoByteString(decoder.utf16_length(), allocation), String);
774 
775   DisallowGarbageCollection no_gc;
776   // Update pointer references, since the original string may have moved after
777   // allocation.
778   utf8_data = base::Vector<const uint8_t>(str->GetChars(no_gc) + begin, length);
779   decoder.Decode(result->GetChars(no_gc), utf8_data);
780   return result;
781 }
782 
NewStringFromTwoByte(const base::uc16 * string,int length,AllocationType allocation)783 MaybeHandle<String> Factory::NewStringFromTwoByte(const base::uc16* string,
784                                                   int length,
785                                                   AllocationType allocation) {
786   DCHECK_NE(allocation, AllocationType::kReadOnly);
787   if (length == 0) return empty_string();
788   if (String::IsOneByte(string, length)) {
789     if (length == 1) return LookupSingleCharacterStringFromCode(string[0]);
790     Handle<SeqOneByteString> result;
791     ASSIGN_RETURN_ON_EXCEPTION(isolate(), result,
792                                NewRawOneByteString(length, allocation), String);
793     DisallowGarbageCollection no_gc;
794     CopyChars(result->GetChars(no_gc), string, length);
795     return result;
796   } else {
797     Handle<SeqTwoByteString> result;
798     ASSIGN_RETURN_ON_EXCEPTION(isolate(), result,
799                                NewRawTwoByteString(length, allocation), String);
800     DisallowGarbageCollection no_gc;
801     CopyChars(result->GetChars(no_gc), string, length);
802     return result;
803   }
804 }
805 
NewStringFromTwoByte(const base::Vector<const base::uc16> & string,AllocationType allocation)806 MaybeHandle<String> Factory::NewStringFromTwoByte(
807     const base::Vector<const base::uc16>& string, AllocationType allocation) {
808   return NewStringFromTwoByte(string.begin(), string.length(), allocation);
809 }
810 
NewStringFromTwoByte(const ZoneVector<base::uc16> * string,AllocationType allocation)811 MaybeHandle<String> Factory::NewStringFromTwoByte(
812     const ZoneVector<base::uc16>* string, AllocationType allocation) {
813   return NewStringFromTwoByte(string->data(), static_cast<int>(string->size()),
814                               allocation);
815 }
816 
817 namespace {
818 
WriteOneByteData(Handle<String> s,uint8_t * chars,int len)819 inline void WriteOneByteData(Handle<String> s, uint8_t* chars, int len) {
820   DCHECK(s->length() == len);
821   String::WriteToFlat(*s, chars, 0, len);
822 }
823 
WriteTwoByteData(Handle<String> s,uint16_t * chars,int len)824 inline void WriteTwoByteData(Handle<String> s, uint16_t* chars, int len) {
825   DCHECK(s->length() == len);
826   String::WriteToFlat(*s, chars, 0, len);
827 }
828 
829 }  // namespace
830 
831 template <bool is_one_byte, typename T>
AllocateInternalizedStringImpl(T t,int chars,uint32_t hash_field)832 Handle<String> Factory::AllocateInternalizedStringImpl(T t, int chars,
833                                                        uint32_t hash_field) {
834   DCHECK_LE(0, chars);
835   DCHECK_GE(String::kMaxLength, chars);
836 
837   // Compute map and object size.
838   int size;
839   Map map;
840   if (is_one_byte) {
841     map = *one_byte_internalized_string_map();
842     size = SeqOneByteString::SizeFor(chars);
843   } else {
844     map = *internalized_string_map();
845     size = SeqTwoByteString::SizeFor(chars);
846   }
847 
848   String result = String::cast(AllocateRawWithImmortalMap(
849       size,
850       RefineAllocationTypeForInPlaceInternalizableString(
851           CanAllocateInReadOnlySpace() ? AllocationType::kReadOnly
852                                        : AllocationType::kOld,
853           map),
854       map));
855   DisallowGarbageCollection no_gc;
856   result.set_length(chars);
857   result.set_raw_hash_field(hash_field);
858   DCHECK_EQ(size, result.Size());
859 
860   if (is_one_byte) {
861     WriteOneByteData(t, SeqOneByteString::cast(result).GetChars(no_gc), chars);
862   } else {
863     WriteTwoByteData(t, SeqTwoByteString::cast(result).GetChars(no_gc), chars);
864   }
865   return handle(result, isolate());
866 }
867 
NewInternalizedStringImpl(Handle<String> string,int chars,uint32_t hash_field)868 Handle<String> Factory::NewInternalizedStringImpl(Handle<String> string,
869                                                   int chars,
870                                                   uint32_t hash_field) {
871   if (string->IsOneByteRepresentation()) {
872     return AllocateInternalizedStringImpl<true>(string, chars, hash_field);
873   }
874   return AllocateInternalizedStringImpl<false>(string, chars, hash_field);
875 }
876 
ComputeInternalizationStrategyForString(Handle<String> string,MaybeHandle<Map> * internalized_map)877 StringTransitionStrategy Factory::ComputeInternalizationStrategyForString(
878     Handle<String> string, MaybeHandle<Map>* internalized_map) {
879   // Do not internalize young strings in-place: This allows us to ignore both
880   // string table and stub cache on scavenges.
881   if (Heap::InYoungGeneration(*string)) {
882     return StringTransitionStrategy::kCopy;
883   }
884   DCHECK_NOT_NULL(internalized_map);
885   DisallowGarbageCollection no_gc;
886   // This method may be called concurrently, so snapshot the map from the input
887   // string instead of the calling IsType methods on HeapObject, which would
888   // reload the map each time.
889   Map map = string->map();
890   *internalized_map = GetInPlaceInternalizedStringMap(map);
891   if (!internalized_map->is_null()) {
892     return StringTransitionStrategy::kInPlace;
893   }
894   if (InstanceTypeChecker::IsInternalizedString(map.instance_type())) {
895     return StringTransitionStrategy::kAlreadyTransitioned;
896   }
897   return StringTransitionStrategy::kCopy;
898 }
899 
900 template <class StringClass>
InternalizeExternalString(Handle<String> string)901 Handle<StringClass> Factory::InternalizeExternalString(Handle<String> string) {
902   Handle<Map> map =
903       GetInPlaceInternalizedStringMap(string->map()).ToHandleChecked();
904   StringClass external_string =
905       StringClass::cast(New(map, AllocationType::kOld));
906   DisallowGarbageCollection no_gc;
907   external_string.AllocateExternalPointerEntries(isolate());
908   StringClass cast_string = StringClass::cast(*string);
909   external_string.set_length(cast_string.length());
910   external_string.set_raw_hash_field(cast_string.raw_hash_field());
911   external_string.SetResource(isolate(), nullptr);
912   isolate()->heap()->RegisterExternalString(external_string);
913   return handle(external_string, isolate());
914 }
915 
916 template Handle<ExternalOneByteString>
917     Factory::InternalizeExternalString<ExternalOneByteString>(Handle<String>);
918 template Handle<ExternalTwoByteString>
919     Factory::InternalizeExternalString<ExternalTwoByteString>(Handle<String>);
920 
ComputeSharingStrategyForString(Handle<String> string,MaybeHandle<Map> * shared_map)921 StringTransitionStrategy Factory::ComputeSharingStrategyForString(
922     Handle<String> string, MaybeHandle<Map>* shared_map) {
923   DCHECK(FLAG_shared_string_table);
924   // Do not share young strings in-place: there is no shared young space.
925   if (Heap::InYoungGeneration(*string)) {
926     return StringTransitionStrategy::kCopy;
927   }
928   DCHECK_NOT_NULL(shared_map);
929   DisallowGarbageCollection no_gc;
930   InstanceType instance_type = string->map().instance_type();
931   if (StringShape(instance_type).IsShared()) {
932     return StringTransitionStrategy::kAlreadyTransitioned;
933   }
934   switch (instance_type) {
935     case STRING_TYPE:
936       *shared_map = read_only_roots().shared_string_map_handle();
937       return StringTransitionStrategy::kInPlace;
938     case ONE_BYTE_STRING_TYPE:
939       *shared_map = read_only_roots().shared_one_byte_string_map_handle();
940       return StringTransitionStrategy::kInPlace;
941     default:
942       return StringTransitionStrategy::kCopy;
943   }
944 }
945 
LookupSingleCharacterStringFromCode(uint16_t code)946 Handle<String> Factory::LookupSingleCharacterStringFromCode(uint16_t code) {
947   if (code <= unibrow::Latin1::kMaxChar) {
948     {
949       DisallowGarbageCollection no_gc;
950       Object value = single_character_string_cache()->get(code);
951       if (value != *undefined_value()) {
952         return handle(String::cast(value), isolate());
953       }
954     }
955     uint8_t buffer[] = {static_cast<uint8_t>(code)};
956     Handle<String> result =
957         InternalizeString(base::Vector<const uint8_t>(buffer, 1));
958     single_character_string_cache()->set(code, *result);
959     return result;
960   }
961   uint16_t buffer[] = {code};
962   return InternalizeString(base::Vector<const uint16_t>(buffer, 1));
963 }
964 
NewSurrogatePairString(uint16_t lead,uint16_t trail)965 Handle<String> Factory::NewSurrogatePairString(uint16_t lead, uint16_t trail) {
966   DCHECK_GE(lead, 0xD800);
967   DCHECK_LE(lead, 0xDBFF);
968   DCHECK_GE(trail, 0xDC00);
969   DCHECK_LE(trail, 0xDFFF);
970 
971   Handle<SeqTwoByteString> str =
972       isolate()->factory()->NewRawTwoByteString(2).ToHandleChecked();
973   DisallowGarbageCollection no_gc;
974   base::uc16* dest = str->GetChars(no_gc);
975   dest[0] = lead;
976   dest[1] = trail;
977   return str;
978 }
979 
NewProperSubString(Handle<String> str,int begin,int end)980 Handle<String> Factory::NewProperSubString(Handle<String> str, int begin,
981                                            int end) {
982 #if VERIFY_HEAP
983   if (FLAG_verify_heap) str->StringVerify(isolate());
984 #endif
985   DCHECK(begin > 0 || end < str->length());
986 
987   str = String::Flatten(isolate(), str);
988 
989   int length = end - begin;
990   if (length <= 0) return empty_string();
991   if (length == 1) {
992     return LookupSingleCharacterStringFromCode(str->Get(begin));
993   }
994   if (length == 2) {
995     // Optimization for 2-byte strings often used as keys in a decompression
996     // dictionary.  Check whether we already have the string in the string
997     // table to prevent creation of many unnecessary strings.
998     uint16_t c1 = str->Get(begin);
999     uint16_t c2 = str->Get(begin + 1);
1000     return MakeOrFindTwoCharacterString(c1, c2);
1001   }
1002 
1003   if (!FLAG_string_slices || length < SlicedString::kMinLength) {
1004     if (str->IsOneByteRepresentation()) {
1005       Handle<SeqOneByteString> result =
1006           NewRawOneByteString(length).ToHandleChecked();
1007       DisallowGarbageCollection no_gc;
1008       uint8_t* dest = result->GetChars(no_gc);
1009       String::WriteToFlat(*str, dest, begin, length);
1010       return result;
1011     } else {
1012       Handle<SeqTwoByteString> result =
1013           NewRawTwoByteString(length).ToHandleChecked();
1014       DisallowGarbageCollection no_gc;
1015       base::uc16* dest = result->GetChars(no_gc);
1016       String::WriteToFlat(*str, dest, begin, length);
1017       return result;
1018     }
1019   }
1020 
1021   int offset = begin;
1022 
1023   if (str->IsSlicedString()) {
1024     Handle<SlicedString> slice = Handle<SlicedString>::cast(str);
1025     str = Handle<String>(slice->parent(), isolate());
1026     offset += slice->offset();
1027   }
1028   if (str->IsThinString()) {
1029     Handle<ThinString> thin = Handle<ThinString>::cast(str);
1030     str = handle(thin->actual(), isolate());
1031   }
1032 
1033   DCHECK(str->IsSeqString() || str->IsExternalString());
1034   Handle<Map> map = str->IsOneByteRepresentation()
1035                         ? sliced_one_byte_string_map()
1036                         : sliced_string_map();
1037   SlicedString slice = SlicedString::cast(New(map, AllocationType::kYoung));
1038   DisallowGarbageCollection no_gc;
1039   slice.set_raw_hash_field(String::kEmptyHashField);
1040   slice.set_length(length);
1041   slice.set_parent(*str);
1042   slice.set_offset(offset);
1043   return handle(slice, isolate());
1044 }
1045 
NewExternalStringFromOneByte(const ExternalOneByteString::Resource * resource)1046 MaybeHandle<String> Factory::NewExternalStringFromOneByte(
1047     const ExternalOneByteString::Resource* resource) {
1048   size_t length = resource->length();
1049   if (length > static_cast<size_t>(String::kMaxLength)) {
1050     THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String);
1051   }
1052   if (length == 0) return empty_string();
1053 
1054   Handle<Map> map = resource->IsCacheable()
1055                         ? external_one_byte_string_map()
1056                         : uncached_external_one_byte_string_map();
1057   ExternalOneByteString external_string =
1058       ExternalOneByteString::cast(New(map, AllocationType::kOld));
1059   DisallowGarbageCollection no_gc;
1060   external_string.AllocateExternalPointerEntries(isolate());
1061   external_string.set_length(static_cast<int>(length));
1062   external_string.set_raw_hash_field(String::kEmptyHashField);
1063   external_string.SetResource(isolate(), resource);
1064 
1065   isolate()->heap()->RegisterExternalString(external_string);
1066 
1067   return Handle<String>(external_string, isolate());
1068 }
1069 
NewExternalStringFromTwoByte(const ExternalTwoByteString::Resource * resource)1070 MaybeHandle<String> Factory::NewExternalStringFromTwoByte(
1071     const ExternalTwoByteString::Resource* resource) {
1072   size_t length = resource->length();
1073   if (length > static_cast<size_t>(String::kMaxLength)) {
1074     THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String);
1075   }
1076   if (length == 0) return empty_string();
1077 
1078   Handle<Map> map = resource->IsCacheable() ? external_string_map()
1079                                             : uncached_external_string_map();
1080   ExternalTwoByteString string =
1081       ExternalTwoByteString::cast(New(map, AllocationType::kOld));
1082   DisallowGarbageCollection no_gc;
1083   string.AllocateExternalPointerEntries(isolate());
1084   string.set_length(static_cast<int>(length));
1085   string.set_raw_hash_field(String::kEmptyHashField);
1086   string.SetResource(isolate(), resource);
1087 
1088   isolate()->heap()->RegisterExternalString(string);
1089 
1090   return Handle<ExternalTwoByteString>(string, isolate());
1091 }
1092 
NewJSStringIterator(Handle<String> string)1093 Handle<JSStringIterator> Factory::NewJSStringIterator(Handle<String> string) {
1094   Handle<Map> map(isolate()->native_context()->initial_string_iterator_map(),
1095                   isolate());
1096   Handle<String> flat_string = String::Flatten(isolate(), string);
1097   Handle<JSStringIterator> iterator =
1098       Handle<JSStringIterator>::cast(NewJSObjectFromMap(map));
1099 
1100   DisallowGarbageCollection no_gc;
1101   JSStringIterator raw = *iterator;
1102   raw.set_string(*flat_string);
1103   raw.set_index(0);
1104   return iterator;
1105 }
1106 
NewSymbolInternal(AllocationType allocation)1107 Symbol Factory::NewSymbolInternal(AllocationType allocation) {
1108   DCHECK(allocation != AllocationType::kYoung);
1109   // Statically ensure that it is safe to allocate symbols in paged spaces.
1110   STATIC_ASSERT(Symbol::kSize <= kMaxRegularHeapObjectSize);
1111 
1112   Symbol symbol = Symbol::cast(AllocateRawWithImmortalMap(
1113       Symbol::kSize, allocation, read_only_roots().symbol_map()));
1114   DisallowGarbageCollection no_gc;
1115   // Generate a random hash value.
1116   int hash = isolate()->GenerateIdentityHash(Name::HashBits::kMax);
1117   symbol.set_raw_hash_field(
1118       Name::CreateHashFieldValue(hash, Name::HashFieldType::kHash));
1119   symbol.set_description(read_only_roots().undefined_value(),
1120                          SKIP_WRITE_BARRIER);
1121   symbol.set_flags(0);
1122   DCHECK(!symbol.is_private());
1123   return symbol;
1124 }
1125 
NewSymbol(AllocationType allocation)1126 Handle<Symbol> Factory::NewSymbol(AllocationType allocation) {
1127   return handle(NewSymbolInternal(allocation), isolate());
1128 }
1129 
NewPrivateSymbol(AllocationType allocation)1130 Handle<Symbol> Factory::NewPrivateSymbol(AllocationType allocation) {
1131   DCHECK(allocation != AllocationType::kYoung);
1132   Symbol symbol = NewSymbolInternal(allocation);
1133   DisallowGarbageCollection no_gc;
1134   symbol.set_is_private(true);
1135   return handle(symbol, isolate());
1136 }
1137 
NewPrivateNameSymbol(Handle<String> name)1138 Handle<Symbol> Factory::NewPrivateNameSymbol(Handle<String> name) {
1139   Symbol symbol = NewSymbolInternal();
1140   DisallowGarbageCollection no_gc;
1141   symbol.set_is_private_name();
1142   symbol.set_description(*name);
1143   return handle(symbol, isolate());
1144 }
1145 
NewContextInternal(Handle<Map> map,int size,int variadic_part_length,AllocationType allocation)1146 Context Factory::NewContextInternal(Handle<Map> map, int size,
1147                                     int variadic_part_length,
1148                                     AllocationType allocation) {
1149   DCHECK_LE(Context::kTodoHeaderSize, size);
1150   DCHECK(IsAligned(size, kTaggedSize));
1151   DCHECK_LE(Context::MIN_CONTEXT_SLOTS, variadic_part_length);
1152   DCHECK_LE(Context::SizeFor(variadic_part_length), size);
1153 
1154   HeapObject result = allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>(
1155       size, allocation);
1156   result.set_map_after_allocation(*map);
1157   DisallowGarbageCollection no_gc;
1158   Context context = Context::cast(result);
1159   context.set_length(variadic_part_length);
1160   DCHECK_EQ(context.SizeFromMap(*map), size);
1161   if (size > Context::kTodoHeaderSize) {
1162     ObjectSlot start = context.RawField(Context::kTodoHeaderSize);
1163     ObjectSlot end = context.RawField(size);
1164     size_t slot_count = end - start;
1165     MemsetTagged(start, *undefined_value(), slot_count);
1166   }
1167   return context;
1168 }
1169 
NewNativeContext()1170 Handle<NativeContext> Factory::NewNativeContext() {
1171   Handle<Map> map = NewMap(NATIVE_CONTEXT_TYPE, kVariableSizeSentinel);
1172   NativeContext context = NativeContext::cast(NewContextInternal(
1173       map, NativeContext::kSize, NativeContext::NATIVE_CONTEXT_SLOTS,
1174       AllocationType::kOld));
1175   DisallowGarbageCollection no_gc;
1176   context.set_native_context_map(*map);
1177   map->set_native_context(context);
1178   // The ExternalPointerTable is a C++ object.
1179   context.AllocateExternalPointerEntries(isolate());
1180   context.set_scope_info(*native_scope_info());
1181   context.set_previous(Context());
1182   context.set_extension(*undefined_value());
1183   context.set_errors_thrown(Smi::zero());
1184   context.set_math_random_index(Smi::zero());
1185   context.set_serialized_objects(*empty_fixed_array());
1186   context.set_microtask_queue(isolate(), nullptr);
1187   context.set_osr_code_cache(*OSROptimizedCodeCache::Empty(isolate()));
1188   context.set_retained_maps(*empty_weak_array_list());
1189   return handle(context, isolate());
1190 }
1191 
NewScriptContext(Handle<NativeContext> outer,Handle<ScopeInfo> scope_info)1192 Handle<Context> Factory::NewScriptContext(Handle<NativeContext> outer,
1193                                           Handle<ScopeInfo> scope_info) {
1194   DCHECK_EQ(scope_info->scope_type(), SCRIPT_SCOPE);
1195   int variadic_part_length = scope_info->ContextLength();
1196   Context context =
1197       NewContextInternal(handle(outer->script_context_map(), isolate()),
1198                          Context::SizeFor(variadic_part_length),
1199                          variadic_part_length, AllocationType::kOld);
1200   DisallowGarbageCollection no_gc;
1201   context.set_scope_info(*scope_info);
1202   context.set_previous(*outer);
1203   DCHECK(context.IsScriptContext());
1204   return handle(context, isolate());
1205 }
1206 
NewScriptContextTable()1207 Handle<ScriptContextTable> Factory::NewScriptContextTable() {
1208   Handle<ScriptContextTable> context_table = Handle<ScriptContextTable>::cast(
1209       NewFixedArrayWithMap(read_only_roots().script_context_table_map_handle(),
1210                            ScriptContextTable::kMinLength));
1211   Handle<NameToIndexHashTable> names = NameToIndexHashTable::New(isolate(), 16);
1212   context_table->set_used(0, kReleaseStore);
1213   context_table->set_names_to_context_index(*names);
1214   return context_table;
1215 }
1216 
NewModuleContext(Handle<SourceTextModule> module,Handle<NativeContext> outer,Handle<ScopeInfo> scope_info)1217 Handle<Context> Factory::NewModuleContext(Handle<SourceTextModule> module,
1218                                           Handle<NativeContext> outer,
1219                                           Handle<ScopeInfo> scope_info) {
1220   DCHECK_EQ(scope_info->scope_type(), MODULE_SCOPE);
1221   int variadic_part_length = scope_info->ContextLength();
1222   Context context = NewContextInternal(
1223       isolate()->module_context_map(), Context::SizeFor(variadic_part_length),
1224       variadic_part_length, AllocationType::kOld);
1225   DisallowGarbageCollection no_gc;
1226   context.set_scope_info(*scope_info);
1227   context.set_previous(*outer);
1228   context.set_extension(*module);
1229   DCHECK(context.IsModuleContext());
1230   return handle(context, isolate());
1231 }
1232 
NewFunctionContext(Handle<Context> outer,Handle<ScopeInfo> scope_info)1233 Handle<Context> Factory::NewFunctionContext(Handle<Context> outer,
1234                                             Handle<ScopeInfo> scope_info) {
1235   Handle<Map> map;
1236   switch (scope_info->scope_type()) {
1237     case EVAL_SCOPE:
1238       map = isolate()->eval_context_map();
1239       break;
1240     case FUNCTION_SCOPE:
1241       map = isolate()->function_context_map();
1242       break;
1243     default:
1244       UNREACHABLE();
1245   }
1246   int variadic_part_length = scope_info->ContextLength();
1247   Context context =
1248       NewContextInternal(map, Context::SizeFor(variadic_part_length),
1249                          variadic_part_length, AllocationType::kYoung);
1250   DisallowGarbageCollection no_gc;
1251   context.set_scope_info(*scope_info);
1252   context.set_previous(*outer);
1253   return handle(context, isolate());
1254 }
1255 
NewCatchContext(Handle<Context> previous,Handle<ScopeInfo> scope_info,Handle<Object> thrown_object)1256 Handle<Context> Factory::NewCatchContext(Handle<Context> previous,
1257                                          Handle<ScopeInfo> scope_info,
1258                                          Handle<Object> thrown_object) {
1259   DCHECK_EQ(scope_info->scope_type(), CATCH_SCOPE);
1260   STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX);
1261   // TODO(ishell): Take the details from CatchContext class.
1262   int variadic_part_length = Context::MIN_CONTEXT_SLOTS + 1;
1263   Context context = NewContextInternal(
1264       isolate()->catch_context_map(), Context::SizeFor(variadic_part_length),
1265       variadic_part_length, AllocationType::kYoung);
1266   DisallowGarbageCollection no_gc;
1267   DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1268   context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
1269   context.set_previous(*previous, SKIP_WRITE_BARRIER);
1270   context.set(Context::THROWN_OBJECT_INDEX, *thrown_object, SKIP_WRITE_BARRIER);
1271   return handle(context, isolate());
1272 }
1273 
NewDebugEvaluateContext(Handle<Context> previous,Handle<ScopeInfo> scope_info,Handle<JSReceiver> extension,Handle<Context> wrapped)1274 Handle<Context> Factory::NewDebugEvaluateContext(Handle<Context> previous,
1275                                                  Handle<ScopeInfo> scope_info,
1276                                                  Handle<JSReceiver> extension,
1277                                                  Handle<Context> wrapped) {
1278   DCHECK(scope_info->IsDebugEvaluateScope());
1279   Handle<HeapObject> ext = extension.is_null()
1280                                ? Handle<HeapObject>::cast(undefined_value())
1281                                : Handle<HeapObject>::cast(extension);
1282   // TODO(ishell): Take the details from DebugEvaluateContextContext class.
1283   int variadic_part_length = Context::MIN_CONTEXT_EXTENDED_SLOTS + 1;
1284   Context context =
1285       NewContextInternal(isolate()->debug_evaluate_context_map(),
1286                          Context::SizeFor(variadic_part_length),
1287                          variadic_part_length, AllocationType::kYoung);
1288   DisallowGarbageCollection no_gc;
1289   DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1290   context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
1291   context.set_previous(*previous, SKIP_WRITE_BARRIER);
1292   context.set_extension(*ext, SKIP_WRITE_BARRIER);
1293   if (!wrapped.is_null()) {
1294     context.set(Context::WRAPPED_CONTEXT_INDEX, *wrapped, SKIP_WRITE_BARRIER);
1295   }
1296   return handle(context, isolate());
1297 }
1298 
NewWithContext(Handle<Context> previous,Handle<ScopeInfo> scope_info,Handle<JSReceiver> extension)1299 Handle<Context> Factory::NewWithContext(Handle<Context> previous,
1300                                         Handle<ScopeInfo> scope_info,
1301                                         Handle<JSReceiver> extension) {
1302   DCHECK_EQ(scope_info->scope_type(), WITH_SCOPE);
1303   // TODO(ishell): Take the details from WithContext class.
1304   int variadic_part_length = Context::MIN_CONTEXT_EXTENDED_SLOTS;
1305   Context context = NewContextInternal(
1306       isolate()->with_context_map(), Context::SizeFor(variadic_part_length),
1307       variadic_part_length, AllocationType::kYoung);
1308   DisallowGarbageCollection no_gc;
1309   DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1310   context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
1311   context.set_previous(*previous, SKIP_WRITE_BARRIER);
1312   context.set_extension(*extension, SKIP_WRITE_BARRIER);
1313   return handle(context, isolate());
1314 }
1315 
NewBlockContext(Handle<Context> previous,Handle<ScopeInfo> scope_info)1316 Handle<Context> Factory::NewBlockContext(Handle<Context> previous,
1317                                          Handle<ScopeInfo> scope_info) {
1318   DCHECK_IMPLIES(scope_info->scope_type() != BLOCK_SCOPE,
1319                  scope_info->scope_type() == CLASS_SCOPE);
1320   int variadic_part_length = scope_info->ContextLength();
1321   Context context = NewContextInternal(
1322       isolate()->block_context_map(), Context::SizeFor(variadic_part_length),
1323       variadic_part_length, AllocationType::kYoung);
1324   DisallowGarbageCollection no_gc;
1325   DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1326   context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
1327   context.set_previous(*previous, SKIP_WRITE_BARRIER);
1328   return handle(context, isolate());
1329 }
1330 
NewBuiltinContext(Handle<NativeContext> native_context,int variadic_part_length)1331 Handle<Context> Factory::NewBuiltinContext(Handle<NativeContext> native_context,
1332                                            int variadic_part_length) {
1333   DCHECK_LE(Context::MIN_CONTEXT_SLOTS, variadic_part_length);
1334   Context context = NewContextInternal(
1335       isolate()->function_context_map(), Context::SizeFor(variadic_part_length),
1336       variadic_part_length, AllocationType::kYoung);
1337   DisallowGarbageCollection no_gc;
1338   DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1339   context.set_scope_info(read_only_roots().empty_scope_info(),
1340                          SKIP_WRITE_BARRIER);
1341   context.set_previous(*native_context, SKIP_WRITE_BARRIER);
1342   return handle(context, isolate());
1343 }
1344 
NewAliasedArgumentsEntry(int aliased_context_slot)1345 Handle<AliasedArgumentsEntry> Factory::NewAliasedArgumentsEntry(
1346     int aliased_context_slot) {
1347   auto entry = NewStructInternal<AliasedArgumentsEntry>(
1348       ALIASED_ARGUMENTS_ENTRY_TYPE, AllocationType::kYoung);
1349   entry.set_aliased_context_slot(aliased_context_slot);
1350   return handle(entry, isolate());
1351 }
1352 
NewAccessorInfo()1353 Handle<AccessorInfo> Factory::NewAccessorInfo() {
1354   auto info =
1355       NewStructInternal<AccessorInfo>(ACCESSOR_INFO_TYPE, AllocationType::kOld);
1356   DisallowGarbageCollection no_gc;
1357   info.set_name(*empty_string(), SKIP_WRITE_BARRIER);
1358   info.set_flags(0);  // Must clear the flags, it was initialized as undefined.
1359   info.set_is_sloppy(true);
1360   info.set_initial_property_attributes(NONE);
1361 
1362   // Clear some other fields that should not be undefined.
1363   info.set_getter(Smi::zero(), SKIP_WRITE_BARRIER);
1364   info.set_setter(Smi::zero(), SKIP_WRITE_BARRIER);
1365   info.set_js_getter(Smi::zero(), SKIP_WRITE_BARRIER);
1366   return handle(info, isolate());
1367 }
1368 
NewErrorStackData(Handle<Object> call_site_infos_or_formatted_stack,Handle<Object> limit_or_stack_frame_infos)1369 Handle<ErrorStackData> Factory::NewErrorStackData(
1370     Handle<Object> call_site_infos_or_formatted_stack,
1371     Handle<Object> limit_or_stack_frame_infos) {
1372   ErrorStackData error_stack_data = NewStructInternal<ErrorStackData>(
1373       ERROR_STACK_DATA_TYPE, AllocationType::kYoung);
1374   DisallowGarbageCollection no_gc;
1375   error_stack_data.set_call_site_infos_or_formatted_stack(
1376       *call_site_infos_or_formatted_stack, SKIP_WRITE_BARRIER);
1377   error_stack_data.set_limit_or_stack_frame_infos(*limit_or_stack_frame_infos,
1378                                                   SKIP_WRITE_BARRIER);
1379   return handle(error_stack_data, isolate());
1380 }
1381 
AddToScriptList(Handle<Script> script)1382 void Factory::AddToScriptList(Handle<Script> script) {
1383   Handle<WeakArrayList> scripts = script_list();
1384   scripts = WeakArrayList::Append(isolate(), scripts,
1385                                   MaybeObjectHandle::Weak(script));
1386   isolate()->heap()->set_script_list(*scripts);
1387 }
1388 
CloneScript(Handle<Script> script)1389 Handle<Script> Factory::CloneScript(Handle<Script> script) {
1390   Heap* heap = isolate()->heap();
1391   int script_id = isolate()->GetNextScriptId();
1392 #ifdef V8_SCRIPTORMODULE_LEGACY_LIFETIME
1393   Handle<ArrayList> list = ArrayList::New(isolate(), 0);
1394 #endif
1395   Handle<Script> new_script_handle =
1396       Handle<Script>::cast(NewStruct(SCRIPT_TYPE, AllocationType::kOld));
1397   {
1398     DisallowGarbageCollection no_gc;
1399     Script new_script = *new_script_handle;
1400     const Script old_script = *script;
1401     new_script.set_source(old_script.source());
1402     new_script.set_name(old_script.name());
1403     new_script.set_id(script_id);
1404     new_script.set_line_offset(old_script.line_offset());
1405     new_script.set_column_offset(old_script.column_offset());
1406     new_script.set_context_data(old_script.context_data());
1407     new_script.set_type(old_script.type());
1408     new_script.set_line_ends(*undefined_value(), SKIP_WRITE_BARRIER);
1409     new_script.set_eval_from_shared_or_wrapped_arguments_or_sfi_table(
1410         script->eval_from_shared_or_wrapped_arguments_or_sfi_table());
1411     new_script.set_shared_function_infos(*empty_weak_fixed_array(),
1412                                          SKIP_WRITE_BARRIER);
1413     new_script.set_eval_from_position(old_script.eval_from_position());
1414     new_script.set_flags(old_script.flags());
1415     new_script.set_host_defined_options(old_script.host_defined_options());
1416 #ifdef V8_SCRIPTORMODULE_LEGACY_LIFETIME
1417     new_script.set_script_or_modules(*list);
1418 #endif
1419   }
1420 
1421   Handle<WeakArrayList> scripts = script_list();
1422   scripts = WeakArrayList::AddToEnd(isolate(), scripts,
1423                                     MaybeObjectHandle::Weak(new_script_handle));
1424   heap->set_script_list(*scripts);
1425   LOG(isolate(), ScriptEvent(Logger::ScriptEventType::kCreate, script_id));
1426   return new_script_handle;
1427 }
1428 
NewCallableTask(Handle<JSReceiver> callable,Handle<Context> context)1429 Handle<CallableTask> Factory::NewCallableTask(Handle<JSReceiver> callable,
1430                                               Handle<Context> context) {
1431   DCHECK(callable->IsCallable());
1432   auto microtask = NewStructInternal<CallableTask>(CALLABLE_TASK_TYPE,
1433                                                    AllocationType::kYoung);
1434   DisallowGarbageCollection no_gc;
1435   microtask.set_callable(*callable, SKIP_WRITE_BARRIER);
1436   microtask.set_context(*context, SKIP_WRITE_BARRIER);
1437   return handle(microtask, isolate());
1438 }
1439 
NewCallbackTask(Handle<Foreign> callback,Handle<Foreign> data)1440 Handle<CallbackTask> Factory::NewCallbackTask(Handle<Foreign> callback,
1441                                               Handle<Foreign> data) {
1442   auto microtask = NewStructInternal<CallbackTask>(CALLBACK_TASK_TYPE,
1443                                                    AllocationType::kYoung);
1444   DisallowGarbageCollection no_gc;
1445   microtask.set_callback(*callback, SKIP_WRITE_BARRIER);
1446   microtask.set_data(*data, SKIP_WRITE_BARRIER);
1447   return handle(microtask, isolate());
1448 }
1449 
NewPromiseResolveThenableJobTask(Handle<JSPromise> promise_to_resolve,Handle<JSReceiver> thenable,Handle<JSReceiver> then,Handle<Context> context)1450 Handle<PromiseResolveThenableJobTask> Factory::NewPromiseResolveThenableJobTask(
1451     Handle<JSPromise> promise_to_resolve, Handle<JSReceiver> thenable,
1452     Handle<JSReceiver> then, Handle<Context> context) {
1453   DCHECK(then->IsCallable());
1454   auto microtask = NewStructInternal<PromiseResolveThenableJobTask>(
1455       PROMISE_RESOLVE_THENABLE_JOB_TASK_TYPE, AllocationType::kYoung);
1456   DisallowGarbageCollection no_gc;
1457   microtask.set_promise_to_resolve(*promise_to_resolve, SKIP_WRITE_BARRIER);
1458   microtask.set_thenable(*thenable, SKIP_WRITE_BARRIER);
1459   microtask.set_then(*then, SKIP_WRITE_BARRIER);
1460   microtask.set_context(*context, SKIP_WRITE_BARRIER);
1461   return handle(microtask, isolate());
1462 }
1463 
NewForeign(Address addr)1464 Handle<Foreign> Factory::NewForeign(Address addr) {
1465   // Statically ensure that it is safe to allocate foreigns in paged spaces.
1466   STATIC_ASSERT(Foreign::kSize <= kMaxRegularHeapObjectSize);
1467   Map map = *foreign_map();
1468   Foreign foreign = Foreign::cast(AllocateRawWithImmortalMap(
1469       map.instance_size(), AllocationType::kYoung, map));
1470   DisallowGarbageCollection no_gc;
1471   foreign.AllocateExternalPointerEntries(isolate());
1472   foreign.set_foreign_address(isolate(), addr);
1473   return handle(foreign, isolate());
1474 }
1475 
1476 #if V8_ENABLE_WEBASSEMBLY
NewWasmTypeInfo(Address type_address,Handle<Map> opt_parent,int instance_size_bytes,Handle<WasmInstanceObject> instance)1477 Handle<WasmTypeInfo> Factory::NewWasmTypeInfo(
1478     Address type_address, Handle<Map> opt_parent, int instance_size_bytes,
1479     Handle<WasmInstanceObject> instance) {
1480   // We pretenure WasmTypeInfo objects for two reasons:
1481   // (1) They are referenced by Maps, which are assumed to be long-lived,
1482   //     so pretenuring the WTI is a bit more efficient.
1483   // (2) The object visitors need to read the WasmTypeInfo to find tagged
1484   //     fields in Wasm structs; in the middle of a GC cycle that's only
1485   //     safe to do if the WTI is in old space.
1486   // The supertypes list is constant after initialization, so we pretenure
1487   // that too. The subtypes list, however, is expected to grow (and hence be
1488   // replaced), so we don't pretenure it.
1489   Handle<FixedArray> supertypes;
1490   if (opt_parent.is_null()) {
1491     supertypes = NewFixedArray(wasm::kMinimumSupertypeArraySize);
1492     for (int i = 0; i < supertypes->length(); i++) {
1493       supertypes->set(i, *undefined_value());
1494     }
1495   } else {
1496     Handle<FixedArray> parent_supertypes =
1497         handle(opt_parent->wasm_type_info().supertypes(), isolate());
1498     int last_defined_index = parent_supertypes->length() - 1;
1499     while (last_defined_index >= 0 &&
1500            parent_supertypes->get(last_defined_index).IsUndefined()) {
1501       last_defined_index--;
1502     }
1503     if (last_defined_index == parent_supertypes->length() - 1) {
1504       supertypes = CopyArrayAndGrow(parent_supertypes, 1, AllocationType::kOld);
1505     } else {
1506       supertypes = CopyFixedArray(parent_supertypes);
1507     }
1508     supertypes->set(last_defined_index + 1, *opt_parent);
1509   }
1510   Map map = *wasm_type_info_map();
1511   WasmTypeInfo result = WasmTypeInfo::cast(AllocateRawWithImmortalMap(
1512       map.instance_size(), AllocationType::kOld, map));
1513   DisallowGarbageCollection no_gc;
1514   result.AllocateExternalPointerEntries(isolate());
1515   result.set_foreign_address(isolate(), type_address);
1516   result.set_supertypes(*supertypes);
1517   result.set_subtypes(ReadOnlyRoots(isolate()).empty_array_list());
1518   result.set_instance_size(instance_size_bytes);
1519   result.set_instance(*instance);
1520   return handle(result, isolate());
1521 }
1522 
NewWasmApiFunctionRef(Handle<JSReceiver> callable,Handle<HeapObject> suspender)1523 Handle<WasmApiFunctionRef> Factory::NewWasmApiFunctionRef(
1524     Handle<JSReceiver> callable, Handle<HeapObject> suspender) {
1525   Map map = *wasm_api_function_ref_map();
1526   auto result = WasmApiFunctionRef::cast(AllocateRawWithImmortalMap(
1527       map.instance_size(), AllocationType::kOld, map));
1528   DisallowGarbageCollection no_gc;
1529   result.set_isolate_root(isolate()->isolate_root());
1530   result.set_native_context(*isolate()->native_context());
1531   if (!callable.is_null()) {
1532     result.set_callable(*callable);
1533   } else {
1534     result.set_callable(*undefined_value());
1535   }
1536   if (!suspender.is_null()) {
1537     result.set_suspender(*suspender);
1538   } else {
1539     result.set_suspender(*undefined_value());
1540   }
1541   return handle(result, isolate());
1542 }
1543 
NewWasmInternalFunction(Address opt_call_target,Handle<HeapObject> ref,Handle<Map> rtt)1544 Handle<WasmInternalFunction> Factory::NewWasmInternalFunction(
1545     Address opt_call_target, Handle<HeapObject> ref, Handle<Map> rtt) {
1546   HeapObject raw = AllocateRaw(rtt->instance_size(), AllocationType::kOld);
1547   raw.set_map_after_allocation(*rtt);
1548   WasmInternalFunction result = WasmInternalFunction::cast(raw);
1549   DisallowGarbageCollection no_gc;
1550   result.AllocateExternalPointerEntries(isolate());
1551   result.set_foreign_address(isolate(), opt_call_target);
1552   result.set_ref(*ref);
1553   // Default values, will be overwritten by the caller.
1554   result.set_code(*BUILTIN_CODE(isolate(), Abort));
1555   result.set_external(*undefined_value());
1556   return handle(result, isolate());
1557 }
1558 
NewWasmJSFunctionData(Address opt_call_target,Handle<JSReceiver> callable,int return_count,int parameter_count,Handle<PodArray<wasm::ValueType>> serialized_sig,Handle<CodeT> wrapper_code,Handle<Map> rtt,Handle<HeapObject> suspender)1559 Handle<WasmJSFunctionData> Factory::NewWasmJSFunctionData(
1560     Address opt_call_target, Handle<JSReceiver> callable, int return_count,
1561     int parameter_count, Handle<PodArray<wasm::ValueType>> serialized_sig,
1562     Handle<CodeT> wrapper_code, Handle<Map> rtt, Handle<HeapObject> suspender) {
1563   Handle<WasmApiFunctionRef> ref = NewWasmApiFunctionRef(callable, suspender);
1564   Handle<WasmInternalFunction> internal =
1565       NewWasmInternalFunction(opt_call_target, ref, rtt);
1566   Map map = *wasm_js_function_data_map();
1567   WasmJSFunctionData result =
1568       WasmJSFunctionData::cast(AllocateRawWithImmortalMap(
1569           map.instance_size(), AllocationType::kOld, map));
1570   DisallowGarbageCollection no_gc;
1571   result.set_internal(*internal);
1572   result.set_wrapper_code(*wrapper_code);
1573   result.set_serialized_return_count(return_count);
1574   result.set_serialized_parameter_count(parameter_count);
1575   result.set_serialized_signature(*serialized_sig);
1576   return handle(result, isolate());
1577 }
1578 
NewWasmOnFulfilledData(Handle<WasmSuspenderObject> suspender)1579 Handle<WasmOnFulfilledData> Factory::NewWasmOnFulfilledData(
1580     Handle<WasmSuspenderObject> suspender) {
1581   Map map = *wasm_onfulfilled_data_map();
1582   WasmOnFulfilledData result =
1583       WasmOnFulfilledData::cast(AllocateRawWithImmortalMap(
1584           map.instance_size(), AllocationType::kOld, map));
1585   DisallowGarbageCollection no_gc;
1586   result.set_suspender(*suspender);
1587   return handle(result, isolate());
1588 }
1589 
NewWasmExportedFunctionData(Handle<CodeT> export_wrapper,Handle<WasmInstanceObject> instance,Address call_target,Handle<Object> ref,int func_index,Address sig_address,int wrapper_budget,Handle<Map> rtt)1590 Handle<WasmExportedFunctionData> Factory::NewWasmExportedFunctionData(
1591     Handle<CodeT> export_wrapper, Handle<WasmInstanceObject> instance,
1592     Address call_target, Handle<Object> ref, int func_index,
1593     Address sig_address, int wrapper_budget, Handle<Map> rtt) {
1594   Handle<Foreign> sig_foreign = NewForeign(sig_address);
1595   Handle<WasmInternalFunction> internal =
1596       NewWasmInternalFunction(call_target, Handle<HeapObject>::cast(ref), rtt);
1597   Map map = *wasm_exported_function_data_map();
1598   WasmExportedFunctionData result =
1599       WasmExportedFunctionData::cast(AllocateRawWithImmortalMap(
1600           map.instance_size(), AllocationType::kOld, map));
1601   DisallowGarbageCollection no_gc;
1602   DCHECK(ref->IsWasmInstanceObject() || ref->IsWasmApiFunctionRef());
1603   result.set_internal(*internal);
1604   result.set_wrapper_code(*export_wrapper);
1605   result.set_instance(*instance);
1606   result.set_function_index(func_index);
1607   result.set_signature(*sig_foreign);
1608   result.set_wrapper_budget(wrapper_budget);
1609   // We can't skip the write barrier when V8_EXTERNAL_CODE_SPACE is enabled
1610   // because in this case the CodeT (CodeDataContainer) objects are not
1611   // immovable.
1612   result.set_c_wrapper_code(
1613       *BUILTIN_CODE(isolate(), Illegal),
1614       V8_EXTERNAL_CODE_SPACE_BOOL ? UPDATE_WRITE_BARRIER : SKIP_WRITE_BARRIER);
1615   result.set_packed_args_size(0);
1616   result.set_suspender(*undefined_value());
1617   return handle(result, isolate());
1618 }
1619 
NewWasmCapiFunctionData(Address call_target,Handle<Foreign> embedder_data,Handle<CodeT> wrapper_code,Handle<Map> rtt,Handle<PodArray<wasm::ValueType>> serialized_sig)1620 Handle<WasmCapiFunctionData> Factory::NewWasmCapiFunctionData(
1621     Address call_target, Handle<Foreign> embedder_data,
1622     Handle<CodeT> wrapper_code, Handle<Map> rtt,
1623     Handle<PodArray<wasm::ValueType>> serialized_sig) {
1624   Handle<WasmApiFunctionRef> ref =
1625       NewWasmApiFunctionRef(Handle<JSReceiver>(), Handle<HeapObject>());
1626   Handle<WasmInternalFunction> internal =
1627       NewWasmInternalFunction(call_target, ref, rtt);
1628   Map map = *wasm_capi_function_data_map();
1629   WasmCapiFunctionData result =
1630       WasmCapiFunctionData::cast(AllocateRawWithImmortalMap(
1631           map.instance_size(), AllocationType::kOld, map));
1632   DisallowGarbageCollection no_gc;
1633   result.set_internal(*internal);
1634   result.set_wrapper_code(*wrapper_code);
1635   result.set_embedder_data(*embedder_data);
1636   result.set_serialized_signature(*serialized_sig);
1637   return handle(result, isolate());
1638 }
1639 
NewWasmArrayFromElements(const wasm::ArrayType * type,const std::vector<wasm::WasmValue> & elements,Handle<Map> map)1640 Handle<WasmArray> Factory::NewWasmArrayFromElements(
1641     const wasm::ArrayType* type, const std::vector<wasm::WasmValue>& elements,
1642     Handle<Map> map) {
1643   uint32_t length = static_cast<uint32_t>(elements.size());
1644   HeapObject raw =
1645       AllocateRaw(WasmArray::SizeFor(*map, length), AllocationType::kYoung);
1646   DisallowGarbageCollection no_gc;
1647   raw.set_map_after_allocation(*map);
1648   WasmArray result = WasmArray::cast(raw);
1649   result.set_raw_properties_or_hash(*empty_fixed_array(), kRelaxedStore);
1650   result.set_length(length);
1651   if (type->element_type().is_numeric()) {
1652     for (uint32_t i = 0; i < length; i++) {
1653       Address address = result.ElementAddress(i);
1654       elements[i]
1655           .Packed(type->element_type())
1656           .CopyTo(reinterpret_cast<byte*>(address));
1657     }
1658   } else {
1659     for (uint32_t i = 0; i < length; i++) {
1660       int offset = result.element_offset(i);
1661       TaggedField<Object>::store(result, offset, *elements[i].to_ref());
1662     }
1663   }
1664   return handle(result, isolate());
1665 }
1666 
NewWasmArrayFromMemory(uint32_t length,Handle<Map> map,Address source)1667 Handle<WasmArray> Factory::NewWasmArrayFromMemory(uint32_t length,
1668                                                   Handle<Map> map,
1669                                                   Address source) {
1670   wasm::ValueType element_type = reinterpret_cast<wasm::ArrayType*>(
1671                                      map->wasm_type_info().foreign_address())
1672                                      ->element_type();
1673   DCHECK(element_type.is_numeric());
1674   HeapObject raw =
1675       AllocateRaw(WasmArray::SizeFor(*map, length), AllocationType::kYoung);
1676   DisallowGarbageCollection no_gc;
1677   raw.set_map_after_allocation(*map);
1678   WasmArray result = WasmArray::cast(raw);
1679   result.set_raw_properties_or_hash(*empty_fixed_array(), kRelaxedStore);
1680   result.set_length(length);
1681   MemCopy(reinterpret_cast<void*>(result.ElementAddress(0)),
1682           reinterpret_cast<void*>(source),
1683           length * element_type.value_kind_size());
1684 
1685   return handle(result, isolate());
1686 }
1687 
NewWasmStruct(const wasm::StructType * type,wasm::WasmValue * args,Handle<Map> map)1688 Handle<WasmStruct> Factory::NewWasmStruct(const wasm::StructType* type,
1689                                           wasm::WasmValue* args,
1690                                           Handle<Map> map) {
1691   DCHECK_EQ(WasmStruct::Size(type), map->wasm_type_info().instance_size());
1692   HeapObject raw = AllocateRaw(WasmStruct::Size(type), AllocationType::kYoung);
1693   raw.set_map_after_allocation(*map);
1694   WasmStruct result = WasmStruct::cast(raw);
1695   result.set_raw_properties_or_hash(*empty_fixed_array(), kRelaxedStore);
1696   for (uint32_t i = 0; i < type->field_count(); i++) {
1697     int offset = type->field_offset(i);
1698     if (type->field(i).is_numeric()) {
1699       Address address = result.RawFieldAddress(offset);
1700       args[i].Packed(type->field(i)).CopyTo(reinterpret_cast<byte*>(address));
1701     } else {
1702       offset += WasmStruct::kHeaderSize;
1703       TaggedField<Object>::store(result, offset, *args[i].to_ref());
1704     }
1705   }
1706   return handle(result, isolate());
1707 }
1708 
1709 Handle<SharedFunctionInfo>
NewSharedFunctionInfoForWasmExportedFunction(Handle<String> name,Handle<WasmExportedFunctionData> data)1710 Factory::NewSharedFunctionInfoForWasmExportedFunction(
1711     Handle<String> name, Handle<WasmExportedFunctionData> data) {
1712   return NewSharedFunctionInfo(name, data, Builtin::kNoBuiltinId);
1713 }
1714 
NewSharedFunctionInfoForWasmJSFunction(Handle<String> name,Handle<WasmJSFunctionData> data)1715 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForWasmJSFunction(
1716     Handle<String> name, Handle<WasmJSFunctionData> data) {
1717   return NewSharedFunctionInfo(name, data, Builtin::kNoBuiltinId);
1718 }
1719 
NewSharedFunctionInfoForWasmOnFulfilled(Handle<WasmOnFulfilledData> data)1720 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForWasmOnFulfilled(
1721     Handle<WasmOnFulfilledData> data) {
1722   return NewSharedFunctionInfo({}, data, Builtin::kNoBuiltinId);
1723 }
1724 
NewSharedFunctionInfoForWasmCapiFunction(Handle<WasmCapiFunctionData> data)1725 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForWasmCapiFunction(
1726     Handle<WasmCapiFunctionData> data) {
1727   return NewSharedFunctionInfo(MaybeHandle<String>(), data,
1728                                Builtin::kNoBuiltinId,
1729                                FunctionKind::kConciseMethod);
1730 }
1731 #endif  // V8_ENABLE_WEBASSEMBLY
1732 
NewCell(Handle<Object> value)1733 Handle<Cell> Factory::NewCell(Handle<Object> value) {
1734   STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize);
1735   Cell result = Cell::cast(AllocateRawWithImmortalMap(
1736       Cell::kSize, AllocationType::kOld, *cell_map()));
1737   DisallowGarbageCollection no_gc;
1738   result.set_value(*value);
1739   return handle(result, isolate());
1740 }
1741 
NewNoClosuresCell(Handle<HeapObject> value)1742 Handle<FeedbackCell> Factory::NewNoClosuresCell(Handle<HeapObject> value) {
1743   FeedbackCell result = FeedbackCell::cast(AllocateRawWithImmortalMap(
1744       FeedbackCell::kAlignedSize, AllocationType::kOld,
1745       *no_closures_cell_map()));
1746   DisallowGarbageCollection no_gc;
1747   result.set_value(*value);
1748   result.SetInitialInterruptBudget();
1749   result.clear_padding();
1750   return handle(result, isolate());
1751 }
1752 
NewOneClosureCell(Handle<HeapObject> value)1753 Handle<FeedbackCell> Factory::NewOneClosureCell(Handle<HeapObject> value) {
1754   FeedbackCell result = FeedbackCell::cast(AllocateRawWithImmortalMap(
1755       FeedbackCell::kAlignedSize, AllocationType::kOld,
1756       *one_closure_cell_map()));
1757   DisallowGarbageCollection no_gc;
1758   result.set_value(*value);
1759   result.SetInitialInterruptBudget();
1760   result.clear_padding();
1761   return handle(result, isolate());
1762 }
1763 
NewManyClosuresCell(Handle<HeapObject> value)1764 Handle<FeedbackCell> Factory::NewManyClosuresCell(Handle<HeapObject> value) {
1765   FeedbackCell result = FeedbackCell::cast(AllocateRawWithImmortalMap(
1766       FeedbackCell::kAlignedSize, AllocationType::kOld,
1767       *many_closures_cell_map()));
1768   DisallowGarbageCollection no_gc;
1769   result.set_value(*value);
1770   result.SetInitialInterruptBudget();
1771   result.clear_padding();
1772   return handle(result, isolate());
1773 }
1774 
NewPropertyCell(Handle<Name> name,PropertyDetails details,Handle<Object> value,AllocationType allocation)1775 Handle<PropertyCell> Factory::NewPropertyCell(Handle<Name> name,
1776                                               PropertyDetails details,
1777                                               Handle<Object> value,
1778                                               AllocationType allocation) {
1779   DCHECK(name->IsUniqueName());
1780   STATIC_ASSERT(PropertyCell::kSize <= kMaxRegularHeapObjectSize);
1781   PropertyCell cell = PropertyCell::cast(AllocateRawWithImmortalMap(
1782       PropertyCell::kSize, allocation, *global_property_cell_map()));
1783   DisallowGarbageCollection no_gc;
1784   cell.set_dependent_code(
1785       DependentCode::empty_dependent_code(ReadOnlyRoots(isolate())),
1786       SKIP_WRITE_BARRIER);
1787   WriteBarrierMode mode = allocation == AllocationType::kYoung
1788                               ? SKIP_WRITE_BARRIER
1789                               : UPDATE_WRITE_BARRIER;
1790   cell.set_name(*name, mode);
1791   cell.set_value(*value, mode);
1792   cell.set_property_details_raw(details.AsSmi(), SKIP_WRITE_BARRIER);
1793   return handle(cell, isolate());
1794 }
1795 
NewProtector()1796 Handle<PropertyCell> Factory::NewProtector() {
1797   return NewPropertyCell(
1798       empty_string(), PropertyDetails::Empty(PropertyCellType::kConstantType),
1799       handle(Smi::FromInt(Protectors::kProtectorValid), isolate()));
1800 }
1801 
NewTransitionArray(int number_of_transitions,int slack)1802 Handle<TransitionArray> Factory::NewTransitionArray(int number_of_transitions,
1803                                                     int slack) {
1804   int capacity = TransitionArray::LengthFor(number_of_transitions + slack);
1805   Handle<TransitionArray> array = Handle<TransitionArray>::cast(
1806       NewWeakFixedArrayWithMap(read_only_roots().transition_array_map(),
1807                                capacity, AllocationType::kOld));
1808   // Transition arrays are AllocationType::kOld. When black allocation is on we
1809   // have to add the transition array to the list of
1810   // encountered_transition_arrays.
1811   Heap* heap = isolate()->heap();
1812   if (heap->incremental_marking()->black_allocation()) {
1813     heap->mark_compact_collector()->AddTransitionArray(*array);
1814   }
1815   array->WeakFixedArray::Set(TransitionArray::kPrototypeTransitionsIndex,
1816                              MaybeObject::FromObject(Smi::zero()));
1817   array->WeakFixedArray::Set(
1818       TransitionArray::kTransitionLengthIndex,
1819       MaybeObject::FromObject(Smi::FromInt(number_of_transitions)));
1820   return array;
1821 }
1822 
NewAllocationSite(bool with_weak_next)1823 Handle<AllocationSite> Factory::NewAllocationSite(bool with_weak_next) {
1824   Handle<Map> map = with_weak_next ? allocation_site_map()
1825                                    : allocation_site_without_weaknext_map();
1826   Handle<AllocationSite> site(
1827       AllocationSite::cast(New(map, AllocationType::kOld)), isolate());
1828   site->Initialize();
1829 
1830   if (with_weak_next) {
1831     // Link the site
1832     site->set_weak_next(isolate()->heap()->allocation_sites_list());
1833     isolate()->heap()->set_allocation_sites_list(*site);
1834   }
1835   return site;
1836 }
1837 
NewMap(InstanceType type,int instance_size,ElementsKind elements_kind,int inobject_properties,AllocationType allocation_type)1838 Handle<Map> Factory::NewMap(InstanceType type, int instance_size,
1839                             ElementsKind elements_kind, int inobject_properties,
1840                             AllocationType allocation_type) {
1841   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
1842   DCHECK_IMPLIES(InstanceTypeChecker::IsJSObject(type) &&
1843                      !Map::CanHaveFastTransitionableElementsKind(type),
1844                  IsDictionaryElementsKind(elements_kind) ||
1845                      IsTerminalElementsKind(elements_kind));
1846   DCHECK(allocation_type == AllocationType::kMap ||
1847          allocation_type == AllocationType::kSharedMap);
1848   HeapObject result = allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>(
1849       Map::kSize, allocation_type);
1850   DisallowGarbageCollection no_gc;
1851   Heap* roots = allocation_type == AllocationType::kMap
1852                     ? isolate()->heap()
1853                     : isolate()->shared_isolate()->heap();
1854   result.set_map_after_allocation(ReadOnlyRoots(roots).meta_map(),
1855                                   SKIP_WRITE_BARRIER);
1856   return handle(InitializeMap(Map::cast(result), type, instance_size,
1857                               elements_kind, inobject_properties, roots),
1858                 isolate());
1859 }
1860 
InitializeMap(Map map,InstanceType type,int instance_size,ElementsKind elements_kind,int inobject_properties,Heap * roots)1861 Map Factory::InitializeMap(Map map, InstanceType type, int instance_size,
1862                            ElementsKind elements_kind, int inobject_properties,
1863                            Heap* roots) {
1864   DisallowGarbageCollection no_gc;
1865   map.set_bit_field(0);
1866   map.set_bit_field2(Map::Bits2::NewTargetIsBaseBit::encode(true));
1867   int bit_field3 =
1868       Map::Bits3::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
1869       Map::Bits3::OwnsDescriptorsBit::encode(true) |
1870       Map::Bits3::ConstructionCounterBits::encode(Map::kNoSlackTracking) |
1871       Map::Bits3::IsExtensibleBit::encode(true);
1872   map.set_bit_field3(bit_field3);
1873   map.set_instance_type(type);
1874   ReadOnlyRoots ro_roots(roots);
1875   HeapObject raw_null_value = ro_roots.null_value();
1876   map.set_prototype(raw_null_value, SKIP_WRITE_BARRIER);
1877   map.set_constructor_or_back_pointer(raw_null_value, SKIP_WRITE_BARRIER);
1878   map.set_instance_size(instance_size);
1879   if (map.IsJSObjectMap()) {
1880     DCHECK(!ReadOnlyHeap::Contains(map));
1881     map.SetInObjectPropertiesStartInWords(instance_size / kTaggedSize -
1882                                           inobject_properties);
1883     DCHECK_EQ(map.GetInObjectProperties(), inobject_properties);
1884     map.set_prototype_validity_cell(roots->invalid_prototype_validity_cell());
1885   } else {
1886     DCHECK_EQ(inobject_properties, 0);
1887     map.set_inobject_properties_start_or_constructor_function_index(0);
1888     map.set_prototype_validity_cell(Smi::FromInt(Map::kPrototypeChainValid),
1889                                     SKIP_WRITE_BARRIER);
1890   }
1891   map.set_dependent_code(DependentCode::empty_dependent_code(ro_roots),
1892                          SKIP_WRITE_BARRIER);
1893   map.set_raw_transitions(MaybeObject::FromSmi(Smi::zero()),
1894                           SKIP_WRITE_BARRIER);
1895   map.SetInObjectUnusedPropertyFields(inobject_properties);
1896   map.SetInstanceDescriptors(isolate(), ro_roots.empty_descriptor_array(), 0);
1897   // Must be called only after |instance_type| and |instance_size| are set.
1898   map.set_visitor_id(Map::GetVisitorId(map));
1899   DCHECK(!map.is_in_retained_map_list());
1900   map.clear_padding();
1901   map.set_elements_kind(elements_kind);
1902   isolate()->counters()->maps_created()->Increment();
1903   if (FLAG_log_maps) LOG(isolate(), MapCreate(map));
1904   return map;
1905 }
1906 
CopyJSObject(Handle<JSObject> source)1907 Handle<JSObject> Factory::CopyJSObject(Handle<JSObject> source) {
1908   return CopyJSObjectWithAllocationSite(source, Handle<AllocationSite>());
1909 }
1910 
CopyJSObjectWithAllocationSite(Handle<JSObject> source,Handle<AllocationSite> site)1911 Handle<JSObject> Factory::CopyJSObjectWithAllocationSite(
1912     Handle<JSObject> source, Handle<AllocationSite> site) {
1913   Handle<Map> map(source->map(), isolate());
1914 
1915   // We can only clone regexps, normal objects, api objects, errors or arrays.
1916   // Copying anything else will break invariants.
1917   InstanceType instance_type = map->instance_type();
1918   bool is_clonable_js_type =
1919       instance_type == JS_REG_EXP_TYPE || instance_type == JS_OBJECT_TYPE ||
1920       instance_type == JS_ERROR_TYPE || instance_type == JS_ARRAY_TYPE ||
1921       instance_type == JS_SPECIAL_API_OBJECT_TYPE ||
1922       InstanceTypeChecker::IsJSApiObject(instance_type);
1923   bool is_clonable_wasm_type = false;
1924 #if V8_ENABLE_WEBASSEMBLY
1925   is_clonable_wasm_type = instance_type == WASM_GLOBAL_OBJECT_TYPE ||
1926                           instance_type == WASM_INSTANCE_OBJECT_TYPE ||
1927                           instance_type == WASM_MEMORY_OBJECT_TYPE ||
1928                           instance_type == WASM_MODULE_OBJECT_TYPE ||
1929                           instance_type == WASM_TABLE_OBJECT_TYPE;
1930 #endif  // V8_ENABLE_WEBASSEMBLY
1931   CHECK(is_clonable_js_type || is_clonable_wasm_type);
1932 
1933   DCHECK(site.is_null() || AllocationSite::CanTrack(instance_type));
1934 
1935   int object_size = map->instance_size();
1936   int adjusted_object_size = object_size;
1937   if (!site.is_null()) {
1938     DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL);
1939     adjusted_object_size += AllocationMemento::kSize;
1940   }
1941   HeapObject raw_clone =
1942       allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>(
1943           adjusted_object_size, AllocationType::kYoung);
1944 
1945   DCHECK(Heap::InYoungGeneration(raw_clone) || FLAG_single_generation);
1946 
1947   Heap::CopyBlock(raw_clone.address(), source->address(), object_size);
1948   Handle<JSObject> clone(JSObject::cast(raw_clone), isolate());
1949 
1950   if (FLAG_enable_unconditional_write_barriers) {
1951     // By default, we shouldn't need to update the write barrier here, as the
1952     // clone will be allocated in new space.
1953     const ObjectSlot start(raw_clone.address());
1954     const ObjectSlot end(raw_clone.address() + object_size);
1955     isolate()->heap()->WriteBarrierForRange(raw_clone, start, end);
1956   }
1957   if (!site.is_null()) {
1958     AllocationMemento alloc_memento = AllocationMemento::unchecked_cast(
1959         Object(raw_clone.ptr() + object_size));
1960     InitializeAllocationMemento(alloc_memento, *site);
1961   }
1962 
1963   SLOW_DCHECK(clone->GetElementsKind() == source->GetElementsKind());
1964   FixedArrayBase elements = source->elements();
1965   // Update elements if necessary.
1966   if (elements.length() > 0) {
1967     FixedArrayBase elem;
1968     if (elements.map() == *fixed_cow_array_map()) {
1969       elem = elements;
1970     } else if (source->HasDoubleElements()) {
1971       elem = *CopyFixedDoubleArray(
1972           handle(FixedDoubleArray::cast(elements), isolate()));
1973     } else {
1974       elem = *CopyFixedArray(handle(FixedArray::cast(elements), isolate()));
1975     }
1976     clone->set_elements(elem);
1977   }
1978 
1979   // Update properties if necessary.
1980   if (source->HasFastProperties()) {
1981     PropertyArray properties = source->property_array();
1982     if (properties.length() > 0) {
1983       // TODO(gsathya): Do not copy hash code.
1984       Handle<PropertyArray> prop = CopyArrayWithMap(
1985           handle(properties, isolate()), handle(properties.map(), isolate()));
1986       clone->set_raw_properties_or_hash(*prop, kRelaxedStore);
1987     }
1988   } else {
1989     Handle<Object> copied_properties;
1990     if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL) {
1991       copied_properties = SwissNameDictionary::ShallowCopy(
1992           isolate(), handle(source->property_dictionary_swiss(), isolate()));
1993     } else {
1994       copied_properties =
1995           CopyFixedArray(handle(source->property_dictionary(), isolate()));
1996     }
1997     clone->set_raw_properties_or_hash(*copied_properties, kRelaxedStore);
1998   }
1999   return clone;
2000 }
2001 
2002 namespace {
2003 template <typename T>
initialize_length(T array,int length)2004 void initialize_length(T array, int length) {
2005   array.set_length(length);
2006 }
2007 
2008 template <>
initialize_length(PropertyArray array,int length)2009 void initialize_length<PropertyArray>(PropertyArray array, int length) {
2010   array.initialize_length(length);
2011 }
2012 
InitEmbedderFields(i::JSObject obj,i::Object initial_value)2013 inline void InitEmbedderFields(i::JSObject obj, i::Object initial_value) {
2014   for (int i = 0; i < obj.GetEmbedderFieldCount(); i++) {
2015     EmbedderDataSlot(obj, i).Initialize(initial_value);
2016   }
2017 }
2018 
2019 }  // namespace
2020 
2021 template <typename T>
CopyArrayWithMap(Handle<T> src,Handle<Map> map)2022 Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) {
2023   int len = src->length();
2024   HeapObject new_object = AllocateRawFixedArray(len, AllocationType::kYoung);
2025   DisallowGarbageCollection no_gc;
2026   new_object.set_map_after_allocation(*map, SKIP_WRITE_BARRIER);
2027   T result = T::cast(new_object);
2028   initialize_length(result, len);
2029   // Copy the content.
2030   WriteBarrierMode mode = result.GetWriteBarrierMode(no_gc);
2031   result.CopyElements(isolate(), 0, *src, 0, len, mode);
2032   return handle(result, isolate());
2033 }
2034 
2035 template <typename T>
CopyArrayAndGrow(Handle<T> src,int grow_by,AllocationType allocation)2036 Handle<T> Factory::CopyArrayAndGrow(Handle<T> src, int grow_by,
2037                                     AllocationType allocation) {
2038   DCHECK_LT(0, grow_by);
2039   DCHECK_LE(grow_by, kMaxInt - src->length());
2040   int old_len = src->length();
2041   int new_len = old_len + grow_by;
2042   HeapObject new_object = AllocateRawFixedArray(new_len, allocation);
2043   DisallowGarbageCollection no_gc;
2044   new_object.set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
2045   T result = T::cast(new_object);
2046   initialize_length(result, new_len);
2047   // Copy the content.
2048   WriteBarrierMode mode = result.GetWriteBarrierMode(no_gc);
2049   result.CopyElements(isolate(), 0, *src, 0, old_len, mode);
2050   MemsetTagged(ObjectSlot(result.data_start() + old_len),
2051                read_only_roots().undefined_value(), grow_by);
2052   return handle(result, isolate());
2053 }
2054 
CopyFixedArrayWithMap(Handle<FixedArray> array,Handle<Map> map)2055 Handle<FixedArray> Factory::CopyFixedArrayWithMap(Handle<FixedArray> array,
2056                                                   Handle<Map> map) {
2057   return CopyArrayWithMap(array, map);
2058 }
2059 
CopyFixedArrayAndGrow(Handle<FixedArray> array,int grow_by)2060 Handle<FixedArray> Factory::CopyFixedArrayAndGrow(Handle<FixedArray> array,
2061                                                   int grow_by) {
2062   return CopyArrayAndGrow(array, grow_by, AllocationType::kYoung);
2063 }
2064 
NewUninitializedWeakArrayList(int capacity,AllocationType allocation)2065 Handle<WeakArrayList> Factory::NewUninitializedWeakArrayList(
2066     int capacity, AllocationType allocation) {
2067   DCHECK_LE(0, capacity);
2068   if (capacity == 0) return empty_weak_array_list();
2069 
2070   HeapObject heap_object = AllocateRawWeakArrayList(capacity, allocation);
2071   DisallowGarbageCollection no_gc;
2072   heap_object.set_map_after_allocation(*weak_array_list_map(),
2073                                        SKIP_WRITE_BARRIER);
2074   WeakArrayList result = WeakArrayList::cast(heap_object);
2075   result.set_length(0);
2076   result.set_capacity(capacity);
2077   return handle(result, isolate());
2078 }
2079 
NewWeakArrayList(int capacity,AllocationType allocation)2080 Handle<WeakArrayList> Factory::NewWeakArrayList(int capacity,
2081                                                 AllocationType allocation) {
2082   Handle<WeakArrayList> result =
2083       NewUninitializedWeakArrayList(capacity, allocation);
2084   MemsetTagged(ObjectSlot(result->data_start()),
2085                read_only_roots().undefined_value(), capacity);
2086   return result;
2087 }
2088 
CopyWeakFixedArrayAndGrow(Handle<WeakFixedArray> src,int grow_by)2089 Handle<WeakFixedArray> Factory::CopyWeakFixedArrayAndGrow(
2090     Handle<WeakFixedArray> src, int grow_by) {
2091   DCHECK(!src->IsTransitionArray());  // Compacted by GC, this code doesn't work
2092   return CopyArrayAndGrow(src, grow_by, AllocationType::kOld);
2093 }
2094 
CopyWeakArrayListAndGrow(Handle<WeakArrayList> src,int grow_by,AllocationType allocation)2095 Handle<WeakArrayList> Factory::CopyWeakArrayListAndGrow(
2096     Handle<WeakArrayList> src, int grow_by, AllocationType allocation) {
2097   int old_capacity = src->capacity();
2098   int new_capacity = old_capacity + grow_by;
2099   DCHECK_GE(new_capacity, old_capacity);
2100   Handle<WeakArrayList> result =
2101       NewUninitializedWeakArrayList(new_capacity, allocation);
2102   DisallowGarbageCollection no_gc;
2103   WeakArrayList raw = *result;
2104   int old_len = src->length();
2105   raw.set_length(old_len);
2106   // Copy the content.
2107   WriteBarrierMode mode = raw.GetWriteBarrierMode(no_gc);
2108   raw.CopyElements(isolate(), 0, *src, 0, old_len, mode);
2109   MemsetTagged(ObjectSlot(raw.data_start() + old_len),
2110                read_only_roots().undefined_value(), new_capacity - old_len);
2111   return result;
2112 }
2113 
CompactWeakArrayList(Handle<WeakArrayList> src,int new_capacity,AllocationType allocation)2114 Handle<WeakArrayList> Factory::CompactWeakArrayList(Handle<WeakArrayList> src,
2115                                                     int new_capacity,
2116                                                     AllocationType allocation) {
2117   Handle<WeakArrayList> result =
2118       NewUninitializedWeakArrayList(new_capacity, allocation);
2119 
2120   // Copy the content.
2121   DisallowGarbageCollection no_gc;
2122   WeakArrayList raw_src = *src;
2123   WeakArrayList raw_result = *result;
2124   WriteBarrierMode mode = raw_result.GetWriteBarrierMode(no_gc);
2125   int copy_to = 0, length = raw_src.length();
2126   for (int i = 0; i < length; i++) {
2127     MaybeObject element = raw_src.Get(i);
2128     if (element->IsCleared()) continue;
2129     raw_result.Set(copy_to++, element, mode);
2130   }
2131   raw_result.set_length(copy_to);
2132 
2133   MemsetTagged(ObjectSlot(raw_result.data_start() + copy_to),
2134                read_only_roots().undefined_value(), new_capacity - copy_to);
2135   return result;
2136 }
2137 
CopyPropertyArrayAndGrow(Handle<PropertyArray> array,int grow_by)2138 Handle<PropertyArray> Factory::CopyPropertyArrayAndGrow(
2139     Handle<PropertyArray> array, int grow_by) {
2140   return CopyArrayAndGrow(array, grow_by, AllocationType::kYoung);
2141 }
2142 
CopyFixedArrayUpTo(Handle<FixedArray> array,int new_len,AllocationType allocation)2143 Handle<FixedArray> Factory::CopyFixedArrayUpTo(Handle<FixedArray> array,
2144                                                int new_len,
2145                                                AllocationType allocation) {
2146   DCHECK_LE(0, new_len);
2147   DCHECK_LE(new_len, array->length());
2148   if (new_len == 0) return empty_fixed_array();
2149   HeapObject heap_object = AllocateRawFixedArray(new_len, allocation);
2150   DisallowGarbageCollection no_gc;
2151   heap_object.set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
2152   FixedArray result = FixedArray::cast(heap_object);
2153   result.set_length(new_len);
2154   // Copy the content.
2155   WriteBarrierMode mode = result.GetWriteBarrierMode(no_gc);
2156   result.CopyElements(isolate(), 0, *array, 0, new_len, mode);
2157   return handle(result, isolate());
2158 }
2159 
CopyFixedArray(Handle<FixedArray> array)2160 Handle<FixedArray> Factory::CopyFixedArray(Handle<FixedArray> array) {
2161   if (array->length() == 0) return array;
2162   return CopyArrayWithMap(array, handle(array->map(), isolate()));
2163 }
2164 
CopyFixedDoubleArray(Handle<FixedDoubleArray> array)2165 Handle<FixedDoubleArray> Factory::CopyFixedDoubleArray(
2166     Handle<FixedDoubleArray> array) {
2167   int len = array->length();
2168   if (len == 0) return array;
2169   Handle<FixedDoubleArray> result =
2170       Handle<FixedDoubleArray>::cast(NewFixedDoubleArray(len));
2171   Heap::CopyBlock(
2172       result->address() + FixedDoubleArray::kLengthOffset,
2173       array->address() + FixedDoubleArray::kLengthOffset,
2174       FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset);
2175   return result;
2176 }
2177 
NewHeapNumberForCodeAssembler(double value)2178 Handle<HeapNumber> Factory::NewHeapNumberForCodeAssembler(double value) {
2179   return CanAllocateInReadOnlySpace()
2180              ? NewHeapNumber<AllocationType::kReadOnly>(value)
2181              : NewHeapNumber<AllocationType::kOld>(value);
2182 }
2183 
NewError(Handle<JSFunction> constructor,MessageTemplate template_index,Handle<Object> arg0,Handle<Object> arg1,Handle<Object> arg2)2184 Handle<JSObject> Factory::NewError(Handle<JSFunction> constructor,
2185                                    MessageTemplate template_index,
2186                                    Handle<Object> arg0, Handle<Object> arg1,
2187                                    Handle<Object> arg2) {
2188   HandleScope scope(isolate());
2189 
2190   if (arg0.is_null()) arg0 = undefined_value();
2191   if (arg1.is_null()) arg1 = undefined_value();
2192   if (arg2.is_null()) arg2 = undefined_value();
2193 
2194   return scope.CloseAndEscape(ErrorUtils::MakeGenericError(
2195       isolate(), constructor, template_index, arg0, arg1, arg2, SKIP_NONE));
2196 }
2197 
NewError(Handle<JSFunction> constructor,Handle<String> message)2198 Handle<JSObject> Factory::NewError(Handle<JSFunction> constructor,
2199                                    Handle<String> message) {
2200   // Construct a new error object. If an exception is thrown, use the exception
2201   // as the result.
2202 
2203   Handle<Object> no_caller;
2204   return ErrorUtils::Construct(isolate(), constructor, constructor, message,
2205                                undefined_value(), SKIP_NONE, no_caller,
2206                                ErrorUtils::StackTraceCollection::kEnabled)
2207       .ToHandleChecked();
2208 }
2209 
NewInvalidStringLengthError()2210 Handle<Object> Factory::NewInvalidStringLengthError() {
2211   if (FLAG_correctness_fuzzer_suppressions) {
2212     FATAL("Aborting on invalid string length");
2213   }
2214   // Invalidate the "string length" protector.
2215   if (Protectors::IsStringLengthOverflowLookupChainIntact(isolate())) {
2216     Protectors::InvalidateStringLengthOverflowLookupChain(isolate());
2217   }
2218   return NewRangeError(MessageTemplate::kInvalidStringLength);
2219 }
2220 
2221 #define DEFINE_ERROR(NAME, name)                                              \
2222   Handle<JSObject> Factory::New##NAME(                                        \
2223       MessageTemplate template_index, Handle<Object> arg0,                    \
2224       Handle<Object> arg1, Handle<Object> arg2) {                             \
2225     return NewError(isolate()->name##_function(), template_index, arg0, arg1, \
2226                     arg2);                                                    \
2227   }
DEFINE_ERROR(Error,error)2228 DEFINE_ERROR(Error, error)
2229 DEFINE_ERROR(EvalError, eval_error)
2230 DEFINE_ERROR(RangeError, range_error)
2231 DEFINE_ERROR(ReferenceError, reference_error)
2232 DEFINE_ERROR(SyntaxError, syntax_error)
2233 DEFINE_ERROR(TypeError, type_error)
2234 DEFINE_ERROR(WasmCompileError, wasm_compile_error)
2235 DEFINE_ERROR(WasmLinkError, wasm_link_error)
2236 DEFINE_ERROR(WasmRuntimeError, wasm_runtime_error)
2237 DEFINE_ERROR(WasmExceptionError, wasm_exception_error)
2238 #undef DEFINE_ERROR
2239 
2240 Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) {
2241   // Make sure to use globals from the function's context, since the function
2242   // can be from a different context.
2243   Handle<NativeContext> native_context(function->native_context(), isolate());
2244   Handle<Map> new_map;
2245   if (V8_UNLIKELY(IsAsyncGeneratorFunction(function->shared().kind()))) {
2246     new_map = handle(native_context->async_generator_object_prototype_map(),
2247                      isolate());
2248   } else if (IsResumableFunction(function->shared().kind())) {
2249     // Generator and async function prototypes can share maps since they
2250     // don't have "constructor" properties.
2251     new_map =
2252         handle(native_context->generator_object_prototype_map(), isolate());
2253   } else {
2254     // Each function prototype gets a fresh map to avoid unwanted sharing of
2255     // maps between prototypes of different constructors.
2256     Handle<JSFunction> object_function(native_context->object_function(),
2257                                        isolate());
2258     DCHECK(object_function->has_initial_map());
2259     new_map = handle(object_function->initial_map(), isolate());
2260   }
2261 
2262   DCHECK(!new_map->is_prototype_map());
2263   Handle<JSObject> prototype = NewJSObjectFromMap(new_map);
2264 
2265   if (!IsResumableFunction(function->shared().kind())) {
2266     JSObject::AddProperty(isolate(), prototype, constructor_string(), function,
2267                           DONT_ENUM);
2268   }
2269 
2270   return prototype;
2271 }
2272 
NewExternal(void * value)2273 Handle<JSObject> Factory::NewExternal(void* value) {
2274   auto external =
2275       Handle<JSExternalObject>::cast(NewJSObjectFromMap(external_map()));
2276   external->AllocateExternalPointerEntries(isolate());
2277   external->set_value(isolate(), value);
2278   return external;
2279 }
2280 
NewDeoptimizationLiteralArray(int length)2281 Handle<DeoptimizationLiteralArray> Factory::NewDeoptimizationLiteralArray(
2282     int length) {
2283   return Handle<DeoptimizationLiteralArray>::cast(
2284       NewWeakFixedArray(length, AllocationType::kOld));
2285 }
2286 
NewOffHeapTrampolineFor(Handle<Code> code,Address off_heap_entry)2287 Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
2288                                               Address off_heap_entry) {
2289   CHECK_NOT_NULL(isolate()->embedded_blob_code());
2290   CHECK_NE(0, isolate()->embedded_blob_code_size());
2291   CHECK(Builtins::IsIsolateIndependentBuiltin(*code));
2292 
2293   bool generate_jump_to_instruction_stream =
2294       Builtins::CodeObjectIsExecutable(code->builtin_id());
2295   Handle<Code> result = Builtins::GenerateOffHeapTrampolineFor(
2296       isolate(), off_heap_entry,
2297       code->code_data_container(kAcquireLoad).kind_specific_flags(kRelaxedLoad),
2298       generate_jump_to_instruction_stream);
2299 
2300   // Trampolines may not contain any metadata since all metadata offsets,
2301   // stored on the Code object, refer to the off-heap metadata area.
2302   CHECK_EQ(result->raw_metadata_size(), 0);
2303 
2304   // The CodeDataContainer should not be modified beyond this point since it's
2305   // now possibly canonicalized.
2306 
2307   // The trampoline code object must inherit specific flags from the original
2308   // builtin (e.g. the safepoint-table offset). We set them manually here.
2309   {
2310     DisallowGarbageCollection no_gc;
2311     CodePageMemoryModificationScope code_allocation(*result);
2312     Code raw_code = *code;
2313     Code raw_result = *result;
2314 
2315     const bool set_is_off_heap_trampoline = true;
2316     raw_result.initialize_flags(raw_code.kind(), raw_code.is_turbofanned(),
2317                                 raw_code.stack_slots(),
2318                                 set_is_off_heap_trampoline);
2319     raw_result.set_builtin_id(raw_code.builtin_id());
2320     raw_result.set_handler_table_offset(raw_code.handler_table_offset());
2321     raw_result.set_constant_pool_offset(raw_code.constant_pool_offset());
2322     raw_result.set_code_comments_offset(raw_code.code_comments_offset());
2323     raw_result.set_unwinding_info_offset(raw_code.unwinding_info_offset());
2324 
2325     // Replace the newly generated trampoline's RelocInfo ByteArray with the
2326     // canonical one stored in the roots to avoid duplicating it for every
2327     // single builtin.
2328     ByteArray canonical_reloc_info =
2329         generate_jump_to_instruction_stream
2330             ? read_only_roots().off_heap_trampoline_relocation_info()
2331             : read_only_roots().empty_byte_array();
2332 #ifdef DEBUG
2333     // Verify that the contents are the same.
2334     ByteArray reloc_info = raw_result.relocation_info();
2335     DCHECK_EQ(reloc_info.length(), canonical_reloc_info.length());
2336     for (int i = 0; i < reloc_info.length(); ++i) {
2337       DCHECK_EQ(reloc_info.get(i), canonical_reloc_info.get(i));
2338     }
2339 #endif
2340     raw_result.set_relocation_info(canonical_reloc_info);
2341     if (V8_EXTERNAL_CODE_SPACE_BOOL) {
2342       CodeDataContainer code_data_container =
2343           raw_result.code_data_container(kAcquireLoad);
2344       // Updating flags (in particular is_off_heap_trampoline one) might change
2345       // the value of the instruction start, so update it here.
2346       code_data_container.UpdateCodeEntryPoint(isolate(), raw_result);
2347       // Also update flag values cached on the code data container.
2348       code_data_container.initialize_flags(raw_code.kind(),
2349                                            raw_code.builtin_id());
2350     }
2351   }
2352 
2353   return result;
2354 }
2355 
CopyCode(Handle<Code> code)2356 Handle<Code> Factory::CopyCode(Handle<Code> code) {
2357   Handle<CodeDataContainer> data_container = NewCodeDataContainer(
2358       code->code_data_container(kAcquireLoad).kind_specific_flags(kRelaxedLoad),
2359       AllocationType::kOld);
2360 
2361   Heap* heap = isolate()->heap();
2362   Handle<Code> new_code;
2363   {
2364     int obj_size = code->Size();
2365     CodePageCollectionMemoryModificationScope code_allocation(heap);
2366     HeapObject result =
2367         allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>(
2368             obj_size, AllocationType::kCode, AllocationOrigin::kRuntime);
2369 
2370     // Copy code object.
2371     Address old_addr = code->address();
2372     Address new_addr = result.address();
2373     Heap::CopyBlock(new_addr, old_addr, obj_size);
2374     new_code = handle(Code::cast(result), isolate());
2375 
2376     // Set the {CodeDataContainer}, it cannot be shared.
2377     new_code->set_code_data_container(*data_container, kReleaseStore);
2378 
2379     new_code->Relocate(new_addr - old_addr);
2380     // We have to iterate over the object and process its pointers when black
2381     // allocation is on.
2382     heap->incremental_marking()->ProcessBlackAllocatedObject(*new_code);
2383     // Record all references to embedded objects in the new code object.
2384 #ifndef V8_DISABLE_WRITE_BARRIERS
2385     WriteBarrierForCode(*new_code);
2386 #endif
2387   }
2388   if (V8_EXTERNAL_CODE_SPACE_BOOL) {
2389     data_container->initialize_flags(code->kind(), code->builtin_id());
2390     data_container->SetCodeAndEntryPoint(isolate(), *new_code);
2391   }
2392 
2393 #ifdef VERIFY_HEAP
2394   if (FLAG_verify_heap) new_code->ObjectVerify(isolate());
2395 #endif
2396   DCHECK(IsAligned(new_code->address(), kCodeAlignment));
2397   DCHECK_IMPLIES(
2398       !V8_ENABLE_THIRD_PARTY_HEAP_BOOL && !heap->code_region().is_empty(),
2399       heap->code_region().contains(new_code->address()));
2400   return new_code;
2401 }
2402 
CopyBytecodeArray(Handle<BytecodeArray> source)2403 Handle<BytecodeArray> Factory::CopyBytecodeArray(Handle<BytecodeArray> source) {
2404   int size = BytecodeArray::SizeFor(source->length());
2405   BytecodeArray copy = BytecodeArray::cast(AllocateRawWithImmortalMap(
2406       size, AllocationType::kOld, *bytecode_array_map()));
2407   DisallowGarbageCollection no_gc;
2408   BytecodeArray raw_source = *source;
2409   copy.set_length(raw_source.length());
2410   copy.set_frame_size(raw_source.frame_size());
2411   copy.set_parameter_count(raw_source.parameter_count());
2412   copy.set_incoming_new_target_or_generator_register(
2413       raw_source.incoming_new_target_or_generator_register());
2414   copy.set_constant_pool(raw_source.constant_pool());
2415   copy.set_handler_table(raw_source.handler_table());
2416   copy.set_source_position_table(raw_source.source_position_table(kAcquireLoad),
2417                                  kReleaseStore);
2418   copy.set_osr_urgency(raw_source.osr_urgency());
2419   copy.set_bytecode_age(raw_source.bytecode_age());
2420   raw_source.CopyBytecodesTo(copy);
2421   return handle(copy, isolate());
2422 }
2423 
NewJSObject(Handle<JSFunction> constructor,AllocationType allocation)2424 Handle<JSObject> Factory::NewJSObject(Handle<JSFunction> constructor,
2425                                       AllocationType allocation) {
2426   JSFunction::EnsureHasInitialMap(constructor);
2427   Handle<Map> map(constructor->initial_map(), isolate());
2428   return NewJSObjectFromMap(map, allocation);
2429 }
2430 
NewSlowJSObjectWithNullProto()2431 Handle<JSObject> Factory::NewSlowJSObjectWithNullProto() {
2432   Handle<JSObject> result =
2433       NewSlowJSObjectFromMap(isolate()->slow_object_with_null_prototype_map());
2434   return result;
2435 }
2436 
NewJSObjectWithNullProto()2437 Handle<JSObject> Factory::NewJSObjectWithNullProto() {
2438   Handle<Map> map(isolate()->object_function()->initial_map(), isolate());
2439   Handle<Map> map_with_null_proto =
2440       Map::TransitionToPrototype(isolate(), map, null_value());
2441   return NewJSObjectFromMap(map_with_null_proto);
2442 }
2443 
NewJSGlobalObject(Handle<JSFunction> constructor)2444 Handle<JSGlobalObject> Factory::NewJSGlobalObject(
2445     Handle<JSFunction> constructor) {
2446   DCHECK(constructor->has_initial_map());
2447   Handle<Map> map(constructor->initial_map(), isolate());
2448   DCHECK(map->is_dictionary_map());
2449 
2450   // Make sure no field properties are described in the initial map.
2451   // This guarantees us that normalizing the properties does not
2452   // require us to change property values to PropertyCells.
2453   DCHECK_EQ(map->NextFreePropertyIndex(), 0);
2454 
2455   // Make sure we don't have a ton of pre-allocated slots in the
2456   // global objects. They will be unused once we normalize the object.
2457   DCHECK_EQ(map->UnusedPropertyFields(), 0);
2458   DCHECK_EQ(map->GetInObjectProperties(), 0);
2459 
2460   // Initial size of the backing store to avoid resize of the storage during
2461   // bootstrapping. The size differs between the JS global object ad the
2462   // builtins object.
2463   int initial_size = 64;
2464 
2465   // Allocate a dictionary object for backing storage.
2466   int at_least_space_for = map->NumberOfOwnDescriptors() * 2 + initial_size;
2467   Handle<GlobalDictionary> dictionary =
2468       GlobalDictionary::New(isolate(), at_least_space_for);
2469 
2470   // The global object might be created from an object template with accessors.
2471   // Fill these accessors into the dictionary.
2472   Handle<DescriptorArray> descs(map->instance_descriptors(isolate()),
2473                                 isolate());
2474   for (InternalIndex i : map->IterateOwnDescriptors()) {
2475     PropertyDetails details = descs->GetDetails(i);
2476     // Only accessors are expected.
2477     DCHECK_EQ(PropertyKind::kAccessor, details.kind());
2478     PropertyDetails d(PropertyKind::kAccessor, details.attributes(),
2479                       PropertyCellType::kMutable);
2480     Handle<Name> name(descs->GetKey(i), isolate());
2481     Handle<Object> value(descs->GetStrongValue(i), isolate());
2482     Handle<PropertyCell> cell = NewPropertyCell(name, d, value);
2483     // |dictionary| already contains enough space for all properties.
2484     USE(GlobalDictionary::Add(isolate(), dictionary, name, cell, d));
2485   }
2486 
2487   // Allocate the global object and initialize it with the backing store.
2488   Handle<JSGlobalObject> global(
2489       JSGlobalObject::cast(New(map, AllocationType::kOld)), isolate());
2490   InitializeJSObjectFromMap(*global, *dictionary, *map);
2491 
2492   // Create a new map for the global object.
2493   Handle<Map> new_map = Map::CopyDropDescriptors(isolate(), map);
2494   Map raw_map = *new_map;
2495   raw_map.set_may_have_interesting_symbols(true);
2496   raw_map.set_is_dictionary_map(true);
2497   LOG(isolate(), MapDetails(raw_map));
2498 
2499   // Set up the global object as a normalized object.
2500   global->set_global_dictionary(*dictionary, kReleaseStore);
2501   global->set_map(raw_map, kReleaseStore);
2502 
2503   // Make sure result is a global object with properties in dictionary.
2504   DCHECK(global->IsJSGlobalObject() && !global->HasFastProperties());
2505   return global;
2506 }
2507 
InitializeJSObjectFromMap(JSObject obj,Object properties,Map map)2508 void Factory::InitializeJSObjectFromMap(JSObject obj, Object properties,
2509                                         Map map) {
2510   DisallowGarbageCollection no_gc;
2511   obj.set_raw_properties_or_hash(properties, kRelaxedStore);
2512   obj.initialize_elements();
2513   // TODO(1240798): Initialize the object's body using valid initial values
2514   // according to the object's initial map.  For example, if the map's
2515   // instance type is JS_ARRAY_TYPE, the length field should be initialized
2516   // to a number (e.g. Smi::zero()) and the elements initialized to a
2517   // fixed array (e.g. Heap::empty_fixed_array()).  Currently, the object
2518   // verification code has to cope with (temporarily) invalid objects.  See
2519   // for example, JSArray::JSArrayVerify).
2520   InitializeJSObjectBody(obj, map, JSObject::kHeaderSize);
2521 }
2522 
InitializeJSObjectBody(JSObject obj,Map map,int start_offset)2523 void Factory::InitializeJSObjectBody(JSObject obj, Map map, int start_offset) {
2524   DisallowGarbageCollection no_gc;
2525   if (start_offset == map.instance_size()) return;
2526   DCHECK_LT(start_offset, map.instance_size());
2527 
2528   // We cannot always fill with one_pointer_filler_map because objects
2529   // created from API functions expect their embedder fields to be initialized
2530   // with undefined_value.
2531   // Pre-allocated fields need to be initialized with undefined_value as well
2532   // so that object accesses before the constructor completes (e.g. in the
2533   // debugger) will not cause a crash.
2534 
2535   // In case of Array subclassing the |map| could already be transitioned
2536   // to different elements kind from the initial map on which we track slack.
2537   bool in_progress = map.IsInobjectSlackTrackingInProgress();
2538   obj.InitializeBody(map, start_offset, in_progress,
2539                      ReadOnlyRoots(isolate()).one_pointer_filler_map_word(),
2540                      *undefined_value());
2541   if (in_progress) {
2542     map.FindRootMap(isolate()).InobjectSlackTrackingStep(isolate());
2543   }
2544 }
2545 
NewJSObjectFromMap(Handle<Map> map,AllocationType allocation,Handle<AllocationSite> allocation_site)2546 Handle<JSObject> Factory::NewJSObjectFromMap(
2547     Handle<Map> map, AllocationType allocation,
2548     Handle<AllocationSite> allocation_site) {
2549   // JSFunctions should be allocated using AllocateFunction to be
2550   // properly initialized.
2551   DCHECK(!InstanceTypeChecker::IsJSFunction((map->instance_type())));
2552 
2553   // Both types of global objects should be allocated using
2554   // AllocateGlobalObject to be properly initialized.
2555   DCHECK(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
2556 
2557   JSObject js_obj = JSObject::cast(
2558       AllocateRawWithAllocationSite(map, allocation, allocation_site));
2559 
2560   InitializeJSObjectFromMap(js_obj, *empty_fixed_array(), *map);
2561 
2562   DCHECK(js_obj.HasFastElements() ||
2563          js_obj.HasTypedArrayOrRabGsabTypedArrayElements() ||
2564          js_obj.HasFastStringWrapperElements() ||
2565          js_obj.HasFastArgumentsElements() || js_obj.HasDictionaryElements());
2566   return handle(js_obj, isolate());
2567 }
2568 
NewSlowJSObjectFromMap(Handle<Map> map,int capacity,AllocationType allocation,Handle<AllocationSite> allocation_site)2569 Handle<JSObject> Factory::NewSlowJSObjectFromMap(
2570     Handle<Map> map, int capacity, AllocationType allocation,
2571     Handle<AllocationSite> allocation_site) {
2572   DCHECK(map->is_dictionary_map());
2573   Handle<HeapObject> object_properties;
2574   if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL) {
2575     object_properties = NewSwissNameDictionary(capacity, allocation);
2576   } else {
2577     object_properties = NameDictionary::New(isolate(), capacity);
2578   }
2579   Handle<JSObject> js_object =
2580       NewJSObjectFromMap(map, allocation, allocation_site);
2581   js_object->set_raw_properties_or_hash(*object_properties, kRelaxedStore);
2582   return js_object;
2583 }
2584 
NewSlowJSObjectWithPropertiesAndElements(Handle<HeapObject> prototype,Handle<HeapObject> properties,Handle<FixedArrayBase> elements)2585 Handle<JSObject> Factory::NewSlowJSObjectWithPropertiesAndElements(
2586     Handle<HeapObject> prototype, Handle<HeapObject> properties,
2587     Handle<FixedArrayBase> elements) {
2588   DCHECK_IMPLIES(V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL,
2589                  properties->IsSwissNameDictionary());
2590   DCHECK_IMPLIES(!V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL,
2591                  properties->IsNameDictionary());
2592 
2593   Handle<Map> object_map = isolate()->slow_object_with_object_prototype_map();
2594   if (object_map->prototype() != *prototype) {
2595     object_map = Map::TransitionToPrototype(isolate(), object_map, prototype);
2596   }
2597   DCHECK(object_map->is_dictionary_map());
2598   Handle<JSObject> object =
2599       NewJSObjectFromMap(object_map, AllocationType::kYoung);
2600   object->set_raw_properties_or_hash(*properties);
2601   if (*elements != read_only_roots().empty_fixed_array()) {
2602     DCHECK(elements->IsNumberDictionary());
2603     object_map =
2604         JSObject::GetElementsTransitionMap(object, DICTIONARY_ELEMENTS);
2605     JSObject::MigrateToMap(isolate(), object, object_map);
2606     object->set_elements(*elements);
2607   }
2608   return object;
2609 }
2610 
NewJSArray(ElementsKind elements_kind,int length,int capacity,ArrayStorageAllocationMode mode,AllocationType allocation)2611 Handle<JSArray> Factory::NewJSArray(ElementsKind elements_kind, int length,
2612                                     int capacity,
2613                                     ArrayStorageAllocationMode mode,
2614                                     AllocationType allocation) {
2615   DCHECK(capacity >= length);
2616   if (capacity == 0) {
2617     return NewJSArrayWithElements(empty_fixed_array(), elements_kind, length,
2618                                   allocation);
2619   }
2620 
2621   HandleScope inner_scope(isolate());
2622   Handle<FixedArrayBase> elms =
2623       NewJSArrayStorage(elements_kind, capacity, mode);
2624   return inner_scope.CloseAndEscape(NewJSArrayWithUnverifiedElements(
2625       elms, elements_kind, length, allocation));
2626 }
2627 
NewJSArrayWithElements(Handle<FixedArrayBase> elements,ElementsKind elements_kind,int length,AllocationType allocation)2628 Handle<JSArray> Factory::NewJSArrayWithElements(Handle<FixedArrayBase> elements,
2629                                                 ElementsKind elements_kind,
2630                                                 int length,
2631                                                 AllocationType allocation) {
2632   Handle<JSArray> array = NewJSArrayWithUnverifiedElements(
2633       elements, elements_kind, length, allocation);
2634   JSObject::ValidateElements(*array);
2635   return array;
2636 }
2637 
NewJSArrayWithUnverifiedElements(Handle<FixedArrayBase> elements,ElementsKind elements_kind,int length,AllocationType allocation)2638 Handle<JSArray> Factory::NewJSArrayWithUnverifiedElements(
2639     Handle<FixedArrayBase> elements, ElementsKind elements_kind, int length,
2640     AllocationType allocation) {
2641   DCHECK(length <= elements->length());
2642   NativeContext native_context = isolate()->raw_native_context();
2643   Map map = native_context.GetInitialJSArrayMap(elements_kind);
2644   if (map.is_null()) {
2645     JSFunction array_function = native_context.array_function();
2646     map = array_function.initial_map();
2647   }
2648   Handle<JSArray> array = Handle<JSArray>::cast(
2649       NewJSObjectFromMap(handle(map, isolate()), allocation));
2650   DisallowGarbageCollection no_gc;
2651   JSArray raw = *array;
2652   raw.set_elements(*elements);
2653   raw.set_length(Smi::FromInt(length));
2654   return array;
2655 }
2656 
NewJSArrayStorage(Handle<JSArray> array,int length,int capacity,ArrayStorageAllocationMode mode)2657 void Factory::NewJSArrayStorage(Handle<JSArray> array, int length, int capacity,
2658                                 ArrayStorageAllocationMode mode) {
2659   DCHECK(capacity >= length);
2660 
2661   if (capacity == 0) {
2662     JSArray raw = *array;
2663     DisallowGarbageCollection no_gc;
2664     raw.set_length(Smi::zero());
2665     raw.set_elements(*empty_fixed_array());
2666     return;
2667   }
2668 
2669   HandleScope inner_scope(isolate());
2670   Handle<FixedArrayBase> elms =
2671       NewJSArrayStorage(array->GetElementsKind(), capacity, mode);
2672   DisallowGarbageCollection no_gc;
2673   JSArray raw = *array;
2674   raw.set_elements(*elms);
2675   raw.set_length(Smi::FromInt(length));
2676 }
2677 
NewJSArrayStorage(ElementsKind elements_kind,int capacity,ArrayStorageAllocationMode mode)2678 Handle<FixedArrayBase> Factory::NewJSArrayStorage(
2679     ElementsKind elements_kind, int capacity, ArrayStorageAllocationMode mode) {
2680   DCHECK_GT(capacity, 0);
2681   Handle<FixedArrayBase> elms;
2682   if (IsDoubleElementsKind(elements_kind)) {
2683     if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
2684       elms = NewFixedDoubleArray(capacity);
2685     } else {
2686       DCHECK(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
2687       elms = NewFixedDoubleArrayWithHoles(capacity);
2688     }
2689   } else {
2690     DCHECK(IsSmiOrObjectElementsKind(elements_kind));
2691     if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
2692       elms = NewFixedArray(capacity);
2693     } else {
2694       DCHECK(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
2695       elms = NewFixedArrayWithHoles(capacity);
2696     }
2697   }
2698   return elms;
2699 }
2700 
NewJSWeakMap()2701 Handle<JSWeakMap> Factory::NewJSWeakMap() {
2702   NativeContext native_context = isolate()->raw_native_context();
2703   Handle<Map> map(native_context.js_weak_map_fun().initial_map(), isolate());
2704   Handle<JSWeakMap> weakmap(JSWeakMap::cast(*NewJSObjectFromMap(map)),
2705                             isolate());
2706   {
2707     // Do not leak handles for the hash table, it would make entries strong.
2708     HandleScope scope(isolate());
2709     JSWeakCollection::Initialize(weakmap, isolate());
2710   }
2711   return weakmap;
2712 }
2713 
NewJSModuleNamespace()2714 Handle<JSModuleNamespace> Factory::NewJSModuleNamespace() {
2715   Handle<Map> map = isolate()->js_module_namespace_map();
2716   Handle<JSModuleNamespace> module_namespace(
2717       Handle<JSModuleNamespace>::cast(NewJSObjectFromMap(map)));
2718   FieldIndex index = FieldIndex::ForDescriptor(
2719       *map, InternalIndex(JSModuleNamespace::kToStringTagFieldIndex));
2720   module_namespace->FastPropertyAtPut(index, read_only_roots().Module_string(),
2721                                       SKIP_WRITE_BARRIER);
2722   return module_namespace;
2723 }
2724 
NewJSWrappedFunction(Handle<NativeContext> creation_context,Handle<Object> target)2725 Handle<JSWrappedFunction> Factory::NewJSWrappedFunction(
2726     Handle<NativeContext> creation_context, Handle<Object> target) {
2727   DCHECK(target->IsCallable());
2728   Handle<Map> map(
2729       Map::cast(creation_context->get(Context::WRAPPED_FUNCTION_MAP_INDEX)),
2730       isolate());
2731   // 2. Let wrapped be ! MakeBasicObject(internalSlotsList).
2732   // 3. Set wrapped.[[Prototype]] to
2733   // callerRealm.[[Intrinsics]].[[%Function.prototype%]].
2734   // 4. Set wrapped.[[Call]] as described in 2.1.
2735   Handle<JSWrappedFunction> wrapped = Handle<JSWrappedFunction>::cast(
2736       isolate()->factory()->NewJSObjectFromMap(map));
2737   // 5. Set wrapped.[[WrappedTargetFunction]] to Target.
2738   wrapped->set_wrapped_target_function(JSReceiver::cast(*target));
2739   // 6. Set wrapped.[[Realm]] to callerRealm.
2740   wrapped->set_context(*creation_context);
2741   // TODO(v8:11989): https://github.com/tc39/proposal-shadowrealm/pull/348
2742 
2743   return wrapped;
2744 }
2745 
NewJSGeneratorObject(Handle<JSFunction> function)2746 Handle<JSGeneratorObject> Factory::NewJSGeneratorObject(
2747     Handle<JSFunction> function) {
2748   DCHECK(IsResumableFunction(function->shared().kind()));
2749   JSFunction::EnsureHasInitialMap(function);
2750   Handle<Map> map(function->initial_map(), isolate());
2751 
2752   DCHECK(map->instance_type() == JS_GENERATOR_OBJECT_TYPE ||
2753          map->instance_type() == JS_ASYNC_GENERATOR_OBJECT_TYPE);
2754 
2755   return Handle<JSGeneratorObject>::cast(NewJSObjectFromMap(map));
2756 }
2757 
NewSourceTextModule(Handle<SharedFunctionInfo> sfi)2758 Handle<SourceTextModule> Factory::NewSourceTextModule(
2759     Handle<SharedFunctionInfo> sfi) {
2760   Handle<SourceTextModuleInfo> module_info(
2761       sfi->scope_info().ModuleDescriptorInfo(), isolate());
2762   Handle<ObjectHashTable> exports =
2763       ObjectHashTable::New(isolate(), module_info->RegularExportCount());
2764   Handle<FixedArray> regular_exports =
2765       NewFixedArray(module_info->RegularExportCount());
2766   Handle<FixedArray> regular_imports =
2767       NewFixedArray(module_info->regular_imports().length());
2768   int requested_modules_length = module_info->module_requests().length();
2769   Handle<FixedArray> requested_modules =
2770       requested_modules_length > 0 ? NewFixedArray(requested_modules_length)
2771                                    : empty_fixed_array();
2772 
2773   ReadOnlyRoots roots(isolate());
2774   SourceTextModule module = SourceTextModule::cast(
2775       New(source_text_module_map(), AllocationType::kOld));
2776   DisallowGarbageCollection no_gc;
2777   module.set_code(*sfi);
2778   module.set_exports(*exports);
2779   module.set_regular_exports(*regular_exports);
2780   module.set_regular_imports(*regular_imports);
2781   module.set_hash(isolate()->GenerateIdentityHash(Smi::kMaxValue));
2782   module.set_module_namespace(roots.undefined_value(), SKIP_WRITE_BARRIER);
2783   module.set_requested_modules(*requested_modules);
2784   module.set_status(Module::kUnlinked);
2785   module.set_exception(roots.the_hole_value(), SKIP_WRITE_BARRIER);
2786   module.set_top_level_capability(roots.undefined_value(), SKIP_WRITE_BARRIER);
2787   module.set_import_meta(roots.the_hole_value(), kReleaseStore,
2788                          SKIP_WRITE_BARRIER);
2789   module.set_dfs_index(-1);
2790   module.set_dfs_ancestor_index(-1);
2791   module.set_flags(0);
2792   module.set_async(IsAsyncModule(sfi->kind()));
2793   module.set_async_evaluating_ordinal(SourceTextModule::kNotAsyncEvaluated);
2794   module.set_cycle_root(roots.the_hole_value(), SKIP_WRITE_BARRIER);
2795   module.set_async_parent_modules(roots.empty_array_list());
2796   module.set_pending_async_dependencies(0);
2797   return handle(module, isolate());
2798 }
2799 
NewSyntheticModule(Handle<String> module_name,Handle<FixedArray> export_names,v8::Module::SyntheticModuleEvaluationSteps evaluation_steps)2800 Handle<SyntheticModule> Factory::NewSyntheticModule(
2801     Handle<String> module_name, Handle<FixedArray> export_names,
2802     v8::Module::SyntheticModuleEvaluationSteps evaluation_steps) {
2803   ReadOnlyRoots roots(isolate());
2804 
2805   Handle<ObjectHashTable> exports =
2806       ObjectHashTable::New(isolate(), static_cast<int>(export_names->length()));
2807   Handle<Foreign> evaluation_steps_foreign =
2808       NewForeign(reinterpret_cast<i::Address>(evaluation_steps));
2809 
2810   SyntheticModule module =
2811       SyntheticModule::cast(New(synthetic_module_map(), AllocationType::kOld));
2812   DisallowGarbageCollection no_gc;
2813   module.set_hash(isolate()->GenerateIdentityHash(Smi::kMaxValue));
2814   module.set_module_namespace(roots.undefined_value(), SKIP_WRITE_BARRIER);
2815   module.set_status(Module::kUnlinked);
2816   module.set_exception(roots.the_hole_value(), SKIP_WRITE_BARRIER);
2817   module.set_top_level_capability(roots.undefined_value(), SKIP_WRITE_BARRIER);
2818   module.set_name(*module_name);
2819   module.set_export_names(*export_names);
2820   module.set_exports(*exports);
2821   module.set_evaluation_steps(*evaluation_steps_foreign);
2822   return handle(module, isolate());
2823 }
2824 
NewJSArrayBuffer(std::shared_ptr<BackingStore> backing_store,AllocationType allocation)2825 Handle<JSArrayBuffer> Factory::NewJSArrayBuffer(
2826     std::shared_ptr<BackingStore> backing_store, AllocationType allocation) {
2827   Handle<Map> map(isolate()->native_context()->array_buffer_fun().initial_map(),
2828                   isolate());
2829   auto result =
2830       Handle<JSArrayBuffer>::cast(NewJSObjectFromMap(map, allocation));
2831   result->Setup(SharedFlag::kNotShared, ResizableFlag::kNotResizable,
2832                 std::move(backing_store));
2833   return result;
2834 }
2835 
NewJSArrayBufferAndBackingStore(size_t byte_length,InitializedFlag initialized,AllocationType allocation)2836 MaybeHandle<JSArrayBuffer> Factory::NewJSArrayBufferAndBackingStore(
2837     size_t byte_length, InitializedFlag initialized,
2838     AllocationType allocation) {
2839   std::unique_ptr<BackingStore> backing_store = nullptr;
2840 
2841   if (byte_length > 0) {
2842     backing_store = BackingStore::Allocate(isolate(), byte_length,
2843                                            SharedFlag::kNotShared, initialized);
2844     if (!backing_store) return MaybeHandle<JSArrayBuffer>();
2845   }
2846   Handle<Map> map(isolate()->native_context()->array_buffer_fun().initial_map(),
2847                   isolate());
2848   auto array_buffer =
2849       Handle<JSArrayBuffer>::cast(NewJSObjectFromMap(map, allocation));
2850   array_buffer->Setup(SharedFlag::kNotShared, ResizableFlag::kNotResizable,
2851                       std::move(backing_store));
2852   return array_buffer;
2853 }
2854 
NewJSSharedArrayBuffer(std::shared_ptr<BackingStore> backing_store)2855 Handle<JSArrayBuffer> Factory::NewJSSharedArrayBuffer(
2856     std::shared_ptr<BackingStore> backing_store) {
2857   DCHECK_IMPLIES(backing_store->is_resizable(), FLAG_harmony_rab_gsab);
2858   Handle<Map> map(
2859       isolate()->native_context()->shared_array_buffer_fun().initial_map(),
2860       isolate());
2861   auto result = Handle<JSArrayBuffer>::cast(
2862       NewJSObjectFromMap(map, AllocationType::kYoung));
2863   ResizableFlag resizable = backing_store->is_resizable()
2864                                 ? ResizableFlag::kResizable
2865                                 : ResizableFlag::kNotResizable;
2866   result->Setup(SharedFlag::kShared, resizable, std::move(backing_store));
2867   return result;
2868 }
2869 
NewJSIteratorResult(Handle<Object> value,bool done)2870 Handle<JSIteratorResult> Factory::NewJSIteratorResult(Handle<Object> value,
2871                                                       bool done) {
2872   Handle<Map> map(isolate()->native_context()->iterator_result_map(),
2873                   isolate());
2874   Handle<JSIteratorResult> js_iter_result = Handle<JSIteratorResult>::cast(
2875       NewJSObjectFromMap(map, AllocationType::kYoung));
2876   DisallowGarbageCollection no_gc;
2877   JSIteratorResult raw = *js_iter_result;
2878   raw.set_value(*value, SKIP_WRITE_BARRIER);
2879   raw.set_done(*ToBoolean(done), SKIP_WRITE_BARRIER);
2880   return js_iter_result;
2881 }
2882 
NewJSAsyncFromSyncIterator(Handle<JSReceiver> sync_iterator,Handle<Object> next)2883 Handle<JSAsyncFromSyncIterator> Factory::NewJSAsyncFromSyncIterator(
2884     Handle<JSReceiver> sync_iterator, Handle<Object> next) {
2885   Handle<Map> map(isolate()->native_context()->async_from_sync_iterator_map(),
2886                   isolate());
2887   Handle<JSAsyncFromSyncIterator> iterator =
2888       Handle<JSAsyncFromSyncIterator>::cast(
2889           NewJSObjectFromMap(map, AllocationType::kYoung));
2890   DisallowGarbageCollection no_gc;
2891   JSAsyncFromSyncIterator raw = *iterator;
2892   raw.set_sync_iterator(*sync_iterator, SKIP_WRITE_BARRIER);
2893   raw.set_next(*next, SKIP_WRITE_BARRIER);
2894   return iterator;
2895 }
2896 
NewJSMap()2897 Handle<JSMap> Factory::NewJSMap() {
2898   Handle<Map> map(isolate()->native_context()->js_map_map(), isolate());
2899   Handle<JSMap> js_map = Handle<JSMap>::cast(NewJSObjectFromMap(map));
2900   JSMap::Initialize(js_map, isolate());
2901   return js_map;
2902 }
2903 
NewJSSet()2904 Handle<JSSet> Factory::NewJSSet() {
2905   Handle<Map> map(isolate()->native_context()->js_set_map(), isolate());
2906   Handle<JSSet> js_set = Handle<JSSet>::cast(NewJSObjectFromMap(map));
2907   JSSet::Initialize(js_set, isolate());
2908   return js_set;
2909 }
2910 
TypeAndSizeForElementsKind(ElementsKind kind,ExternalArrayType * array_type,size_t * element_size)2911 void Factory::TypeAndSizeForElementsKind(ElementsKind kind,
2912                                          ExternalArrayType* array_type,
2913                                          size_t* element_size) {
2914   switch (kind) {
2915 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
2916   case TYPE##_ELEMENTS:                           \
2917     *array_type = kExternal##Type##Array;         \
2918     *element_size = sizeof(ctype);                \
2919     break;
2920     TYPED_ARRAYS(TYPED_ARRAY_CASE)
2921     RAB_GSAB_TYPED_ARRAYS_WITH_TYPED_ARRAY_TYPE(TYPED_ARRAY_CASE)
2922 #undef TYPED_ARRAY_CASE
2923 
2924     default:
2925       UNREACHABLE();
2926   }
2927 }
2928 
2929 namespace {
2930 
ForFixedTypedArray(ExternalArrayType array_type,size_t * element_size,ElementsKind * element_kind)2931 void ForFixedTypedArray(ExternalArrayType array_type, size_t* element_size,
2932                         ElementsKind* element_kind) {
2933   switch (array_type) {
2934 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
2935   case kExternal##Type##Array:                    \
2936     *element_size = sizeof(ctype);                \
2937     *element_kind = TYPE##_ELEMENTS;              \
2938     return;
2939 
2940     TYPED_ARRAYS(TYPED_ARRAY_CASE)
2941 #undef TYPED_ARRAY_CASE
2942   }
2943   UNREACHABLE();
2944 }
2945 
2946 }  // namespace
2947 
NewJSArrayBufferView(Handle<Map> map,Handle<FixedArrayBase> elements,Handle<JSArrayBuffer> buffer,size_t byte_offset,size_t byte_length)2948 Handle<JSArrayBufferView> Factory::NewJSArrayBufferView(
2949     Handle<Map> map, Handle<FixedArrayBase> elements,
2950     Handle<JSArrayBuffer> buffer, size_t byte_offset, size_t byte_length) {
2951   CHECK_LE(byte_length, buffer->byte_length());
2952   CHECK_LE(byte_offset, buffer->byte_length());
2953   CHECK_LE(byte_offset + byte_length, buffer->byte_length());
2954   Handle<JSArrayBufferView> array_buffer_view = Handle<JSArrayBufferView>::cast(
2955       NewJSObjectFromMap(map, AllocationType::kYoung));
2956   DisallowGarbageCollection no_gc;
2957   JSArrayBufferView raw = *array_buffer_view;
2958   raw.set_elements(*elements, SKIP_WRITE_BARRIER);
2959   raw.set_buffer(*buffer, SKIP_WRITE_BARRIER);
2960   raw.set_byte_offset(byte_offset);
2961   raw.set_byte_length(byte_length);
2962   raw.set_bit_field(0);
2963   // TODO(v8) remove once embedder data slots are always zero-initialized.
2964   InitEmbedderFields(raw, Smi::zero());
2965   DCHECK_EQ(raw.GetEmbedderFieldCount(),
2966             v8::ArrayBufferView::kEmbedderFieldCount);
2967   return array_buffer_view;
2968 }
2969 
NewJSTypedArray(ExternalArrayType type,Handle<JSArrayBuffer> buffer,size_t byte_offset,size_t length)2970 Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type,
2971                                               Handle<JSArrayBuffer> buffer,
2972                                               size_t byte_offset,
2973                                               size_t length) {
2974   size_t element_size;
2975   ElementsKind elements_kind;
2976   ForFixedTypedArray(type, &element_size, &elements_kind);
2977   size_t byte_length = length * element_size;
2978 
2979   CHECK_LE(length, JSTypedArray::kMaxLength);
2980   CHECK_EQ(length, byte_length / element_size);
2981   CHECK_EQ(0, byte_offset % ElementsKindToByteSize(elements_kind));
2982 
2983   Handle<Map> map;
2984   switch (elements_kind) {
2985 #define TYPED_ARRAY_FUN(Type, type, TYPE, ctype)                              \
2986   case TYPE##_ELEMENTS:                                                       \
2987     map =                                                                     \
2988         handle(isolate()->native_context()->type##_array_fun().initial_map(), \
2989                isolate());                                                    \
2990     break;
2991 
2992     TYPED_ARRAYS(TYPED_ARRAY_FUN)
2993 #undef TYPED_ARRAY_FUN
2994 
2995     default:
2996       UNREACHABLE();
2997   }
2998   Handle<JSTypedArray> typed_array =
2999       Handle<JSTypedArray>::cast(NewJSArrayBufferView(
3000           map, empty_byte_array(), buffer, byte_offset, byte_length));
3001   JSTypedArray raw = *typed_array;
3002   DisallowGarbageCollection no_gc;
3003   raw.set_length(length);
3004   raw.SetOffHeapDataPtr(isolate(), buffer->backing_store(), byte_offset);
3005   raw.set_is_length_tracking(false);
3006   raw.set_is_backed_by_rab(!buffer->is_shared() && buffer->is_resizable());
3007   return typed_array;
3008 }
3009 
NewJSDataView(Handle<JSArrayBuffer> buffer,size_t byte_offset,size_t byte_length)3010 Handle<JSDataView> Factory::NewJSDataView(Handle<JSArrayBuffer> buffer,
3011                                           size_t byte_offset,
3012                                           size_t byte_length) {
3013   Handle<Map> map(isolate()->native_context()->data_view_fun().initial_map(),
3014                   isolate());
3015   Handle<JSDataView> obj = Handle<JSDataView>::cast(NewJSArrayBufferView(
3016       map, empty_fixed_array(), buffer, byte_offset, byte_length));
3017   obj->set_data_pointer(
3018       isolate(), static_cast<uint8_t*>(buffer->backing_store()) + byte_offset);
3019   // TODO(v8:11111): Support creating length tracking DataViews via the API.
3020   obj->set_is_length_tracking(false);
3021   obj->set_is_backed_by_rab(!buffer->is_shared() && buffer->is_resizable());
3022   return obj;
3023 }
3024 
NewJSBoundFunction(Handle<JSReceiver> target_function,Handle<Object> bound_this,base::Vector<Handle<Object>> bound_args)3025 MaybeHandle<JSBoundFunction> Factory::NewJSBoundFunction(
3026     Handle<JSReceiver> target_function, Handle<Object> bound_this,
3027     base::Vector<Handle<Object>> bound_args) {
3028   DCHECK(target_function->IsCallable());
3029   STATIC_ASSERT(Code::kMaxArguments <= FixedArray::kMaxLength);
3030   if (bound_args.length() >= Code::kMaxArguments) {
3031     THROW_NEW_ERROR(isolate(),
3032                     NewRangeError(MessageTemplate::kTooManyArguments),
3033                     JSBoundFunction);
3034   }
3035 
3036   // Determine the prototype of the {target_function}.
3037   Handle<HeapObject> prototype;
3038   ASSIGN_RETURN_ON_EXCEPTION(
3039       isolate(), prototype,
3040       JSReceiver::GetPrototype(isolate(), target_function), JSBoundFunction);
3041 
3042   SaveAndSwitchContext save(
3043       isolate(), *target_function->GetCreationContext().ToHandleChecked());
3044 
3045   // Create the [[BoundArguments]] for the result.
3046   Handle<FixedArray> bound_arguments;
3047   if (bound_args.length() == 0) {
3048     bound_arguments = empty_fixed_array();
3049   } else {
3050     bound_arguments = NewFixedArray(bound_args.length());
3051     for (int i = 0; i < bound_args.length(); ++i) {
3052       bound_arguments->set(i, *bound_args[i]);
3053     }
3054   }
3055 
3056   // Setup the map for the JSBoundFunction instance.
3057   Handle<Map> map = target_function->IsConstructor()
3058                         ? isolate()->bound_function_with_constructor_map()
3059                         : isolate()->bound_function_without_constructor_map();
3060   if (map->prototype() != *prototype) {
3061     map = Map::TransitionToPrototype(isolate(), map, prototype);
3062   }
3063   DCHECK_EQ(target_function->IsConstructor(), map->is_constructor());
3064 
3065   // Setup the JSBoundFunction instance.
3066   Handle<JSBoundFunction> result = Handle<JSBoundFunction>::cast(
3067       NewJSObjectFromMap(map, AllocationType::kYoung));
3068   DisallowGarbageCollection no_gc;
3069   JSBoundFunction raw = *result;
3070   raw.set_bound_target_function(*target_function, SKIP_WRITE_BARRIER);
3071   raw.set_bound_this(*bound_this, SKIP_WRITE_BARRIER);
3072   raw.set_bound_arguments(*bound_arguments, SKIP_WRITE_BARRIER);
3073   return result;
3074 }
3075 
3076 // ES6 section 9.5.15 ProxyCreate (target, handler)
NewJSProxy(Handle<JSReceiver> target,Handle<JSReceiver> handler)3077 Handle<JSProxy> Factory::NewJSProxy(Handle<JSReceiver> target,
3078                                     Handle<JSReceiver> handler) {
3079   // Allocate the proxy object.
3080   Handle<Map> map;
3081   if (target->IsCallable()) {
3082     if (target->IsConstructor()) {
3083       map = Handle<Map>(isolate()->proxy_constructor_map());
3084     } else {
3085       map = Handle<Map>(isolate()->proxy_callable_map());
3086     }
3087   } else {
3088     map = Handle<Map>(isolate()->proxy_map());
3089   }
3090   DCHECK(map->prototype().IsNull(isolate()));
3091   JSProxy result = JSProxy::cast(New(map, AllocationType::kYoung));
3092   DisallowGarbageCollection no_gc;
3093   result.initialize_properties(isolate());
3094   result.set_target(*target, SKIP_WRITE_BARRIER);
3095   result.set_handler(*handler, SKIP_WRITE_BARRIER);
3096   return handle(result, isolate());
3097 }
3098 
NewUninitializedJSGlobalProxy(int size)3099 Handle<JSGlobalProxy> Factory::NewUninitializedJSGlobalProxy(int size) {
3100   // Create an empty shell of a JSGlobalProxy that needs to be reinitialized
3101   // via ReinitializeJSGlobalProxy later.
3102   Handle<Map> map = NewMap(JS_GLOBAL_PROXY_TYPE, size);
3103   // Maintain invariant expected from any JSGlobalProxy.
3104   {
3105     DisallowGarbageCollection no_gc;
3106     Map raw = *map;
3107     raw.set_is_access_check_needed(true);
3108     raw.set_may_have_interesting_symbols(true);
3109     LOG(isolate(), MapDetails(raw));
3110   }
3111   Handle<JSGlobalProxy> proxy = Handle<JSGlobalProxy>::cast(
3112       NewJSObjectFromMap(map, AllocationType::kOld));
3113   // Create identity hash early in case there is any JS collection containing
3114   // a global proxy key and needs to be rehashed after deserialization.
3115   proxy->GetOrCreateIdentityHash(isolate());
3116   return proxy;
3117 }
3118 
ReinitializeJSGlobalProxy(Handle<JSGlobalProxy> object,Handle<JSFunction> constructor)3119 void Factory::ReinitializeJSGlobalProxy(Handle<JSGlobalProxy> object,
3120                                         Handle<JSFunction> constructor) {
3121   DCHECK(constructor->has_initial_map());
3122   Handle<Map> map(constructor->initial_map(), isolate());
3123   Handle<Map> old_map(object->map(), isolate());
3124 
3125   // The proxy's hash should be retained across reinitialization.
3126   Handle<Object> raw_properties_or_hash(object->raw_properties_or_hash(),
3127                                         isolate());
3128 
3129   if (old_map->is_prototype_map()) {
3130     map = Map::Copy(isolate(), map, "CopyAsPrototypeForJSGlobalProxy");
3131     map->set_is_prototype_map(true);
3132   }
3133   JSObject::NotifyMapChange(old_map, map, isolate());
3134   old_map->NotifyLeafMapLayoutChange(isolate());
3135 
3136   // Check that the already allocated object has the same size and type as
3137   // objects allocated using the constructor.
3138   DCHECK(map->instance_size() == old_map->instance_size());
3139   DCHECK(map->instance_type() == old_map->instance_type());
3140 
3141   // In order to keep heap in consistent state there must be no allocations
3142   // before object re-initialization is finished.
3143   DisallowGarbageCollection no_gc;
3144 
3145   // Reset the map for the object.
3146   JSGlobalProxy raw = *object;
3147   raw.set_map(*map, kReleaseStore);
3148 
3149   // Reinitialize the object from the constructor map.
3150   InitializeJSObjectFromMap(raw, *raw_properties_or_hash, *map);
3151 }
3152 
NewJSMessageObject(MessageTemplate message,Handle<Object> argument,int start_position,int end_position,Handle<SharedFunctionInfo> shared_info,int bytecode_offset,Handle<Script> script,Handle<Object> stack_frames)3153 Handle<JSMessageObject> Factory::NewJSMessageObject(
3154     MessageTemplate message, Handle<Object> argument, int start_position,
3155     int end_position, Handle<SharedFunctionInfo> shared_info,
3156     int bytecode_offset, Handle<Script> script, Handle<Object> stack_frames) {
3157   Handle<Map> map = message_object_map();
3158   JSMessageObject message_obj =
3159       JSMessageObject::cast(New(map, AllocationType::kYoung));
3160   DisallowGarbageCollection no_gc;
3161   message_obj.set_raw_properties_or_hash(*empty_fixed_array(),
3162                                          SKIP_WRITE_BARRIER);
3163   message_obj.initialize_elements();
3164   message_obj.set_elements(*empty_fixed_array(), SKIP_WRITE_BARRIER);
3165   message_obj.set_type(message);
3166   message_obj.set_argument(*argument, SKIP_WRITE_BARRIER);
3167   message_obj.set_start_position(start_position);
3168   message_obj.set_end_position(end_position);
3169   message_obj.set_script(*script, SKIP_WRITE_BARRIER);
3170   if (start_position >= 0) {
3171     // If there's a start_position, then there's no need to store the
3172     // SharedFunctionInfo as it will never be necessary to regenerate the
3173     // position.
3174     message_obj.set_shared_info(*undefined_value(), SKIP_WRITE_BARRIER);
3175     message_obj.set_bytecode_offset(Smi::FromInt(0));
3176   } else {
3177     message_obj.set_bytecode_offset(Smi::FromInt(bytecode_offset));
3178     if (shared_info.is_null()) {
3179       message_obj.set_shared_info(*undefined_value(), SKIP_WRITE_BARRIER);
3180       DCHECK_EQ(bytecode_offset, -1);
3181     } else {
3182       message_obj.set_shared_info(*shared_info, SKIP_WRITE_BARRIER);
3183       DCHECK_GE(bytecode_offset, kFunctionEntryBytecodeOffset);
3184     }
3185   }
3186 
3187   message_obj.set_stack_frames(*stack_frames, SKIP_WRITE_BARRIER);
3188   message_obj.set_error_level(v8::Isolate::kMessageError);
3189   return handle(message_obj, isolate());
3190 }
3191 
NewSharedFunctionInfoForApiFunction(MaybeHandle<String> maybe_name,Handle<FunctionTemplateInfo> function_template_info,FunctionKind kind)3192 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForApiFunction(
3193     MaybeHandle<String> maybe_name,
3194     Handle<FunctionTemplateInfo> function_template_info, FunctionKind kind) {
3195   Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo(
3196       maybe_name, function_template_info, Builtin::kNoBuiltinId, kind);
3197   return shared;
3198 }
3199 
NewSharedFunctionInfoForBuiltin(MaybeHandle<String> maybe_name,Builtin builtin,FunctionKind kind)3200 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForBuiltin(
3201     MaybeHandle<String> maybe_name, Builtin builtin, FunctionKind kind) {
3202   Handle<SharedFunctionInfo> shared =
3203       NewSharedFunctionInfo(maybe_name, MaybeHandle<Code>(), builtin, kind);
3204   return shared;
3205 }
3206 
NewSharedFunctionInfoForWebSnapshot()3207 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForWebSnapshot() {
3208   return NewSharedFunctionInfo(empty_string(), MaybeHandle<Code>(),
3209                                Builtin::kNoBuiltinId,
3210                                FunctionKind::kNormalFunction);
3211 }
3212 
3213 namespace {
NumberToStringCacheHash(Handle<FixedArray> cache,Smi number)3214 V8_INLINE int NumberToStringCacheHash(Handle<FixedArray> cache, Smi number) {
3215   int mask = (cache->length() >> 1) - 1;
3216   return number.value() & mask;
3217 }
3218 
NumberToStringCacheHash(Handle<FixedArray> cache,double number)3219 V8_INLINE int NumberToStringCacheHash(Handle<FixedArray> cache, double number) {
3220   int mask = (cache->length() >> 1) - 1;
3221   int64_t bits = bit_cast<int64_t>(number);
3222   return (static_cast<int>(bits) ^ static_cast<int>(bits >> 32)) & mask;
3223 }
3224 
CharToString(Factory * factory,const char * string,NumberCacheMode mode)3225 V8_INLINE Handle<String> CharToString(Factory* factory, const char* string,
3226                                       NumberCacheMode mode) {
3227   // We tenure the allocated string since it is referenced from the
3228   // number-string cache which lives in the old space.
3229   AllocationType type = mode == NumberCacheMode::kIgnore
3230                             ? AllocationType::kYoung
3231                             : AllocationType::kOld;
3232   return factory->NewStringFromAsciiChecked(string, type);
3233 }
3234 
3235 }  // namespace
3236 
NumberToStringCacheSet(Handle<Object> number,int hash,Handle<String> js_string)3237 void Factory::NumberToStringCacheSet(Handle<Object> number, int hash,
3238                                      Handle<String> js_string) {
3239   if (!number_string_cache()->get(hash * 2).IsUndefined(isolate()) &&
3240       !FLAG_optimize_for_size) {
3241     int full_size = isolate()->heap()->MaxNumberToStringCacheSize();
3242     if (number_string_cache()->length() != full_size) {
3243       Handle<FixedArray> new_cache =
3244           NewFixedArray(full_size, AllocationType::kOld);
3245       isolate()->heap()->set_number_string_cache(*new_cache);
3246       return;
3247     }
3248   }
3249   DisallowGarbageCollection no_gc;
3250   FixedArray cache = *number_string_cache();
3251   cache.set(hash * 2, *number);
3252   cache.set(hash * 2 + 1, *js_string);
3253 }
3254 
NumberToStringCacheGet(Object number,int hash)3255 Handle<Object> Factory::NumberToStringCacheGet(Object number, int hash) {
3256   DisallowGarbageCollection no_gc;
3257   FixedArray cache = *number_string_cache();
3258   Object key = cache.get(hash * 2);
3259   if (key == number || (key.IsHeapNumber() && number.IsHeapNumber() &&
3260                         key.Number() == number.Number())) {
3261     return Handle<String>(String::cast(cache.get(hash * 2 + 1)), isolate());
3262   }
3263   return undefined_value();
3264 }
3265 
NumberToString(Handle<Object> number,NumberCacheMode mode)3266 Handle<String> Factory::NumberToString(Handle<Object> number,
3267                                        NumberCacheMode mode) {
3268   SLOW_DCHECK(number->IsNumber());
3269   if (number->IsSmi()) return SmiToString(Smi::cast(*number), mode);
3270 
3271   double double_value = Handle<HeapNumber>::cast(number)->value();
3272   // Try to canonicalize doubles.
3273   int smi_value;
3274   if (DoubleToSmiInteger(double_value, &smi_value)) {
3275     return SmiToString(Smi::FromInt(smi_value), mode);
3276   }
3277   return HeapNumberToString(Handle<HeapNumber>::cast(number), double_value,
3278                             mode);
3279 }
3280 
3281 // Must be large enough to fit any double, int, or size_t.
3282 static const int kNumberToStringBufferSize = 32;
3283 
HeapNumberToString(Handle<HeapNumber> number,double value,NumberCacheMode mode)3284 Handle<String> Factory::HeapNumberToString(Handle<HeapNumber> number,
3285                                            double value, NumberCacheMode mode) {
3286   int hash = 0;
3287   if (mode != NumberCacheMode::kIgnore) {
3288     hash = NumberToStringCacheHash(number_string_cache(), value);
3289   }
3290   if (mode == NumberCacheMode::kBoth) {
3291     Handle<Object> cached = NumberToStringCacheGet(*number, hash);
3292     if (!cached->IsUndefined(isolate())) return Handle<String>::cast(cached);
3293   }
3294 
3295   Handle<String> result;
3296   if (value == 0) {
3297     result = zero_string();
3298   } else if (std::isnan(value)) {
3299     result = NaN_string();
3300   } else {
3301     char arr[kNumberToStringBufferSize];
3302     base::Vector<char> buffer(arr, arraysize(arr));
3303     const char* string = DoubleToCString(value, buffer);
3304     result = CharToString(this, string, mode);
3305   }
3306   if (mode != NumberCacheMode::kIgnore) {
3307     NumberToStringCacheSet(number, hash, result);
3308   }
3309   return result;
3310 }
3311 
SmiToString(Smi number,NumberCacheMode mode)3312 inline Handle<String> Factory::SmiToString(Smi number, NumberCacheMode mode) {
3313   int hash = NumberToStringCacheHash(number_string_cache(), number);
3314   if (mode == NumberCacheMode::kBoth) {
3315     Handle<Object> cached = NumberToStringCacheGet(number, hash);
3316     if (!cached->IsUndefined(isolate())) return Handle<String>::cast(cached);
3317   }
3318 
3319   Handle<String> result;
3320   if (number == Smi::zero()) {
3321     result = zero_string();
3322   } else {
3323     char arr[kNumberToStringBufferSize];
3324     base::Vector<char> buffer(arr, arraysize(arr));
3325     const char* string = IntToCString(number.value(), buffer);
3326     result = CharToString(this, string, mode);
3327   }
3328   if (mode != NumberCacheMode::kIgnore) {
3329     NumberToStringCacheSet(handle(number, isolate()), hash, result);
3330   }
3331 
3332   // Compute the hash here (rather than letting the caller take care of it) so
3333   // that the "cache hit" case above doesn't have to bother with it.
3334   STATIC_ASSERT(Smi::kMaxValue <= std::numeric_limits<uint32_t>::max());
3335   {
3336     DisallowGarbageCollection no_gc;
3337     String raw = *result;
3338     if (raw.raw_hash_field() == String::kEmptyHashField &&
3339         number.value() >= 0) {
3340       uint32_t raw_hash_field = StringHasher::MakeArrayIndexHash(
3341           static_cast<uint32_t>(number.value()), raw.length());
3342       raw.set_raw_hash_field(raw_hash_field);
3343     }
3344   }
3345   return result;
3346 }
3347 
SizeToString(size_t value,bool check_cache)3348 Handle<String> Factory::SizeToString(size_t value, bool check_cache) {
3349   Handle<String> result;
3350   NumberCacheMode cache_mode =
3351       check_cache ? NumberCacheMode::kBoth : NumberCacheMode::kIgnore;
3352   if (value <= Smi::kMaxValue) {
3353     int32_t int32v = static_cast<int32_t>(static_cast<uint32_t>(value));
3354     // SmiToString sets the hash when needed, we can return immediately.
3355     return SmiToString(Smi::FromInt(int32v), cache_mode);
3356   } else if (value <= kMaxSafeInteger) {
3357     // TODO(jkummerow): Refactor the cache to not require Objects as keys.
3358     double double_value = static_cast<double>(value);
3359     result = HeapNumberToString(NewHeapNumber(double_value), value, cache_mode);
3360   } else {
3361     char arr[kNumberToStringBufferSize];
3362     base::Vector<char> buffer(arr, arraysize(arr));
3363     // Build the string backwards from the least significant digit.
3364     int i = buffer.length();
3365     size_t value_copy = value;
3366     buffer[--i] = '\0';
3367     do {
3368       buffer[--i] = '0' + (value_copy % 10);
3369       value_copy /= 10;
3370     } while (value_copy > 0);
3371     char* string = buffer.begin() + i;
3372     // No way to cache this; we'd need an {Object} to use as key.
3373     result = NewStringFromAsciiChecked(string);
3374   }
3375   {
3376     DisallowGarbageCollection no_gc;
3377     String raw = *result;
3378     if (value <= JSArray::kMaxArrayIndex &&
3379         raw.raw_hash_field() == String::kEmptyHashField) {
3380       uint32_t raw_hash_field = StringHasher::MakeArrayIndexHash(
3381           static_cast<uint32_t>(value), raw.length());
3382       raw.set_raw_hash_field(raw_hash_field);
3383     }
3384   }
3385   return result;
3386 }
3387 
NewDebugInfo(Handle<SharedFunctionInfo> shared)3388 Handle<DebugInfo> Factory::NewDebugInfo(Handle<SharedFunctionInfo> shared) {
3389   DCHECK(!shared->HasDebugInfo());
3390 
3391   auto debug_info =
3392       NewStructInternal<DebugInfo>(DEBUG_INFO_TYPE, AllocationType::kOld);
3393   DisallowGarbageCollection no_gc;
3394   SharedFunctionInfo raw_shared = *shared;
3395   debug_info.set_flags(DebugInfo::kNone, kRelaxedStore);
3396   debug_info.set_shared(raw_shared);
3397   debug_info.set_debugger_hints(0);
3398   DCHECK_EQ(DebugInfo::kNoDebuggingId, debug_info.debugging_id());
3399   debug_info.set_script(raw_shared.script_or_debug_info(kAcquireLoad));
3400   HeapObject undefined = *undefined_value();
3401   debug_info.set_original_bytecode_array(undefined, kReleaseStore,
3402                                          SKIP_WRITE_BARRIER);
3403   debug_info.set_debug_bytecode_array(undefined, kReleaseStore,
3404                                       SKIP_WRITE_BARRIER);
3405   debug_info.set_break_points(*empty_fixed_array(), SKIP_WRITE_BARRIER);
3406 
3407   // Link debug info to function.
3408   raw_shared.SetDebugInfo(debug_info);
3409 
3410   return handle(debug_info, isolate());
3411 }
3412 
NewBreakPointInfo(int source_position)3413 Handle<BreakPointInfo> Factory::NewBreakPointInfo(int source_position) {
3414   auto new_break_point_info = NewStructInternal<BreakPointInfo>(
3415       BREAK_POINT_INFO_TYPE, AllocationType::kOld);
3416   DisallowGarbageCollection no_gc;
3417   new_break_point_info.set_source_position(source_position);
3418   new_break_point_info.set_break_points(*undefined_value(), SKIP_WRITE_BARRIER);
3419   return handle(new_break_point_info, isolate());
3420 }
3421 
NewBreakPoint(int id,Handle<String> condition)3422 Handle<BreakPoint> Factory::NewBreakPoint(int id, Handle<String> condition) {
3423   auto new_break_point =
3424       NewStructInternal<BreakPoint>(BREAK_POINT_TYPE, AllocationType::kOld);
3425   DisallowGarbageCollection no_gc;
3426   new_break_point.set_id(id);
3427   new_break_point.set_condition(*condition);
3428   return handle(new_break_point, isolate());
3429 }
3430 
NewCallSiteInfo(Handle<Object> receiver_or_instance,Handle<Object> function,Handle<HeapObject> code_object,int code_offset_or_source_position,int flags,Handle<FixedArray> parameters)3431 Handle<CallSiteInfo> Factory::NewCallSiteInfo(
3432     Handle<Object> receiver_or_instance, Handle<Object> function,
3433     Handle<HeapObject> code_object, int code_offset_or_source_position,
3434     int flags, Handle<FixedArray> parameters) {
3435   auto info = NewStructInternal<CallSiteInfo>(CALL_SITE_INFO_TYPE,
3436                                               AllocationType::kYoung);
3437   DisallowGarbageCollection no_gc;
3438   info.set_receiver_or_instance(*receiver_or_instance, SKIP_WRITE_BARRIER);
3439   info.set_function(*function, SKIP_WRITE_BARRIER);
3440   info.set_code_object(*code_object, SKIP_WRITE_BARRIER);
3441   info.set_code_offset_or_source_position(code_offset_or_source_position);
3442   info.set_flags(flags);
3443   info.set_parameters(*parameters, SKIP_WRITE_BARRIER);
3444   return handle(info, isolate());
3445 }
3446 
NewStackFrameInfo(Handle<HeapObject> shared_or_script,int bytecode_offset_or_source_position,Handle<String> function_name,bool is_constructor)3447 Handle<StackFrameInfo> Factory::NewStackFrameInfo(
3448     Handle<HeapObject> shared_or_script, int bytecode_offset_or_source_position,
3449     Handle<String> function_name, bool is_constructor) {
3450   DCHECK_GE(bytecode_offset_or_source_position, 0);
3451   StackFrameInfo info = NewStructInternal<StackFrameInfo>(
3452       STACK_FRAME_INFO_TYPE, AllocationType::kYoung);
3453   DisallowGarbageCollection no_gc;
3454   info.set_flags(0);
3455   info.set_shared_or_script(*shared_or_script, SKIP_WRITE_BARRIER);
3456   info.set_bytecode_offset_or_source_position(
3457       bytecode_offset_or_source_position);
3458   info.set_function_name(*function_name, SKIP_WRITE_BARRIER);
3459   info.set_is_constructor(is_constructor);
3460   return handle(info, isolate());
3461 }
3462 
NewPromiseOnStack(Handle<Object> prev,Handle<JSObject> promise)3463 Handle<PromiseOnStack> Factory::NewPromiseOnStack(Handle<Object> prev,
3464                                                   Handle<JSObject> promise) {
3465   PromiseOnStack promise_on_stack = NewStructInternal<PromiseOnStack>(
3466       PROMISE_ON_STACK_TYPE, AllocationType::kYoung);
3467   DisallowGarbageCollection no_gc;
3468   promise_on_stack.set_prev(*prev, SKIP_WRITE_BARRIER);
3469   promise_on_stack.set_promise(*MaybeObjectHandle::Weak(promise));
3470   return handle(promise_on_stack, isolate());
3471 }
3472 
NewArgumentsObject(Handle<JSFunction> callee,int length)3473 Handle<JSObject> Factory::NewArgumentsObject(Handle<JSFunction> callee,
3474                                              int length) {
3475   bool strict_mode_callee = is_strict(callee->shared().language_mode()) ||
3476                             !callee->shared().has_simple_parameters();
3477   Handle<Map> map = strict_mode_callee ? isolate()->strict_arguments_map()
3478                                        : isolate()->sloppy_arguments_map();
3479   AllocationSiteUsageContext context(isolate(), Handle<AllocationSite>(),
3480                                      false);
3481   DCHECK(!isolate()->has_pending_exception());
3482   Handle<JSObject> result = NewJSObjectFromMap(map);
3483   Handle<Smi> value(Smi::FromInt(length), isolate());
3484   Object::SetProperty(isolate(), result, length_string(), value,
3485                       StoreOrigin::kMaybeKeyed,
3486                       Just(ShouldThrow::kThrowOnError))
3487       .Assert();
3488   if (!strict_mode_callee) {
3489     Object::SetProperty(isolate(), result, callee_string(), callee,
3490                         StoreOrigin::kMaybeKeyed,
3491                         Just(ShouldThrow::kThrowOnError))
3492         .Assert();
3493   }
3494   return result;
3495 }
3496 
ObjectLiteralMapFromCache(Handle<NativeContext> context,int number_of_properties)3497 Handle<Map> Factory::ObjectLiteralMapFromCache(Handle<NativeContext> context,
3498                                                int number_of_properties) {
3499   // Use initial slow object proto map for too many properties.
3500   if (number_of_properties >= JSObject::kMapCacheSize) {
3501     return handle(context->slow_object_with_object_prototype_map(), isolate());
3502   }
3503 
3504   Handle<WeakFixedArray> cache(WeakFixedArray::cast(context->map_cache()),
3505                                isolate());
3506 
3507   // Check to see whether there is a matching element in the cache.
3508   MaybeObject result = cache->Get(number_of_properties);
3509   HeapObject heap_object;
3510   if (result->GetHeapObjectIfWeak(&heap_object)) {
3511     Map map = Map::cast(heap_object);
3512     DCHECK(!map.is_dictionary_map());
3513     return handle(map, isolate());
3514   }
3515 
3516   // Create a new map and add it to the cache.
3517   Handle<Map> map = Map::Create(isolate(), number_of_properties);
3518   DCHECK(!map->is_dictionary_map());
3519   cache->Set(number_of_properties, HeapObjectReference::Weak(*map));
3520   return map;
3521 }
3522 
NewMegaDomHandler(MaybeObjectHandle accessor,MaybeObjectHandle context)3523 Handle<MegaDomHandler> Factory::NewMegaDomHandler(MaybeObjectHandle accessor,
3524                                                   MaybeObjectHandle context) {
3525   Handle<Map> map = read_only_roots().mega_dom_handler_map_handle();
3526   MegaDomHandler handler = MegaDomHandler::cast(New(map, AllocationType::kOld));
3527   DisallowGarbageCollection no_gc;
3528   handler.set_accessor(*accessor);
3529   handler.set_context(*context);
3530   return handle(handler, isolate());
3531 }
3532 
NewLoadHandler(int data_count,AllocationType allocation)3533 Handle<LoadHandler> Factory::NewLoadHandler(int data_count,
3534                                             AllocationType allocation) {
3535   Handle<Map> map;
3536   switch (data_count) {
3537     case 1:
3538       map = load_handler1_map();
3539       break;
3540     case 2:
3541       map = load_handler2_map();
3542       break;
3543     case 3:
3544       map = load_handler3_map();
3545       break;
3546     default:
3547       UNREACHABLE();
3548   }
3549   return handle(LoadHandler::cast(New(map, allocation)), isolate());
3550 }
3551 
NewStoreHandler(int data_count)3552 Handle<StoreHandler> Factory::NewStoreHandler(int data_count) {
3553   Handle<Map> map;
3554   switch (data_count) {
3555     case 0:
3556       map = store_handler0_map();
3557       break;
3558     case 1:
3559       map = store_handler1_map();
3560       break;
3561     case 2:
3562       map = store_handler2_map();
3563       break;
3564     case 3:
3565       map = store_handler3_map();
3566       break;
3567     default:
3568       UNREACHABLE();
3569   }
3570   return handle(StoreHandler::cast(New(map, AllocationType::kOld)), isolate());
3571 }
3572 
SetRegExpAtomData(Handle<JSRegExp> regexp,Handle<String> source,JSRegExp::Flags flags,Handle<Object> data)3573 void Factory::SetRegExpAtomData(Handle<JSRegExp> regexp, Handle<String> source,
3574                                 JSRegExp::Flags flags, Handle<Object> data) {
3575   FixedArray store =
3576       *NewFixedArray(JSRegExp::kAtomDataSize, AllocationType::kYoung);
3577   DisallowGarbageCollection no_gc;
3578   store.set(JSRegExp::kTagIndex, Smi::FromInt(JSRegExp::ATOM));
3579   store.set(JSRegExp::kSourceIndex, *source, SKIP_WRITE_BARRIER);
3580   store.set(JSRegExp::kFlagsIndex, Smi::FromInt(flags));
3581   store.set(JSRegExp::kAtomPatternIndex, *data, SKIP_WRITE_BARRIER);
3582   regexp->set_data(store);
3583 }
3584 
SetRegExpIrregexpData(Handle<JSRegExp> regexp,Handle<String> source,JSRegExp::Flags flags,int capture_count,uint32_t backtrack_limit)3585 void Factory::SetRegExpIrregexpData(Handle<JSRegExp> regexp,
3586                                     Handle<String> source,
3587                                     JSRegExp::Flags flags, int capture_count,
3588                                     uint32_t backtrack_limit) {
3589   DCHECK(Smi::IsValid(backtrack_limit));
3590   FixedArray store =
3591       *NewFixedArray(JSRegExp::kIrregexpDataSize, AllocationType::kYoung);
3592   DisallowGarbageCollection no_gc;
3593   Smi uninitialized = Smi::FromInt(JSRegExp::kUninitializedValue);
3594   Smi ticks_until_tier_up = FLAG_regexp_tier_up
3595                                 ? Smi::FromInt(FLAG_regexp_tier_up_ticks)
3596                                 : uninitialized;
3597   store.set(JSRegExp::kTagIndex, Smi::FromInt(JSRegExp::IRREGEXP));
3598   store.set(JSRegExp::kSourceIndex, *source, SKIP_WRITE_BARRIER);
3599   store.set(JSRegExp::kFlagsIndex, Smi::FromInt(flags));
3600   store.set(JSRegExp::kIrregexpLatin1CodeIndex, uninitialized);
3601   store.set(JSRegExp::kIrregexpUC16CodeIndex, uninitialized);
3602   store.set(JSRegExp::kIrregexpLatin1BytecodeIndex, uninitialized);
3603   store.set(JSRegExp::kIrregexpUC16BytecodeIndex, uninitialized);
3604   store.set(JSRegExp::kIrregexpMaxRegisterCountIndex, Smi::zero());
3605   store.set(JSRegExp::kIrregexpCaptureCountIndex, Smi::FromInt(capture_count));
3606   store.set(JSRegExp::kIrregexpCaptureNameMapIndex, uninitialized);
3607   store.set(JSRegExp::kIrregexpTicksUntilTierUpIndex, ticks_until_tier_up);
3608   store.set(JSRegExp::kIrregexpBacktrackLimit, Smi::FromInt(backtrack_limit));
3609   regexp->set_data(store);
3610 }
3611 
SetRegExpExperimentalData(Handle<JSRegExp> regexp,Handle<String> source,JSRegExp::Flags flags,int capture_count)3612 void Factory::SetRegExpExperimentalData(Handle<JSRegExp> regexp,
3613                                         Handle<String> source,
3614                                         JSRegExp::Flags flags,
3615                                         int capture_count) {
3616   FixedArray store =
3617       *NewFixedArray(JSRegExp::kExperimentalDataSize, AllocationType::kYoung);
3618   DisallowGarbageCollection no_gc;
3619   Smi uninitialized = Smi::FromInt(JSRegExp::kUninitializedValue);
3620 
3621   store.set(JSRegExp::kTagIndex, Smi::FromInt(JSRegExp::EXPERIMENTAL));
3622   store.set(JSRegExp::kSourceIndex, *source, SKIP_WRITE_BARRIER);
3623   store.set(JSRegExp::kFlagsIndex, Smi::FromInt(flags));
3624   store.set(JSRegExp::kIrregexpLatin1CodeIndex, uninitialized);
3625   store.set(JSRegExp::kIrregexpUC16CodeIndex, uninitialized);
3626   store.set(JSRegExp::kIrregexpLatin1BytecodeIndex, uninitialized);
3627   store.set(JSRegExp::kIrregexpUC16BytecodeIndex, uninitialized);
3628   store.set(JSRegExp::kIrregexpMaxRegisterCountIndex, uninitialized);
3629   store.set(JSRegExp::kIrregexpCaptureCountIndex, Smi::FromInt(capture_count));
3630   store.set(JSRegExp::kIrregexpCaptureNameMapIndex, uninitialized);
3631   store.set(JSRegExp::kIrregexpTicksUntilTierUpIndex, uninitialized);
3632   store.set(JSRegExp::kIrregexpBacktrackLimit, uninitialized);
3633   regexp->set_data(store);
3634 }
3635 
NewRegExpMatchInfo()3636 Handle<RegExpMatchInfo> Factory::NewRegExpMatchInfo() {
3637   // Initially, the last match info consists of all fixed fields plus space for
3638   // the match itself (i.e., 2 capture indices).
3639   static const int kInitialSize = RegExpMatchInfo::kFirstCaptureIndex +
3640                                   RegExpMatchInfo::kInitialCaptureIndices;
3641 
3642   Handle<FixedArray> elems =
3643       NewFixedArray(kInitialSize, AllocationType::kYoung);
3644   Handle<RegExpMatchInfo> result = Handle<RegExpMatchInfo>::cast(elems);
3645   {
3646     DisallowGarbageCollection no_gc;
3647     RegExpMatchInfo raw = *result;
3648     raw.SetNumberOfCaptureRegisters(RegExpMatchInfo::kInitialCaptureIndices);
3649     raw.SetLastSubject(*empty_string(), SKIP_WRITE_BARRIER);
3650     raw.SetLastInput(*undefined_value(), SKIP_WRITE_BARRIER);
3651     raw.SetCapture(0, 0);
3652     raw.SetCapture(1, 0);
3653   }
3654   return result;
3655 }
3656 
GlobalConstantFor(Handle<Name> name)3657 Handle<Object> Factory::GlobalConstantFor(Handle<Name> name) {
3658   if (Name::Equals(isolate(), name, undefined_string())) {
3659     return undefined_value();
3660   }
3661   if (Name::Equals(isolate(), name, NaN_string())) return nan_value();
3662   if (Name::Equals(isolate(), name, Infinity_string())) return infinity_value();
3663   return Handle<Object>::null();
3664 }
3665 
ToPrimitiveHintString(ToPrimitiveHint hint)3666 Handle<String> Factory::ToPrimitiveHintString(ToPrimitiveHint hint) {
3667   switch (hint) {
3668     case ToPrimitiveHint::kDefault:
3669       return default_string();
3670     case ToPrimitiveHint::kNumber:
3671       return number_string();
3672     case ToPrimitiveHint::kString:
3673       return string_string();
3674   }
3675   UNREACHABLE();
3676 }
3677 
CreateSloppyFunctionMap(FunctionMode function_mode,MaybeHandle<JSFunction> maybe_empty_function)3678 Handle<Map> Factory::CreateSloppyFunctionMap(
3679     FunctionMode function_mode, MaybeHandle<JSFunction> maybe_empty_function) {
3680   bool has_prototype = IsFunctionModeWithPrototype(function_mode);
3681   int header_size = has_prototype ? JSFunction::kSizeWithPrototype
3682                                   : JSFunction::kSizeWithoutPrototype;
3683   int descriptors_count = has_prototype ? 5 : 4;
3684   int inobject_properties_count = 0;
3685   if (IsFunctionModeWithName(function_mode)) ++inobject_properties_count;
3686 
3687   Handle<Map> map = NewMap(
3688       JS_FUNCTION_TYPE, header_size + inobject_properties_count * kTaggedSize,
3689       TERMINAL_FAST_ELEMENTS_KIND, inobject_properties_count);
3690   {
3691     DisallowGarbageCollection no_gc;
3692     Map raw_map = *map;
3693     raw_map.set_has_prototype_slot(has_prototype);
3694     raw_map.set_is_constructor(has_prototype);
3695     raw_map.set_is_callable(true);
3696   }
3697   Handle<JSFunction> empty_function;
3698   if (maybe_empty_function.ToHandle(&empty_function)) {
3699     Map::SetPrototype(isolate(), map, empty_function);
3700   }
3701 
3702   //
3703   // Setup descriptors array.
3704   //
3705   Map::EnsureDescriptorSlack(isolate(), map, descriptors_count);
3706 
3707   PropertyAttributes ro_attribs =
3708       static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
3709   PropertyAttributes rw_attribs =
3710       static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
3711   PropertyAttributes roc_attribs =
3712       static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY);
3713 
3714   int field_index = 0;
3715   STATIC_ASSERT(
3716       JSFunctionOrBoundFunctionOrWrappedFunction::kLengthDescriptorIndex == 0);
3717   {  // Add length accessor.
3718     Descriptor d = Descriptor::AccessorConstant(
3719         length_string(), function_length_accessor(), roc_attribs);
3720     map->AppendDescriptor(isolate(), &d);
3721   }
3722 
3723   STATIC_ASSERT(
3724       JSFunctionOrBoundFunctionOrWrappedFunction::kNameDescriptorIndex == 1);
3725   if (IsFunctionModeWithName(function_mode)) {
3726     // Add name field.
3727     Handle<Name> name = isolate()->factory()->name_string();
3728     Descriptor d = Descriptor::DataField(isolate(), name, field_index++,
3729                                          roc_attribs, Representation::Tagged());
3730     map->AppendDescriptor(isolate(), &d);
3731 
3732   } else {
3733     // Add name accessor.
3734     Descriptor d = Descriptor::AccessorConstant(
3735         name_string(), function_name_accessor(), roc_attribs);
3736     map->AppendDescriptor(isolate(), &d);
3737   }
3738   {  // Add arguments accessor.
3739     Descriptor d = Descriptor::AccessorConstant(
3740         arguments_string(), function_arguments_accessor(), ro_attribs);
3741     map->AppendDescriptor(isolate(), &d);
3742   }
3743   {  // Add caller accessor.
3744     Descriptor d = Descriptor::AccessorConstant(
3745         caller_string(), function_caller_accessor(), ro_attribs);
3746     map->AppendDescriptor(isolate(), &d);
3747   }
3748   if (IsFunctionModeWithPrototype(function_mode)) {
3749     // Add prototype accessor.
3750     PropertyAttributes attribs =
3751         IsFunctionModeWithWritablePrototype(function_mode) ? rw_attribs
3752                                                            : ro_attribs;
3753     Descriptor d = Descriptor::AccessorConstant(
3754         prototype_string(), function_prototype_accessor(), attribs);
3755     map->AppendDescriptor(isolate(), &d);
3756   }
3757   DCHECK_EQ(inobject_properties_count, field_index);
3758   DCHECK_EQ(0,
3759             map->instance_descriptors(isolate()).number_of_slack_descriptors());
3760   LOG(isolate(), MapDetails(*map));
3761   return map;
3762 }
3763 
CreateStrictFunctionMap(FunctionMode function_mode,Handle<JSFunction> empty_function)3764 Handle<Map> Factory::CreateStrictFunctionMap(
3765     FunctionMode function_mode, Handle<JSFunction> empty_function) {
3766   bool has_prototype = IsFunctionModeWithPrototype(function_mode);
3767   int header_size = has_prototype ? JSFunction::kSizeWithPrototype
3768                                   : JSFunction::kSizeWithoutPrototype;
3769   int inobject_properties_count = 0;
3770   // length and prototype accessors or just length accessor.
3771   int descriptors_count = IsFunctionModeWithPrototype(function_mode) ? 2 : 1;
3772   if (IsFunctionModeWithName(function_mode)) {
3773     ++inobject_properties_count;  // name property.
3774   } else {
3775     ++descriptors_count;  // name accessor.
3776   }
3777   descriptors_count += inobject_properties_count;
3778 
3779   Handle<Map> map = NewMap(
3780       JS_FUNCTION_TYPE, header_size + inobject_properties_count * kTaggedSize,
3781       TERMINAL_FAST_ELEMENTS_KIND, inobject_properties_count);
3782   {
3783     DisallowGarbageCollection no_gc;
3784     Map raw_map = *map;
3785     raw_map.set_has_prototype_slot(has_prototype);
3786     raw_map.set_is_constructor(has_prototype);
3787     raw_map.set_is_callable(true);
3788   }
3789   Map::SetPrototype(isolate(), map, empty_function);
3790 
3791   //
3792   // Setup descriptors array.
3793   //
3794   Map::EnsureDescriptorSlack(isolate(), map, descriptors_count);
3795 
3796   PropertyAttributes rw_attribs =
3797       static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
3798   PropertyAttributes ro_attribs =
3799       static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
3800   PropertyAttributes roc_attribs =
3801       static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY);
3802 
3803   int field_index = 0;
3804   STATIC_ASSERT(JSFunction::kLengthDescriptorIndex == 0);
3805   {  // Add length accessor.
3806     Descriptor d = Descriptor::AccessorConstant(
3807         length_string(), function_length_accessor(), roc_attribs);
3808     map->AppendDescriptor(isolate(), &d);
3809   }
3810 
3811   STATIC_ASSERT(JSFunction::kNameDescriptorIndex == 1);
3812   if (IsFunctionModeWithName(function_mode)) {
3813     // Add name field.
3814     Handle<Name> name = isolate()->factory()->name_string();
3815     Descriptor d = Descriptor::DataField(isolate(), name, field_index++,
3816                                          roc_attribs, Representation::Tagged());
3817     map->AppendDescriptor(isolate(), &d);
3818 
3819   } else {
3820     // Add name accessor.
3821     Descriptor d = Descriptor::AccessorConstant(
3822         name_string(), function_name_accessor(), roc_attribs);
3823     map->AppendDescriptor(isolate(), &d);
3824   }
3825 
3826   if (IsFunctionModeWithPrototype(function_mode)) {
3827     // Add prototype accessor.
3828     PropertyAttributes attribs =
3829         IsFunctionModeWithWritablePrototype(function_mode) ? rw_attribs
3830                                                            : ro_attribs;
3831     Descriptor d = Descriptor::AccessorConstant(
3832         prototype_string(), function_prototype_accessor(), attribs);
3833     map->AppendDescriptor(isolate(), &d);
3834   }
3835   DCHECK_EQ(inobject_properties_count, field_index);
3836   DCHECK_EQ(0,
3837             map->instance_descriptors(isolate()).number_of_slack_descriptors());
3838   LOG(isolate(), MapDetails(*map));
3839   return map;
3840 }
3841 
CreateClassFunctionMap(Handle<JSFunction> empty_function)3842 Handle<Map> Factory::CreateClassFunctionMap(Handle<JSFunction> empty_function) {
3843   Handle<Map> map =
3844       NewMap(JS_CLASS_CONSTRUCTOR_TYPE, JSFunction::kSizeWithPrototype);
3845   {
3846     DisallowGarbageCollection no_gc;
3847     Map raw_map = *map;
3848     raw_map.set_has_prototype_slot(true);
3849     raw_map.set_is_constructor(true);
3850     raw_map.set_is_prototype_map(true);
3851     raw_map.set_is_callable(true);
3852   }
3853   Map::SetPrototype(isolate(), map, empty_function);
3854 
3855   //
3856   // Setup descriptors array.
3857   //
3858   Map::EnsureDescriptorSlack(isolate(), map, 2);
3859 
3860   PropertyAttributes ro_attribs =
3861       static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
3862   PropertyAttributes roc_attribs =
3863       static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY);
3864 
3865   STATIC_ASSERT(JSFunction::kLengthDescriptorIndex == 0);
3866   {  // Add length accessor.
3867     Descriptor d = Descriptor::AccessorConstant(
3868         length_string(), function_length_accessor(), roc_attribs);
3869     map->AppendDescriptor(isolate(), &d);
3870   }
3871 
3872   {
3873     // Add prototype accessor.
3874     Descriptor d = Descriptor::AccessorConstant(
3875         prototype_string(), function_prototype_accessor(), ro_attribs);
3876     map->AppendDescriptor(isolate(), &d);
3877   }
3878   LOG(isolate(), MapDetails(*map));
3879   return map;
3880 }
3881 
NewJSPromiseWithoutHook()3882 Handle<JSPromise> Factory::NewJSPromiseWithoutHook() {
3883   Handle<JSPromise> promise =
3884       Handle<JSPromise>::cast(NewJSObject(isolate()->promise_function()));
3885   DisallowGarbageCollection no_gc;
3886   JSPromise raw = *promise;
3887   raw.set_reactions_or_result(Smi::zero(), SKIP_WRITE_BARRIER);
3888   raw.set_flags(0);
3889   // TODO(v8) remove once embedder data slots are always zero-initialized.
3890   InitEmbedderFields(*promise, Smi::zero());
3891   DCHECK_EQ(raw.GetEmbedderFieldCount(), v8::Promise::kEmbedderFieldCount);
3892   return promise;
3893 }
3894 
NewJSPromise()3895 Handle<JSPromise> Factory::NewJSPromise() {
3896   Handle<JSPromise> promise = NewJSPromiseWithoutHook();
3897   isolate()->RunAllPromiseHooks(PromiseHookType::kInit, promise,
3898                                 undefined_value());
3899   return promise;
3900 }
3901 
NewCallHandlerInfo(bool has_no_side_effect)3902 Handle<CallHandlerInfo> Factory::NewCallHandlerInfo(bool has_no_side_effect) {
3903   Handle<Map> map = has_no_side_effect
3904                         ? side_effect_free_call_handler_info_map()
3905                         : side_effect_call_handler_info_map();
3906   CallHandlerInfo info = CallHandlerInfo::cast(New(map, AllocationType::kOld));
3907   DisallowGarbageCollection no_gc;
3908   Object undefined_value = read_only_roots().undefined_value();
3909   info.set_callback(undefined_value, SKIP_WRITE_BARRIER);
3910   info.set_js_callback(undefined_value, SKIP_WRITE_BARRIER);
3911   info.set_data(undefined_value, SKIP_WRITE_BARRIER);
3912   return handle(info, isolate());
3913 }
3914 
CanAllocateInReadOnlySpace()3915 bool Factory::CanAllocateInReadOnlySpace() {
3916   return allocator()->CanAllocateInReadOnlySpace();
3917 }
3918 
EmptyStringRootIsInitialized()3919 bool Factory::EmptyStringRootIsInitialized() {
3920   return isolate()->roots_table()[RootIndex::kempty_string] != kNullAddress;
3921 }
3922 
AllocationTypeForInPlaceInternalizableString()3923 AllocationType Factory::AllocationTypeForInPlaceInternalizableString() {
3924   return isolate()
3925       ->heap()
3926       ->allocation_type_for_in_place_internalizable_strings();
3927 }
3928 
NewFunctionForTesting(Handle<String> name)3929 Handle<JSFunction> Factory::NewFunctionForTesting(Handle<String> name) {
3930   Handle<SharedFunctionInfo> info =
3931       NewSharedFunctionInfoForBuiltin(name, Builtin::kIllegal);
3932   info->set_language_mode(LanguageMode::kSloppy);
3933   return JSFunctionBuilder{isolate(), info, isolate()->native_context()}
3934       .Build();
3935 }
3936 
JSFunctionBuilder(Isolate * isolate,Handle<SharedFunctionInfo> sfi,Handle<Context> context)3937 Factory::JSFunctionBuilder::JSFunctionBuilder(Isolate* isolate,
3938                                               Handle<SharedFunctionInfo> sfi,
3939                                               Handle<Context> context)
3940     : isolate_(isolate), sfi_(sfi), context_(context) {}
3941 
Build()3942 Handle<JSFunction> Factory::JSFunctionBuilder::Build() {
3943   PrepareMap();
3944   PrepareFeedbackCell();
3945 
3946   Handle<Code> code = handle(FromCodeT(sfi_->GetCode()), isolate_);
3947   Handle<JSFunction> result = BuildRaw(code);
3948 
3949   if (code->kind() == CodeKind::BASELINE) {
3950     IsCompiledScope is_compiled_scope(sfi_->is_compiled_scope(isolate_));
3951     JSFunction::EnsureFeedbackVector(isolate_, result, &is_compiled_scope);
3952   }
3953 
3954   Compiler::PostInstantiation(result);
3955   return result;
3956 }
3957 
BuildRaw(Handle<Code> code)3958 Handle<JSFunction> Factory::JSFunctionBuilder::BuildRaw(Handle<Code> code) {
3959   Isolate* isolate = isolate_;
3960   Factory* factory = isolate_->factory();
3961 
3962   Handle<Map> map = maybe_map_.ToHandleChecked();
3963   Handle<FeedbackCell> feedback_cell = maybe_feedback_cell_.ToHandleChecked();
3964 
3965   DCHECK(InstanceTypeChecker::IsJSFunction(map->instance_type()));
3966 
3967   // Allocation.
3968   JSFunction function = JSFunction::cast(factory->New(map, allocation_type_));
3969   DisallowGarbageCollection no_gc;
3970 
3971   WriteBarrierMode mode = allocation_type_ == AllocationType::kYoung
3972                               ? SKIP_WRITE_BARRIER
3973                               : UPDATE_WRITE_BARRIER;
3974   // Header initialization.
3975   function.initialize_properties(isolate);
3976   function.initialize_elements();
3977   function.set_shared(*sfi_, mode);
3978   function.set_context(*context_, mode);
3979   function.set_raw_feedback_cell(*feedback_cell, mode);
3980   function.set_code(*code, kReleaseStore, mode);
3981   if (function.has_prototype_slot()) {
3982     function.set_prototype_or_initial_map(
3983         ReadOnlyRoots(isolate).the_hole_value(), kReleaseStore,
3984         SKIP_WRITE_BARRIER);
3985   }
3986 
3987   // Potentially body initialization.
3988   factory->InitializeJSObjectBody(
3989       function, *map, JSFunction::GetHeaderSize(map->has_prototype_slot()));
3990 
3991   return handle(function, isolate_);
3992 }
3993 
PrepareMap()3994 void Factory::JSFunctionBuilder::PrepareMap() {
3995   if (maybe_map_.is_null()) {
3996     // No specific map requested, use the default.
3997     maybe_map_ = handle(
3998         Map::cast(context_->native_context().get(sfi_->function_map_index())),
3999         isolate_);
4000   }
4001 }
4002 
PrepareFeedbackCell()4003 void Factory::JSFunctionBuilder::PrepareFeedbackCell() {
4004   Handle<FeedbackCell> feedback_cell;
4005   if (maybe_feedback_cell_.ToHandle(&feedback_cell)) {
4006     // Track the newly-created closure.
4007     feedback_cell->IncrementClosureCount(isolate_);
4008   } else {
4009     // Fall back to the many_closures_cell.
4010     maybe_feedback_cell_ = isolate_->factory()->many_closures_cell();
4011   }
4012 }
4013 
4014 }  // namespace internal
4015 }  // namespace v8
4016