• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/builtins/builtins-constructor-gen.h"
6 
7 #include "src/ast/ast.h"
8 #include "src/builtins/builtins-call-gen.h"
9 #include "src/builtins/builtins-constructor.h"
10 #include "src/builtins/builtins-utils-gen.h"
11 #include "src/builtins/builtins.h"
12 #include "src/codegen/code-factory.h"
13 #include "src/codegen/code-stub-assembler.h"
14 #include "src/codegen/interface-descriptors.h"
15 #include "src/codegen/macro-assembler.h"
16 #include "src/common/globals.h"
17 #include "src/logging/counters.h"
18 #include "src/objects/objects-inl.h"
19 
20 namespace v8 {
21 namespace internal {
22 
Generate_ConstructVarargs(MacroAssembler * masm)23 void Builtins::Generate_ConstructVarargs(MacroAssembler* masm) {
24   Generate_CallOrConstructVarargs(masm,
25                                   BUILTIN_CODE(masm->isolate(), Construct));
26 }
27 
Generate_ConstructForwardVarargs(MacroAssembler * masm)28 void Builtins::Generate_ConstructForwardVarargs(MacroAssembler* masm) {
29   Generate_CallOrConstructForwardVarargs(
30       masm, CallOrConstructMode::kConstruct,
31       BUILTIN_CODE(masm->isolate(), Construct));
32 }
33 
Generate_ConstructFunctionForwardVarargs(MacroAssembler * masm)34 void Builtins::Generate_ConstructFunctionForwardVarargs(MacroAssembler* masm) {
35   Generate_CallOrConstructForwardVarargs(
36       masm, CallOrConstructMode::kConstruct,
37       BUILTIN_CODE(masm->isolate(), ConstructFunction));
38 }
39 
TF_BUILTIN(Construct_Baseline,CallOrConstructBuiltinsAssembler)40 TF_BUILTIN(Construct_Baseline, CallOrConstructBuiltinsAssembler) {
41   auto target = Parameter<Object>(Descriptor::kTarget);
42   auto new_target = Parameter<Object>(Descriptor::kNewTarget);
43   auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
44   auto slot = UncheckedParameter<UintPtrT>(Descriptor::kSlot);
45 
46   BuildConstruct(
47       target, new_target, argc, [=] { return LoadContextFromBaseline(); },
48       [=] { return LoadFeedbackVectorFromBaseline(); }, slot,
49       UpdateFeedbackMode::kGuaranteedFeedback);
50 }
51 
TF_BUILTIN(Construct_WithFeedback,CallOrConstructBuiltinsAssembler)52 TF_BUILTIN(Construct_WithFeedback, CallOrConstructBuiltinsAssembler) {
53   auto target = Parameter<Object>(Descriptor::kTarget);
54   auto new_target = Parameter<Object>(Descriptor::kNewTarget);
55   auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
56   auto context = Parameter<Context>(Descriptor::kContext);
57   auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
58   auto slot = UncheckedParameter<UintPtrT>(Descriptor::kSlot);
59 
60   BuildConstruct(
61       target, new_target, argc, [=] { return context; },
62       [=] { return feedback_vector; }, slot,
63       UpdateFeedbackMode::kOptionalFeedback);
64 }
65 
BuildConstruct(TNode<Object> target,TNode<Object> new_target,TNode<Int32T> argc,const LazyNode<Context> & context,const LazyNode<HeapObject> & feedback_vector,TNode<UintPtrT> slot,UpdateFeedbackMode mode)66 void CallOrConstructBuiltinsAssembler::BuildConstruct(
67     TNode<Object> target, TNode<Object> new_target, TNode<Int32T> argc,
68     const LazyNode<Context>& context,
69     const LazyNode<HeapObject>& feedback_vector, TNode<UintPtrT> slot,
70     UpdateFeedbackMode mode) {
71   TVARIABLE(AllocationSite, allocation_site);
72   Label if_construct_generic(this), if_construct_array(this);
73   TNode<Context> eager_context = context();
74   CollectConstructFeedback(eager_context, target, new_target, feedback_vector(),
75                            slot, mode, &if_construct_generic,
76                            &if_construct_array, &allocation_site);
77 
78   BIND(&if_construct_generic);
79   TailCallBuiltin(Builtin::kConstruct, eager_context, target, new_target, argc);
80 
81   BIND(&if_construct_array);
82   TailCallBuiltin(Builtin::kArrayConstructorImpl, eager_context, target,
83                   new_target, argc, allocation_site.value());
84 }
85 
TF_BUILTIN(ConstructWithArrayLike,CallOrConstructBuiltinsAssembler)86 TF_BUILTIN(ConstructWithArrayLike, CallOrConstructBuiltinsAssembler) {
87   auto target = Parameter<Object>(Descriptor::kTarget);
88   auto new_target = Parameter<Object>(Descriptor::kNewTarget);
89   auto arguments_list = Parameter<Object>(Descriptor::kArgumentsList);
90   auto context = Parameter<Context>(Descriptor::kContext);
91   CallOrConstructWithArrayLike(target, new_target, arguments_list, context);
92 }
93 
TF_BUILTIN(ConstructWithArrayLike_WithFeedback,CallOrConstructBuiltinsAssembler)94 TF_BUILTIN(ConstructWithArrayLike_WithFeedback,
95            CallOrConstructBuiltinsAssembler) {
96   auto target = Parameter<Object>(Descriptor::kTarget);
97   auto new_target = Parameter<Object>(Descriptor::kNewTarget);
98   auto arguments_list = Parameter<Object>(Descriptor::kArgumentsList);
99   auto context = Parameter<Context>(Descriptor::kContext);
100   auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
101   auto slot = UncheckedParameter<UintPtrT>(Descriptor::kSlot);
102 
103   TVARIABLE(AllocationSite, allocation_site);
104   Label if_construct_generic(this), if_construct_array(this);
105   CollectConstructFeedback(context, target, new_target, feedback_vector, slot,
106                            UpdateFeedbackMode::kOptionalFeedback,
107                            &if_construct_generic, &if_construct_array,
108                            &allocation_site);
109 
110   BIND(&if_construct_array);
111   Goto(&if_construct_generic);  // Not implemented.
112 
113   BIND(&if_construct_generic);
114   CallOrConstructWithArrayLike(target, new_target, arguments_list, context);
115 }
116 
TF_BUILTIN(ConstructWithSpread,CallOrConstructBuiltinsAssembler)117 TF_BUILTIN(ConstructWithSpread, CallOrConstructBuiltinsAssembler) {
118   auto target = Parameter<Object>(Descriptor::kTarget);
119   auto new_target = Parameter<Object>(Descriptor::kNewTarget);
120   auto spread = Parameter<Object>(Descriptor::kSpread);
121   auto args_count =
122       UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
123   auto context = Parameter<Context>(Descriptor::kContext);
124   CallOrConstructWithSpread(target, new_target, spread, args_count, context);
125 }
126 
TF_BUILTIN(ConstructWithSpread_Baseline,CallOrConstructBuiltinsAssembler)127 TF_BUILTIN(ConstructWithSpread_Baseline, CallOrConstructBuiltinsAssembler) {
128   auto target = Parameter<Object>(Descriptor::kTarget);
129   auto new_target = Parameter<Object>(Descriptor::kNewTarget);
130   auto spread = Parameter<Object>(Descriptor::kSpread);
131   auto args_count =
132       UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
133   auto slot = UncheckedParameter<UintPtrT>(Descriptor::kSlot);
134   return BuildConstructWithSpread(
135       target, new_target, spread, args_count,
136       [=] { return LoadContextFromBaseline(); },
137       [=] { return LoadFeedbackVectorFromBaseline(); }, slot,
138       UpdateFeedbackMode::kGuaranteedFeedback);
139 }
140 
TF_BUILTIN(ConstructWithSpread_WithFeedback,CallOrConstructBuiltinsAssembler)141 TF_BUILTIN(ConstructWithSpread_WithFeedback, CallOrConstructBuiltinsAssembler) {
142   auto target = Parameter<Object>(Descriptor::kTarget);
143   auto new_target = Parameter<Object>(Descriptor::kNewTarget);
144   auto spread = Parameter<Object>(Descriptor::kSpread);
145   auto args_count =
146       UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
147   auto context = Parameter<Context>(Descriptor::kContext);
148   auto feedback_vector = Parameter<HeapObject>(Descriptor::kFeedbackVector);
149   auto slot = UncheckedParameter<UintPtrT>(Descriptor::kSlot);
150 
151   return BuildConstructWithSpread(
152       target, new_target, spread, args_count, [=] { return context; },
153       [=] { return feedback_vector; }, slot,
154       UpdateFeedbackMode::kGuaranteedFeedback);
155 }
156 
BuildConstructWithSpread(TNode<Object> target,TNode<Object> new_target,TNode<Object> spread,TNode<Int32T> argc,const LazyNode<Context> & context,const LazyNode<HeapObject> & feedback_vector,TNode<UintPtrT> slot,UpdateFeedbackMode mode)157 void CallOrConstructBuiltinsAssembler::BuildConstructWithSpread(
158     TNode<Object> target, TNode<Object> new_target, TNode<Object> spread,
159     TNode<Int32T> argc, const LazyNode<Context>& context,
160     const LazyNode<HeapObject>& feedback_vector, TNode<UintPtrT> slot,
161     UpdateFeedbackMode mode) {
162   TVARIABLE(AllocationSite, allocation_site);
163   Label if_construct_generic(this), if_construct_array(this);
164   TNode<Context> eager_context = context();
165   CollectConstructFeedback(eager_context, target, new_target, feedback_vector(),
166                            slot, UpdateFeedbackMode::kGuaranteedFeedback,
167                            &if_construct_generic, &if_construct_array,
168                            &allocation_site);
169 
170   BIND(&if_construct_array);
171   Goto(&if_construct_generic);  // Not implemented.
172 
173   BIND(&if_construct_generic);
174   CallOrConstructWithSpread(target, new_target, spread, argc, eager_context);
175 }
176 
TF_BUILTIN(FastNewClosure,ConstructorBuiltinsAssembler)177 TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
178   auto shared_function_info =
179       Parameter<SharedFunctionInfo>(Descriptor::kSharedFunctionInfo);
180   auto feedback_cell = Parameter<FeedbackCell>(Descriptor::kFeedbackCell);
181   auto context = Parameter<Context>(Descriptor::kContext);
182 
183   IncrementCounter(isolate()->counters()->fast_new_closure_total(), 1);
184 
185   // Bump the closure counter encoded the {feedback_cell}s map.
186   {
187     const TNode<Map> feedback_cell_map = LoadMap(feedback_cell);
188     Label no_closures(this), one_closure(this), cell_done(this);
189 
190     GotoIf(IsNoClosuresCellMap(feedback_cell_map), &no_closures);
191     GotoIf(IsOneClosureCellMap(feedback_cell_map), &one_closure);
192     CSA_DCHECK(this, IsManyClosuresCellMap(feedback_cell_map),
193                feedback_cell_map, feedback_cell);
194     Goto(&cell_done);
195 
196     BIND(&no_closures);
197     StoreMapNoWriteBarrier(feedback_cell, RootIndex::kOneClosureCellMap);
198     Goto(&cell_done);
199 
200     BIND(&one_closure);
201     StoreMapNoWriteBarrier(feedback_cell, RootIndex::kManyClosuresCellMap);
202     Goto(&cell_done);
203 
204     BIND(&cell_done);
205   }
206 
207   // The calculation of |function_map_index| must be in sync with
208   // SharedFunctionInfo::function_map_index().
209   TNode<Uint32T> flags = LoadObjectField<Uint32T>(
210       shared_function_info, SharedFunctionInfo::kFlagsOffset);
211   const TNode<IntPtrT> function_map_index = Signed(IntPtrAdd(
212       DecodeWordFromWord32<SharedFunctionInfo::FunctionMapIndexBits>(flags),
213       IntPtrConstant(Context::FIRST_FUNCTION_MAP_INDEX)));
214   CSA_DCHECK(this, UintPtrLessThanOrEqual(
215                        function_map_index,
216                        IntPtrConstant(Context::LAST_FUNCTION_MAP_INDEX)));
217 
218   // Get the function map in the current native context and set that
219   // as the map of the allocated object.
220   const TNode<NativeContext> native_context = LoadNativeContext(context);
221   const TNode<Map> function_map =
222       CAST(LoadContextElement(native_context, function_map_index));
223 
224   // Create a new closure from the given function info in new space
225   TNode<IntPtrT> instance_size_in_bytes =
226       TimesTaggedSize(LoadMapInstanceSizeInWords(function_map));
227   TNode<HeapObject> result = Allocate(instance_size_in_bytes);
228   StoreMapNoWriteBarrier(result, function_map);
229   InitializeJSObjectBodyNoSlackTracking(result, function_map,
230                                         instance_size_in_bytes,
231                                         JSFunction::kSizeWithoutPrototype);
232 
233   // Initialize the rest of the function.
234   StoreObjectFieldRoot(result, JSObject::kPropertiesOrHashOffset,
235                        RootIndex::kEmptyFixedArray);
236   StoreObjectFieldRoot(result, JSObject::kElementsOffset,
237                        RootIndex::kEmptyFixedArray);
238   {
239     // Set function prototype if necessary.
240     Label done(this), init_prototype(this);
241     Branch(IsFunctionWithPrototypeSlotMap(function_map), &init_prototype,
242            &done);
243 
244     BIND(&init_prototype);
245     StoreObjectFieldRoot(result, JSFunction::kPrototypeOrInitialMapOffset,
246                          RootIndex::kTheHoleValue);
247     Goto(&done);
248     BIND(&done);
249   }
250 
251   STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
252   StoreObjectFieldNoWriteBarrier(result, JSFunction::kFeedbackCellOffset,
253                                  feedback_cell);
254   StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset,
255                                  shared_function_info);
256   StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context);
257   TNode<CodeT> lazy_builtin =
258       HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
259   StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeOffset, lazy_builtin);
260   Return(result);
261 }
262 
TF_BUILTIN(FastNewObject,ConstructorBuiltinsAssembler)263 TF_BUILTIN(FastNewObject, ConstructorBuiltinsAssembler) {
264   auto context = Parameter<Context>(Descriptor::kContext);
265   auto target = Parameter<JSFunction>(Descriptor::kTarget);
266   auto new_target = Parameter<JSReceiver>(Descriptor::kNewTarget);
267 
268   Label call_runtime(this);
269 
270   TNode<JSObject> result =
271       FastNewObject(context, target, new_target, &call_runtime);
272   Return(result);
273 
274   BIND(&call_runtime);
275   TailCallRuntime(Runtime::kNewObject, context, target, new_target);
276 }
277 
FastNewObject(TNode<Context> context,TNode<JSFunction> target,TNode<JSReceiver> new_target)278 TNode<JSObject> ConstructorBuiltinsAssembler::FastNewObject(
279     TNode<Context> context, TNode<JSFunction> target,
280     TNode<JSReceiver> new_target) {
281   TVARIABLE(JSObject, var_obj);
282   Label call_runtime(this), end(this);
283 
284   var_obj = FastNewObject(context, target, new_target, &call_runtime);
285   Goto(&end);
286 
287   BIND(&call_runtime);
288   var_obj = CAST(CallRuntime(Runtime::kNewObject, context, target, new_target));
289   Goto(&end);
290 
291   BIND(&end);
292   return var_obj.value();
293 }
294 
FastNewObject(TNode<Context> context,TNode<JSFunction> target,TNode<JSReceiver> new_target,Label * call_runtime)295 TNode<JSObject> ConstructorBuiltinsAssembler::FastNewObject(
296     TNode<Context> context, TNode<JSFunction> target,
297     TNode<JSReceiver> new_target, Label* call_runtime) {
298   // Verify that the new target is a JSFunction.
299   Label end(this);
300   TNode<JSFunction> new_target_func =
301       HeapObjectToJSFunctionWithPrototypeSlot(new_target, call_runtime);
302   // Fast path.
303 
304   // Load the initial map and verify that it's in fact a map.
305   TNode<Object> initial_map_or_proto =
306       LoadJSFunctionPrototypeOrInitialMap(new_target_func);
307   GotoIf(TaggedIsSmi(initial_map_or_proto), call_runtime);
308   GotoIf(DoesntHaveInstanceType(CAST(initial_map_or_proto), MAP_TYPE),
309          call_runtime);
310   TNode<Map> initial_map = CAST(initial_map_or_proto);
311 
312   // Fall back to runtime if the target differs from the new target's
313   // initial map constructor.
314   TNode<Object> new_target_constructor = LoadObjectField(
315       initial_map, Map::kConstructorOrBackPointerOrNativeContextOffset);
316   GotoIf(TaggedNotEqual(target, new_target_constructor), call_runtime);
317 
318   TVARIABLE(HeapObject, properties);
319 
320   Label instantiate_map(this), allocate_properties(this);
321   GotoIf(IsDictionaryMap(initial_map), &allocate_properties);
322   {
323     properties = EmptyFixedArrayConstant();
324     Goto(&instantiate_map);
325   }
326   BIND(&allocate_properties);
327   {
328     if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL) {
329       properties =
330           AllocateSwissNameDictionary(SwissNameDictionary::kInitialCapacity);
331     } else {
332       properties = AllocateNameDictionary(NameDictionary::kInitialCapacity);
333     }
334     Goto(&instantiate_map);
335   }
336 
337   BIND(&instantiate_map);
338   return AllocateJSObjectFromMap(initial_map, properties.value(), base::nullopt,
339                                  AllocationFlag::kNone, kWithSlackTracking);
340 }
341 
FastNewFunctionContext(TNode<ScopeInfo> scope_info,TNode<Uint32T> slots,TNode<Context> context,ScopeType scope_type)342 TNode<Context> ConstructorBuiltinsAssembler::FastNewFunctionContext(
343     TNode<ScopeInfo> scope_info, TNode<Uint32T> slots, TNode<Context> context,
344     ScopeType scope_type) {
345   TNode<IntPtrT> slots_intptr = Signed(ChangeUint32ToWord(slots));
346   TNode<IntPtrT> size = ElementOffsetFromIndex(slots_intptr, PACKED_ELEMENTS,
347                                                Context::kTodoHeaderSize);
348 
349   // Create a new closure from the given function info in new space
350   TNode<Context> function_context =
351       UncheckedCast<Context>(AllocateInNewSpace(size));
352 
353   TNode<NativeContext> native_context = LoadNativeContext(context);
354   Context::Field index;
355   switch (scope_type) {
356     case EVAL_SCOPE:
357       index = Context::EVAL_CONTEXT_MAP_INDEX;
358       break;
359     case FUNCTION_SCOPE:
360       index = Context::FUNCTION_CONTEXT_MAP_INDEX;
361       break;
362     default:
363       UNREACHABLE();
364   }
365   TNode<Map> map = CAST(LoadContextElement(native_context, index));
366   // Set up the header.
367   StoreMapNoWriteBarrier(function_context, map);
368   TNode<IntPtrT> min_context_slots = IntPtrConstant(Context::MIN_CONTEXT_SLOTS);
369   // TODO(ishell): for now, length also includes MIN_CONTEXT_SLOTS.
370   TNode<IntPtrT> length = IntPtrAdd(slots_intptr, min_context_slots);
371   StoreObjectFieldNoWriteBarrier(function_context, Context::kLengthOffset,
372                                  SmiTag(length));
373   StoreObjectFieldNoWriteBarrier(function_context, Context::kScopeInfoOffset,
374                                  scope_info);
375   StoreObjectFieldNoWriteBarrier(function_context, Context::kPreviousOffset,
376                                  context);
377 
378   // Initialize the varrest of the slots to undefined.
379   TNode<Oddball> undefined = UndefinedConstant();
380   TNode<IntPtrT> start_offset = IntPtrConstant(Context::kTodoHeaderSize);
381   CodeStubAssembler::VariableList vars(0, zone());
382   BuildFastLoop<IntPtrT>(
383       vars, start_offset, size,
384       [=](TNode<IntPtrT> offset) {
385         StoreObjectFieldNoWriteBarrier(function_context, offset, undefined);
386       },
387       kTaggedSize, IndexAdvanceMode::kPost);
388   return function_context;
389 }
390 
CreateRegExpLiteral(TNode<HeapObject> maybe_feedback_vector,TNode<TaggedIndex> slot,TNode<Object> pattern,TNode<Smi> flags,TNode<Context> context)391 TNode<JSRegExp> ConstructorBuiltinsAssembler::CreateRegExpLiteral(
392     TNode<HeapObject> maybe_feedback_vector, TNode<TaggedIndex> slot,
393     TNode<Object> pattern, TNode<Smi> flags, TNode<Context> context) {
394   Label call_runtime(this, Label::kDeferred), end(this);
395 
396   GotoIf(IsUndefined(maybe_feedback_vector), &call_runtime);
397 
398   TVARIABLE(JSRegExp, result);
399   TNode<FeedbackVector> feedback_vector = CAST(maybe_feedback_vector);
400   TNode<Object> literal_site =
401       CAST(LoadFeedbackVectorSlot(feedback_vector, slot));
402   GotoIfNot(HasBoilerplate(literal_site), &call_runtime);
403   {
404     STATIC_ASSERT(JSRegExp::kDataOffset == JSObject::kHeaderSize);
405     STATIC_ASSERT(JSRegExp::kSourceOffset ==
406                   JSRegExp::kDataOffset + kTaggedSize);
407     STATIC_ASSERT(JSRegExp::kFlagsOffset ==
408                   JSRegExp::kSourceOffset + kTaggedSize);
409     STATIC_ASSERT(JSRegExp::kHeaderSize ==
410                   JSRegExp::kFlagsOffset + kTaggedSize);
411     STATIC_ASSERT(JSRegExp::kLastIndexOffset == JSRegExp::kHeaderSize);
412     DCHECK_EQ(JSRegExp::Size(), JSRegExp::kLastIndexOffset + kTaggedSize);
413 
414     TNode<RegExpBoilerplateDescription> boilerplate = CAST(literal_site);
415     TNode<HeapObject> new_object = Allocate(JSRegExp::Size());
416 
417     // Initialize Object fields.
418     TNode<JSFunction> regexp_function = CAST(LoadContextElement(
419         LoadNativeContext(context), Context::REGEXP_FUNCTION_INDEX));
420     TNode<Map> initial_map = CAST(LoadObjectField(
421         regexp_function, JSFunction::kPrototypeOrInitialMapOffset));
422     StoreMapNoWriteBarrier(new_object, initial_map);
423     // Initialize JSReceiver fields.
424     StoreObjectFieldRoot(new_object, JSReceiver::kPropertiesOrHashOffset,
425                          RootIndex::kEmptyFixedArray);
426     // Initialize JSObject fields.
427     StoreObjectFieldRoot(new_object, JSObject::kElementsOffset,
428                          RootIndex::kEmptyFixedArray);
429     // Initialize JSRegExp fields.
430     StoreObjectFieldNoWriteBarrier(
431         new_object, JSRegExp::kDataOffset,
432         LoadObjectField(boilerplate,
433                         RegExpBoilerplateDescription::kDataOffset));
434     StoreObjectFieldNoWriteBarrier(
435         new_object, JSRegExp::kSourceOffset,
436         LoadObjectField(boilerplate,
437                         RegExpBoilerplateDescription::kSourceOffset));
438     StoreObjectFieldNoWriteBarrier(
439         new_object, JSRegExp::kFlagsOffset,
440         LoadObjectField(boilerplate,
441                         RegExpBoilerplateDescription::kFlagsOffset));
442     StoreObjectFieldNoWriteBarrier(
443         new_object, JSRegExp::kLastIndexOffset,
444         SmiConstant(JSRegExp::kInitialLastIndexValue));
445 
446     result = CAST(new_object);
447     Goto(&end);
448   }
449 
450   BIND(&call_runtime);
451   {
452     result = CAST(CallRuntime(Runtime::kCreateRegExpLiteral, context,
453                               maybe_feedback_vector, slot, pattern, flags));
454     Goto(&end);
455   }
456 
457   BIND(&end);
458   return result.value();
459 }
460 
CreateShallowArrayLiteral(TNode<FeedbackVector> feedback_vector,TNode<TaggedIndex> slot,TNode<Context> context,AllocationSiteMode allocation_site_mode,Label * call_runtime)461 TNode<JSArray> ConstructorBuiltinsAssembler::CreateShallowArrayLiteral(
462     TNode<FeedbackVector> feedback_vector, TNode<TaggedIndex> slot,
463     TNode<Context> context, AllocationSiteMode allocation_site_mode,
464     Label* call_runtime) {
465   Label zero_capacity(this), cow_elements(this), fast_elements(this),
466       return_result(this);
467 
468   TNode<Object> maybe_allocation_site =
469       CAST(LoadFeedbackVectorSlot(feedback_vector, slot));
470   GotoIfNot(HasBoilerplate(maybe_allocation_site), call_runtime);
471 
472   TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
473   TNode<JSArray> boilerplate = CAST(LoadBoilerplate(allocation_site));
474 
475   if (allocation_site_mode == TRACK_ALLOCATION_SITE &&
476       V8_ALLOCATION_SITE_TRACKING_BOOL) {
477     return CloneFastJSArray(context, boilerplate, allocation_site);
478   } else {
479     return CloneFastJSArray(context, boilerplate);
480   }
481 }
482 
CreateEmptyArrayLiteral(TNode<FeedbackVector> feedback_vector,TNode<TaggedIndex> slot,TNode<Context> context)483 TNode<JSArray> ConstructorBuiltinsAssembler::CreateEmptyArrayLiteral(
484     TNode<FeedbackVector> feedback_vector, TNode<TaggedIndex> slot,
485     TNode<Context> context) {
486   // Array literals always have a valid AllocationSite to properly track
487   // elements transitions.
488   TNode<Object> maybe_allocation_site =
489       CAST(LoadFeedbackVectorSlot(feedback_vector, slot));
490   TVARIABLE(AllocationSite, allocation_site);
491 
492   Label create_empty_array(this),
493       initialize_allocation_site(this, Label::kDeferred), done(this);
494   GotoIf(TaggedIsSmi(maybe_allocation_site), &initialize_allocation_site);
495   {
496     allocation_site = CAST(maybe_allocation_site);
497     Goto(&create_empty_array);
498   }
499   // TODO(cbruni): create the AllocationSite in CSA.
500   BIND(&initialize_allocation_site);
501   {
502     allocation_site = CreateAllocationSiteInFeedbackVector(
503         feedback_vector,
504         // TODO(v8:10047): pass slot as TaggedIndex here
505         Unsigned(TaggedIndexToIntPtr(slot)));
506     Goto(&create_empty_array);
507   }
508 
509   BIND(&create_empty_array);
510   TNode<Int32T> kind = LoadElementsKind(allocation_site.value());
511   TNode<NativeContext> native_context = LoadNativeContext(context);
512   Comment("LoadJSArrayElementsMap");
513   TNode<Map> array_map = LoadJSArrayElementsMap(kind, native_context);
514   TNode<IntPtrT> zero_intptr = IntPtrConstant(0);
515   TNode<Smi> zero = SmiConstant(0);
516   Comment("Allocate JSArray");
517   base::Optional<TNode<AllocationSite>> site =
518       V8_ALLOCATION_SITE_TRACKING_BOOL
519           ? base::make_optional(allocation_site.value())
520           : base::nullopt;
521   TNode<JSArray> result = AllocateJSArray(GetInitialFastElementsKind(),
522                                           array_map, zero_intptr, zero, site);
523 
524   Goto(&done);
525   BIND(&done);
526 
527   return result;
528 }
529 
CreateShallowObjectLiteral(TNode<FeedbackVector> feedback_vector,TNode<TaggedIndex> slot,Label * call_runtime)530 TNode<HeapObject> ConstructorBuiltinsAssembler::CreateShallowObjectLiteral(
531     TNode<FeedbackVector> feedback_vector, TNode<TaggedIndex> slot,
532     Label* call_runtime) {
533   TNode<Object> maybe_allocation_site =
534       CAST(LoadFeedbackVectorSlot(feedback_vector, slot));
535   GotoIfNot(HasBoilerplate(maybe_allocation_site), call_runtime);
536 
537   TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
538   TNode<JSObject> boilerplate = LoadBoilerplate(allocation_site);
539   TNode<Map> boilerplate_map = LoadMap(boilerplate);
540   CSA_DCHECK(this, IsJSObjectMap(boilerplate_map));
541 
542   TVARIABLE(HeapObject, var_properties);
543   {
544     TNode<Uint32T> bit_field_3 = LoadMapBitField3(boilerplate_map);
545     GotoIf(IsSetWord32<Map::Bits3::IsDeprecatedBit>(bit_field_3), call_runtime);
546     // Directly copy over the property store for dict-mode boilerplates.
547     Label if_dictionary(this), if_fast(this), done(this);
548     Branch(IsSetWord32<Map::Bits3::IsDictionaryMapBit>(bit_field_3),
549            &if_dictionary, &if_fast);
550     BIND(&if_dictionary);
551     {
552       Comment("Copy dictionary properties");
553       if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL) {
554         var_properties =
555             CopySwissNameDictionary(CAST(LoadSlowProperties(boilerplate)));
556       } else {
557         var_properties = CopyNameDictionary(
558             CAST(LoadSlowProperties(boilerplate)), call_runtime);
559       }
560       // Slow objects have no in-object properties.
561       Goto(&done);
562     }
563     BIND(&if_fast);
564     {
565       // TODO(cbruni): support copying out-of-object properties.
566       TNode<HeapObject> boilerplate_properties =
567           LoadFastProperties(boilerplate);
568       GotoIfNot(IsEmptyFixedArray(boilerplate_properties), call_runtime);
569       var_properties = EmptyFixedArrayConstant();
570       Goto(&done);
571     }
572     BIND(&done);
573   }
574 
575   TVARIABLE(FixedArrayBase, var_elements);
576   {
577     // Copy the elements backing store, assuming that it's flat.
578     Label if_empty_fixed_array(this), if_copy_elements(this), done(this);
579     TNode<FixedArrayBase> boilerplate_elements = LoadElements(boilerplate);
580     Branch(IsEmptyFixedArray(boilerplate_elements), &if_empty_fixed_array,
581            &if_copy_elements);
582 
583     BIND(&if_empty_fixed_array);
584     var_elements = boilerplate_elements;
585     Goto(&done);
586 
587     BIND(&if_copy_elements);
588     CSA_DCHECK(this, Word32BinaryNot(
589                          IsFixedCOWArrayMap(LoadMap(boilerplate_elements))));
590     auto flags = ExtractFixedArrayFlag::kAllFixedArrays;
591     var_elements = CloneFixedArray(boilerplate_elements, flags);
592     Goto(&done);
593     BIND(&done);
594   }
595 
596   // Ensure new-space allocation for a fresh JSObject so we can skip write
597   // barriers when copying all object fields.
598   STATIC_ASSERT(JSObject::kMaxInstanceSize < kMaxRegularHeapObjectSize);
599   TNode<IntPtrT> instance_size =
600       TimesTaggedSize(LoadMapInstanceSizeInWords(boilerplate_map));
601   TNode<IntPtrT> allocation_size = instance_size;
602   bool needs_allocation_memento = FLAG_allocation_site_pretenuring;
603   if (needs_allocation_memento) {
604     DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL);
605     // Prepare for inner-allocating the AllocationMemento.
606     allocation_size =
607         IntPtrAdd(instance_size, IntPtrConstant(AllocationMemento::kSize));
608   }
609 
610   TNode<HeapObject> copy =
611       UncheckedCast<HeapObject>(AllocateInNewSpace(allocation_size));
612   {
613     Comment("Initialize Literal Copy");
614     // Initialize Object fields.
615     StoreMapNoWriteBarrier(copy, boilerplate_map);
616     StoreObjectFieldNoWriteBarrier(copy, JSObject::kPropertiesOrHashOffset,
617                                    var_properties.value());
618     StoreObjectFieldNoWriteBarrier(copy, JSObject::kElementsOffset,
619                                    var_elements.value());
620   }
621 
622   // Initialize the AllocationMemento before potential GCs due to heap number
623   // allocation when copying the in-object properties.
624   if (needs_allocation_memento) {
625     InitializeAllocationMemento(copy, instance_size, allocation_site);
626   }
627 
628   {
629     // Copy over in-object properties.
630     Label continue_with_write_barrier(this), done_init(this);
631     TVARIABLE(IntPtrT, offset, IntPtrConstant(JSObject::kHeaderSize));
632     {
633       Comment("Copy in-object properties fast");
634       Label continue_fast(this, &offset);
635       Branch(IntPtrEqual(offset.value(), instance_size), &done_init,
636              &continue_fast);
637       BIND(&continue_fast);
638       TNode<Object> field = LoadObjectField(boilerplate, offset.value());
639       Label store_field(this);
640       GotoIf(TaggedIsSmi(field), &store_field);
641       // TODO(leszeks): Read the field descriptor to decide if this heap
642       // number is mutable or not.
643       GotoIf(IsHeapNumber(CAST(field)), &continue_with_write_barrier);
644       Goto(&store_field);
645       BIND(&store_field);
646       StoreObjectFieldNoWriteBarrier(copy, offset.value(), field);
647       offset = IntPtrAdd(offset.value(), IntPtrConstant(kTaggedSize));
648       Branch(WordNotEqual(offset.value(), instance_size), &continue_fast,
649              &done_init);
650     }
651 
652     // Continue initializing the literal after seeing the first sub-object
653     // potentially causing allocation. In this case we prepare the new literal
654     // by copying all pending fields over from the boilerplate and emit full
655     // write barriers from here on.
656     BIND(&continue_with_write_barrier);
657     {
658       Comment("Copy in-object properties slow");
659       BuildFastLoop<IntPtrT>(
660           offset.value(), instance_size,
661           [=](TNode<IntPtrT> offset) {
662             // TODO(ishell): value decompression is not necessary here.
663             TNode<Object> field = LoadObjectField(boilerplate, offset);
664             StoreObjectFieldNoWriteBarrier(copy, offset, field);
665           },
666           kTaggedSize, IndexAdvanceMode::kPost);
667       CopyMutableHeapNumbersInObject(copy, offset.value(), instance_size);
668       Goto(&done_init);
669     }
670     BIND(&done_init);
671   }
672   return copy;
673 }
674 
675 // Used by the CreateEmptyObjectLiteral bytecode and the Object constructor.
CreateEmptyObjectLiteral(TNode<Context> context)676 TNode<JSObject> ConstructorBuiltinsAssembler::CreateEmptyObjectLiteral(
677     TNode<Context> context) {
678   TNode<NativeContext> native_context = LoadNativeContext(context);
679   TNode<Map> map = LoadObjectFunctionInitialMap(native_context);
680   // Ensure that slack tracking is disabled for the map.
681   STATIC_ASSERT(Map::kNoSlackTracking == 0);
682   CSA_DCHECK(this, IsClearWord32<Map::Bits3::ConstructionCounterBits>(
683                        LoadMapBitField3(map)));
684   TNode<FixedArray> empty_fixed_array = EmptyFixedArrayConstant();
685   TNode<JSObject> result =
686       AllocateJSObjectFromMap(map, empty_fixed_array, empty_fixed_array);
687   return result;
688 }
689 
CopyMutableHeapNumbersInObject(TNode<HeapObject> copy,TNode<IntPtrT> start_offset,TNode<IntPtrT> end_offset)690 void ConstructorBuiltinsAssembler::CopyMutableHeapNumbersInObject(
691     TNode<HeapObject> copy, TNode<IntPtrT> start_offset,
692     TNode<IntPtrT> end_offset) {
693   // Iterate over all object properties of a freshly copied object and
694   // duplicate mutable heap numbers.
695   Comment("Copy mutable HeapNumber values");
696   BuildFastLoop<IntPtrT>(
697       start_offset, end_offset,
698       [=](TNode<IntPtrT> offset) {
699         TNode<Object> field = LoadObjectField(copy, offset);
700         Label copy_heap_number(this, Label::kDeferred), continue_loop(this);
701         // We only have to clone complex field values.
702         GotoIf(TaggedIsSmi(field), &continue_loop);
703         // TODO(leszeks): Read the field descriptor to decide if this heap
704         // number is mutable or not.
705         Branch(IsHeapNumber(CAST(field)), &copy_heap_number, &continue_loop);
706         BIND(&copy_heap_number);
707         {
708           TNode<Float64T> double_value = LoadHeapNumberValue(CAST(field));
709           TNode<HeapNumber> heap_number =
710               AllocateHeapNumberWithValue(double_value);
711           StoreObjectField(copy, offset, heap_number);
712           Goto(&continue_loop);
713         }
714         BIND(&continue_loop);
715       },
716       kTaggedSize, IndexAdvanceMode::kPost);
717 }
718 
719 }  // namespace internal
720 }  // namespace v8
721