• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/builtins/builtins-constructor-gen.h"
6 
7 #include "src/ast/ast.h"
8 #include "src/builtins/builtins-call-gen.h"
9 #include "src/builtins/builtins-constructor.h"
10 #include "src/builtins/builtins-utils-gen.h"
11 #include "src/builtins/builtins.h"
12 #include "src/codegen/code-factory.h"
13 #include "src/codegen/code-stub-assembler.h"
14 #include "src/codegen/interface-descriptors.h"
15 #include "src/codegen/macro-assembler.h"
16 #include "src/logging/counters.h"
17 #include "src/objects/objects-inl.h"
18 
19 namespace v8 {
20 namespace internal {
21 
Generate_ConstructVarargs(MacroAssembler * masm)22 void Builtins::Generate_ConstructVarargs(MacroAssembler* masm) {
23   Generate_CallOrConstructVarargs(masm,
24                                   BUILTIN_CODE(masm->isolate(), Construct));
25 }
26 
Generate_ConstructForwardVarargs(MacroAssembler * masm)27 void Builtins::Generate_ConstructForwardVarargs(MacroAssembler* masm) {
28   Generate_CallOrConstructForwardVarargs(
29       masm, CallOrConstructMode::kConstruct,
30       BUILTIN_CODE(masm->isolate(), Construct));
31 }
32 
Generate_ConstructFunctionForwardVarargs(MacroAssembler * masm)33 void Builtins::Generate_ConstructFunctionForwardVarargs(MacroAssembler* masm) {
34   Generate_CallOrConstructForwardVarargs(
35       masm, CallOrConstructMode::kConstruct,
36       BUILTIN_CODE(masm->isolate(), ConstructFunction));
37 }
38 
TF_BUILTIN(Construct_WithFeedback,CallOrConstructBuiltinsAssembler)39 TF_BUILTIN(Construct_WithFeedback, CallOrConstructBuiltinsAssembler) {
40   auto target = Parameter<Object>(Descriptor::kTarget);
41   auto new_target = Parameter<Object>(Descriptor::kNewTarget);
42   auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
43   auto context = Parameter<Context>(Descriptor::kContext);
44   auto maybe_feedback_vector =
45       Parameter<HeapObject>(Descriptor::kMaybeFeedbackVector);
46   auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
47 
48   TVARIABLE(AllocationSite, allocation_site);
49   Label if_construct_generic(this), if_construct_array(this);
50   CollectConstructFeedback(context, target, new_target, maybe_feedback_vector,
51                            Unsigned(ChangeInt32ToIntPtr(slot)),
52                            &if_construct_generic, &if_construct_array,
53                            &allocation_site);
54 
55   BIND(&if_construct_generic);
56   TailCallBuiltin(Builtins::kConstruct, context, target, new_target, argc);
57 
58   BIND(&if_construct_array);
59   TailCallBuiltin(Builtins::kArrayConstructorImpl, context, target, new_target,
60                   argc, allocation_site.value());
61 }
62 
TF_BUILTIN(ConstructWithArrayLike,CallOrConstructBuiltinsAssembler)63 TF_BUILTIN(ConstructWithArrayLike, CallOrConstructBuiltinsAssembler) {
64   auto target = Parameter<Object>(Descriptor::kTarget);
65   auto new_target = Parameter<Object>(Descriptor::kNewTarget);
66   auto arguments_list = Parameter<Object>(Descriptor::kArgumentsList);
67   auto context = Parameter<Context>(Descriptor::kContext);
68   CallOrConstructWithArrayLike(target, new_target, arguments_list, context);
69 }
70 
TF_BUILTIN(ConstructWithArrayLike_WithFeedback,CallOrConstructBuiltinsAssembler)71 TF_BUILTIN(ConstructWithArrayLike_WithFeedback,
72            CallOrConstructBuiltinsAssembler) {
73   auto target = Parameter<Object>(Descriptor::kTarget);
74   auto new_target = Parameter<Object>(Descriptor::kNewTarget);
75   auto arguments_list = Parameter<Object>(Descriptor::kArgumentsList);
76   auto context = Parameter<Context>(Descriptor::kContext);
77   auto maybe_feedback_vector =
78       Parameter<HeapObject>(Descriptor::kMaybeFeedbackVector);
79   auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
80 
81   TVARIABLE(AllocationSite, allocation_site);
82   Label if_construct_generic(this), if_construct_array(this);
83   CollectConstructFeedback(context, target, new_target, maybe_feedback_vector,
84                            Unsigned(ChangeInt32ToIntPtr(slot)),
85                            &if_construct_generic, &if_construct_array,
86                            &allocation_site);
87 
88   BIND(&if_construct_array);
89   Goto(&if_construct_generic);  // Not implemented.
90 
91   BIND(&if_construct_generic);
92   CallOrConstructWithArrayLike(target, new_target, arguments_list, context);
93 }
94 
TF_BUILTIN(ConstructWithSpread,CallOrConstructBuiltinsAssembler)95 TF_BUILTIN(ConstructWithSpread, CallOrConstructBuiltinsAssembler) {
96   auto target = Parameter<Object>(Descriptor::kTarget);
97   auto new_target = Parameter<Object>(Descriptor::kNewTarget);
98   auto spread = Parameter<Object>(Descriptor::kSpread);
99   auto args_count =
100       UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
101   auto context = Parameter<Context>(Descriptor::kContext);
102   CallOrConstructWithSpread(target, new_target, spread, args_count, context);
103 }
104 
TF_BUILTIN(ConstructWithSpread_WithFeedback,CallOrConstructBuiltinsAssembler)105 TF_BUILTIN(ConstructWithSpread_WithFeedback, CallOrConstructBuiltinsAssembler) {
106   auto target = Parameter<Object>(Descriptor::kTarget);
107   auto new_target = Parameter<Object>(Descriptor::kNewTarget);
108   auto spread = Parameter<Object>(Descriptor::kSpread);
109   auto args_count =
110       UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
111   auto context = Parameter<Context>(Descriptor::kContext);
112   auto maybe_feedback_vector =
113       Parameter<HeapObject>(Descriptor::kMaybeFeedbackVector);
114   auto slot = UncheckedParameter<Int32T>(Descriptor::kSlot);
115 
116   TVARIABLE(AllocationSite, allocation_site);
117   Label if_construct_generic(this), if_construct_array(this);
118   CollectConstructFeedback(context, target, new_target, maybe_feedback_vector,
119                            Unsigned(ChangeInt32ToIntPtr(slot)),
120                            &if_construct_generic, &if_construct_array,
121                            &allocation_site);
122 
123   BIND(&if_construct_array);
124   Goto(&if_construct_generic);  // Not implemented.
125 
126   BIND(&if_construct_generic);
127   CallOrConstructWithSpread(target, new_target, spread, args_count, context);
128 }
129 
130 using Node = compiler::Node;
131 
TF_BUILTIN(FastNewClosure,ConstructorBuiltinsAssembler)132 TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
133   auto shared_function_info =
134       Parameter<SharedFunctionInfo>(Descriptor::kSharedFunctionInfo);
135   auto feedback_cell = Parameter<FeedbackCell>(Descriptor::kFeedbackCell);
136   auto context = Parameter<Context>(Descriptor::kContext);
137 
138   IncrementCounter(isolate()->counters()->fast_new_closure_total(), 1);
139 
140   // Bump the closure counter encoded the {feedback_cell}s map.
141   {
142     const TNode<Map> feedback_cell_map = LoadMap(feedback_cell);
143     Label no_closures(this), one_closure(this), cell_done(this);
144 
145     GotoIf(IsNoClosuresCellMap(feedback_cell_map), &no_closures);
146     GotoIf(IsOneClosureCellMap(feedback_cell_map), &one_closure);
147     CSA_ASSERT(this, IsManyClosuresCellMap(feedback_cell_map),
148                feedback_cell_map, feedback_cell);
149     Goto(&cell_done);
150 
151     BIND(&no_closures);
152     StoreMapNoWriteBarrier(feedback_cell, RootIndex::kOneClosureCellMap);
153     Goto(&cell_done);
154 
155     BIND(&one_closure);
156     StoreMapNoWriteBarrier(feedback_cell, RootIndex::kManyClosuresCellMap);
157     Goto(&cell_done);
158 
159     BIND(&cell_done);
160   }
161 
162   // The calculation of |function_map_index| must be in sync with
163   // SharedFunctionInfo::function_map_index().
164   TNode<Uint32T> flags = LoadObjectField<Uint32T>(
165       shared_function_info, SharedFunctionInfo::kFlagsOffset);
166   const TNode<IntPtrT> function_map_index = Signed(IntPtrAdd(
167       DecodeWordFromWord32<SharedFunctionInfo::FunctionMapIndexBits>(flags),
168       IntPtrConstant(Context::FIRST_FUNCTION_MAP_INDEX)));
169   CSA_ASSERT(this, UintPtrLessThanOrEqual(
170                        function_map_index,
171                        IntPtrConstant(Context::LAST_FUNCTION_MAP_INDEX)));
172 
173   // Get the function map in the current native context and set that
174   // as the map of the allocated object.
175   const TNode<NativeContext> native_context = LoadNativeContext(context);
176   const TNode<Map> function_map =
177       CAST(LoadContextElement(native_context, function_map_index));
178 
179   // Create a new closure from the given function info in new space
180   TNode<IntPtrT> instance_size_in_bytes =
181       TimesTaggedSize(LoadMapInstanceSizeInWords(function_map));
182   TNode<HeapObject> result = Allocate(instance_size_in_bytes);
183   StoreMapNoWriteBarrier(result, function_map);
184   InitializeJSObjectBodyNoSlackTracking(result, function_map,
185                                         instance_size_in_bytes,
186                                         JSFunction::kSizeWithoutPrototype);
187 
188   // Initialize the rest of the function.
189   StoreObjectFieldRoot(result, JSObject::kPropertiesOrHashOffset,
190                        RootIndex::kEmptyFixedArray);
191   StoreObjectFieldRoot(result, JSObject::kElementsOffset,
192                        RootIndex::kEmptyFixedArray);
193   {
194     // Set function prototype if necessary.
195     Label done(this), init_prototype(this);
196     Branch(IsFunctionWithPrototypeSlotMap(function_map), &init_prototype,
197            &done);
198 
199     BIND(&init_prototype);
200     StoreObjectFieldRoot(result, JSFunction::kPrototypeOrInitialMapOffset,
201                          RootIndex::kTheHoleValue);
202     Goto(&done);
203     BIND(&done);
204   }
205 
206   STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
207   StoreObjectFieldNoWriteBarrier(result, JSFunction::kFeedbackCellOffset,
208                                  feedback_cell);
209   StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset,
210                                  shared_function_info);
211   StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context);
212   Handle<Code> lazy_builtin_handle =
213       isolate()->builtins()->builtin_handle(Builtins::kCompileLazy);
214   TNode<Code> lazy_builtin = HeapConstant(lazy_builtin_handle);
215   StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeOffset, lazy_builtin);
216   Return(result);
217 }
218 
TF_BUILTIN(FastNewObject,ConstructorBuiltinsAssembler)219 TF_BUILTIN(FastNewObject, ConstructorBuiltinsAssembler) {
220   auto context = Parameter<Context>(Descriptor::kContext);
221   auto target = Parameter<JSFunction>(Descriptor::kTarget);
222   auto new_target = Parameter<JSReceiver>(Descriptor::kNewTarget);
223 
224   Label call_runtime(this);
225 
226   TNode<JSObject> result =
227       FastNewObject(context, target, new_target, &call_runtime);
228   Return(result);
229 
230   BIND(&call_runtime);
231   TailCallRuntime(Runtime::kNewObject, context, target, new_target);
232 }
233 
FastNewObject(TNode<Context> context,TNode<JSFunction> target,TNode<JSReceiver> new_target)234 TNode<JSObject> ConstructorBuiltinsAssembler::FastNewObject(
235     TNode<Context> context, TNode<JSFunction> target,
236     TNode<JSReceiver> new_target) {
237   TVARIABLE(JSObject, var_obj);
238   Label call_runtime(this), end(this);
239 
240   var_obj = FastNewObject(context, target, new_target, &call_runtime);
241   Goto(&end);
242 
243   BIND(&call_runtime);
244   var_obj = CAST(CallRuntime(Runtime::kNewObject, context, target, new_target));
245   Goto(&end);
246 
247   BIND(&end);
248   return var_obj.value();
249 }
250 
FastNewObject(TNode<Context> context,TNode<JSFunction> target,TNode<JSReceiver> new_target,Label * call_runtime)251 TNode<JSObject> ConstructorBuiltinsAssembler::FastNewObject(
252     TNode<Context> context, TNode<JSFunction> target,
253     TNode<JSReceiver> new_target, Label* call_runtime) {
254   // Verify that the new target is a JSFunction.
255   Label end(this);
256   TNode<JSFunction> new_target_func =
257       HeapObjectToJSFunctionWithPrototypeSlot(new_target, call_runtime);
258   // Fast path.
259 
260   // Load the initial map and verify that it's in fact a map.
261   TNode<Object> initial_map_or_proto =
262       LoadJSFunctionPrototypeOrInitialMap(new_target_func);
263   GotoIf(TaggedIsSmi(initial_map_or_proto), call_runtime);
264   GotoIf(DoesntHaveInstanceType(CAST(initial_map_or_proto), MAP_TYPE),
265          call_runtime);
266   TNode<Map> initial_map = CAST(initial_map_or_proto);
267 
268   // Fall back to runtime if the target differs from the new target's
269   // initial map constructor.
270   TNode<Object> new_target_constructor = LoadObjectField(
271       initial_map, Map::kConstructorOrBackPointerOrNativeContextOffset);
272   GotoIf(TaggedNotEqual(target, new_target_constructor), call_runtime);
273 
274   TVARIABLE(HeapObject, properties);
275 
276   Label instantiate_map(this), allocate_properties(this);
277   GotoIf(IsDictionaryMap(initial_map), &allocate_properties);
278   {
279     properties = EmptyFixedArrayConstant();
280     Goto(&instantiate_map);
281   }
282   BIND(&allocate_properties);
283   {
284     properties = AllocateNameDictionary(NameDictionary::kInitialCapacity);
285     Goto(&instantiate_map);
286   }
287 
288   BIND(&instantiate_map);
289   return AllocateJSObjectFromMap(initial_map, properties.value(), base::nullopt,
290                                  kNone, kWithSlackTracking);
291 }
292 
FastNewFunctionContext(TNode<ScopeInfo> scope_info,TNode<Uint32T> slots,TNode<Context> context,ScopeType scope_type)293 TNode<Context> ConstructorBuiltinsAssembler::FastNewFunctionContext(
294     TNode<ScopeInfo> scope_info, TNode<Uint32T> slots, TNode<Context> context,
295     ScopeType scope_type) {
296   TNode<IntPtrT> slots_intptr = Signed(ChangeUint32ToWord(slots));
297   TNode<IntPtrT> size = ElementOffsetFromIndex(slots_intptr, PACKED_ELEMENTS,
298                                                Context::kTodoHeaderSize);
299 
300   // Create a new closure from the given function info in new space
301   TNode<Context> function_context =
302       UncheckedCast<Context>(AllocateInNewSpace(size));
303 
304   TNode<NativeContext> native_context = LoadNativeContext(context);
305   Context::Field index;
306   switch (scope_type) {
307     case EVAL_SCOPE:
308       index = Context::EVAL_CONTEXT_MAP_INDEX;
309       break;
310     case FUNCTION_SCOPE:
311       index = Context::FUNCTION_CONTEXT_MAP_INDEX;
312       break;
313     default:
314       UNREACHABLE();
315   }
316   TNode<Map> map = CAST(LoadContextElement(native_context, index));
317   // Set up the header.
318   StoreMapNoWriteBarrier(function_context, map);
319   TNode<IntPtrT> min_context_slots = IntPtrConstant(Context::MIN_CONTEXT_SLOTS);
320   // TODO(ishell): for now, length also includes MIN_CONTEXT_SLOTS.
321   TNode<IntPtrT> length = IntPtrAdd(slots_intptr, min_context_slots);
322   StoreObjectFieldNoWriteBarrier(function_context, Context::kLengthOffset,
323                                  SmiTag(length));
324   StoreObjectFieldNoWriteBarrier(function_context, Context::kScopeInfoOffset,
325                                  scope_info);
326   StoreObjectFieldNoWriteBarrier(function_context, Context::kPreviousOffset,
327                                  context);
328 
329   // Initialize the varrest of the slots to undefined.
330   TNode<Oddball> undefined = UndefinedConstant();
331   TNode<IntPtrT> start_offset = IntPtrConstant(Context::kTodoHeaderSize);
332   CodeStubAssembler::VariableList vars(0, zone());
333   BuildFastLoop<IntPtrT>(
334       vars, start_offset, size,
335       [=](TNode<IntPtrT> offset) {
336         StoreObjectFieldNoWriteBarrier(function_context, offset, undefined);
337       },
338       kTaggedSize, IndexAdvanceMode::kPost);
339   return function_context;
340 }
341 
CreateRegExpLiteral(TNode<HeapObject> maybe_feedback_vector,TNode<TaggedIndex> slot,TNode<Object> pattern,TNode<Smi> flags,TNode<Context> context)342 TNode<JSRegExp> ConstructorBuiltinsAssembler::CreateRegExpLiteral(
343     TNode<HeapObject> maybe_feedback_vector, TNode<TaggedIndex> slot,
344     TNode<Object> pattern, TNode<Smi> flags, TNode<Context> context) {
345   Label call_runtime(this, Label::kDeferred), end(this);
346 
347   GotoIf(IsUndefined(maybe_feedback_vector), &call_runtime);
348 
349   TVARIABLE(JSRegExp, result);
350   TNode<FeedbackVector> feedback_vector = CAST(maybe_feedback_vector);
351   TNode<Object> literal_site =
352       CAST(LoadFeedbackVectorSlot(feedback_vector, slot));
353   GotoIfNot(HasBoilerplate(literal_site), &call_runtime);
354   {
355     TNode<JSRegExp> boilerplate = CAST(literal_site);
356     int size =
357         JSRegExp::kHeaderSize + JSRegExp::kInObjectFieldCount * kTaggedSize;
358     TNode<HeapObject> copy = Allocate(size);
359     for (int offset = 0; offset < size; offset += kTaggedSize) {
360       TNode<Object> value = LoadObjectField(boilerplate, offset);
361       StoreObjectFieldNoWriteBarrier(copy, offset, value);
362     }
363     result = CAST(copy);
364     Goto(&end);
365   }
366 
367   BIND(&call_runtime);
368   {
369     result = CAST(CallRuntime(Runtime::kCreateRegExpLiteral, context,
370                               maybe_feedback_vector, slot, pattern, flags));
371     Goto(&end);
372   }
373 
374   BIND(&end);
375   return result.value();
376 }
377 
CreateShallowArrayLiteral(TNode<FeedbackVector> feedback_vector,TNode<TaggedIndex> slot,TNode<Context> context,AllocationSiteMode allocation_site_mode,Label * call_runtime)378 TNode<JSArray> ConstructorBuiltinsAssembler::CreateShallowArrayLiteral(
379     TNode<FeedbackVector> feedback_vector, TNode<TaggedIndex> slot,
380     TNode<Context> context, AllocationSiteMode allocation_site_mode,
381     Label* call_runtime) {
382   Label zero_capacity(this), cow_elements(this), fast_elements(this),
383       return_result(this);
384 
385   TNode<Object> maybe_allocation_site =
386       CAST(LoadFeedbackVectorSlot(feedback_vector, slot));
387   GotoIfNot(HasBoilerplate(maybe_allocation_site), call_runtime);
388 
389   TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
390   TNode<JSArray> boilerplate = CAST(LoadBoilerplate(allocation_site));
391 
392   if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
393     return CloneFastJSArray(context, boilerplate, allocation_site);
394   } else {
395     return CloneFastJSArray(context, boilerplate);
396   }
397 }
398 
CreateEmptyArrayLiteral(TNode<FeedbackVector> feedback_vector,TNode<TaggedIndex> slot,TNode<Context> context)399 TNode<JSArray> ConstructorBuiltinsAssembler::CreateEmptyArrayLiteral(
400     TNode<FeedbackVector> feedback_vector, TNode<TaggedIndex> slot,
401     TNode<Context> context) {
402   // Array literals always have a valid AllocationSite to properly track
403   // elements transitions.
404   TNode<Object> maybe_allocation_site =
405       CAST(LoadFeedbackVectorSlot(feedback_vector, slot));
406   TVARIABLE(AllocationSite, allocation_site);
407 
408   Label create_empty_array(this),
409       initialize_allocation_site(this, Label::kDeferred), done(this);
410   GotoIf(TaggedIsSmi(maybe_allocation_site), &initialize_allocation_site);
411   {
412     allocation_site = CAST(maybe_allocation_site);
413     Goto(&create_empty_array);
414   }
415   // TODO(cbruni): create the AllocationSite in CSA.
416   BIND(&initialize_allocation_site);
417   {
418     allocation_site = CreateAllocationSiteInFeedbackVector(
419         feedback_vector,
420         // TODO(v8:10047): pass slot as TaggedIndex here
421         Unsigned(TaggedIndexToIntPtr(slot)));
422     Goto(&create_empty_array);
423   }
424 
425   BIND(&create_empty_array);
426   TNode<Int32T> kind = LoadElementsKind(allocation_site.value());
427   TNode<NativeContext> native_context = LoadNativeContext(context);
428   Comment("LoadJSArrayElementsMap");
429   TNode<Map> array_map = LoadJSArrayElementsMap(kind, native_context);
430   TNode<IntPtrT> zero_intptr = IntPtrConstant(0);
431   TNode<Smi> zero = SmiConstant(0);
432   Comment("Allocate JSArray");
433   TNode<JSArray> result =
434       AllocateJSArray(GetInitialFastElementsKind(), array_map, zero_intptr,
435                       zero, allocation_site.value());
436 
437   Goto(&done);
438   BIND(&done);
439 
440   return result;
441 }
442 
CreateShallowObjectLiteral(TNode<FeedbackVector> feedback_vector,TNode<TaggedIndex> slot,Label * call_runtime)443 TNode<HeapObject> ConstructorBuiltinsAssembler::CreateShallowObjectLiteral(
444     TNode<FeedbackVector> feedback_vector, TNode<TaggedIndex> slot,
445     Label* call_runtime) {
446   TNode<Object> maybe_allocation_site =
447       CAST(LoadFeedbackVectorSlot(feedback_vector, slot));
448   GotoIfNot(HasBoilerplate(maybe_allocation_site), call_runtime);
449 
450   TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
451   TNode<JSObject> boilerplate = LoadBoilerplate(allocation_site);
452   TNode<Map> boilerplate_map = LoadMap(boilerplate);
453   CSA_ASSERT(this, IsJSObjectMap(boilerplate_map));
454 
455   TVARIABLE(FixedArray, var_properties);
456   {
457     TNode<Uint32T> bit_field_3 = LoadMapBitField3(boilerplate_map);
458     GotoIf(IsSetWord32<Map::Bits3::IsDeprecatedBit>(bit_field_3), call_runtime);
459     // Directly copy over the property store for dict-mode boilerplates.
460     Label if_dictionary(this), if_fast(this), done(this);
461     Branch(IsSetWord32<Map::Bits3::IsDictionaryMapBit>(bit_field_3),
462            &if_dictionary, &if_fast);
463     BIND(&if_dictionary);
464     {
465       Comment("Copy dictionary properties");
466       var_properties = CopyNameDictionary(CAST(LoadSlowProperties(boilerplate)),
467                                           call_runtime);
468       // Slow objects have no in-object properties.
469       Goto(&done);
470     }
471     BIND(&if_fast);
472     {
473       // TODO(cbruni): support copying out-of-object properties.
474       TNode<HeapObject> boilerplate_properties =
475           LoadFastProperties(boilerplate);
476       GotoIfNot(IsEmptyFixedArray(boilerplate_properties), call_runtime);
477       var_properties = EmptyFixedArrayConstant();
478       Goto(&done);
479     }
480     BIND(&done);
481   }
482 
483   TVARIABLE(FixedArrayBase, var_elements);
484   {
485     // Copy the elements backing store, assuming that it's flat.
486     Label if_empty_fixed_array(this), if_copy_elements(this), done(this);
487     TNode<FixedArrayBase> boilerplate_elements = LoadElements(boilerplate);
488     Branch(IsEmptyFixedArray(boilerplate_elements), &if_empty_fixed_array,
489            &if_copy_elements);
490 
491     BIND(&if_empty_fixed_array);
492     var_elements = boilerplate_elements;
493     Goto(&done);
494 
495     BIND(&if_copy_elements);
496     CSA_ASSERT(this, Word32BinaryNot(
497                          IsFixedCOWArrayMap(LoadMap(boilerplate_elements))));
498     ExtractFixedArrayFlags flags;
499     flags |= ExtractFixedArrayFlag::kAllFixedArrays;
500     flags |= ExtractFixedArrayFlag::kNewSpaceAllocationOnly;
501     flags |= ExtractFixedArrayFlag::kDontCopyCOW;
502     var_elements = CloneFixedArray(boilerplate_elements, flags);
503     Goto(&done);
504     BIND(&done);
505   }
506 
507   // Ensure new-space allocation for a fresh JSObject so we can skip write
508   // barriers when copying all object fields.
509   STATIC_ASSERT(JSObject::kMaxInstanceSize < kMaxRegularHeapObjectSize);
510   TNode<IntPtrT> instance_size =
511       TimesTaggedSize(LoadMapInstanceSizeInWords(boilerplate_map));
512   TNode<IntPtrT> allocation_size = instance_size;
513   bool needs_allocation_memento = FLAG_allocation_site_pretenuring;
514   if (needs_allocation_memento) {
515     // Prepare for inner-allocating the AllocationMemento.
516     allocation_size =
517         IntPtrAdd(instance_size, IntPtrConstant(AllocationMemento::kSize));
518   }
519 
520   TNode<HeapObject> copy =
521       UncheckedCast<HeapObject>(AllocateInNewSpace(allocation_size));
522   {
523     Comment("Initialize Literal Copy");
524     // Initialize Object fields.
525     StoreMapNoWriteBarrier(copy, boilerplate_map);
526     StoreObjectFieldNoWriteBarrier(copy, JSObject::kPropertiesOrHashOffset,
527                                    var_properties.value());
528     StoreObjectFieldNoWriteBarrier(copy, JSObject::kElementsOffset,
529                                    var_elements.value());
530   }
531 
532   // Initialize the AllocationMemento before potential GCs due to heap number
533   // allocation when copying the in-object properties.
534   if (needs_allocation_memento) {
535     InitializeAllocationMemento(copy, instance_size, allocation_site);
536   }
537 
538   {
539     // Copy over in-object properties.
540     Label continue_with_write_barrier(this), done_init(this);
541     TVARIABLE(IntPtrT, offset, IntPtrConstant(JSObject::kHeaderSize));
542     // Heap numbers are only mutable on 32-bit platforms.
543     bool may_use_mutable_heap_numbers = !FLAG_unbox_double_fields;
544     {
545       Comment("Copy in-object properties fast");
546       Label continue_fast(this, &offset);
547       Branch(IntPtrEqual(offset.value(), instance_size), &done_init,
548              &continue_fast);
549       BIND(&continue_fast);
550       if (may_use_mutable_heap_numbers) {
551         TNode<Object> field = LoadObjectField(boilerplate, offset.value());
552         Label store_field(this);
553         GotoIf(TaggedIsSmi(field), &store_field);
554         // TODO(leszeks): Read the field descriptor to decide if this heap
555         // number is mutable or not.
556         GotoIf(IsHeapNumber(CAST(field)), &continue_with_write_barrier);
557         Goto(&store_field);
558         BIND(&store_field);
559         StoreObjectFieldNoWriteBarrier(copy, offset.value(), field);
560       } else {
561         // Copy fields as raw data.
562         TNode<TaggedT> field =
563             LoadObjectField<TaggedT>(boilerplate, offset.value());
564         StoreObjectFieldNoWriteBarrier(copy, offset.value(), field);
565       }
566       offset = IntPtrAdd(offset.value(), IntPtrConstant(kTaggedSize));
567       Branch(WordNotEqual(offset.value(), instance_size), &continue_fast,
568              &done_init);
569     }
570 
571     if (!may_use_mutable_heap_numbers) {
572       BIND(&done_init);
573       return copy;
574     }
575     // Continue initializing the literal after seeing the first sub-object
576     // potentially causing allocation. In this case we prepare the new literal
577     // by copying all pending fields over from the boilerplate and emit full
578     // write barriers from here on.
579     BIND(&continue_with_write_barrier);
580     {
581       Comment("Copy in-object properties slow");
582       BuildFastLoop<IntPtrT>(
583           offset.value(), instance_size,
584           [=](TNode<IntPtrT> offset) {
585             // TODO(ishell): value decompression is not necessary here.
586             TNode<Object> field = LoadObjectField(boilerplate, offset);
587             StoreObjectFieldNoWriteBarrier(copy, offset, field);
588           },
589           kTaggedSize, IndexAdvanceMode::kPost);
590       CopyMutableHeapNumbersInObject(copy, offset.value(), instance_size);
591       Goto(&done_init);
592     }
593     BIND(&done_init);
594   }
595   return copy;
596 }
597 
598 // Used by the CreateEmptyObjectLiteral bytecode and the Object constructor.
CreateEmptyObjectLiteral(TNode<Context> context)599 TNode<JSObject> ConstructorBuiltinsAssembler::CreateEmptyObjectLiteral(
600     TNode<Context> context) {
601   TNode<NativeContext> native_context = LoadNativeContext(context);
602   TNode<Map> map = LoadObjectFunctionInitialMap(native_context);
603   // Ensure that slack tracking is disabled for the map.
604   STATIC_ASSERT(Map::kNoSlackTracking == 0);
605   CSA_ASSERT(this, IsClearWord32<Map::Bits3::ConstructionCounterBits>(
606                        LoadMapBitField3(map)));
607   TNode<FixedArray> empty_fixed_array = EmptyFixedArrayConstant();
608   TNode<JSObject> result =
609       AllocateJSObjectFromMap(map, empty_fixed_array, empty_fixed_array);
610   return result;
611 }
612 
CopyMutableHeapNumbersInObject(TNode<HeapObject> copy,TNode<IntPtrT> start_offset,TNode<IntPtrT> end_offset)613 void ConstructorBuiltinsAssembler::CopyMutableHeapNumbersInObject(
614     TNode<HeapObject> copy, TNode<IntPtrT> start_offset,
615     TNode<IntPtrT> end_offset) {
616   // Iterate over all object properties of a freshly copied object and
617   // duplicate mutable heap numbers.
618   if (FLAG_unbox_double_fields) return;
619   Comment("Copy mutable HeapNumber values");
620   BuildFastLoop<IntPtrT>(
621       start_offset, end_offset,
622       [=](TNode<IntPtrT> offset) {
623         TNode<Object> field = LoadObjectField(copy, offset);
624         Label copy_heap_number(this, Label::kDeferred), continue_loop(this);
625         // We only have to clone complex field values.
626         GotoIf(TaggedIsSmi(field), &continue_loop);
627         // TODO(leszeks): Read the field descriptor to decide if this heap
628         // number is mutable or not.
629         Branch(IsHeapNumber(CAST(field)), &copy_heap_number, &continue_loop);
630         BIND(&copy_heap_number);
631         {
632           TNode<Float64T> double_value = LoadHeapNumberValue(CAST(field));
633           TNode<HeapNumber> heap_number =
634               AllocateHeapNumberWithValue(double_value);
635           StoreObjectField(copy, offset, heap_number);
636           Goto(&continue_loop);
637         }
638         BIND(&continue_loop);
639       },
640       kTaggedSize, IndexAdvanceMode::kPost);
641 }
642 
643 }  // namespace internal
644 }  // namespace v8
645