1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/builtins/builtins-constructor-gen.h"
6
7 #include "src/ast/ast.h"
8 #include "src/builtins/builtins-call-gen.h"
9 #include "src/builtins/builtins-constructor.h"
10 #include "src/builtins/builtins-utils-gen.h"
11 #include "src/builtins/builtins.h"
12 #include "src/code-factory.h"
13 #include "src/code-stub-assembler.h"
14 #include "src/counters.h"
15 #include "src/interface-descriptors.h"
16 #include "src/objects-inl.h"
17
18 namespace v8 {
19 namespace internal {
20
Generate_ConstructVarargs(MacroAssembler * masm)21 void Builtins::Generate_ConstructVarargs(MacroAssembler* masm) {
22 Generate_CallOrConstructVarargs(masm,
23 BUILTIN_CODE(masm->isolate(), Construct));
24 }
25
Generate_ConstructForwardVarargs(MacroAssembler * masm)26 void Builtins::Generate_ConstructForwardVarargs(MacroAssembler* masm) {
27 Generate_CallOrConstructForwardVarargs(
28 masm, CallOrConstructMode::kConstruct,
29 BUILTIN_CODE(masm->isolate(), Construct));
30 }
31
Generate_ConstructFunctionForwardVarargs(MacroAssembler * masm)32 void Builtins::Generate_ConstructFunctionForwardVarargs(MacroAssembler* masm) {
33 Generate_CallOrConstructForwardVarargs(
34 masm, CallOrConstructMode::kConstruct,
35 BUILTIN_CODE(masm->isolate(), ConstructFunction));
36 }
37
TF_BUILTIN(ConstructWithArrayLike,CallOrConstructBuiltinsAssembler)38 TF_BUILTIN(ConstructWithArrayLike, CallOrConstructBuiltinsAssembler) {
39 TNode<Object> target = CAST(Parameter(Descriptor::kTarget));
40 SloppyTNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
41 TNode<Object> arguments_list = CAST(Parameter(Descriptor::kArgumentsList));
42 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
43 CallOrConstructWithArrayLike(target, new_target, arguments_list, context);
44 }
45
TF_BUILTIN(ConstructWithSpread,CallOrConstructBuiltinsAssembler)46 TF_BUILTIN(ConstructWithSpread, CallOrConstructBuiltinsAssembler) {
47 TNode<Object> target = CAST(Parameter(Descriptor::kTarget));
48 SloppyTNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
49 TNode<Object> spread = CAST(Parameter(Descriptor::kSpread));
50 TNode<Int32T> args_count =
51 UncheckedCast<Int32T>(Parameter(Descriptor::kActualArgumentsCount));
52 TNode<Context> context = CAST(Parameter(Descriptor::kContext));
53 CallOrConstructWithSpread(target, new_target, spread, args_count, context);
54 }
55
56 typedef compiler::Node Node;
57
TF_BUILTIN(FastNewClosure,ConstructorBuiltinsAssembler)58 TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
59 Node* shared_function_info = Parameter(Descriptor::kSharedFunctionInfo);
60 Node* feedback_cell = Parameter(Descriptor::kFeedbackCell);
61 Node* context = Parameter(Descriptor::kContext);
62
63 CSA_ASSERT(this, IsFeedbackCell(feedback_cell));
64 CSA_ASSERT(this, IsSharedFunctionInfo(shared_function_info));
65
66 IncrementCounter(isolate()->counters()->fast_new_closure_total(), 1);
67
68 // Bump the closure counter encoded the {feedback_cell}s map.
69 {
70 Node* const feedback_cell_map = LoadMap(feedback_cell);
71 Label no_closures(this), one_closure(this), cell_done(this);
72
73 GotoIf(IsNoClosuresCellMap(feedback_cell_map), &no_closures);
74 GotoIf(IsOneClosureCellMap(feedback_cell_map), &one_closure);
75 CSA_ASSERT(this, IsManyClosuresCellMap(feedback_cell_map),
76 feedback_cell_map, feedback_cell);
77 Goto(&cell_done);
78
79 BIND(&no_closures);
80 StoreMapNoWriteBarrier(feedback_cell, Heap::kOneClosureCellMapRootIndex);
81 Goto(&cell_done);
82
83 BIND(&one_closure);
84 StoreMapNoWriteBarrier(feedback_cell, Heap::kManyClosuresCellMapRootIndex);
85 Goto(&cell_done);
86
87 BIND(&cell_done);
88 }
89
90 // The calculation of |function_map_index| must be in sync with
91 // SharedFunctionInfo::function_map_index().
92 Node* const flags =
93 LoadObjectField(shared_function_info, SharedFunctionInfo::kFlagsOffset,
94 MachineType::Uint32());
95 Node* const function_map_index = IntPtrAdd(
96 DecodeWordFromWord32<SharedFunctionInfo::FunctionMapIndexBits>(flags),
97 IntPtrConstant(Context::FIRST_FUNCTION_MAP_INDEX));
98 CSA_ASSERT(this, UintPtrLessThanOrEqual(
99 function_map_index,
100 IntPtrConstant(Context::LAST_FUNCTION_MAP_INDEX)));
101
102 // Get the function map in the current native context and set that
103 // as the map of the allocated object.
104 Node* const native_context = LoadNativeContext(context);
105 Node* const function_map =
106 LoadContextElement(native_context, function_map_index);
107
108 // Create a new closure from the given function info in new space
109 Node* instance_size_in_bytes =
110 TimesPointerSize(LoadMapInstanceSizeInWords(function_map));
111 Node* const result = Allocate(instance_size_in_bytes);
112 StoreMapNoWriteBarrier(result, function_map);
113 InitializeJSObjectBodyNoSlackTracking(result, function_map,
114 instance_size_in_bytes,
115 JSFunction::kSizeWithoutPrototype);
116
117 // Initialize the rest of the function.
118 StoreObjectFieldRoot(result, JSObject::kPropertiesOrHashOffset,
119 Heap::kEmptyFixedArrayRootIndex);
120 StoreObjectFieldRoot(result, JSObject::kElementsOffset,
121 Heap::kEmptyFixedArrayRootIndex);
122 {
123 // Set function prototype if necessary.
124 Label done(this), init_prototype(this);
125 Branch(IsFunctionWithPrototypeSlotMap(function_map), &init_prototype,
126 &done);
127
128 BIND(&init_prototype);
129 StoreObjectFieldRoot(result, JSFunction::kPrototypeOrInitialMapOffset,
130 Heap::kTheHoleValueRootIndex);
131 Goto(&done);
132 BIND(&done);
133 }
134
135 STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kPointerSize);
136 StoreObjectFieldNoWriteBarrier(result, JSFunction::kFeedbackCellOffset,
137 feedback_cell);
138 StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset,
139 shared_function_info);
140 StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context);
141 Handle<Code> lazy_builtin_handle(
142 isolate()->builtins()->builtin(Builtins::kCompileLazy), isolate());
143 Node* lazy_builtin = HeapConstant(lazy_builtin_handle);
144 StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeOffset, lazy_builtin);
145 Return(result);
146 }
147
TF_BUILTIN(FastNewObject,ConstructorBuiltinsAssembler)148 TF_BUILTIN(FastNewObject, ConstructorBuiltinsAssembler) {
149 Node* context = Parameter(Descriptor::kContext);
150 Node* target = Parameter(Descriptor::kTarget);
151 Node* new_target = Parameter(Descriptor::kNewTarget);
152
153 Label call_runtime(this);
154
155 Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
156 Return(result);
157
158 BIND(&call_runtime);
159 TailCallRuntime(Runtime::kNewObject, context, target, new_target);
160 }
161
EmitFastNewObject(Node * context,Node * target,Node * new_target)162 Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
163 Node* target,
164 Node* new_target) {
165 VARIABLE(var_obj, MachineRepresentation::kTagged);
166 Label call_runtime(this), end(this);
167
168 Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
169 var_obj.Bind(result);
170 Goto(&end);
171
172 BIND(&call_runtime);
173 var_obj.Bind(CallRuntime(Runtime::kNewObject, context, target, new_target));
174 Goto(&end);
175
176 BIND(&end);
177 return var_obj.value();
178 }
179
EmitFastNewObject(Node * context,Node * target,Node * new_target,Label * call_runtime)180 Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
181 Node* target,
182 Node* new_target,
183 Label* call_runtime) {
184 CSA_ASSERT(this, HasInstanceType(target, JS_FUNCTION_TYPE));
185 CSA_ASSERT(this, IsJSReceiver(new_target));
186
187 // Verify that the new target is a JSFunction.
188 Label fast(this), end(this);
189 GotoIf(HasInstanceType(new_target, JS_FUNCTION_TYPE), &fast);
190 Goto(call_runtime);
191
192 BIND(&fast);
193
194 // Load the initial map and verify that it's in fact a map.
195 Node* initial_map =
196 LoadObjectField(new_target, JSFunction::kPrototypeOrInitialMapOffset);
197 GotoIf(TaggedIsSmi(initial_map), call_runtime);
198 GotoIf(DoesntHaveInstanceType(initial_map, MAP_TYPE), call_runtime);
199
200 // Fall back to runtime if the target differs from the new target's
201 // initial map constructor.
202 Node* new_target_constructor =
203 LoadObjectField(initial_map, Map::kConstructorOrBackPointerOffset);
204 GotoIf(WordNotEqual(target, new_target_constructor), call_runtime);
205
206 VARIABLE(properties, MachineRepresentation::kTagged);
207
208 Label instantiate_map(this), allocate_properties(this);
209 GotoIf(IsDictionaryMap(initial_map), &allocate_properties);
210 {
211 properties.Bind(EmptyFixedArrayConstant());
212 Goto(&instantiate_map);
213 }
214 BIND(&allocate_properties);
215 {
216 properties.Bind(AllocateNameDictionary(NameDictionary::kInitialCapacity));
217 Goto(&instantiate_map);
218 }
219
220 BIND(&instantiate_map);
221 return AllocateJSObjectFromMap(initial_map, properties.value(), nullptr,
222 kNone, kWithSlackTracking);
223 }
224
EmitFastNewFunctionContext(Node * scope_info,Node * slots,Node * context,ScopeType scope_type)225 Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext(
226 Node* scope_info, Node* slots, Node* context, ScopeType scope_type) {
227 slots = ChangeUint32ToWord(slots);
228
229 // TODO(ishell): Use CSA::OptimalParameterMode() here.
230 ParameterMode mode = INTPTR_PARAMETERS;
231 Node* min_context_slots = IntPtrConstant(Context::MIN_CONTEXT_SLOTS);
232 Node* length = IntPtrAdd(slots, min_context_slots);
233 Node* size = GetFixedArrayAllocationSize(length, PACKED_ELEMENTS, mode);
234
235 // Create a new closure from the given function info in new space
236 TNode<Context> function_context =
237 UncheckedCast<Context>(AllocateInNewSpace(size));
238
239 Heap::RootListIndex context_type;
240 switch (scope_type) {
241 case EVAL_SCOPE:
242 context_type = Heap::kEvalContextMapRootIndex;
243 break;
244 case FUNCTION_SCOPE:
245 context_type = Heap::kFunctionContextMapRootIndex;
246 break;
247 default:
248 UNREACHABLE();
249 }
250 StoreMapNoWriteBarrier(function_context, context_type);
251 StoreObjectFieldNoWriteBarrier(function_context, Context::kLengthOffset,
252 SmiTag(length));
253
254 // Set up the fixed slots.
255 StoreFixedArrayElement(function_context, Context::SCOPE_INFO_INDEX,
256 scope_info, SKIP_WRITE_BARRIER);
257 StoreFixedArrayElement(function_context, Context::PREVIOUS_INDEX, context,
258 SKIP_WRITE_BARRIER);
259 StoreFixedArrayElement(function_context, Context::EXTENSION_INDEX,
260 TheHoleConstant(), SKIP_WRITE_BARRIER);
261
262 // Copy the native context from the previous context.
263 Node* native_context = LoadNativeContext(context);
264 StoreFixedArrayElement(function_context, Context::NATIVE_CONTEXT_INDEX,
265 native_context, SKIP_WRITE_BARRIER);
266
267 // Initialize the rest of the slots to undefined.
268 Node* undefined = UndefinedConstant();
269 BuildFastFixedArrayForEach(
270 function_context, PACKED_ELEMENTS, min_context_slots, length,
271 [this, undefined](Node* context, Node* offset) {
272 StoreNoWriteBarrier(MachineRepresentation::kTagged, context, offset,
273 undefined);
274 },
275 mode);
276
277 return function_context;
278 }
279
TF_BUILTIN(FastNewFunctionContextEval,ConstructorBuiltinsAssembler)280 TF_BUILTIN(FastNewFunctionContextEval, ConstructorBuiltinsAssembler) {
281 Node* scope_info = Parameter(Descriptor::kScopeInfo);
282 Node* slots = Parameter(Descriptor::kSlots);
283 Node* context = Parameter(Descriptor::kContext);
284 Return(EmitFastNewFunctionContext(scope_info, slots, context,
285 ScopeType::EVAL_SCOPE));
286 }
287
TF_BUILTIN(FastNewFunctionContextFunction,ConstructorBuiltinsAssembler)288 TF_BUILTIN(FastNewFunctionContextFunction, ConstructorBuiltinsAssembler) {
289 Node* scope_info = Parameter(Descriptor::kScopeInfo);
290 Node* slots = Parameter(Descriptor::kSlots);
291 Node* context = Parameter(Descriptor::kContext);
292 Return(EmitFastNewFunctionContext(scope_info, slots, context,
293 ScopeType::FUNCTION_SCOPE));
294 }
295
EmitCreateRegExpLiteral(Node * feedback_vector,Node * slot,Node * pattern,Node * flags,Node * context)296 Node* ConstructorBuiltinsAssembler::EmitCreateRegExpLiteral(
297 Node* feedback_vector, Node* slot, Node* pattern, Node* flags,
298 Node* context) {
299 Label call_runtime(this, Label::kDeferred), end(this);
300
301 VARIABLE(result, MachineRepresentation::kTagged);
302 TNode<Object> literal_site =
303 CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
304 GotoIf(NotHasBoilerplate(literal_site), &call_runtime);
305 {
306 Node* boilerplate = literal_site;
307 CSA_ASSERT(this, IsJSRegExp(boilerplate));
308 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
309 Node* copy = Allocate(size);
310 for (int offset = 0; offset < size; offset += kPointerSize) {
311 Node* value = LoadObjectField(boilerplate, offset);
312 StoreObjectFieldNoWriteBarrier(copy, offset, value);
313 }
314 result.Bind(copy);
315 Goto(&end);
316 }
317
318 BIND(&call_runtime);
319 {
320 result.Bind(CallRuntime(Runtime::kCreateRegExpLiteral, context,
321 feedback_vector, SmiTag(slot), pattern, flags));
322 Goto(&end);
323 }
324
325 BIND(&end);
326 return result.value();
327 }
328
TF_BUILTIN(CreateRegExpLiteral,ConstructorBuiltinsAssembler)329 TF_BUILTIN(CreateRegExpLiteral, ConstructorBuiltinsAssembler) {
330 Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
331 Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
332 Node* pattern = Parameter(Descriptor::kPattern);
333 Node* flags = Parameter(Descriptor::kFlags);
334 Node* context = Parameter(Descriptor::kContext);
335 Node* result =
336 EmitCreateRegExpLiteral(feedback_vector, slot, pattern, flags, context);
337 Return(result);
338 }
339
EmitCreateShallowArrayLiteral(Node * feedback_vector,Node * slot,Node * context,Label * call_runtime,AllocationSiteMode allocation_site_mode)340 Node* ConstructorBuiltinsAssembler::EmitCreateShallowArrayLiteral(
341 Node* feedback_vector, Node* slot, Node* context, Label* call_runtime,
342 AllocationSiteMode allocation_site_mode) {
343 Label zero_capacity(this), cow_elements(this), fast_elements(this),
344 return_result(this);
345 VARIABLE(result, MachineRepresentation::kTagged);
346
347 TNode<Object> maybe_allocation_site =
348 CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
349 GotoIf(NotHasBoilerplate(maybe_allocation_site), call_runtime);
350
351 TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
352 TNode<JSArray> boilerplate = CAST(LoadBoilerplate(allocation_site));
353
354 ParameterMode mode = OptimalParameterMode();
355 if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
356 return CloneFastJSArray(context, boilerplate, mode, allocation_site);
357 } else {
358 return CloneFastJSArray(context, boilerplate, mode);
359 }
360 }
361
TF_BUILTIN(CreateShallowArrayLiteral,ConstructorBuiltinsAssembler)362 TF_BUILTIN(CreateShallowArrayLiteral, ConstructorBuiltinsAssembler) {
363 Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
364 Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
365 Node* constant_elements = Parameter(Descriptor::kConstantElements);
366 Node* context = Parameter(Descriptor::kContext);
367 Label call_runtime(this, Label::kDeferred);
368 Return(EmitCreateShallowArrayLiteral(feedback_vector, slot, context,
369 &call_runtime,
370 DONT_TRACK_ALLOCATION_SITE));
371
372 BIND(&call_runtime);
373 {
374 Comment("call runtime");
375 int const flags =
376 AggregateLiteral::kDisableMementos | AggregateLiteral::kIsShallow;
377 Return(CallRuntime(Runtime::kCreateArrayLiteral, context, feedback_vector,
378 SmiTag(slot), constant_elements, SmiConstant(flags)));
379 }
380 }
381
EmitCreateEmptyArrayLiteral(Node * feedback_vector,Node * slot,Node * context)382 Node* ConstructorBuiltinsAssembler::EmitCreateEmptyArrayLiteral(
383 Node* feedback_vector, Node* slot, Node* context) {
384 // Array literals always have a valid AllocationSite to properly track
385 // elements transitions.
386 TNode<Object> maybe_allocation_site =
387 CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
388 TVARIABLE(AllocationSite, allocation_site);
389
390 Label create_empty_array(this),
391 initialize_allocation_site(this, Label::kDeferred), done(this);
392 GotoIf(TaggedIsSmi(maybe_allocation_site), &initialize_allocation_site);
393 {
394 allocation_site = CAST(maybe_allocation_site);
395 Goto(&create_empty_array);
396 }
397 // TODO(cbruni): create the AllocationSite in CSA.
398 BIND(&initialize_allocation_site);
399 {
400 allocation_site =
401 CreateAllocationSiteInFeedbackVector(feedback_vector, SmiTag(slot));
402 Goto(&create_empty_array);
403 }
404
405 BIND(&create_empty_array);
406 TNode<Int32T> kind = LoadElementsKind(allocation_site.value());
407 TNode<Context> native_context = LoadNativeContext(context);
408 Comment("LoadJSArrayElementsMap");
409 Node* array_map = LoadJSArrayElementsMap(kind, native_context);
410 Node* zero = SmiConstant(0);
411 Comment("Allocate JSArray");
412 Node* result =
413 AllocateJSArray(GetInitialFastElementsKind(), array_map, zero, zero,
414 allocation_site.value(), ParameterMode::SMI_PARAMETERS);
415
416 Goto(&done);
417 BIND(&done);
418
419 return result;
420 }
421
TF_BUILTIN(CreateEmptyArrayLiteral,ConstructorBuiltinsAssembler)422 TF_BUILTIN(CreateEmptyArrayLiteral, ConstructorBuiltinsAssembler) {
423 Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
424 Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
425 Node* context = Parameter(Descriptor::kContext);
426 Node* result = EmitCreateEmptyArrayLiteral(feedback_vector, slot, context);
427 Return(result);
428 }
429
EmitCreateShallowObjectLiteral(Node * feedback_vector,Node * slot,Label * call_runtime)430 Node* ConstructorBuiltinsAssembler::EmitCreateShallowObjectLiteral(
431 Node* feedback_vector, Node* slot, Label* call_runtime) {
432 TNode<Object> maybe_allocation_site =
433 CAST(LoadFeedbackVectorSlot(feedback_vector, slot, 0, INTPTR_PARAMETERS));
434 GotoIf(NotHasBoilerplate(maybe_allocation_site), call_runtime);
435
436 TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
437 TNode<JSObject> boilerplate = LoadBoilerplate(allocation_site);
438 TNode<Map> boilerplate_map = LoadMap(boilerplate);
439 CSA_ASSERT(this, IsJSObjectMap(boilerplate_map));
440
441 VARIABLE(var_properties, MachineRepresentation::kTagged);
442 {
443 Node* bit_field_3 = LoadMapBitField3(boilerplate_map);
444 GotoIf(IsSetWord32<Map::IsDeprecatedBit>(bit_field_3), call_runtime);
445 // Directly copy over the property store for dict-mode boilerplates.
446 Label if_dictionary(this), if_fast(this), done(this);
447 Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field_3), &if_dictionary,
448 &if_fast);
449 BIND(&if_dictionary);
450 {
451 Comment("Copy dictionary properties");
452 var_properties.Bind(CopyNameDictionary(
453 CAST(LoadSlowProperties(boilerplate)), call_runtime));
454 // Slow objects have no in-object properties.
455 Goto(&done);
456 }
457 BIND(&if_fast);
458 {
459 // TODO(cbruni): support copying out-of-object properties.
460 Node* boilerplate_properties = LoadFastProperties(boilerplate);
461 GotoIfNot(IsEmptyFixedArray(boilerplate_properties), call_runtime);
462 var_properties.Bind(EmptyFixedArrayConstant());
463 Goto(&done);
464 }
465 BIND(&done);
466 }
467
468 VARIABLE(var_elements, MachineRepresentation::kTagged);
469 {
470 // Copy the elements backing store, assuming that it's flat.
471 Label if_empty_fixed_array(this), if_copy_elements(this), done(this);
472 Node* boilerplate_elements = LoadElements(boilerplate);
473 Branch(IsEmptyFixedArray(boilerplate_elements), &if_empty_fixed_array,
474 &if_copy_elements);
475
476 BIND(&if_empty_fixed_array);
477 var_elements.Bind(boilerplate_elements);
478 Goto(&done);
479
480 BIND(&if_copy_elements);
481 CSA_ASSERT(this, Word32BinaryNot(
482 IsFixedCOWArrayMap(LoadMap(boilerplate_elements))));
483 ExtractFixedArrayFlags flags;
484 flags |= ExtractFixedArrayFlag::kAllFixedArrays;
485 flags |= ExtractFixedArrayFlag::kNewSpaceAllocationOnly;
486 flags |= ExtractFixedArrayFlag::kDontCopyCOW;
487 var_elements.Bind(CloneFixedArray(boilerplate_elements, flags));
488 Goto(&done);
489 BIND(&done);
490 }
491
492 // Ensure new-space allocation for a fresh JSObject so we can skip write
493 // barriers when copying all object fields.
494 STATIC_ASSERT(JSObject::kMaxInstanceSize < kMaxRegularHeapObjectSize);
495 Node* instance_size =
496 TimesPointerSize(LoadMapInstanceSizeInWords(boilerplate_map));
497 Node* allocation_size = instance_size;
498 bool needs_allocation_memento = FLAG_allocation_site_pretenuring;
499 if (needs_allocation_memento) {
500 // Prepare for inner-allocating the AllocationMemento.
501 allocation_size =
502 IntPtrAdd(instance_size, IntPtrConstant(AllocationMemento::kSize));
503 }
504
505 Node* copy = AllocateInNewSpace(allocation_size);
506 {
507 Comment("Initialize Literal Copy");
508 // Initialize Object fields.
509 StoreMapNoWriteBarrier(copy, boilerplate_map);
510 StoreObjectFieldNoWriteBarrier(copy, JSObject::kPropertiesOrHashOffset,
511 var_properties.value());
512 StoreObjectFieldNoWriteBarrier(copy, JSObject::kElementsOffset,
513 var_elements.value());
514 }
515
516 // Initialize the AllocationMemento before potential GCs due to heap number
517 // allocation when copying the in-object properties.
518 if (needs_allocation_memento) {
519 InitializeAllocationMemento(copy, instance_size, allocation_site);
520 }
521
522 {
523 // Copy over in-object properties.
524 Label continue_with_write_barrier(this), done_init(this);
525 VARIABLE(offset, MachineType::PointerRepresentation(),
526 IntPtrConstant(JSObject::kHeaderSize));
527 // Mutable heap numbers only occur on 32-bit platforms.
528 bool may_use_mutable_heap_numbers =
529 FLAG_track_double_fields && !FLAG_unbox_double_fields;
530 {
531 Comment("Copy in-object properties fast");
532 Label continue_fast(this, &offset);
533 Branch(WordEqual(offset.value(), instance_size), &done_init,
534 &continue_fast);
535 BIND(&continue_fast);
536 Node* field = LoadObjectField(boilerplate, offset.value());
537 if (may_use_mutable_heap_numbers) {
538 Label store_field(this);
539 GotoIf(TaggedIsSmi(field), &store_field);
540 GotoIf(IsMutableHeapNumber(field), &continue_with_write_barrier);
541 Goto(&store_field);
542 BIND(&store_field);
543 }
544 StoreObjectFieldNoWriteBarrier(copy, offset.value(), field);
545 offset.Bind(IntPtrAdd(offset.value(), IntPtrConstant(kPointerSize)));
546 Branch(WordNotEqual(offset.value(), instance_size), &continue_fast,
547 &done_init);
548 }
549
550 if (!may_use_mutable_heap_numbers) {
551 BIND(&done_init);
552 return copy;
553 }
554 // Continue initializing the literal after seeing the first sub-object
555 // potentially causing allocation. In this case we prepare the new literal
556 // by copying all pending fields over from the boilerplate and emit full
557 // write barriers from here on.
558 BIND(&continue_with_write_barrier);
559 {
560 Comment("Copy in-object properties slow");
561 BuildFastLoop(offset.value(), instance_size,
562 [=](Node* offset) {
563 Node* field = LoadObjectField(boilerplate, offset);
564 StoreObjectFieldNoWriteBarrier(copy, offset, field);
565 },
566 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
567 Comment("Copy mutable HeapNumber values");
568 BuildFastLoop(offset.value(), instance_size,
569 [=](Node* offset) {
570 Node* field = LoadObjectField(copy, offset);
571 Label copy_mutable_heap_number(this, Label::kDeferred),
572 continue_loop(this);
573 // We only have to clone complex field values.
574 GotoIf(TaggedIsSmi(field), &continue_loop);
575 Branch(IsMutableHeapNumber(field),
576 ©_mutable_heap_number, &continue_loop);
577 BIND(©_mutable_heap_number);
578 {
579 Node* double_value = LoadHeapNumberValue(field);
580 Node* mutable_heap_number =
581 AllocateMutableHeapNumberWithValue(double_value);
582 StoreObjectField(copy, offset, mutable_heap_number);
583 Goto(&continue_loop);
584 }
585 BIND(&continue_loop);
586 },
587 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
588 Goto(&done_init);
589 }
590 BIND(&done_init);
591 }
592 return copy;
593 }
594
TF_BUILTIN(CreateShallowObjectLiteral,ConstructorBuiltinsAssembler)595 TF_BUILTIN(CreateShallowObjectLiteral, ConstructorBuiltinsAssembler) {
596 Label call_runtime(this);
597 Node* feedback_vector = Parameter(Descriptor::kFeedbackVector);
598 Node* slot = SmiUntag(Parameter(Descriptor::kSlot));
599 Node* copy =
600 EmitCreateShallowObjectLiteral(feedback_vector, slot, &call_runtime);
601 Return(copy);
602
603 BIND(&call_runtime);
604 Node* object_boilerplate_description =
605 Parameter(Descriptor::kObjectBoilerplateDescription);
606 Node* flags = Parameter(Descriptor::kFlags);
607 Node* context = Parameter(Descriptor::kContext);
608 TailCallRuntime(Runtime::kCreateObjectLiteral, context, feedback_vector,
609 SmiTag(slot), object_boilerplate_description, flags);
610 }
611
612 // Used by the CreateEmptyObjectLiteral bytecode and the Object constructor.
EmitCreateEmptyObjectLiteral(Node * context)613 Node* ConstructorBuiltinsAssembler::EmitCreateEmptyObjectLiteral(
614 Node* context) {
615 Node* native_context = LoadNativeContext(context);
616 Node* object_function =
617 LoadContextElement(native_context, Context::OBJECT_FUNCTION_INDEX);
618 Node* map = LoadObjectField(object_function,
619 JSFunction::kPrototypeOrInitialMapOffset);
620 CSA_ASSERT(this, IsMap(map));
621 // Ensure that slack tracking is disabled for the map.
622 STATIC_ASSERT(Map::kNoSlackTracking == 0);
623 CSA_ASSERT(
624 this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
625 Node* empty_fixed_array = EmptyFixedArrayConstant();
626 Node* result =
627 AllocateJSObjectFromMap(map, empty_fixed_array, empty_fixed_array);
628 return result;
629 }
630
631 // ES #sec-object-constructor
TF_BUILTIN(ObjectConstructor,ConstructorBuiltinsAssembler)632 TF_BUILTIN(ObjectConstructor, ConstructorBuiltinsAssembler) {
633 int const kValueArg = 0;
634 Node* argc =
635 ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
636 CodeStubArguments args(this, argc);
637 Node* context = Parameter(Descriptor::kContext);
638 Node* new_target = Parameter(Descriptor::kJSNewTarget);
639
640 VARIABLE(var_result, MachineRepresentation::kTagged);
641 Label if_subclass(this, Label::kDeferred), if_notsubclass(this),
642 return_result(this);
643 GotoIf(IsUndefined(new_target), &if_notsubclass);
644 TNode<JSFunction> target = CAST(Parameter(Descriptor::kJSTarget));
645 Branch(WordEqual(new_target, target), &if_notsubclass, &if_subclass);
646
647 BIND(&if_subclass);
648 {
649 Node* result =
650 CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
651 var_result.Bind(result);
652 Goto(&return_result);
653 }
654
655 BIND(&if_notsubclass);
656 {
657 Label if_newobject(this, Label::kDeferred), if_toobject(this);
658
659 Node* value_index = IntPtrConstant(kValueArg);
660 GotoIf(UintPtrGreaterThanOrEqual(value_index, argc), &if_newobject);
661 Node* value = args.AtIndex(value_index);
662 GotoIf(IsNull(value), &if_newobject);
663 Branch(IsUndefined(value), &if_newobject, &if_toobject);
664
665 BIND(&if_newobject);
666 {
667 Node* result = EmitCreateEmptyObjectLiteral(context);
668 var_result.Bind(result);
669 Goto(&return_result);
670 }
671
672 BIND(&if_toobject);
673 {
674 Node* result = CallBuiltin(Builtins::kToObject, context, value);
675 var_result.Bind(result);
676 Goto(&return_result);
677 }
678 }
679
680 BIND(&return_result);
681 args.PopAndReturn(var_result.value());
682 }
683
684 // ES #sec-number-constructor
TF_BUILTIN(NumberConstructor,ConstructorBuiltinsAssembler)685 TF_BUILTIN(NumberConstructor, ConstructorBuiltinsAssembler) {
686 Node* context = Parameter(Descriptor::kContext);
687 Node* argc =
688 ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
689 CodeStubArguments args(this, argc);
690
691 // 1. If no arguments were passed to this function invocation, let n be +0.
692 VARIABLE(var_n, MachineRepresentation::kTagged, SmiConstant(0));
693 Label if_nloaded(this, &var_n);
694 GotoIf(WordEqual(argc, IntPtrConstant(0)), &if_nloaded);
695
696 // 2. Else,
697 // a. Let prim be ? ToNumeric(value).
698 // b. If Type(prim) is BigInt, let n be the Number value for prim.
699 // c. Otherwise, let n be prim.
700 Node* value = args.AtIndex(0);
701 var_n.Bind(ToNumber(context, value, BigIntHandling::kConvertToNumber));
702 Goto(&if_nloaded);
703
704 BIND(&if_nloaded);
705 {
706 // 3. If NewTarget is undefined, return n.
707 Node* n_value = var_n.value();
708 Node* new_target = Parameter(Descriptor::kJSNewTarget);
709 Label return_n(this), constructnumber(this, Label::kDeferred);
710 Branch(IsUndefined(new_target), &return_n, &constructnumber);
711
712 BIND(&return_n);
713 { args.PopAndReturn(n_value); }
714
715 BIND(&constructnumber);
716 {
717 // 4. Let O be ? OrdinaryCreateFromConstructor(NewTarget,
718 // "%NumberPrototype%", « [[NumberData]] »).
719 // 5. Set O.[[NumberData]] to n.
720 // 6. Return O.
721
722 // We are not using Parameter(Descriptor::kJSTarget) and loading the value
723 // from the current frame here in order to reduce register pressure on the
724 // fast path.
725 TNode<JSFunction> target = LoadTargetFromFrame();
726 Node* result =
727 CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
728 StoreObjectField(result, JSValue::kValueOffset, n_value);
729 args.PopAndReturn(result);
730 }
731 }
732 }
733
TF_BUILTIN(GenericConstructorLazyDeoptContinuation,ConstructorBuiltinsAssembler)734 TF_BUILTIN(GenericConstructorLazyDeoptContinuation,
735 ConstructorBuiltinsAssembler) {
736 Node* result = Parameter(Descriptor::kResult);
737 Return(result);
738 }
739
740 // https://tc39.github.io/ecma262/#sec-string-constructor
TF_BUILTIN(StringConstructor,ConstructorBuiltinsAssembler)741 TF_BUILTIN(StringConstructor, ConstructorBuiltinsAssembler) {
742 Node* context = Parameter(Descriptor::kContext);
743 Node* argc =
744 ChangeInt32ToIntPtr(Parameter(Descriptor::kJSActualArgumentsCount));
745 CodeStubArguments args(this, argc);
746
747 TNode<Object> new_target = CAST(Parameter(Descriptor::kJSNewTarget));
748
749 // 1. If no arguments were passed to this function invocation, let s be "".
750 VARIABLE(var_s, MachineRepresentation::kTagged, EmptyStringConstant());
751 Label if_sloaded(this, &var_s);
752 GotoIf(WordEqual(argc, IntPtrConstant(0)), &if_sloaded);
753
754 // 2. Else,
755 // a. If NewTarget is undefined [...]
756 Node* value = args.AtIndex(0);
757 Label if_tostring(this, &var_s);
758 GotoIfNot(IsUndefined(new_target), &if_tostring);
759
760 // 2a. [...] and Type(value) is Symbol, return SymbolDescriptiveString(value).
761 GotoIf(TaggedIsSmi(value), &if_tostring);
762 GotoIfNot(IsSymbol(value), &if_tostring);
763 {
764 Node* result =
765 CallRuntime(Runtime::kSymbolDescriptiveString, context, value);
766 args.PopAndReturn(result);
767 }
768
769 // 2b. Let s be ? ToString(value).
770 BIND(&if_tostring);
771 {
772 var_s.Bind(CallBuiltin(Builtins::kToString, context, value));
773 Goto(&if_sloaded);
774 }
775
776 // 3. If NewTarget is undefined, return s.
777 BIND(&if_sloaded);
778 {
779 Node* s_value = var_s.value();
780 Label return_s(this), constructstring(this, Label::kDeferred);
781 Branch(IsUndefined(new_target), &return_s, &constructstring);
782
783 BIND(&return_s);
784 { args.PopAndReturn(s_value); }
785
786 BIND(&constructstring);
787 {
788 // We are not using Parameter(Descriptor::kJSTarget) and loading the value
789 // from the current frame here in order to reduce register pressure on the
790 // fast path.
791 TNode<JSFunction> target = LoadTargetFromFrame();
792
793 Node* result =
794 CallBuiltin(Builtins::kFastNewObject, context, target, new_target);
795 StoreObjectField(result, JSValue::kValueOffset, s_value);
796 args.PopAndReturn(result);
797 }
798 }
799 }
800
801 } // namespace internal
802 } // namespace v8
803