1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/api/api.h"
6 #include "src/builtins/builtins-utils-gen.h"
7 #include "src/builtins/builtins.h"
8 #include "src/codegen/code-stub-assembler.h"
9 #include "src/codegen/macro-assembler.h"
10 #include "src/execution/frame-constants.h"
11 #include "src/heap/memory-chunk.h"
12 #include "src/ic/accessor-assembler.h"
13 #include "src/ic/keyed-store-generic.h"
14 #include "src/logging/counters.h"
15 #include "src/objects/debug-objects.h"
16 #include "src/objects/shared-function-info.h"
17 #include "src/runtime/runtime.h"
18
19 namespace v8 {
20 namespace internal {
21
22 // -----------------------------------------------------------------------------
23 // Stack checks.
24
Generate_StackCheck(MacroAssembler * masm)25 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
26 masm->TailCallRuntime(Runtime::kStackGuard);
27 }
28
29 // -----------------------------------------------------------------------------
30 // TurboFan support builtins.
31
TF_BUILTIN(CopyFastSmiOrObjectElements,CodeStubAssembler)32 TF_BUILTIN(CopyFastSmiOrObjectElements, CodeStubAssembler) {
33 auto js_object = Parameter<JSObject>(Descriptor::kObject);
34
35 // Load the {object}s elements.
36 TNode<FixedArrayBase> source =
37 CAST(LoadObjectField(js_object, JSObject::kElementsOffset));
38 TNode<FixedArrayBase> target =
39 CloneFixedArray(source, ExtractFixedArrayFlag::kFixedArrays);
40 StoreObjectField(js_object, JSObject::kElementsOffset, target);
41 Return(target);
42 }
43
TF_BUILTIN(GrowFastDoubleElements,CodeStubAssembler)44 TF_BUILTIN(GrowFastDoubleElements, CodeStubAssembler) {
45 auto object = Parameter<JSObject>(Descriptor::kObject);
46 auto key = Parameter<Smi>(Descriptor::kKey);
47
48 Label runtime(this, Label::kDeferred);
49 TNode<FixedArrayBase> elements = LoadElements(object);
50 elements = TryGrowElementsCapacity(object, elements, PACKED_DOUBLE_ELEMENTS,
51 key, &runtime);
52 Return(elements);
53
54 BIND(&runtime);
55 TailCallRuntime(Runtime::kGrowArrayElements, NoContextConstant(), object,
56 key);
57 }
58
TF_BUILTIN(GrowFastSmiOrObjectElements,CodeStubAssembler)59 TF_BUILTIN(GrowFastSmiOrObjectElements, CodeStubAssembler) {
60 auto object = Parameter<JSObject>(Descriptor::kObject);
61 auto key = Parameter<Smi>(Descriptor::kKey);
62
63 Label runtime(this, Label::kDeferred);
64 TNode<FixedArrayBase> elements = LoadElements(object);
65 elements =
66 TryGrowElementsCapacity(object, elements, PACKED_ELEMENTS, key, &runtime);
67 Return(elements);
68
69 BIND(&runtime);
70 TailCallRuntime(Runtime::kGrowArrayElements, NoContextConstant(), object,
71 key);
72 }
73
TF_BUILTIN(ReturnReceiver,CodeStubAssembler)74 TF_BUILTIN(ReturnReceiver, CodeStubAssembler) {
75 auto receiver = Parameter<Object>(Descriptor::kReceiver);
76 Return(receiver);
77 }
78
TF_BUILTIN(DebugBreakTrampoline,CodeStubAssembler)79 TF_BUILTIN(DebugBreakTrampoline, CodeStubAssembler) {
80 Label tailcall_to_shared(this);
81 auto context = Parameter<Context>(Descriptor::kContext);
82 auto new_target = Parameter<Object>(Descriptor::kJSNewTarget);
83 auto arg_count =
84 UncheckedParameter<Int32T>(Descriptor::kJSActualArgumentsCount);
85 auto function = Parameter<JSFunction>(Descriptor::kJSTarget);
86
87 // Check break-at-entry flag on the debug info.
88 TNode<SharedFunctionInfo> shared =
89 CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
90 TNode<Object> maybe_heap_object_or_smi =
91 LoadObjectField(shared, SharedFunctionInfo::kScriptOrDebugInfoOffset);
92 TNode<HeapObject> maybe_debug_info =
93 TaggedToHeapObject(maybe_heap_object_or_smi, &tailcall_to_shared);
94 GotoIfNot(HasInstanceType(maybe_debug_info, InstanceType::DEBUG_INFO_TYPE),
95 &tailcall_to_shared);
96
97 {
98 TNode<DebugInfo> debug_info = CAST(maybe_debug_info);
99 TNode<Smi> flags =
100 CAST(LoadObjectField(debug_info, DebugInfo::kFlagsOffset));
101 GotoIfNot(SmiToInt32(SmiAnd(flags, SmiConstant(DebugInfo::kBreakAtEntry))),
102 &tailcall_to_shared);
103
104 CallRuntime(Runtime::kDebugBreakAtEntry, context, function);
105 Goto(&tailcall_to_shared);
106 }
107
108 BIND(&tailcall_to_shared);
109 // Tail call into code object on the SharedFunctionInfo.
110 TNode<Code> code = GetSharedFunctionInfoCode(shared);
111 TailCallJSCode(code, context, function, new_target, arg_count);
112 }
113
114 class RecordWriteCodeStubAssembler : public CodeStubAssembler {
115 public:
RecordWriteCodeStubAssembler(compiler::CodeAssemblerState * state)116 explicit RecordWriteCodeStubAssembler(compiler::CodeAssemblerState* state)
117 : CodeStubAssembler(state) {}
118
IsMarking()119 TNode<BoolT> IsMarking() {
120 TNode<ExternalReference> is_marking_addr = ExternalConstant(
121 ExternalReference::heap_is_marking_flag_address(this->isolate()));
122 return Word32NotEqual(Load<Uint8T>(is_marking_addr), Int32Constant(0));
123 }
124
IsPageFlagSet(TNode<IntPtrT> object,int mask)125 TNode<BoolT> IsPageFlagSet(TNode<IntPtrT> object, int mask) {
126 TNode<IntPtrT> page = PageFromAddress(object);
127 TNode<IntPtrT> flags = UncheckedCast<IntPtrT>(
128 Load(MachineType::Pointer(), page,
129 IntPtrConstant(BasicMemoryChunk::kFlagsOffset)));
130 return WordNotEqual(WordAnd(flags, IntPtrConstant(mask)),
131 IntPtrConstant(0));
132 }
133
IsWhite(TNode<IntPtrT> object)134 TNode<BoolT> IsWhite(TNode<IntPtrT> object) {
135 DCHECK_EQ(strcmp(Marking::kWhiteBitPattern, "00"), 0);
136 TNode<IntPtrT> cell;
137 TNode<IntPtrT> mask;
138 GetMarkBit(object, &cell, &mask);
139 TNode<Int32T> mask32 = TruncateIntPtrToInt32(mask);
140 // Non-white has 1 for the first bit, so we only need to check for the first
141 // bit.
142 return Word32Equal(Word32And(Load<Int32T>(cell), mask32), Int32Constant(0));
143 }
144
GetMarkBit(TNode<IntPtrT> object,TNode<IntPtrT> * cell,TNode<IntPtrT> * mask)145 void GetMarkBit(TNode<IntPtrT> object, TNode<IntPtrT>* cell,
146 TNode<IntPtrT>* mask) {
147 TNode<IntPtrT> page = PageFromAddress(object);
148 TNode<IntPtrT> bitmap =
149 IntPtrAdd(page, IntPtrConstant(MemoryChunk::kMarkingBitmapOffset));
150
151 {
152 // Temp variable to calculate cell offset in bitmap.
153 TNode<WordT> r0;
154 int shift = Bitmap::kBitsPerCellLog2 + kTaggedSizeLog2 -
155 Bitmap::kBytesPerCellLog2;
156 r0 = WordShr(object, IntPtrConstant(shift));
157 r0 = WordAnd(r0, IntPtrConstant((kPageAlignmentMask >> shift) &
158 ~(Bitmap::kBytesPerCell - 1)));
159 *cell = IntPtrAdd(bitmap, Signed(r0));
160 }
161 {
162 // Temp variable to calculate bit offset in cell.
163 TNode<WordT> r1;
164 r1 = WordShr(object, IntPtrConstant(kTaggedSizeLog2));
165 r1 = WordAnd(r1, IntPtrConstant((1 << Bitmap::kBitsPerCellLog2) - 1));
166 // It seems that LSB(e.g. cl) is automatically used, so no manual masking
167 // is needed. Uncomment the following line otherwise.
168 // WordAnd(r1, IntPtrConstant((1 << kBitsPerByte) - 1)));
169 *mask = WordShl(IntPtrConstant(1), r1);
170 }
171 }
172
ShouldSkipFPRegs(TNode<Smi> mode)173 TNode<BoolT> ShouldSkipFPRegs(TNode<Smi> mode) {
174 return TaggedEqual(mode, SmiConstant(kDontSaveFPRegs));
175 }
176
ShouldEmitRememberSet(TNode<Smi> remembered_set)177 TNode<BoolT> ShouldEmitRememberSet(TNode<Smi> remembered_set) {
178 return TaggedEqual(remembered_set, SmiConstant(EMIT_REMEMBERED_SET));
179 }
180
181 template <typename Ret, typename Arg0, typename Arg1>
CallCFunction2WithCallerSavedRegistersMode(TNode<ExternalReference> function,TNode<Arg0> arg0,TNode<Arg1> arg1,TNode<Smi> mode,Label * next)182 void CallCFunction2WithCallerSavedRegistersMode(
183 TNode<ExternalReference> function, TNode<Arg0> arg0, TNode<Arg1> arg1,
184 TNode<Smi> mode, Label* next) {
185 Label dont_save_fp(this), save_fp(this);
186 Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
187 BIND(&dont_save_fp);
188 {
189 CallCFunctionWithCallerSavedRegisters(
190 function, MachineTypeOf<Ret>::value, kDontSaveFPRegs,
191 std::make_pair(MachineTypeOf<Arg0>::value, arg0),
192 std::make_pair(MachineTypeOf<Arg1>::value, arg1));
193 Goto(next);
194 }
195
196 BIND(&save_fp);
197 {
198 CallCFunctionWithCallerSavedRegisters(
199 function, MachineTypeOf<Ret>::value, kSaveFPRegs,
200 std::make_pair(MachineTypeOf<Arg0>::value, arg0),
201 std::make_pair(MachineTypeOf<Arg1>::value, arg1));
202 Goto(next);
203 }
204 }
205
206 template <typename Ret, typename Arg0, typename Arg1, typename Arg2>
CallCFunction3WithCallerSavedRegistersMode(TNode<ExternalReference> function,TNode<Arg0> arg0,TNode<Arg1> arg1,TNode<Arg2> arg2,TNode<Smi> mode,Label * next)207 void CallCFunction3WithCallerSavedRegistersMode(
208 TNode<ExternalReference> function, TNode<Arg0> arg0, TNode<Arg1> arg1,
209 TNode<Arg2> arg2, TNode<Smi> mode, Label* next) {
210 Label dont_save_fp(this), save_fp(this);
211 Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
212 BIND(&dont_save_fp);
213 {
214 CallCFunctionWithCallerSavedRegisters(
215 function, MachineTypeOf<Ret>::value, kDontSaveFPRegs,
216 std::make_pair(MachineTypeOf<Arg0>::value, arg0),
217 std::make_pair(MachineTypeOf<Arg1>::value, arg1),
218 std::make_pair(MachineTypeOf<Arg2>::value, arg2));
219 Goto(next);
220 }
221
222 BIND(&save_fp);
223 {
224 CallCFunctionWithCallerSavedRegisters(
225 function, MachineTypeOf<Ret>::value, kSaveFPRegs,
226 std::make_pair(MachineTypeOf<Arg0>::value, arg0),
227 std::make_pair(MachineTypeOf<Arg1>::value, arg1),
228 std::make_pair(MachineTypeOf<Arg2>::value, arg2));
229 Goto(next);
230 }
231 }
232
InsertIntoRememberedSetAndGotoSlow(TNode<IntPtrT> object,TNode<IntPtrT> slot,TNode<Smi> mode,Label * next)233 void InsertIntoRememberedSetAndGotoSlow(TNode<IntPtrT> object,
234 TNode<IntPtrT> slot, TNode<Smi> mode,
235 Label* next) {
236 TNode<IntPtrT> page = PageFromAddress(object);
237 TNode<ExternalReference> function =
238 ExternalConstant(ExternalReference::insert_remembered_set_function());
239 CallCFunction2WithCallerSavedRegistersMode<Int32T, IntPtrT, IntPtrT>(
240 function, page, slot, mode, next);
241 }
242
InsertIntoRememberedSetAndGoto(TNode<IntPtrT> object,TNode<IntPtrT> slot,TNode<Smi> mode,Label * next)243 void InsertIntoRememberedSetAndGoto(TNode<IntPtrT> object,
244 TNode<IntPtrT> slot, TNode<Smi> mode,
245 Label* next) {
246 Label slow_path(this);
247 TNode<IntPtrT> page = PageFromAddress(object);
248
249 // Load address of SlotSet
250 TNode<IntPtrT> slot_set = LoadSlotSet(page, &slow_path);
251 TNode<IntPtrT> slot_offset = IntPtrSub(slot, page);
252
253 // Load bucket
254 TNode<IntPtrT> bucket = LoadBucket(slot_set, slot_offset, &slow_path);
255
256 // Update cell
257 SetBitInCell(bucket, slot_offset);
258
259 Goto(next);
260
261 BIND(&slow_path);
262 InsertIntoRememberedSetAndGotoSlow(object, slot, mode, next);
263 }
264
LoadSlotSet(TNode<IntPtrT> page,Label * slow_path)265 TNode<IntPtrT> LoadSlotSet(TNode<IntPtrT> page, Label* slow_path) {
266 TNode<IntPtrT> slot_set = UncheckedCast<IntPtrT>(
267 Load(MachineType::Pointer(), page,
268 IntPtrConstant(MemoryChunk::kOldToNewSlotSetOffset)));
269 GotoIf(WordEqual(slot_set, IntPtrConstant(0)), slow_path);
270
271 return slot_set;
272 }
273
LoadBucket(TNode<IntPtrT> slot_set,TNode<WordT> slot_offset,Label * slow_path)274 TNode<IntPtrT> LoadBucket(TNode<IntPtrT> slot_set, TNode<WordT> slot_offset,
275 Label* slow_path) {
276 TNode<WordT> bucket_index =
277 WordShr(slot_offset, SlotSet::kBitsPerBucketLog2 + kTaggedSizeLog2);
278 TNode<IntPtrT> bucket = UncheckedCast<IntPtrT>(
279 Load(MachineType::Pointer(), slot_set,
280 WordShl(bucket_index, kSystemPointerSizeLog2)));
281 GotoIf(WordEqual(bucket, IntPtrConstant(0)), slow_path);
282 return bucket;
283 }
284
SetBitInCell(TNode<IntPtrT> bucket,TNode<WordT> slot_offset)285 void SetBitInCell(TNode<IntPtrT> bucket, TNode<WordT> slot_offset) {
286 // Load cell value
287 TNode<WordT> cell_offset = WordAnd(
288 WordShr(slot_offset, SlotSet::kBitsPerCellLog2 + kTaggedSizeLog2 -
289 SlotSet::kCellSizeBytesLog2),
290 IntPtrConstant((SlotSet::kCellsPerBucket - 1)
291 << SlotSet::kCellSizeBytesLog2));
292 TNode<IntPtrT> cell_address =
293 UncheckedCast<IntPtrT>(IntPtrAdd(bucket, cell_offset));
294 TNode<IntPtrT> old_cell_value =
295 ChangeInt32ToIntPtr(Load<Int32T>(cell_address));
296
297 // Calculate new cell value
298 TNode<WordT> bit_index = WordAnd(WordShr(slot_offset, kTaggedSizeLog2),
299 IntPtrConstant(SlotSet::kBitsPerCell - 1));
300 TNode<IntPtrT> new_cell_value = UncheckedCast<IntPtrT>(
301 WordOr(old_cell_value, WordShl(IntPtrConstant(1), bit_index)));
302
303 // Update cell value
304 StoreNoWriteBarrier(MachineRepresentation::kWord32, cell_address,
305 TruncateIntPtrToInt32(new_cell_value));
306 }
307 };
308
TF_BUILTIN(RecordWrite,RecordWriteCodeStubAssembler)309 TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
310 Label generational_wb(this);
311 Label incremental_wb(this);
312 Label exit(this);
313
314 auto remembered_set = UncheckedParameter<Smi>(Descriptor::kRememberedSet);
315 Branch(ShouldEmitRememberSet(remembered_set), &generational_wb,
316 &incremental_wb);
317
318 BIND(&generational_wb);
319 {
320 Label test_old_to_young_flags(this);
321 Label store_buffer_exit(this), store_buffer_incremental_wb(this);
322
323 // When incremental marking is not on, we skip cross generation pointer
324 // checking here, because there are checks for
325 // `kPointersFromHereAreInterestingMask` and
326 // `kPointersToHereAreInterestingMask` in
327 // `src/compiler/<arch>/code-generator-<arch>.cc` before calling this stub,
328 // which serves as the cross generation checking.
329 auto slot = UncheckedParameter<IntPtrT>(Descriptor::kSlot);
330 Branch(IsMarking(), &test_old_to_young_flags, &store_buffer_exit);
331
332 BIND(&test_old_to_young_flags);
333 {
334 // TODO(ishell): do a new-space range check instead.
335 TNode<IntPtrT> value =
336 BitcastTaggedToWord(Load(MachineType::TaggedPointer(), slot));
337
338 // TODO(albertnetymk): Try to cache the page flag for value and object,
339 // instead of calling IsPageFlagSet each time.
340 TNode<BoolT> value_is_young =
341 IsPageFlagSet(value, MemoryChunk::kIsInYoungGenerationMask);
342 GotoIfNot(value_is_young, &incremental_wb);
343
344 TNode<IntPtrT> object =
345 BitcastTaggedToWord(UntypedParameter(Descriptor::kObject));
346 TNode<BoolT> object_is_young =
347 IsPageFlagSet(object, MemoryChunk::kIsInYoungGenerationMask);
348 Branch(object_is_young, &incremental_wb, &store_buffer_incremental_wb);
349 }
350
351 BIND(&store_buffer_exit);
352 {
353 auto fp_mode = UncheckedParameter<Smi>(Descriptor::kFPMode);
354 TNode<IntPtrT> object =
355 BitcastTaggedToWord(UntypedParameter(Descriptor::kObject));
356 InsertIntoRememberedSetAndGoto(object, slot, fp_mode, &exit);
357 }
358
359 BIND(&store_buffer_incremental_wb);
360 {
361 auto fp_mode = UncheckedParameter<Smi>(Descriptor::kFPMode);
362 TNode<IntPtrT> object =
363 BitcastTaggedToWord(UntypedParameter(Descriptor::kObject));
364 InsertIntoRememberedSetAndGoto(object, slot, fp_mode, &incremental_wb);
365 }
366 }
367
368 BIND(&incremental_wb);
369 {
370 Label call_incremental_wb(this);
371
372 auto slot = UncheckedParameter<IntPtrT>(Descriptor::kSlot);
373 TNode<IntPtrT> value =
374 BitcastTaggedToWord(Load(MachineType::TaggedPointer(), slot));
375
376 // There are two cases we need to call incremental write barrier.
377 // 1) value_is_white
378 GotoIf(IsWhite(value), &call_incremental_wb);
379
380 // 2) is_compacting && value_in_EC && obj_isnt_skip
381 // is_compacting = true when is_marking = true
382 GotoIfNot(IsPageFlagSet(value, MemoryChunk::kEvacuationCandidateMask),
383 &exit);
384
385 TNode<IntPtrT> object =
386 BitcastTaggedToWord(UntypedParameter(Descriptor::kObject));
387 Branch(
388 IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask),
389 &exit, &call_incremental_wb);
390
391 BIND(&call_incremental_wb);
392 {
393 TNode<ExternalReference> function = ExternalConstant(
394 ExternalReference::write_barrier_marking_from_code_function());
395 auto fp_mode = UncheckedParameter<Smi>(Descriptor::kFPMode);
396 TNode<IntPtrT> object =
397 BitcastTaggedToWord(UntypedParameter(Descriptor::kObject));
398 CallCFunction2WithCallerSavedRegistersMode<Int32T, IntPtrT, IntPtrT>(
399 function, object, slot, fp_mode, &exit);
400 }
401 }
402
403 BIND(&exit);
404 IncrementCounter(isolate()->counters()->write_barriers(), 1);
405 Return(TrueConstant());
406 }
407
TF_BUILTIN(EphemeronKeyBarrier,RecordWriteCodeStubAssembler)408 TF_BUILTIN(EphemeronKeyBarrier, RecordWriteCodeStubAssembler) {
409 Label exit(this);
410
411 TNode<ExternalReference> function = ExternalConstant(
412 ExternalReference::ephemeron_key_write_barrier_function());
413 TNode<ExternalReference> isolate_constant =
414 ExternalConstant(ExternalReference::isolate_address(isolate()));
415 auto address = UncheckedParameter<IntPtrT>(Descriptor::kSlotAddress);
416 TNode<IntPtrT> object =
417 BitcastTaggedToWord(UntypedParameter(Descriptor::kObject));
418 TNode<Smi> fp_mode = UncheckedParameter<Smi>(Descriptor::kFPMode);
419 CallCFunction3WithCallerSavedRegistersMode<Int32T, IntPtrT, IntPtrT,
420 ExternalReference>(
421 function, object, address, isolate_constant, fp_mode, &exit);
422
423 BIND(&exit);
424 IncrementCounter(isolate()->counters()->write_barriers(), 1);
425 Return(TrueConstant());
426 }
427
428 class DeletePropertyBaseAssembler : public AccessorAssembler {
429 public:
DeletePropertyBaseAssembler(compiler::CodeAssemblerState * state)430 explicit DeletePropertyBaseAssembler(compiler::CodeAssemblerState* state)
431 : AccessorAssembler(state) {}
432
DeleteDictionaryProperty(TNode<Object> receiver,TNode<NameDictionary> properties,TNode<Name> name,TNode<Context> context,Label * dont_delete,Label * notfound)433 void DeleteDictionaryProperty(TNode<Object> receiver,
434 TNode<NameDictionary> properties,
435 TNode<Name> name, TNode<Context> context,
436 Label* dont_delete, Label* notfound) {
437 TVARIABLE(IntPtrT, var_name_index);
438 Label dictionary_found(this, &var_name_index);
439 NameDictionaryLookup<NameDictionary>(properties, name, &dictionary_found,
440 &var_name_index, notfound);
441
442 BIND(&dictionary_found);
443 TNode<IntPtrT> key_index = var_name_index.value();
444 TNode<Uint32T> details = LoadDetailsByKeyIndex(properties, key_index);
445 GotoIf(IsSetWord32(details, PropertyDetails::kAttributesDontDeleteMask),
446 dont_delete);
447 // Overwrite the entry itself (see NameDictionary::SetEntry).
448 TNode<Oddball> filler = TheHoleConstant();
449 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kTheHoleValue));
450 StoreFixedArrayElement(properties, key_index, filler, SKIP_WRITE_BARRIER);
451 StoreValueByKeyIndex<NameDictionary>(properties, key_index, filler,
452 SKIP_WRITE_BARRIER);
453 StoreDetailsByKeyIndex<NameDictionary>(properties, key_index,
454 SmiConstant(0));
455
456 // Update bookkeeping information (see NameDictionary::ElementRemoved).
457 TNode<Smi> nof = GetNumberOfElements<NameDictionary>(properties);
458 TNode<Smi> new_nof = SmiSub(nof, SmiConstant(1));
459 SetNumberOfElements<NameDictionary>(properties, new_nof);
460 TNode<Smi> num_deleted =
461 GetNumberOfDeletedElements<NameDictionary>(properties);
462 TNode<Smi> new_deleted = SmiAdd(num_deleted, SmiConstant(1));
463 SetNumberOfDeletedElements<NameDictionary>(properties, new_deleted);
464
465 // Shrink the dictionary if necessary (see NameDictionary::Shrink).
466 Label shrinking_done(this);
467 TNode<Smi> capacity = GetCapacity<NameDictionary>(properties);
468 GotoIf(SmiGreaterThan(new_nof, SmiShr(capacity, 2)), &shrinking_done);
469 GotoIf(SmiLessThan(new_nof, SmiConstant(16)), &shrinking_done);
470 CallRuntime(Runtime::kShrinkPropertyDictionary, context, receiver);
471 Goto(&shrinking_done);
472 BIND(&shrinking_done);
473
474 Return(TrueConstant());
475 }
476 };
477
TF_BUILTIN(DeleteProperty,DeletePropertyBaseAssembler)478 TF_BUILTIN(DeleteProperty, DeletePropertyBaseAssembler) {
479 auto receiver = Parameter<Object>(Descriptor::kObject);
480 auto key = Parameter<Object>(Descriptor::kKey);
481 auto language_mode = Parameter<Smi>(Descriptor::kLanguageMode);
482 auto context = Parameter<Context>(Descriptor::kContext);
483
484 TVARIABLE(IntPtrT, var_index);
485 TVARIABLE(Name, var_unique);
486 Label if_index(this, &var_index), if_unique_name(this), if_notunique(this),
487 if_notfound(this), slow(this), if_proxy(this);
488
489 GotoIf(TaggedIsSmi(receiver), &slow);
490 TNode<Map> receiver_map = LoadMap(CAST(receiver));
491 TNode<Uint16T> instance_type = LoadMapInstanceType(receiver_map);
492 GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), &if_proxy);
493 GotoIf(IsCustomElementsReceiverInstanceType(instance_type), &slow);
494 TryToName(key, &if_index, &var_index, &if_unique_name, &var_unique, &slow,
495 &if_notunique);
496
497 BIND(&if_index);
498 {
499 Comment("integer index");
500 Goto(&slow); // TODO(jkummerow): Implement more smarts here.
501 }
502
503 BIND(&if_unique_name);
504 {
505 Comment("key is unique name");
506 CheckForAssociatedProtector(var_unique.value(), &slow);
507
508 Label dictionary(this), dont_delete(this);
509 GotoIf(IsDictionaryMap(receiver_map), &dictionary);
510
511 // Fast properties need to clear recorded slots, which can only be done
512 // in C++.
513 Goto(&slow);
514
515 BIND(&dictionary);
516 {
517 InvalidateValidityCellIfPrototype(receiver_map);
518
519 TNode<NameDictionary> properties =
520 CAST(LoadSlowProperties(CAST(receiver)));
521 DeleteDictionaryProperty(receiver, properties, var_unique.value(),
522 context, &dont_delete, &if_notfound);
523 }
524
525 BIND(&dont_delete);
526 {
527 STATIC_ASSERT(LanguageModeSize == 2);
528 GotoIf(SmiNotEqual(language_mode, SmiConstant(LanguageMode::kSloppy)),
529 &slow);
530 Return(FalseConstant());
531 }
532 }
533
534 BIND(&if_notunique);
535 {
536 // If the string was not found in the string table, then no object can
537 // have a property with that name.
538 TryInternalizeString(CAST(key), &if_index, &var_index, &if_unique_name,
539 &var_unique, &if_notfound, &slow);
540 }
541
542 BIND(&if_notfound);
543 Return(TrueConstant());
544
545 BIND(&if_proxy);
546 {
547 TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
548 GotoIf(IsPrivateSymbol(name), &slow);
549 TailCallBuiltin(Builtins::kProxyDeleteProperty, context, receiver, name,
550 language_mode);
551 }
552
553 BIND(&slow);
554 {
555 TailCallRuntime(Runtime::kDeleteProperty, context, receiver, key,
556 language_mode);
557 }
558 }
559
560 namespace {
561
562 class SetOrCopyDataPropertiesAssembler : public CodeStubAssembler {
563 public:
SetOrCopyDataPropertiesAssembler(compiler::CodeAssemblerState * state)564 explicit SetOrCopyDataPropertiesAssembler(compiler::CodeAssemblerState* state)
565 : CodeStubAssembler(state) {}
566
567 protected:
SetOrCopyDataProperties(TNode<Context> context,TNode<JSReceiver> target,TNode<Object> source,Label * if_runtime,bool use_set=true)568 TNode<Object> SetOrCopyDataProperties(TNode<Context> context,
569 TNode<JSReceiver> target,
570 TNode<Object> source, Label* if_runtime,
571 bool use_set = true) {
572 Label if_done(this), if_noelements(this),
573 if_sourcenotjsobject(this, Label::kDeferred);
574
575 // JSPrimitiveWrapper wrappers for numbers don't have any enumerable own
576 // properties, so we can immediately skip the whole operation if {source} is
577 // a Smi.
578 GotoIf(TaggedIsSmi(source), &if_done);
579
580 // Otherwise check if {source} is a proper JSObject, and if not, defer
581 // to testing for non-empty strings below.
582 TNode<Map> source_map = LoadMap(CAST(source));
583 TNode<Uint16T> source_instance_type = LoadMapInstanceType(source_map);
584 GotoIfNot(IsJSObjectInstanceType(source_instance_type),
585 &if_sourcenotjsobject);
586
587 TNode<FixedArrayBase> source_elements = LoadElements(CAST(source));
588 GotoIf(IsEmptyFixedArray(source_elements), &if_noelements);
589 Branch(IsEmptySlowElementDictionary(source_elements), &if_noelements,
590 if_runtime);
591
592 BIND(&if_noelements);
593 {
594 // If the target is deprecated, the object will be updated on first store.
595 // If the source for that store equals the target, this will invalidate
596 // the cached representation of the source. Handle this case in runtime.
597 TNode<Map> target_map = LoadMap(target);
598 GotoIf(IsDeprecatedMap(target_map), if_runtime);
599
600 if (use_set) {
601 TNode<BoolT> target_is_simple_receiver = IsSimpleObjectMap(target_map);
602 ForEachEnumerableOwnProperty(
603 context, source_map, CAST(source), kEnumerationOrder,
604 [=](TNode<Name> key, TNode<Object> value) {
605 KeyedStoreGenericGenerator::SetProperty(
606 state(), context, target, target_is_simple_receiver, key,
607 value, LanguageMode::kStrict);
608 },
609 if_runtime);
610 } else {
611 ForEachEnumerableOwnProperty(
612 context, source_map, CAST(source), kEnumerationOrder,
613 [=](TNode<Name> key, TNode<Object> value) {
614 CallBuiltin(Builtins::kSetPropertyInLiteral, context, target, key,
615 value);
616 },
617 if_runtime);
618 }
619 Goto(&if_done);
620 }
621
622 BIND(&if_sourcenotjsobject);
623 {
624 // Handle other JSReceivers in the runtime.
625 GotoIf(IsJSReceiverInstanceType(source_instance_type), if_runtime);
626
627 // Non-empty strings are the only non-JSReceivers that need to be
628 // handled explicitly by Object.assign() and CopyDataProperties.
629 GotoIfNot(IsStringInstanceType(source_instance_type), &if_done);
630 TNode<IntPtrT> source_length = LoadStringLengthAsWord(CAST(source));
631 Branch(IntPtrEqual(source_length, IntPtrConstant(0)), &if_done,
632 if_runtime);
633 }
634
635 BIND(&if_done);
636 return UndefinedConstant();
637 }
638 };
639
640 } // namespace
641
642 // ES #sec-copydataproperties
TF_BUILTIN(CopyDataProperties,SetOrCopyDataPropertiesAssembler)643 TF_BUILTIN(CopyDataProperties, SetOrCopyDataPropertiesAssembler) {
644 auto target = Parameter<JSObject>(Descriptor::kTarget);
645 auto source = Parameter<Object>(Descriptor::kSource);
646 auto context = Parameter<Context>(Descriptor::kContext);
647
648 CSA_ASSERT(this, TaggedNotEqual(target, source));
649
650 Label if_runtime(this, Label::kDeferred);
651 Return(SetOrCopyDataProperties(context, target, source, &if_runtime, false));
652
653 BIND(&if_runtime);
654 TailCallRuntime(Runtime::kCopyDataProperties, context, target, source);
655 }
656
TF_BUILTIN(SetDataProperties,SetOrCopyDataPropertiesAssembler)657 TF_BUILTIN(SetDataProperties, SetOrCopyDataPropertiesAssembler) {
658 auto target = Parameter<JSReceiver>(Descriptor::kTarget);
659 auto source = Parameter<Object>(Descriptor::kSource);
660 auto context = Parameter<Context>(Descriptor::kContext);
661
662 Label if_runtime(this, Label::kDeferred);
663 Return(SetOrCopyDataProperties(context, target, source, &if_runtime, true));
664
665 BIND(&if_runtime);
666 TailCallRuntime(Runtime::kSetDataProperties, context, target, source);
667 }
668
TF_BUILTIN(ForInEnumerate,CodeStubAssembler)669 TF_BUILTIN(ForInEnumerate, CodeStubAssembler) {
670 auto receiver = Parameter<JSReceiver>(Descriptor::kReceiver);
671 auto context = Parameter<Context>(Descriptor::kContext);
672
673 Label if_empty(this), if_runtime(this, Label::kDeferred);
674 TNode<Map> receiver_map = CheckEnumCache(receiver, &if_empty, &if_runtime);
675 Return(receiver_map);
676
677 BIND(&if_empty);
678 Return(EmptyFixedArrayConstant());
679
680 BIND(&if_runtime);
681 TailCallRuntime(Runtime::kForInEnumerate, context, receiver);
682 }
683
TF_BUILTIN(ForInFilter,CodeStubAssembler)684 TF_BUILTIN(ForInFilter, CodeStubAssembler) {
685 auto key = Parameter<String>(Descriptor::kKey);
686 auto object = Parameter<HeapObject>(Descriptor::kObject);
687 auto context = Parameter<Context>(Descriptor::kContext);
688
689 Label if_true(this), if_false(this);
690 TNode<Oddball> result = HasProperty(context, object, key, kForInHasProperty);
691 Branch(IsTrue(result), &if_true, &if_false);
692
693 BIND(&if_true);
694 Return(key);
695
696 BIND(&if_false);
697 Return(UndefinedConstant());
698 }
699
TF_BUILTIN(SameValue,CodeStubAssembler)700 TF_BUILTIN(SameValue, CodeStubAssembler) {
701 auto lhs = Parameter<Object>(Descriptor::kLeft);
702 auto rhs = Parameter<Object>(Descriptor::kRight);
703
704 Label if_true(this), if_false(this);
705 BranchIfSameValue(lhs, rhs, &if_true, &if_false);
706
707 BIND(&if_true);
708 Return(TrueConstant());
709
710 BIND(&if_false);
711 Return(FalseConstant());
712 }
713
TF_BUILTIN(SameValueNumbersOnly,CodeStubAssembler)714 TF_BUILTIN(SameValueNumbersOnly, CodeStubAssembler) {
715 auto lhs = Parameter<Object>(Descriptor::kLeft);
716 auto rhs = Parameter<Object>(Descriptor::kRight);
717
718 Label if_true(this), if_false(this);
719 BranchIfSameValue(lhs, rhs, &if_true, &if_false, SameValueMode::kNumbersOnly);
720
721 BIND(&if_true);
722 Return(TrueConstant());
723
724 BIND(&if_false);
725 Return(FalseConstant());
726 }
727
TF_BUILTIN(AdaptorWithBuiltinExitFrame,CodeStubAssembler)728 TF_BUILTIN(AdaptorWithBuiltinExitFrame, CodeStubAssembler) {
729 auto target = Parameter<JSFunction>(Descriptor::kTarget);
730 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
731 auto c_function = UncheckedParameter<WordT>(Descriptor::kCFunction);
732
733 // The logic contained here is mirrored for TurboFan inlining in
734 // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.
735
736 // Make sure we operate in the context of the called function (for example
737 // ConstructStubs implemented in C++ will be run in the context of the caller
738 // instead of the callee, due to the way that [[Construct]] is defined for
739 // ordinary functions).
740 TNode<Context> context = LoadJSFunctionContext(target);
741
742 auto actual_argc =
743 UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
744
745 TVARIABLE(Int32T, pushed_argc, actual_argc);
746
747 #ifdef V8_NO_ARGUMENTS_ADAPTOR
748 TNode<SharedFunctionInfo> shared = LoadJSFunctionSharedFunctionInfo(target);
749
750 TNode<Int32T> formal_count =
751 UncheckedCast<Int32T>(LoadSharedFunctionInfoFormalParameterCount(shared));
752
753 // The number of arguments pushed is the maximum of actual arguments count
754 // and formal parameters count. Except when the formal parameters count is
755 // the sentinel.
756 Label check_argc(this), update_argc(this), done_argc(this);
757
758 Branch(Word32Equal(formal_count, Int32Constant(kDontAdaptArgumentsSentinel)),
759 &done_argc, &check_argc);
760 BIND(&check_argc);
761 Branch(Int32GreaterThan(formal_count, pushed_argc.value()), &update_argc,
762 &done_argc);
763 BIND(&update_argc);
764 pushed_argc = formal_count;
765 Goto(&done_argc);
766 BIND(&done_argc);
767 #endif
768
769 // Update arguments count for CEntry to contain the number of arguments
770 // including the receiver and the extra arguments.
771 TNode<Int32T> argc = Int32Add(
772 pushed_argc.value(),
773 Int32Constant(BuiltinExitFrameConstants::kNumExtraArgsWithReceiver));
774
775 const bool builtin_exit_frame = true;
776 TNode<Code> code = HeapConstant(CodeFactory::CEntry(
777 isolate(), 1, kDontSaveFPRegs, kArgvOnStack, builtin_exit_frame));
778
779 // Unconditionally push argc, target and new target as extra stack arguments.
780 // They will be used by stack frame iterators when constructing stack trace.
781 TailCallStub(CEntry1ArgvOnStackDescriptor{}, // descriptor
782 code, context, // standard arguments for TailCallStub
783 argc, c_function, // register arguments
784 TheHoleConstant(), // additional stack argument 1 (padding)
785 SmiFromInt32(argc), // additional stack argument 2
786 target, // additional stack argument 3
787 new_target); // additional stack argument 4
788 }
789
TF_BUILTIN(AllocateInYoungGeneration,CodeStubAssembler)790 TF_BUILTIN(AllocateInYoungGeneration, CodeStubAssembler) {
791 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
792 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
793
794 TNode<Smi> allocation_flags =
795 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
796 AllowLargeObjectAllocationFlag::encode(true)));
797 TailCallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
798 SmiFromIntPtr(requested_size), allocation_flags);
799 }
800
TF_BUILTIN(AllocateRegularInYoungGeneration,CodeStubAssembler)801 TF_BUILTIN(AllocateRegularInYoungGeneration, CodeStubAssembler) {
802 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
803 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
804
805 TNode<Smi> allocation_flags =
806 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
807 AllowLargeObjectAllocationFlag::encode(false)));
808 TailCallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
809 SmiFromIntPtr(requested_size), allocation_flags);
810 }
811
TF_BUILTIN(AllocateInOldGeneration,CodeStubAssembler)812 TF_BUILTIN(AllocateInOldGeneration, CodeStubAssembler) {
813 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
814 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
815
816 TNode<Smi> runtime_flags =
817 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
818 AllowLargeObjectAllocationFlag::encode(true)));
819 TailCallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
820 SmiFromIntPtr(requested_size), runtime_flags);
821 }
822
TF_BUILTIN(AllocateRegularInOldGeneration,CodeStubAssembler)823 TF_BUILTIN(AllocateRegularInOldGeneration, CodeStubAssembler) {
824 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
825 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
826
827 TNode<Smi> runtime_flags =
828 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
829 AllowLargeObjectAllocationFlag::encode(false)));
830 TailCallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
831 SmiFromIntPtr(requested_size), runtime_flags);
832 }
833
TF_BUILTIN(Abort,CodeStubAssembler)834 TF_BUILTIN(Abort, CodeStubAssembler) {
835 auto message_id = Parameter<Smi>(Descriptor::kMessageOrMessageId);
836 TailCallRuntime(Runtime::kAbort, NoContextConstant(), message_id);
837 }
838
TF_BUILTIN(AbortCSAAssert,CodeStubAssembler)839 TF_BUILTIN(AbortCSAAssert, CodeStubAssembler) {
840 auto message = Parameter<String>(Descriptor::kMessageOrMessageId);
841 TailCallRuntime(Runtime::kAbortCSAAssert, NoContextConstant(), message);
842 }
843
Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)844 void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
845 MacroAssembler* masm) {
846 Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, false);
847 }
848
Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)849 void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
850 MacroAssembler* masm) {
851 Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, true);
852 }
853
854 void Builtins::
Generate_CEntry_Return1_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(MacroAssembler * masm)855 Generate_CEntry_Return1_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
856 MacroAssembler* masm) {
857 Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvInRegister, false);
858 }
859
Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)860 void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
861 MacroAssembler* masm) {
862 Generate_CEntry(masm, 1, kSaveFPRegs, kArgvOnStack, false);
863 }
864
Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)865 void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_BuiltinExit(
866 MacroAssembler* masm) {
867 Generate_CEntry(masm, 1, kSaveFPRegs, kArgvOnStack, true);
868 }
869
Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)870 void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
871 MacroAssembler* masm) {
872 Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvOnStack, false);
873 }
874
Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)875 void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
876 MacroAssembler* masm) {
877 Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvOnStack, true);
878 }
879
880 void Builtins::
Generate_CEntry_Return2_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(MacroAssembler * masm)881 Generate_CEntry_Return2_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
882 MacroAssembler* masm) {
883 Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvInRegister, false);
884 }
885
Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)886 void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
887 MacroAssembler* masm) {
888 Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, false);
889 }
890
Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)891 void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(
892 MacroAssembler* masm) {
893 Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, true);
894 }
895
896 #if !defined(V8_TARGET_ARCH_ARM) && !defined(V8_TARGET_ARCH_MIPS)
Generate_MemCopyUint8Uint8(MacroAssembler * masm)897 void Builtins::Generate_MemCopyUint8Uint8(MacroAssembler* masm) {
898 masm->Call(BUILTIN_CODE(masm->isolate(), Illegal), RelocInfo::CODE_TARGET);
899 }
900 #endif // !defined(V8_TARGET_ARCH_ARM) && !defined(V8_TARGET_ARCH_MIPS)
901
902 #ifndef V8_TARGET_ARCH_IA32
Generate_MemMove(MacroAssembler * masm)903 void Builtins::Generate_MemMove(MacroAssembler* masm) {
904 masm->Call(BUILTIN_CODE(masm->isolate(), Illegal), RelocInfo::CODE_TARGET);
905 }
906 #endif // V8_TARGET_ARCH_IA32
907
908 // ES6 [[Get]] operation.
TF_BUILTIN(GetProperty,CodeStubAssembler)909 TF_BUILTIN(GetProperty, CodeStubAssembler) {
910 auto object = Parameter<Object>(Descriptor::kObject);
911 auto key = Parameter<Object>(Descriptor::kKey);
912 auto context = Parameter<Context>(Descriptor::kContext);
913 // TODO(duongn): consider tailcalling to GetPropertyWithReceiver(object,
914 // object, key, OnNonExistent::kReturnUndefined).
915 Label if_notfound(this), if_proxy(this, Label::kDeferred),
916 if_slow(this, Label::kDeferred);
917
918 CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
919 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
920 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
921 TNode<Name> unique_name, Label* next_holder, Label* if_bailout) {
922 TVARIABLE(Object, var_value);
923 Label if_found(this);
924 TryGetOwnProperty(context, receiver, CAST(holder), holder_map,
925 holder_instance_type, unique_name, &if_found,
926 &var_value, next_holder, if_bailout);
927 BIND(&if_found);
928 Return(var_value.value());
929 };
930
931 CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
932 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
933 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
934 TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
935 // Not supported yet.
936 Use(next_holder);
937 Goto(if_bailout);
938 };
939
940 TryPrototypeChainLookup(object, object, key, lookup_property_in_holder,
941 lookup_element_in_holder, &if_notfound, &if_slow,
942 &if_proxy);
943
944 BIND(&if_notfound);
945 Return(UndefinedConstant());
946
947 BIND(&if_slow);
948 TailCallRuntime(Runtime::kGetProperty, context, object, key);
949
950 BIND(&if_proxy);
951 {
952 // Convert the {key} to a Name first.
953 TNode<Object> name = CallBuiltin(Builtins::kToName, context, key);
954
955 // The {object} is a JSProxy instance, look up the {name} on it, passing
956 // {object} both as receiver and holder. If {name} is absent we can safely
957 // return undefined from here.
958 TailCallBuiltin(Builtins::kProxyGetProperty, context, object, name, object,
959 SmiConstant(OnNonExistent::kReturnUndefined));
960 }
961 }
962
963 // ES6 [[Get]] operation with Receiver.
TF_BUILTIN(GetPropertyWithReceiver,CodeStubAssembler)964 TF_BUILTIN(GetPropertyWithReceiver, CodeStubAssembler) {
965 auto object = Parameter<Object>(Descriptor::kObject);
966 auto key = Parameter<Object>(Descriptor::kKey);
967 auto context = Parameter<Context>(Descriptor::kContext);
968 auto receiver = Parameter<Object>(Descriptor::kReceiver);
969 auto on_non_existent = Parameter<Object>(Descriptor::kOnNonExistent);
970 Label if_notfound(this), if_proxy(this, Label::kDeferred),
971 if_slow(this, Label::kDeferred);
972
973 CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
974 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
975 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
976 TNode<Name> unique_name, Label* next_holder, Label* if_bailout) {
977 TVARIABLE(Object, var_value);
978 Label if_found(this);
979 TryGetOwnProperty(context, receiver, CAST(holder), holder_map,
980 holder_instance_type, unique_name, &if_found,
981 &var_value, next_holder, if_bailout);
982 BIND(&if_found);
983 Return(var_value.value());
984 };
985
986 CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
987 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
988 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
989 TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
990 // Not supported yet.
991 Use(next_holder);
992 Goto(if_bailout);
993 };
994
995 TryPrototypeChainLookup(receiver, object, key, lookup_property_in_holder,
996 lookup_element_in_holder, &if_notfound, &if_slow,
997 &if_proxy);
998
999 BIND(&if_notfound);
1000 Label throw_reference_error(this);
1001 GotoIf(TaggedEqual(on_non_existent,
1002 SmiConstant(OnNonExistent::kThrowReferenceError)),
1003 &throw_reference_error);
1004 CSA_ASSERT(this, TaggedEqual(on_non_existent,
1005 SmiConstant(OnNonExistent::kReturnUndefined)));
1006 Return(UndefinedConstant());
1007
1008 BIND(&throw_reference_error);
1009 Return(CallRuntime(Runtime::kThrowReferenceError, context, key));
1010
1011 BIND(&if_slow);
1012 TailCallRuntime(Runtime::kGetPropertyWithReceiver, context, object, key,
1013 receiver, on_non_existent);
1014
1015 BIND(&if_proxy);
1016 {
1017 // Convert the {key} to a Name first.
1018 TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
1019
1020 // Proxy cannot handle private symbol so bailout.
1021 GotoIf(IsPrivateSymbol(name), &if_slow);
1022
1023 // The {object} is a JSProxy instance, look up the {name} on it, passing
1024 // {object} both as receiver and holder. If {name} is absent we can safely
1025 // return undefined from here.
1026 TailCallBuiltin(Builtins::kProxyGetProperty, context, object, name,
1027 receiver, on_non_existent);
1028 }
1029 }
1030
1031 // ES6 [[Set]] operation.
TF_BUILTIN(SetProperty,CodeStubAssembler)1032 TF_BUILTIN(SetProperty, CodeStubAssembler) {
1033 auto context = Parameter<Context>(Descriptor::kContext);
1034 auto receiver = Parameter<Object>(Descriptor::kReceiver);
1035 auto key = Parameter<Object>(Descriptor::kKey);
1036 auto value = Parameter<Object>(Descriptor::kValue);
1037
1038 KeyedStoreGenericGenerator::SetProperty(state(), context, receiver, key,
1039 value, LanguageMode::kStrict);
1040 }
1041
1042 // ES6 CreateDataProperty(), specialized for the case where objects are still
1043 // being initialized, and have not yet been made accessible to the user. Thus,
1044 // any operation here should be unobservable until after the object has been
1045 // returned.
TF_BUILTIN(SetPropertyInLiteral,CodeStubAssembler)1046 TF_BUILTIN(SetPropertyInLiteral, CodeStubAssembler) {
1047 auto context = Parameter<Context>(Descriptor::kContext);
1048 auto receiver = Parameter<JSObject>(Descriptor::kReceiver);
1049 auto key = Parameter<Object>(Descriptor::kKey);
1050 auto value = Parameter<Object>(Descriptor::kValue);
1051
1052 KeyedStoreGenericGenerator::SetPropertyInLiteral(state(), context, receiver,
1053 key, value);
1054 }
1055
TF_BUILTIN(InstantiateAsmJs,CodeStubAssembler)1056 TF_BUILTIN(InstantiateAsmJs, CodeStubAssembler) {
1057 Label tailcall_to_function(this);
1058 auto context = Parameter<Context>(Descriptor::kContext);
1059 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
1060 auto arg_count =
1061 UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
1062 auto function = Parameter<JSFunction>(Descriptor::kTarget);
1063
1064 // Retrieve arguments from caller (stdlib, foreign, heap).
1065 CodeStubArguments args(this, arg_count);
1066 TNode<Object> stdlib = args.GetOptionalArgumentValue(0);
1067 TNode<Object> foreign = args.GetOptionalArgumentValue(1);
1068 TNode<Object> heap = args.GetOptionalArgumentValue(2);
1069
1070 // Call runtime, on success just pass the result to the caller and pop all
1071 // arguments. A smi 0 is returned on failure, an object on success.
1072 TNode<Object> maybe_result_or_smi_zero = CallRuntime(
1073 Runtime::kInstantiateAsmJs, context, function, stdlib, foreign, heap);
1074 GotoIf(TaggedIsSmi(maybe_result_or_smi_zero), &tailcall_to_function);
1075
1076 #ifdef V8_NO_ARGUMENTS_ADAPTOR
1077 TNode<SharedFunctionInfo> shared = LoadJSFunctionSharedFunctionInfo(function);
1078 TNode<Int32T> parameter_count =
1079 UncheckedCast<Int32T>(LoadSharedFunctionInfoFormalParameterCount(shared));
1080 // This builtin intercepts a call to {function}, where the number of arguments
1081 // pushed is the maximum of actual arguments count and formal parameters
1082 // count.
1083 Label argc_lt_param_count(this), argc_ge_param_count(this);
1084 Branch(Int32LessThan(arg_count, parameter_count), &argc_lt_param_count,
1085 &argc_ge_param_count);
1086 BIND(&argc_lt_param_count);
1087 PopAndReturn(Int32Add(parameter_count, Int32Constant(1)),
1088 maybe_result_or_smi_zero);
1089 BIND(&argc_ge_param_count);
1090 #endif
1091 args.PopAndReturn(maybe_result_or_smi_zero);
1092
1093 BIND(&tailcall_to_function);
1094 // On failure, tail call back to regular JavaScript by re-calling the given
1095 // function which has been reset to the compile lazy builtin.
1096 TNode<Code> code = CAST(LoadObjectField(function, JSFunction::kCodeOffset));
1097 TailCallJSCode(code, context, function, new_target, arg_count);
1098 }
1099
1100 } // namespace internal
1101 } // namespace v8
1102