1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/api/api.h"
6 #include "src/baseline/baseline.h"
7 #include "src/builtins/builtins-utils-gen.h"
8 #include "src/builtins/builtins.h"
9 #include "src/codegen/code-stub-assembler.h"
10 #include "src/codegen/interface-descriptors-inl.h"
11 #include "src/codegen/macro-assembler.h"
12 #include "src/common/globals.h"
13 #include "src/execution/frame-constants.h"
14 #include "src/heap/memory-chunk.h"
15 #include "src/ic/accessor-assembler.h"
16 #include "src/ic/keyed-store-generic.h"
17 #include "src/logging/counters.h"
18 #include "src/objects/debug-objects.h"
19 #include "src/objects/shared-function-info.h"
20 #include "src/runtime/runtime.h"
21
22 namespace v8 {
23 namespace internal {
24
25 // -----------------------------------------------------------------------------
26 // Stack checks.
27
Generate_StackCheck(MacroAssembler * masm)28 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
29 masm->TailCallRuntime(Runtime::kStackGuard);
30 }
31
32 // -----------------------------------------------------------------------------
33 // TurboFan support builtins.
34
TF_BUILTIN(CopyFastSmiOrObjectElements,CodeStubAssembler)35 TF_BUILTIN(CopyFastSmiOrObjectElements, CodeStubAssembler) {
36 auto js_object = Parameter<JSObject>(Descriptor::kObject);
37
38 // Load the {object}s elements.
39 TNode<FixedArrayBase> source =
40 CAST(LoadObjectField(js_object, JSObject::kElementsOffset));
41 TNode<FixedArrayBase> target =
42 CloneFixedArray(source, ExtractFixedArrayFlag::kFixedArrays);
43 StoreObjectField(js_object, JSObject::kElementsOffset, target);
44 Return(target);
45 }
46
TF_BUILTIN(GrowFastDoubleElements,CodeStubAssembler)47 TF_BUILTIN(GrowFastDoubleElements, CodeStubAssembler) {
48 auto object = Parameter<JSObject>(Descriptor::kObject);
49 auto key = Parameter<Smi>(Descriptor::kKey);
50
51 Label runtime(this, Label::kDeferred);
52 TNode<FixedArrayBase> elements = LoadElements(object);
53 elements = TryGrowElementsCapacity(object, elements, PACKED_DOUBLE_ELEMENTS,
54 key, &runtime);
55 Return(elements);
56
57 BIND(&runtime);
58 TailCallRuntime(Runtime::kGrowArrayElements, NoContextConstant(), object,
59 key);
60 }
61
TF_BUILTIN(GrowFastSmiOrObjectElements,CodeStubAssembler)62 TF_BUILTIN(GrowFastSmiOrObjectElements, CodeStubAssembler) {
63 auto object = Parameter<JSObject>(Descriptor::kObject);
64 auto key = Parameter<Smi>(Descriptor::kKey);
65
66 Label runtime(this, Label::kDeferred);
67 TNode<FixedArrayBase> elements = LoadElements(object);
68 elements =
69 TryGrowElementsCapacity(object, elements, PACKED_ELEMENTS, key, &runtime);
70 Return(elements);
71
72 BIND(&runtime);
73 TailCallRuntime(Runtime::kGrowArrayElements, NoContextConstant(), object,
74 key);
75 }
76
TF_BUILTIN(ReturnReceiver,CodeStubAssembler)77 TF_BUILTIN(ReturnReceiver, CodeStubAssembler) {
78 auto receiver = Parameter<Object>(Descriptor::kReceiver);
79 Return(receiver);
80 }
81
TF_BUILTIN(DebugBreakTrampoline,CodeStubAssembler)82 TF_BUILTIN(DebugBreakTrampoline, CodeStubAssembler) {
83 Label tailcall_to_shared(this);
84 auto context = Parameter<Context>(Descriptor::kContext);
85 auto new_target = Parameter<Object>(Descriptor::kJSNewTarget);
86 auto arg_count =
87 UncheckedParameter<Int32T>(Descriptor::kJSActualArgumentsCount);
88 auto function = Parameter<JSFunction>(Descriptor::kJSTarget);
89
90 // Check break-at-entry flag on the debug info.
91 TNode<SharedFunctionInfo> shared =
92 CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
93 TNode<Object> maybe_heap_object_or_smi =
94 LoadObjectField(shared, SharedFunctionInfo::kScriptOrDebugInfoOffset);
95 TNode<HeapObject> maybe_debug_info =
96 TaggedToHeapObject(maybe_heap_object_or_smi, &tailcall_to_shared);
97 GotoIfNot(HasInstanceType(maybe_debug_info, InstanceType::DEBUG_INFO_TYPE),
98 &tailcall_to_shared);
99
100 {
101 TNode<DebugInfo> debug_info = CAST(maybe_debug_info);
102 TNode<Smi> flags =
103 CAST(LoadObjectField(debug_info, DebugInfo::kFlagsOffset));
104 GotoIfNot(SmiToInt32(SmiAnd(flags, SmiConstant(DebugInfo::kBreakAtEntry))),
105 &tailcall_to_shared);
106
107 CallRuntime(Runtime::kDebugBreakAtEntry, context, function);
108 Goto(&tailcall_to_shared);
109 }
110
111 BIND(&tailcall_to_shared);
112 // Tail call into code object on the SharedFunctionInfo.
113 TNode<CodeT> code = GetSharedFunctionInfoCode(shared);
114 TailCallJSCode(code, context, function, new_target, arg_count);
115 }
116
117 class WriteBarrierCodeStubAssembler : public CodeStubAssembler {
118 public:
WriteBarrierCodeStubAssembler(compiler::CodeAssemblerState * state)119 explicit WriteBarrierCodeStubAssembler(compiler::CodeAssemblerState* state)
120 : CodeStubAssembler(state) {}
121
IsMarking()122 TNode<BoolT> IsMarking() {
123 TNode<ExternalReference> is_marking_addr = ExternalConstant(
124 ExternalReference::heap_is_marking_flag_address(this->isolate()));
125 return Word32NotEqual(Load<Uint8T>(is_marking_addr), Int32Constant(0));
126 }
127
IsPageFlagSet(TNode<IntPtrT> object,int mask)128 TNode<BoolT> IsPageFlagSet(TNode<IntPtrT> object, int mask) {
129 TNode<IntPtrT> page = PageFromAddress(object);
130 TNode<IntPtrT> flags = UncheckedCast<IntPtrT>(
131 Load(MachineType::Pointer(), page,
132 IntPtrConstant(BasicMemoryChunk::kFlagsOffset)));
133 return WordNotEqual(WordAnd(flags, IntPtrConstant(mask)),
134 IntPtrConstant(0));
135 }
136
IsWhite(TNode<IntPtrT> object)137 TNode<BoolT> IsWhite(TNode<IntPtrT> object) {
138 DCHECK_EQ(strcmp(Marking::kWhiteBitPattern, "00"), 0);
139 TNode<IntPtrT> cell;
140 TNode<IntPtrT> mask;
141 GetMarkBit(object, &cell, &mask);
142 TNode<Int32T> mask32 = TruncateIntPtrToInt32(mask);
143 // Non-white has 1 for the first bit, so we only need to check for the first
144 // bit.
145 return Word32Equal(Word32And(Load<Int32T>(cell), mask32), Int32Constant(0));
146 }
147
GetMarkBit(TNode<IntPtrT> object,TNode<IntPtrT> * cell,TNode<IntPtrT> * mask)148 void GetMarkBit(TNode<IntPtrT> object, TNode<IntPtrT>* cell,
149 TNode<IntPtrT>* mask) {
150 TNode<IntPtrT> page = PageFromAddress(object);
151 TNode<IntPtrT> bitmap =
152 IntPtrAdd(page, IntPtrConstant(MemoryChunk::kMarkingBitmapOffset));
153
154 {
155 // Temp variable to calculate cell offset in bitmap.
156 TNode<WordT> r0;
157 int shift = Bitmap::kBitsPerCellLog2 + kTaggedSizeLog2 -
158 Bitmap::kBytesPerCellLog2;
159 r0 = WordShr(object, IntPtrConstant(shift));
160 r0 = WordAnd(r0, IntPtrConstant((kPageAlignmentMask >> shift) &
161 ~(Bitmap::kBytesPerCell - 1)));
162 *cell = IntPtrAdd(bitmap, Signed(r0));
163 }
164 {
165 // Temp variable to calculate bit offset in cell.
166 TNode<WordT> r1;
167 r1 = WordShr(object, IntPtrConstant(kTaggedSizeLog2));
168 r1 = WordAnd(r1, IntPtrConstant((1 << Bitmap::kBitsPerCellLog2) - 1));
169 // It seems that LSB(e.g. cl) is automatically used, so no manual masking
170 // is needed. Uncomment the following line otherwise.
171 // WordAnd(r1, IntPtrConstant((1 << kBitsPerByte) - 1)));
172 *mask = WordShl(IntPtrConstant(1), r1);
173 }
174 }
175
InsertIntoRememberedSet(TNode<IntPtrT> object,TNode<IntPtrT> slot,SaveFPRegsMode fp_mode)176 void InsertIntoRememberedSet(TNode<IntPtrT> object, TNode<IntPtrT> slot,
177 SaveFPRegsMode fp_mode) {
178 Label slow_path(this), next(this);
179 TNode<IntPtrT> page = PageFromAddress(object);
180
181 // Load address of SlotSet
182 TNode<IntPtrT> slot_set = LoadSlotSet(page, &slow_path);
183 TNode<IntPtrT> slot_offset = IntPtrSub(slot, page);
184
185 // Load bucket
186 TNode<IntPtrT> bucket = LoadBucket(slot_set, slot_offset, &slow_path);
187
188 // Update cell
189 SetBitInCell(bucket, slot_offset);
190 Goto(&next);
191
192 BIND(&slow_path);
193 {
194 TNode<ExternalReference> function =
195 ExternalConstant(ExternalReference::insert_remembered_set_function());
196 CallCFunctionWithCallerSavedRegisters(
197 function, MachineTypeOf<Int32T>::value, fp_mode,
198 std::make_pair(MachineTypeOf<IntPtrT>::value, page),
199 std::make_pair(MachineTypeOf<IntPtrT>::value, slot));
200 Goto(&next);
201 }
202
203 BIND(&next);
204 }
205
LoadSlotSet(TNode<IntPtrT> page,Label * slow_path)206 TNode<IntPtrT> LoadSlotSet(TNode<IntPtrT> page, Label* slow_path) {
207 TNode<IntPtrT> slot_set = UncheckedCast<IntPtrT>(
208 Load(MachineType::Pointer(), page,
209 IntPtrConstant(MemoryChunk::kOldToNewSlotSetOffset)));
210 GotoIf(WordEqual(slot_set, IntPtrConstant(0)), slow_path);
211 return slot_set;
212 }
213
LoadBucket(TNode<IntPtrT> slot_set,TNode<WordT> slot_offset,Label * slow_path)214 TNode<IntPtrT> LoadBucket(TNode<IntPtrT> slot_set, TNode<WordT> slot_offset,
215 Label* slow_path) {
216 TNode<WordT> bucket_index =
217 WordShr(slot_offset, SlotSet::kBitsPerBucketLog2 + kTaggedSizeLog2);
218 TNode<IntPtrT> bucket = UncheckedCast<IntPtrT>(
219 Load(MachineType::Pointer(), slot_set,
220 WordShl(bucket_index, kSystemPointerSizeLog2)));
221 GotoIf(WordEqual(bucket, IntPtrConstant(0)), slow_path);
222 return bucket;
223 }
224
SetBitInCell(TNode<IntPtrT> bucket,TNode<WordT> slot_offset)225 void SetBitInCell(TNode<IntPtrT> bucket, TNode<WordT> slot_offset) {
226 // Load cell value
227 TNode<WordT> cell_offset = WordAnd(
228 WordShr(slot_offset, SlotSet::kBitsPerCellLog2 + kTaggedSizeLog2 -
229 SlotSet::kCellSizeBytesLog2),
230 IntPtrConstant((SlotSet::kCellsPerBucket - 1)
231 << SlotSet::kCellSizeBytesLog2));
232 TNode<IntPtrT> cell_address =
233 UncheckedCast<IntPtrT>(IntPtrAdd(bucket, cell_offset));
234 TNode<IntPtrT> old_cell_value =
235 ChangeInt32ToIntPtr(Load<Int32T>(cell_address));
236
237 // Calculate new cell value
238 TNode<WordT> bit_index = WordAnd(WordShr(slot_offset, kTaggedSizeLog2),
239 IntPtrConstant(SlotSet::kBitsPerCell - 1));
240 TNode<IntPtrT> new_cell_value = UncheckedCast<IntPtrT>(
241 WordOr(old_cell_value, WordShl(IntPtrConstant(1), bit_index)));
242
243 // Update cell value
244 StoreNoWriteBarrier(MachineRepresentation::kWord32, cell_address,
245 TruncateIntPtrToInt32(new_cell_value));
246 }
247
GenerationalWriteBarrier(SaveFPRegsMode fp_mode)248 void GenerationalWriteBarrier(SaveFPRegsMode fp_mode) {
249 Label incremental_wb(this), test_old_to_young_flags(this),
250 remembered_set_only(this), remembered_set_and_incremental_wb(this),
251 next(this);
252
253 // When incremental marking is not on, we skip cross generation pointer
254 // checking here, because there are checks for
255 // `kPointersFromHereAreInterestingMask` and
256 // `kPointersToHereAreInterestingMask` in
257 // `src/compiler/<arch>/code-generator-<arch>.cc` before calling this
258 // stub, which serves as the cross generation checking.
259 auto slot =
260 UncheckedParameter<IntPtrT>(WriteBarrierDescriptor::kSlotAddress);
261 Branch(IsMarking(), &test_old_to_young_flags, &remembered_set_only);
262
263 BIND(&test_old_to_young_flags);
264 {
265 // TODO(ishell): do a new-space range check instead.
266 TNode<IntPtrT> value = BitcastTaggedToWord(Load<HeapObject>(slot));
267
268 // TODO(albertnetymk): Try to cache the page flag for value and
269 // object, instead of calling IsPageFlagSet each time.
270 TNode<BoolT> value_is_young =
271 IsPageFlagSet(value, MemoryChunk::kIsInYoungGenerationMask);
272 GotoIfNot(value_is_young, &incremental_wb);
273
274 TNode<IntPtrT> object = BitcastTaggedToWord(
275 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
276 TNode<BoolT> object_is_young =
277 IsPageFlagSet(object, MemoryChunk::kIsInYoungGenerationMask);
278 Branch(object_is_young, &incremental_wb,
279 &remembered_set_and_incremental_wb);
280 }
281
282 BIND(&remembered_set_only);
283 {
284 TNode<IntPtrT> object = BitcastTaggedToWord(
285 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
286 InsertIntoRememberedSet(object, slot, fp_mode);
287 Goto(&next);
288 }
289
290 BIND(&remembered_set_and_incremental_wb);
291 {
292 TNode<IntPtrT> object = BitcastTaggedToWord(
293 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
294 InsertIntoRememberedSet(object, slot, fp_mode);
295 Goto(&incremental_wb);
296 }
297
298 BIND(&incremental_wb);
299 {
300 TNode<IntPtrT> value = BitcastTaggedToWord(Load<HeapObject>(slot));
301 IncrementalWriteBarrier(slot, value, fp_mode);
302 Goto(&next);
303 }
304
305 BIND(&next);
306 }
307
IncrementalWriteBarrier(SaveFPRegsMode fp_mode)308 void IncrementalWriteBarrier(SaveFPRegsMode fp_mode) {
309 auto slot =
310 UncheckedParameter<IntPtrT>(WriteBarrierDescriptor::kSlotAddress);
311 TNode<IntPtrT> value = BitcastTaggedToWord(Load<HeapObject>(slot));
312 IncrementalWriteBarrier(slot, value, fp_mode);
313 }
314
IncrementalWriteBarrier(TNode<IntPtrT> slot,TNode<IntPtrT> value,SaveFPRegsMode fp_mode)315 void IncrementalWriteBarrier(TNode<IntPtrT> slot, TNode<IntPtrT> value,
316 SaveFPRegsMode fp_mode) {
317 Label call_incremental_wb(this), next(this);
318
319 // There are two cases we need to call incremental write barrier.
320 // 1) value_is_white
321 GotoIf(IsWhite(value), &call_incremental_wb);
322
323 // 2) is_compacting && value_in_EC && obj_isnt_skip
324 // is_compacting = true when is_marking = true
325 GotoIfNot(IsPageFlagSet(value, MemoryChunk::kEvacuationCandidateMask),
326 &next);
327
328 {
329 TNode<IntPtrT> object = BitcastTaggedToWord(
330 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
331 Branch(
332 IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask),
333 &next, &call_incremental_wb);
334 }
335 BIND(&call_incremental_wb);
336 {
337 TNode<ExternalReference> function = ExternalConstant(
338 ExternalReference::write_barrier_marking_from_code_function());
339 TNode<IntPtrT> object = BitcastTaggedToWord(
340 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
341 CallCFunctionWithCallerSavedRegisters(
342 function, MachineTypeOf<Int32T>::value, fp_mode,
343 std::make_pair(MachineTypeOf<IntPtrT>::value, object),
344 std::make_pair(MachineTypeOf<IntPtrT>::value, slot));
345 Goto(&next);
346 }
347 BIND(&next);
348 }
349
GenerateRecordWrite(RememberedSetAction rs_mode,SaveFPRegsMode fp_mode)350 void GenerateRecordWrite(RememberedSetAction rs_mode,
351 SaveFPRegsMode fp_mode) {
352 if (V8_DISABLE_WRITE_BARRIERS_BOOL) {
353 Return(TrueConstant());
354 return;
355 }
356 switch (rs_mode) {
357 case RememberedSetAction::kEmit:
358 GenerationalWriteBarrier(fp_mode);
359 break;
360 case RememberedSetAction::kOmit:
361 IncrementalWriteBarrier(fp_mode);
362 break;
363 }
364 IncrementCounter(isolate()->counters()->write_barriers(), 1);
365 Return(TrueConstant());
366 }
367
GenerateEphemeronKeyBarrier(SaveFPRegsMode fp_mode)368 void GenerateEphemeronKeyBarrier(SaveFPRegsMode fp_mode) {
369 TNode<ExternalReference> function = ExternalConstant(
370 ExternalReference::ephemeron_key_write_barrier_function());
371 TNode<ExternalReference> isolate_constant =
372 ExternalConstant(ExternalReference::isolate_address(isolate()));
373 // In this method we limit the allocatable registers so we have to use
374 // UncheckedParameter. Parameter does not work because the checked cast
375 // needs more registers.
376 auto address =
377 UncheckedParameter<IntPtrT>(WriteBarrierDescriptor::kSlotAddress);
378 TNode<IntPtrT> object = BitcastTaggedToWord(
379 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
380
381 CallCFunctionWithCallerSavedRegisters(
382 function, MachineTypeOf<Int32T>::value, fp_mode,
383 std::make_pair(MachineTypeOf<IntPtrT>::value, object),
384 std::make_pair(MachineTypeOf<IntPtrT>::value, address),
385 std::make_pair(MachineTypeOf<ExternalReference>::value,
386 isolate_constant));
387
388 IncrementCounter(isolate()->counters()->write_barriers(), 1);
389 Return(TrueConstant());
390 }
391 };
392
TF_BUILTIN(RecordWriteEmitRememberedSetSaveFP,WriteBarrierCodeStubAssembler)393 TF_BUILTIN(RecordWriteEmitRememberedSetSaveFP, WriteBarrierCodeStubAssembler) {
394 GenerateRecordWrite(RememberedSetAction::kEmit, SaveFPRegsMode::kSave);
395 }
396
TF_BUILTIN(RecordWriteOmitRememberedSetSaveFP,WriteBarrierCodeStubAssembler)397 TF_BUILTIN(RecordWriteOmitRememberedSetSaveFP, WriteBarrierCodeStubAssembler) {
398 GenerateRecordWrite(RememberedSetAction::kOmit, SaveFPRegsMode::kSave);
399 }
400
TF_BUILTIN(RecordWriteEmitRememberedSetIgnoreFP,WriteBarrierCodeStubAssembler)401 TF_BUILTIN(RecordWriteEmitRememberedSetIgnoreFP,
402 WriteBarrierCodeStubAssembler) {
403 GenerateRecordWrite(RememberedSetAction::kEmit, SaveFPRegsMode::kIgnore);
404 }
405
TF_BUILTIN(RecordWriteOmitRememberedSetIgnoreFP,WriteBarrierCodeStubAssembler)406 TF_BUILTIN(RecordWriteOmitRememberedSetIgnoreFP,
407 WriteBarrierCodeStubAssembler) {
408 GenerateRecordWrite(RememberedSetAction::kOmit, SaveFPRegsMode::kIgnore);
409 }
410
TF_BUILTIN(EphemeronKeyBarrierSaveFP,WriteBarrierCodeStubAssembler)411 TF_BUILTIN(EphemeronKeyBarrierSaveFP, WriteBarrierCodeStubAssembler) {
412 GenerateEphemeronKeyBarrier(SaveFPRegsMode::kSave);
413 }
414
TF_BUILTIN(EphemeronKeyBarrierIgnoreFP,WriteBarrierCodeStubAssembler)415 TF_BUILTIN(EphemeronKeyBarrierIgnoreFP, WriteBarrierCodeStubAssembler) {
416 GenerateEphemeronKeyBarrier(SaveFPRegsMode::kIgnore);
417 }
418
419 #ifdef V8_IS_TSAN
420 class TSANRelaxedStoreCodeStubAssembler : public CodeStubAssembler {
421 public:
TSANRelaxedStoreCodeStubAssembler(compiler::CodeAssemblerState * state)422 explicit TSANRelaxedStoreCodeStubAssembler(
423 compiler::CodeAssemblerState* state)
424 : CodeStubAssembler(state) {}
425
GetExternalReference(int size)426 TNode<ExternalReference> GetExternalReference(int size) {
427 if (size == kInt8Size) {
428 return ExternalConstant(
429 ExternalReference::tsan_relaxed_store_function_8_bits());
430 } else if (size == kInt16Size) {
431 return ExternalConstant(
432 ExternalReference::tsan_relaxed_store_function_16_bits());
433 } else if (size == kInt32Size) {
434 return ExternalConstant(
435 ExternalReference::tsan_relaxed_store_function_32_bits());
436 } else {
437 CHECK_EQ(size, kInt64Size);
438 return ExternalConstant(
439 ExternalReference::tsan_relaxed_store_function_64_bits());
440 }
441 }
442
GenerateTSANRelaxedStore(SaveFPRegsMode fp_mode,int size)443 void GenerateTSANRelaxedStore(SaveFPRegsMode fp_mode, int size) {
444 TNode<ExternalReference> function = GetExternalReference(size);
445 auto address = UncheckedParameter<IntPtrT>(TSANStoreDescriptor::kAddress);
446 TNode<IntPtrT> value = BitcastTaggedToWord(
447 UncheckedParameter<Object>(TSANStoreDescriptor::kValue));
448 CallCFunctionWithCallerSavedRegisters(
449 function, MachineType::Int32(), fp_mode,
450 std::make_pair(MachineType::IntPtr(), address),
451 std::make_pair(MachineType::IntPtr(), value));
452 Return(UndefinedConstant());
453 }
454 };
455
TF_BUILTIN(TSANRelaxedStore8IgnoreFP,TSANRelaxedStoreCodeStubAssembler)456 TF_BUILTIN(TSANRelaxedStore8IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
457 GenerateTSANRelaxedStore(SaveFPRegsMode::kIgnore, kInt8Size);
458 }
459
TF_BUILTIN(TSANRelaxedStore8SaveFP,TSANRelaxedStoreCodeStubAssembler)460 TF_BUILTIN(TSANRelaxedStore8SaveFP, TSANRelaxedStoreCodeStubAssembler) {
461 GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt8Size);
462 }
463
TF_BUILTIN(TSANRelaxedStore16IgnoreFP,TSANRelaxedStoreCodeStubAssembler)464 TF_BUILTIN(TSANRelaxedStore16IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
465 GenerateTSANRelaxedStore(SaveFPRegsMode::kIgnore, kInt16Size);
466 }
467
TF_BUILTIN(TSANRelaxedStore16SaveFP,TSANRelaxedStoreCodeStubAssembler)468 TF_BUILTIN(TSANRelaxedStore16SaveFP, TSANRelaxedStoreCodeStubAssembler) {
469 GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt16Size);
470 }
471
TF_BUILTIN(TSANRelaxedStore32IgnoreFP,TSANRelaxedStoreCodeStubAssembler)472 TF_BUILTIN(TSANRelaxedStore32IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
473 GenerateTSANRelaxedStore(SaveFPRegsMode::kIgnore, kInt32Size);
474 }
475
TF_BUILTIN(TSANRelaxedStore32SaveFP,TSANRelaxedStoreCodeStubAssembler)476 TF_BUILTIN(TSANRelaxedStore32SaveFP, TSANRelaxedStoreCodeStubAssembler) {
477 GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt32Size);
478 }
479
TF_BUILTIN(TSANRelaxedStore64IgnoreFP,TSANRelaxedStoreCodeStubAssembler)480 TF_BUILTIN(TSANRelaxedStore64IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
481 GenerateTSANRelaxedStore(SaveFPRegsMode::kIgnore, kInt64Size);
482 }
483
TF_BUILTIN(TSANRelaxedStore64SaveFP,TSANRelaxedStoreCodeStubAssembler)484 TF_BUILTIN(TSANRelaxedStore64SaveFP, TSANRelaxedStoreCodeStubAssembler) {
485 GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt64Size);
486 }
487
488 class TSANSeqCstStoreCodeStubAssembler : public CodeStubAssembler {
489 public:
TSANSeqCstStoreCodeStubAssembler(compiler::CodeAssemblerState * state)490 explicit TSANSeqCstStoreCodeStubAssembler(compiler::CodeAssemblerState* state)
491 : CodeStubAssembler(state) {}
492
GetExternalReference(int size)493 TNode<ExternalReference> GetExternalReference(int size) {
494 if (size == kInt8Size) {
495 return ExternalConstant(
496 ExternalReference::tsan_seq_cst_store_function_8_bits());
497 } else if (size == kInt16Size) {
498 return ExternalConstant(
499 ExternalReference::tsan_seq_cst_store_function_16_bits());
500 } else if (size == kInt32Size) {
501 return ExternalConstant(
502 ExternalReference::tsan_seq_cst_store_function_32_bits());
503 } else {
504 CHECK_EQ(size, kInt64Size);
505 return ExternalConstant(
506 ExternalReference::tsan_seq_cst_store_function_64_bits());
507 }
508 }
509
GenerateTSANSeqCstStore(SaveFPRegsMode fp_mode,int size)510 void GenerateTSANSeqCstStore(SaveFPRegsMode fp_mode, int size) {
511 TNode<ExternalReference> function = GetExternalReference(size);
512 auto address = UncheckedParameter<IntPtrT>(TSANStoreDescriptor::kAddress);
513 TNode<IntPtrT> value = BitcastTaggedToWord(
514 UncheckedParameter<Object>(TSANStoreDescriptor::kValue));
515 CallCFunctionWithCallerSavedRegisters(
516 function, MachineType::Int32(), fp_mode,
517 std::make_pair(MachineType::IntPtr(), address),
518 std::make_pair(MachineType::IntPtr(), value));
519 Return(UndefinedConstant());
520 }
521 };
522
TF_BUILTIN(TSANSeqCstStore8IgnoreFP,TSANSeqCstStoreCodeStubAssembler)523 TF_BUILTIN(TSANSeqCstStore8IgnoreFP, TSANSeqCstStoreCodeStubAssembler) {
524 GenerateTSANSeqCstStore(SaveFPRegsMode::kIgnore, kInt8Size);
525 }
526
TF_BUILTIN(TSANSeqCstStore8SaveFP,TSANSeqCstStoreCodeStubAssembler)527 TF_BUILTIN(TSANSeqCstStore8SaveFP, TSANSeqCstStoreCodeStubAssembler) {
528 GenerateTSANSeqCstStore(SaveFPRegsMode::kSave, kInt8Size);
529 }
530
TF_BUILTIN(TSANSeqCstStore16IgnoreFP,TSANSeqCstStoreCodeStubAssembler)531 TF_BUILTIN(TSANSeqCstStore16IgnoreFP, TSANSeqCstStoreCodeStubAssembler) {
532 GenerateTSANSeqCstStore(SaveFPRegsMode::kIgnore, kInt16Size);
533 }
534
TF_BUILTIN(TSANSeqCstStore16SaveFP,TSANSeqCstStoreCodeStubAssembler)535 TF_BUILTIN(TSANSeqCstStore16SaveFP, TSANSeqCstStoreCodeStubAssembler) {
536 GenerateTSANSeqCstStore(SaveFPRegsMode::kSave, kInt16Size);
537 }
538
TF_BUILTIN(TSANSeqCstStore32IgnoreFP,TSANSeqCstStoreCodeStubAssembler)539 TF_BUILTIN(TSANSeqCstStore32IgnoreFP, TSANSeqCstStoreCodeStubAssembler) {
540 GenerateTSANSeqCstStore(SaveFPRegsMode::kIgnore, kInt32Size);
541 }
542
TF_BUILTIN(TSANSeqCstStore32SaveFP,TSANSeqCstStoreCodeStubAssembler)543 TF_BUILTIN(TSANSeqCstStore32SaveFP, TSANSeqCstStoreCodeStubAssembler) {
544 GenerateTSANSeqCstStore(SaveFPRegsMode::kSave, kInt32Size);
545 }
546
TF_BUILTIN(TSANSeqCstStore64IgnoreFP,TSANSeqCstStoreCodeStubAssembler)547 TF_BUILTIN(TSANSeqCstStore64IgnoreFP, TSANSeqCstStoreCodeStubAssembler) {
548 GenerateTSANSeqCstStore(SaveFPRegsMode::kIgnore, kInt64Size);
549 }
550
TF_BUILTIN(TSANSeqCstStore64SaveFP,TSANSeqCstStoreCodeStubAssembler)551 TF_BUILTIN(TSANSeqCstStore64SaveFP, TSANSeqCstStoreCodeStubAssembler) {
552 GenerateTSANSeqCstStore(SaveFPRegsMode::kSave, kInt64Size);
553 }
554
555 class TSANRelaxedLoadCodeStubAssembler : public CodeStubAssembler {
556 public:
TSANRelaxedLoadCodeStubAssembler(compiler::CodeAssemblerState * state)557 explicit TSANRelaxedLoadCodeStubAssembler(compiler::CodeAssemblerState* state)
558 : CodeStubAssembler(state) {}
559
GetExternalReference(int size)560 TNode<ExternalReference> GetExternalReference(int size) {
561 if (size == kInt32Size) {
562 return ExternalConstant(
563 ExternalReference::tsan_relaxed_load_function_32_bits());
564 } else {
565 CHECK_EQ(size, kInt64Size);
566 return ExternalConstant(
567 ExternalReference::tsan_relaxed_load_function_64_bits());
568 }
569 }
570
GenerateTSANRelaxedLoad(SaveFPRegsMode fp_mode,int size)571 void GenerateTSANRelaxedLoad(SaveFPRegsMode fp_mode, int size) {
572 TNode<ExternalReference> function = GetExternalReference(size);
573 auto address = UncheckedParameter<IntPtrT>(TSANLoadDescriptor::kAddress);
574 CallCFunctionWithCallerSavedRegisters(
575 function, MachineType::Int32(), fp_mode,
576 std::make_pair(MachineType::IntPtr(), address));
577 Return(UndefinedConstant());
578 }
579 };
580
TF_BUILTIN(TSANRelaxedLoad32IgnoreFP,TSANRelaxedLoadCodeStubAssembler)581 TF_BUILTIN(TSANRelaxedLoad32IgnoreFP, TSANRelaxedLoadCodeStubAssembler) {
582 GenerateTSANRelaxedLoad(SaveFPRegsMode::kIgnore, kInt32Size);
583 }
584
TF_BUILTIN(TSANRelaxedLoad32SaveFP,TSANRelaxedLoadCodeStubAssembler)585 TF_BUILTIN(TSANRelaxedLoad32SaveFP, TSANRelaxedLoadCodeStubAssembler) {
586 GenerateTSANRelaxedLoad(SaveFPRegsMode::kSave, kInt32Size);
587 }
588
TF_BUILTIN(TSANRelaxedLoad64IgnoreFP,TSANRelaxedLoadCodeStubAssembler)589 TF_BUILTIN(TSANRelaxedLoad64IgnoreFP, TSANRelaxedLoadCodeStubAssembler) {
590 GenerateTSANRelaxedLoad(SaveFPRegsMode::kIgnore, kInt64Size);
591 }
592
TF_BUILTIN(TSANRelaxedLoad64SaveFP,TSANRelaxedLoadCodeStubAssembler)593 TF_BUILTIN(TSANRelaxedLoad64SaveFP, TSANRelaxedLoadCodeStubAssembler) {
594 GenerateTSANRelaxedLoad(SaveFPRegsMode::kSave, kInt64Size);
595 }
596 #endif // V8_IS_TSAN
597
598 class DeletePropertyBaseAssembler : public AccessorAssembler {
599 public:
DeletePropertyBaseAssembler(compiler::CodeAssemblerState * state)600 explicit DeletePropertyBaseAssembler(compiler::CodeAssemblerState* state)
601 : AccessorAssembler(state) {}
602
DictionarySpecificDelete(TNode<JSReceiver> receiver,TNode<NameDictionary> properties,TNode<IntPtrT> key_index,TNode<Context> context)603 void DictionarySpecificDelete(TNode<JSReceiver> receiver,
604 TNode<NameDictionary> properties,
605 TNode<IntPtrT> key_index,
606 TNode<Context> context) {
607 // Overwrite the entry itself (see NameDictionary::SetEntry).
608 TNode<Oddball> filler = TheHoleConstant();
609 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kTheHoleValue));
610 StoreFixedArrayElement(properties, key_index, filler, SKIP_WRITE_BARRIER);
611 StoreValueByKeyIndex<NameDictionary>(properties, key_index, filler,
612 SKIP_WRITE_BARRIER);
613 StoreDetailsByKeyIndex<NameDictionary>(properties, key_index,
614 SmiConstant(0));
615
616 // Update bookkeeping information (see NameDictionary::ElementRemoved).
617 TNode<Smi> nof = GetNumberOfElements<NameDictionary>(properties);
618 TNode<Smi> new_nof = SmiSub(nof, SmiConstant(1));
619 SetNumberOfElements<NameDictionary>(properties, new_nof);
620 TNode<Smi> num_deleted =
621 GetNumberOfDeletedElements<NameDictionary>(properties);
622 TNode<Smi> new_deleted = SmiAdd(num_deleted, SmiConstant(1));
623 SetNumberOfDeletedElements<NameDictionary>(properties, new_deleted);
624
625 // Shrink the dictionary if necessary (see NameDictionary::Shrink).
626 Label shrinking_done(this);
627 TNode<Smi> capacity = GetCapacity<NameDictionary>(properties);
628 GotoIf(SmiGreaterThan(new_nof, SmiShr(capacity, 2)), &shrinking_done);
629 GotoIf(SmiLessThan(new_nof, SmiConstant(16)), &shrinking_done);
630
631 TNode<NameDictionary> new_properties =
632 CAST(CallRuntime(Runtime::kShrinkNameDictionary, context, properties));
633
634 StoreJSReceiverPropertiesOrHash(receiver, new_properties);
635
636 Goto(&shrinking_done);
637 BIND(&shrinking_done);
638 }
639
DictionarySpecificDelete(TNode<JSReceiver> receiver,TNode<SwissNameDictionary> properties,TNode<IntPtrT> key_index,TNode<Context> context)640 void DictionarySpecificDelete(TNode<JSReceiver> receiver,
641 TNode<SwissNameDictionary> properties,
642 TNode<IntPtrT> key_index,
643 TNode<Context> context) {
644 Label shrunk(this), done(this);
645 TVARIABLE(SwissNameDictionary, shrunk_table);
646
647 SwissNameDictionaryDelete(properties, key_index, &shrunk, &shrunk_table);
648 Goto(&done);
649 BIND(&shrunk);
650 StoreJSReceiverPropertiesOrHash(receiver, shrunk_table.value());
651 Goto(&done);
652
653 BIND(&done);
654 }
655
656 template <typename Dictionary>
DeleteDictionaryProperty(TNode<JSReceiver> receiver,TNode<Dictionary> properties,TNode<Name> name,TNode<Context> context,Label * dont_delete,Label * notfound)657 void DeleteDictionaryProperty(TNode<JSReceiver> receiver,
658 TNode<Dictionary> properties, TNode<Name> name,
659 TNode<Context> context, Label* dont_delete,
660 Label* notfound) {
661 TVARIABLE(IntPtrT, var_name_index);
662 Label dictionary_found(this, &var_name_index);
663 NameDictionaryLookup<Dictionary>(properties, name, &dictionary_found,
664 &var_name_index, notfound);
665
666 BIND(&dictionary_found);
667 TNode<IntPtrT> key_index = var_name_index.value();
668 TNode<Uint32T> details = LoadDetailsByKeyIndex(properties, key_index);
669 GotoIf(IsSetWord32(details, PropertyDetails::kAttributesDontDeleteMask),
670 dont_delete);
671
672 DictionarySpecificDelete(receiver, properties, key_index, context);
673
674 Return(TrueConstant());
675 }
676 };
677
TF_BUILTIN(DeleteProperty,DeletePropertyBaseAssembler)678 TF_BUILTIN(DeleteProperty, DeletePropertyBaseAssembler) {
679 auto receiver = Parameter<Object>(Descriptor::kObject);
680 auto key = Parameter<Object>(Descriptor::kKey);
681 auto language_mode = Parameter<Smi>(Descriptor::kLanguageMode);
682 auto context = Parameter<Context>(Descriptor::kContext);
683
684 TVARIABLE(IntPtrT, var_index);
685 TVARIABLE(Name, var_unique);
686 Label if_index(this, &var_index), if_unique_name(this), if_notunique(this),
687 if_notfound(this), slow(this), if_proxy(this);
688
689 GotoIf(TaggedIsSmi(receiver), &slow);
690 TNode<Map> receiver_map = LoadMap(CAST(receiver));
691 TNode<Uint16T> instance_type = LoadMapInstanceType(receiver_map);
692 GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), &if_proxy);
693 GotoIf(IsCustomElementsReceiverInstanceType(instance_type), &slow);
694 TryToName(key, &if_index, &var_index, &if_unique_name, &var_unique, &slow,
695 &if_notunique);
696
697 BIND(&if_index);
698 {
699 Comment("integer index");
700 Goto(&slow); // TODO(jkummerow): Implement more smarts here.
701 }
702
703 BIND(&if_unique_name);
704 {
705 Comment("key is unique name");
706 CheckForAssociatedProtector(var_unique.value(), &slow);
707
708 Label dictionary(this), dont_delete(this);
709 GotoIf(IsDictionaryMap(receiver_map), &dictionary);
710
711 // Fast properties need to clear recorded slots and mark the deleted
712 // property as mutable, which can only be done in C++.
713 Goto(&slow);
714
715 BIND(&dictionary);
716 {
717 InvalidateValidityCellIfPrototype(receiver_map);
718
719 TNode<PropertyDictionary> properties =
720 CAST(LoadSlowProperties(CAST(receiver)));
721 DeleteDictionaryProperty(CAST(receiver), properties, var_unique.value(),
722 context, &dont_delete, &if_notfound);
723 }
724
725 BIND(&dont_delete);
726 {
727 STATIC_ASSERT(LanguageModeSize == 2);
728 GotoIf(SmiNotEqual(language_mode, SmiConstant(LanguageMode::kSloppy)),
729 &slow);
730 Return(FalseConstant());
731 }
732 }
733
734 BIND(&if_notunique);
735 {
736 // If the string was not found in the string table, then no object can
737 // have a property with that name.
738 TryInternalizeString(CAST(key), &if_index, &var_index, &if_unique_name,
739 &var_unique, &if_notfound, &slow);
740 }
741
742 BIND(&if_notfound);
743 Return(TrueConstant());
744
745 BIND(&if_proxy);
746 {
747 TNode<Name> name = CAST(CallBuiltin(Builtin::kToName, context, key));
748 GotoIf(IsPrivateSymbol(name), &slow);
749 TailCallBuiltin(Builtin::kProxyDeleteProperty, context, receiver, name,
750 language_mode);
751 }
752
753 BIND(&slow);
754 {
755 TailCallRuntime(Runtime::kDeleteProperty, context, receiver, key,
756 language_mode);
757 }
758 }
759
760 namespace {
761
762 class SetOrCopyDataPropertiesAssembler : public CodeStubAssembler {
763 public:
SetOrCopyDataPropertiesAssembler(compiler::CodeAssemblerState * state)764 explicit SetOrCopyDataPropertiesAssembler(compiler::CodeAssemblerState* state)
765 : CodeStubAssembler(state) {}
766
767 protected:
AllocateJsObjectTarget(TNode<Context> context)768 TNode<JSObject> AllocateJsObjectTarget(TNode<Context> context) {
769 const TNode<NativeContext> native_context = LoadNativeContext(context);
770 const TNode<JSFunction> object_function = Cast(
771 LoadContextElement(native_context, Context::OBJECT_FUNCTION_INDEX));
772 const TNode<Map> map =
773 Cast(LoadJSFunctionPrototypeOrInitialMap(object_function));
774 const TNode<JSObject> target = AllocateJSObjectFromMap(map);
775 return target;
776 }
SetOrCopyDataProperties(TNode<Context> context,TNode<JSReceiver> target,TNode<Object> source,Label * if_runtime,base::Optional<TNode<IntPtrT>> excluded_property_count=base::nullopt,base::Optional<TNode<IntPtrT>> excluded_property_base=base::nullopt,bool use_set=true)777 TNode<Object> SetOrCopyDataProperties(
778 TNode<Context> context, TNode<JSReceiver> target, TNode<Object> source,
779 Label* if_runtime,
780 base::Optional<TNode<IntPtrT>> excluded_property_count = base::nullopt,
781 base::Optional<TNode<IntPtrT>> excluded_property_base = base::nullopt,
782 bool use_set = true) {
783 Label if_done(this), if_noelements(this),
784 if_sourcenotjsobject(this, Label::kDeferred);
785
786 // JSPrimitiveWrapper wrappers for numbers don't have any enumerable own
787 // properties, so we can immediately skip the whole operation if {source} is
788 // a Smi.
789 GotoIf(TaggedIsSmi(source), &if_done);
790
791 // Otherwise check if {source} is a proper JSObject, and if not, defer
792 // to testing for non-empty strings below.
793 TNode<Map> source_map = LoadMap(CAST(source));
794 TNode<Uint16T> source_instance_type = LoadMapInstanceType(source_map);
795 GotoIfNot(IsJSObjectInstanceType(source_instance_type),
796 &if_sourcenotjsobject);
797
798 TNode<FixedArrayBase> source_elements = LoadElements(CAST(source));
799 GotoIf(IsEmptyFixedArray(source_elements), &if_noelements);
800 Branch(IsEmptySlowElementDictionary(source_elements), &if_noelements,
801 if_runtime);
802
803 BIND(&if_noelements);
804 {
805 // If the target is deprecated, the object will be updated on first
806 // store. If the source for that store equals the target, this will
807 // invalidate the cached representation of the source. Handle this case
808 // in runtime.
809 TNode<Map> target_map = LoadMap(target);
810 GotoIf(IsDeprecatedMap(target_map), if_runtime);
811 if (use_set) {
812 TNode<BoolT> target_is_simple_receiver = IsSimpleObjectMap(target_map);
813 ForEachEnumerableOwnProperty(
814 context, source_map, CAST(source), kEnumerationOrder,
815 [=](TNode<Name> key, TNode<Object> value) {
816 KeyedStoreGenericGenerator::SetProperty(
817 state(), context, target, target_is_simple_receiver, key,
818 value, LanguageMode::kStrict);
819 },
820 if_runtime);
821 } else {
822 ForEachEnumerableOwnProperty(
823 context, source_map, CAST(source), kEnumerationOrder,
824 [=](TNode<Name> key, TNode<Object> value) {
825 Label skip(this);
826 if (excluded_property_count.has_value()) {
827 BuildFastLoop<IntPtrT>(
828 IntPtrConstant(0), excluded_property_count.value(),
829 [&](TNode<IntPtrT> index) {
830 auto offset = Signed(TimesSystemPointerSize(index));
831 TNode<IntPtrT> location = Signed(
832 IntPtrSub(excluded_property_base.value(), offset));
833 auto property = LoadFullTagged(location);
834
835 Label continue_label(this);
836 BranchIfSameValue(key, property, &skip, &continue_label);
837 Bind(&continue_label);
838 },
839 1, IndexAdvanceMode::kPost);
840 }
841
842 CallBuiltin(Builtin::kCreateDataProperty, context, target, key,
843 value);
844 Goto(&skip);
845 Bind(&skip);
846 },
847 if_runtime);
848 }
849 Goto(&if_done);
850 }
851
852 BIND(&if_sourcenotjsobject);
853 {
854 // Handle other JSReceivers in the runtime.
855 GotoIf(IsJSReceiverInstanceType(source_instance_type), if_runtime);
856
857 // Non-empty strings are the only non-JSReceivers that need to be
858 // handled explicitly by Object.assign() and CopyDataProperties.
859 GotoIfNot(IsStringInstanceType(source_instance_type), &if_done);
860 TNode<IntPtrT> source_length = LoadStringLengthAsWord(CAST(source));
861 Branch(IntPtrEqual(source_length, IntPtrConstant(0)), &if_done,
862 if_runtime);
863 }
864
865 BIND(&if_done);
866 return target;
867 }
868 };
869
870 } // namespace
871
TF_BUILTIN(CopyDataPropertiesWithExcludedPropertiesOnStack,SetOrCopyDataPropertiesAssembler)872 TF_BUILTIN(CopyDataPropertiesWithExcludedPropertiesOnStack,
873 SetOrCopyDataPropertiesAssembler) {
874 auto source = UncheckedParameter<Object>(Descriptor::kSource);
875 auto excluded_property_count =
876 UncheckedParameter<IntPtrT>(Descriptor::kExcludedPropertyCount);
877 auto excluded_properties =
878 UncheckedParameter<IntPtrT>(Descriptor::kExcludedPropertyBase);
879 auto context = Parameter<Context>(Descriptor::kContext);
880
881 // first check undefine or null
882 Label if_runtime(this, Label::kDeferred);
883 GotoIf(IsNullOrUndefined(source), &if_runtime);
884
885 TNode<JSReceiver> target = AllocateJsObjectTarget(context);
886 Return(SetOrCopyDataProperties(context, target, source, &if_runtime,
887 excluded_property_count, excluded_properties,
888 false));
889
890 BIND(&if_runtime);
891 // The excluded_property_base is passed as a raw stack pointer, but is
892 // bitcasted to a Smi . This is safe because the stack pointer is aligned, so
893 // it looks like a Smi to the GC.
894 CSA_DCHECK(this, IntPtrEqual(WordAnd(excluded_properties,
895 IntPtrConstant(kSmiTagMask)),
896 IntPtrConstant(kSmiTag)));
897 TailCallRuntime(Runtime::kCopyDataPropertiesWithExcludedPropertiesOnStack,
898 context, source, SmiTag(excluded_property_count),
899 BitcastWordToTaggedSigned(excluded_properties));
900 }
901
TF_BUILTIN(CopyDataPropertiesWithExcludedProperties,SetOrCopyDataPropertiesAssembler)902 TF_BUILTIN(CopyDataPropertiesWithExcludedProperties,
903 SetOrCopyDataPropertiesAssembler) {
904 auto source = UncheckedParameter<Object>(Descriptor::kSource);
905
906 auto excluded_property_count_smi =
907 UncheckedParameter<Smi>(Descriptor::kExcludedPropertyCount);
908 auto context = Parameter<Context>(Descriptor::kContext);
909
910 auto excluded_property_count = SmiToIntPtr(excluded_property_count_smi);
911 CodeStubArguments arguments(this, excluded_property_count);
912
913 TNode<IntPtrT> excluded_properties =
914 ReinterpretCast<IntPtrT>(arguments.AtIndexPtr(
915 IntPtrSub(excluded_property_count, IntPtrConstant(2))));
916
917 arguments.PopAndReturn(CallBuiltin(
918 Builtin::kCopyDataPropertiesWithExcludedPropertiesOnStack, context,
919 source, excluded_property_count, excluded_properties));
920 }
921
922 // ES #sec-copydataproperties
TF_BUILTIN(CopyDataProperties,SetOrCopyDataPropertiesAssembler)923 TF_BUILTIN(CopyDataProperties, SetOrCopyDataPropertiesAssembler) {
924 auto target = Parameter<JSObject>(Descriptor::kTarget);
925 auto source = Parameter<Object>(Descriptor::kSource);
926 auto context = Parameter<Context>(Descriptor::kContext);
927
928 CSA_DCHECK(this, TaggedNotEqual(target, source));
929
930 Label if_runtime(this, Label::kDeferred);
931 SetOrCopyDataProperties(context, target, source, &if_runtime, base::nullopt,
932 base::nullopt, false);
933 Return(UndefinedConstant());
934
935 BIND(&if_runtime);
936 TailCallRuntime(Runtime::kCopyDataProperties, context, target, source);
937 }
938
TF_BUILTIN(SetDataProperties,SetOrCopyDataPropertiesAssembler)939 TF_BUILTIN(SetDataProperties, SetOrCopyDataPropertiesAssembler) {
940 auto target = Parameter<JSReceiver>(Descriptor::kTarget);
941 auto source = Parameter<Object>(Descriptor::kSource);
942 auto context = Parameter<Context>(Descriptor::kContext);
943
944 Label if_runtime(this, Label::kDeferred);
945 GotoIfForceSlowPath(&if_runtime);
946 SetOrCopyDataProperties(context, target, source, &if_runtime, base::nullopt,
947 base::nullopt, true);
948 Return(UndefinedConstant());
949
950 BIND(&if_runtime);
951 TailCallRuntime(Runtime::kSetDataProperties, context, target, source);
952 }
953
TF_BUILTIN(ForInEnumerate,CodeStubAssembler)954 TF_BUILTIN(ForInEnumerate, CodeStubAssembler) {
955 auto receiver = Parameter<JSReceiver>(Descriptor::kReceiver);
956 auto context = Parameter<Context>(Descriptor::kContext);
957
958 Label if_empty(this), if_runtime(this, Label::kDeferred);
959 TNode<Map> receiver_map = CheckEnumCache(receiver, &if_empty, &if_runtime);
960 Return(receiver_map);
961
962 BIND(&if_empty);
963 Return(EmptyFixedArrayConstant());
964
965 BIND(&if_runtime);
966 TailCallRuntime(Runtime::kForInEnumerate, context, receiver);
967 }
968
TF_BUILTIN(ForInPrepare,CodeStubAssembler)969 TF_BUILTIN(ForInPrepare, CodeStubAssembler) {
970 // The {enumerator} is either a Map or a FixedArray.
971 auto enumerator = Parameter<HeapObject>(Descriptor::kEnumerator);
972 auto index = Parameter<TaggedIndex>(Descriptor::kVectorIndex);
973 auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
974 TNode<UintPtrT> vector_index = Unsigned(TaggedIndexToIntPtr(index));
975
976 TNode<FixedArray> cache_array;
977 TNode<Smi> cache_length;
978 ForInPrepare(enumerator, vector_index, feedback_vector, &cache_array,
979 &cache_length, UpdateFeedbackMode::kGuaranteedFeedback);
980 Return(cache_array, cache_length);
981 }
982
TF_BUILTIN(ForInFilter,CodeStubAssembler)983 TF_BUILTIN(ForInFilter, CodeStubAssembler) {
984 auto key = Parameter<String>(Descriptor::kKey);
985 auto object = Parameter<HeapObject>(Descriptor::kObject);
986 auto context = Parameter<Context>(Descriptor::kContext);
987
988 Label if_true(this), if_false(this);
989 TNode<Oddball> result = HasProperty(context, object, key, kForInHasProperty);
990 Branch(IsTrue(result), &if_true, &if_false);
991
992 BIND(&if_true);
993 Return(key);
994
995 BIND(&if_false);
996 Return(UndefinedConstant());
997 }
998
TF_BUILTIN(SameValue,CodeStubAssembler)999 TF_BUILTIN(SameValue, CodeStubAssembler) {
1000 auto lhs = Parameter<Object>(Descriptor::kLeft);
1001 auto rhs = Parameter<Object>(Descriptor::kRight);
1002
1003 Label if_true(this), if_false(this);
1004 BranchIfSameValue(lhs, rhs, &if_true, &if_false);
1005
1006 BIND(&if_true);
1007 Return(TrueConstant());
1008
1009 BIND(&if_false);
1010 Return(FalseConstant());
1011 }
1012
TF_BUILTIN(SameValueNumbersOnly,CodeStubAssembler)1013 TF_BUILTIN(SameValueNumbersOnly, CodeStubAssembler) {
1014 auto lhs = Parameter<Object>(Descriptor::kLeft);
1015 auto rhs = Parameter<Object>(Descriptor::kRight);
1016
1017 Label if_true(this), if_false(this);
1018 BranchIfSameValue(lhs, rhs, &if_true, &if_false, SameValueMode::kNumbersOnly);
1019
1020 BIND(&if_true);
1021 Return(TrueConstant());
1022
1023 BIND(&if_false);
1024 Return(FalseConstant());
1025 }
1026
TF_BUILTIN(AdaptorWithBuiltinExitFrame,CodeStubAssembler)1027 TF_BUILTIN(AdaptorWithBuiltinExitFrame, CodeStubAssembler) {
1028 auto target = Parameter<JSFunction>(Descriptor::kTarget);
1029 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
1030 auto c_function = UncheckedParameter<WordT>(Descriptor::kCFunction);
1031
1032 // The logic contained here is mirrored for TurboFan inlining in
1033 // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.
1034
1035 // Make sure we operate in the context of the called function (for example
1036 // ConstructStubs implemented in C++ will be run in the context of the caller
1037 // instead of the callee, due to the way that [[Construct]] is defined for
1038 // ordinary functions).
1039 TNode<Context> context = LoadJSFunctionContext(target);
1040
1041 auto actual_argc =
1042 UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
1043 CodeStubArguments args(this, actual_argc);
1044
1045 TVARIABLE(Int32T, pushed_argc,
1046 TruncateIntPtrToInt32(args.GetLengthWithReceiver()));
1047
1048 TNode<SharedFunctionInfo> shared = LoadJSFunctionSharedFunctionInfo(target);
1049
1050 TNode<Int32T> formal_count = UncheckedCast<Int32T>(
1051 LoadSharedFunctionInfoFormalParameterCountWithReceiver(shared));
1052
1053 // The number of arguments pushed is the maximum of actual arguments count
1054 // and formal parameters count. Except when the formal parameters count is
1055 // the sentinel.
1056 Label check_argc(this), update_argc(this), done_argc(this);
1057
1058 Branch(IsSharedFunctionInfoDontAdaptArguments(shared), &done_argc,
1059 &check_argc);
1060 BIND(&check_argc);
1061 Branch(Int32GreaterThan(formal_count, pushed_argc.value()), &update_argc,
1062 &done_argc);
1063 BIND(&update_argc);
1064 pushed_argc = formal_count;
1065 Goto(&done_argc);
1066 BIND(&done_argc);
1067
1068 // Update arguments count for CEntry to contain the number of arguments
1069 // including the receiver and the extra arguments.
1070 TNode<Int32T> argc = Int32Add(
1071 pushed_argc.value(),
1072 Int32Constant(BuiltinExitFrameConstants::kNumExtraArgsWithoutReceiver));
1073
1074 const bool builtin_exit_frame = true;
1075 TNode<CodeT> code =
1076 HeapConstant(CodeFactory::CEntry(isolate(), 1, SaveFPRegsMode::kIgnore,
1077 ArgvMode::kStack, builtin_exit_frame));
1078
1079 // Unconditionally push argc, target and new target as extra stack arguments.
1080 // They will be used by stack frame iterators when constructing stack trace.
1081 TailCallStub(CEntry1ArgvOnStackDescriptor{}, // descriptor
1082 code, context, // standard arguments for TailCallStub
1083 argc, c_function, // register arguments
1084 TheHoleConstant(), // additional stack argument 1 (padding)
1085 SmiFromInt32(argc), // additional stack argument 2
1086 target, // additional stack argument 3
1087 new_target); // additional stack argument 4
1088 }
1089
TF_BUILTIN(AllocateInYoungGeneration,CodeStubAssembler)1090 TF_BUILTIN(AllocateInYoungGeneration, CodeStubAssembler) {
1091 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
1092 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
1093
1094 TNode<Smi> allocation_flags =
1095 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
1096 AllowLargeObjectAllocationFlag::encode(true)));
1097 TailCallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
1098 SmiFromIntPtr(requested_size), allocation_flags);
1099 }
1100
TF_BUILTIN(AllocateRegularInYoungGeneration,CodeStubAssembler)1101 TF_BUILTIN(AllocateRegularInYoungGeneration, CodeStubAssembler) {
1102 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
1103 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
1104
1105 TNode<Smi> allocation_flags =
1106 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
1107 AllowLargeObjectAllocationFlag::encode(false)));
1108 TailCallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
1109 SmiFromIntPtr(requested_size), allocation_flags);
1110 }
1111
TF_BUILTIN(AllocateInOldGeneration,CodeStubAssembler)1112 TF_BUILTIN(AllocateInOldGeneration, CodeStubAssembler) {
1113 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
1114 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
1115
1116 TNode<Smi> runtime_flags =
1117 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
1118 AllowLargeObjectAllocationFlag::encode(true)));
1119 TailCallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1120 SmiFromIntPtr(requested_size), runtime_flags);
1121 }
1122
TF_BUILTIN(AllocateRegularInOldGeneration,CodeStubAssembler)1123 TF_BUILTIN(AllocateRegularInOldGeneration, CodeStubAssembler) {
1124 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
1125 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
1126
1127 TNode<Smi> runtime_flags =
1128 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
1129 AllowLargeObjectAllocationFlag::encode(false)));
1130 TailCallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1131 SmiFromIntPtr(requested_size), runtime_flags);
1132 }
1133
TF_BUILTIN(Abort,CodeStubAssembler)1134 TF_BUILTIN(Abort, CodeStubAssembler) {
1135 auto message_id = Parameter<Smi>(Descriptor::kMessageOrMessageId);
1136 TailCallRuntime(Runtime::kAbort, NoContextConstant(), message_id);
1137 }
1138
TF_BUILTIN(AbortCSADcheck,CodeStubAssembler)1139 TF_BUILTIN(AbortCSADcheck, CodeStubAssembler) {
1140 auto message = Parameter<String>(Descriptor::kMessageOrMessageId);
1141 TailCallRuntime(Runtime::kAbortCSADcheck, NoContextConstant(), message);
1142 }
1143
Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)1144 void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
1145 MacroAssembler* masm) {
1146 Generate_CEntry(masm, 1, SaveFPRegsMode::kIgnore, ArgvMode::kStack, false);
1147 }
1148
Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)1149 void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
1150 MacroAssembler* masm) {
1151 Generate_CEntry(masm, 1, SaveFPRegsMode::kIgnore, ArgvMode::kStack, true);
1152 }
1153
1154 void Builtins::
Generate_CEntry_Return1_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(MacroAssembler * masm)1155 Generate_CEntry_Return1_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
1156 MacroAssembler* masm) {
1157 Generate_CEntry(masm, 1, SaveFPRegsMode::kIgnore, ArgvMode::kRegister, false);
1158 }
1159
Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)1160 void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
1161 MacroAssembler* masm) {
1162 Generate_CEntry(masm, 1, SaveFPRegsMode::kSave, ArgvMode::kStack, false);
1163 }
1164
Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)1165 void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_BuiltinExit(
1166 MacroAssembler* masm) {
1167 Generate_CEntry(masm, 1, SaveFPRegsMode::kSave, ArgvMode::kStack, true);
1168 }
1169
Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)1170 void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
1171 MacroAssembler* masm) {
1172 Generate_CEntry(masm, 2, SaveFPRegsMode::kIgnore, ArgvMode::kStack, false);
1173 }
1174
Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)1175 void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
1176 MacroAssembler* masm) {
1177 Generate_CEntry(masm, 2, SaveFPRegsMode::kIgnore, ArgvMode::kStack, true);
1178 }
1179
1180 void Builtins::
Generate_CEntry_Return2_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(MacroAssembler * masm)1181 Generate_CEntry_Return2_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
1182 MacroAssembler* masm) {
1183 Generate_CEntry(masm, 2, SaveFPRegsMode::kIgnore, ArgvMode::kRegister, false);
1184 }
1185
Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_NoBuiltinExit(MacroAssembler * masm)1186 void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
1187 MacroAssembler* masm) {
1188 Generate_CEntry(masm, 2, SaveFPRegsMode::kSave, ArgvMode::kStack, false);
1189 }
1190
Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(MacroAssembler * masm)1191 void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(
1192 MacroAssembler* masm) {
1193 Generate_CEntry(masm, 2, SaveFPRegsMode::kSave, ArgvMode::kStack, true);
1194 }
1195
1196 #if !defined(V8_TARGET_ARCH_ARM) && !defined(V8_TARGET_ARCH_MIPS)
Generate_MemCopyUint8Uint8(MacroAssembler * masm)1197 void Builtins::Generate_MemCopyUint8Uint8(MacroAssembler* masm) {
1198 masm->Call(BUILTIN_CODE(masm->isolate(), Illegal), RelocInfo::CODE_TARGET);
1199 }
1200 #endif // !defined(V8_TARGET_ARCH_ARM) && !defined(V8_TARGET_ARCH_MIPS)
1201
1202 #ifndef V8_TARGET_ARCH_IA32
Generate_MemMove(MacroAssembler * masm)1203 void Builtins::Generate_MemMove(MacroAssembler* masm) {
1204 masm->Call(BUILTIN_CODE(masm->isolate(), Illegal), RelocInfo::CODE_TARGET);
1205 }
1206 #endif // V8_TARGET_ARCH_IA32
1207
1208 // TODO(v8:11421): Remove #if once baseline compiler is ported to other
1209 // architectures.
1210 #if ENABLE_SPARKPLUG
Generate_BaselineLeaveFrame(MacroAssembler * masm)1211 void Builtins::Generate_BaselineLeaveFrame(MacroAssembler* masm) {
1212 EmitReturnBaseline(masm);
1213 }
1214 #else
1215 // Stub out implementations of arch-specific baseline builtins.
Generate_BaselineOutOfLinePrologue(MacroAssembler * masm)1216 void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
1217 masm->Trap();
1218 }
Generate_BaselineLeaveFrame(MacroAssembler * masm)1219 void Builtins::Generate_BaselineLeaveFrame(MacroAssembler* masm) {
1220 masm->Trap();
1221 }
Generate_BaselineOnStackReplacement(MacroAssembler * masm)1222 void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
1223 masm->Trap();
1224 }
1225 #endif
1226
1227 // ES6 [[Get]] operation.
TF_BUILTIN(GetProperty,CodeStubAssembler)1228 TF_BUILTIN(GetProperty, CodeStubAssembler) {
1229 auto object = Parameter<Object>(Descriptor::kObject);
1230 auto key = Parameter<Object>(Descriptor::kKey);
1231 auto context = Parameter<Context>(Descriptor::kContext);
1232 // TODO(duongn): consider tailcalling to GetPropertyWithReceiver(object,
1233 // object, key, OnNonExistent::kReturnUndefined).
1234 Label if_notfound(this), if_proxy(this, Label::kDeferred),
1235 if_slow(this, Label::kDeferred);
1236
1237 CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
1238 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
1239 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1240 TNode<Name> unique_name, Label* next_holder, Label* if_bailout) {
1241 TVARIABLE(Object, var_value);
1242 Label if_found(this);
1243 TryGetOwnProperty(context, receiver, CAST(holder), holder_map,
1244 holder_instance_type, unique_name, &if_found,
1245 &var_value, next_holder, if_bailout);
1246 BIND(&if_found);
1247 Return(var_value.value());
1248 };
1249
1250 CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
1251 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
1252 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1253 TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
1254 // Not supported yet.
1255 Use(next_holder);
1256 Goto(if_bailout);
1257 };
1258
1259 TryPrototypeChainLookup(object, object, key, lookup_property_in_holder,
1260 lookup_element_in_holder, &if_notfound, &if_slow,
1261 &if_proxy);
1262
1263 BIND(&if_notfound);
1264 Return(UndefinedConstant());
1265
1266 BIND(&if_slow);
1267 TailCallRuntime(Runtime::kGetProperty, context, object, key);
1268
1269 BIND(&if_proxy);
1270 {
1271 // Convert the {key} to a Name first.
1272 TNode<Object> name = CallBuiltin(Builtin::kToName, context, key);
1273
1274 // The {object} is a JSProxy instance, look up the {name} on it, passing
1275 // {object} both as receiver and holder. If {name} is absent we can safely
1276 // return undefined from here.
1277 TailCallBuiltin(Builtin::kProxyGetProperty, context, object, name, object,
1278 SmiConstant(OnNonExistent::kReturnUndefined));
1279 }
1280 }
1281
1282 // ES6 [[Get]] operation with Receiver.
TF_BUILTIN(GetPropertyWithReceiver,CodeStubAssembler)1283 TF_BUILTIN(GetPropertyWithReceiver, CodeStubAssembler) {
1284 auto object = Parameter<Object>(Descriptor::kObject);
1285 auto key = Parameter<Object>(Descriptor::kKey);
1286 auto context = Parameter<Context>(Descriptor::kContext);
1287 auto receiver = Parameter<Object>(Descriptor::kReceiver);
1288 auto on_non_existent = Parameter<Object>(Descriptor::kOnNonExistent);
1289 Label if_notfound(this), if_proxy(this, Label::kDeferred),
1290 if_slow(this, Label::kDeferred);
1291
1292 CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
1293 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
1294 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1295 TNode<Name> unique_name, Label* next_holder, Label* if_bailout) {
1296 TVARIABLE(Object, var_value);
1297 Label if_found(this);
1298 TryGetOwnProperty(context, receiver, CAST(holder), holder_map,
1299 holder_instance_type, unique_name, &if_found,
1300 &var_value, next_holder, if_bailout);
1301 BIND(&if_found);
1302 Return(var_value.value());
1303 };
1304
1305 CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
1306 [=](TNode<HeapObject> receiver, TNode<HeapObject> holder,
1307 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1308 TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
1309 // Not supported yet.
1310 Use(next_holder);
1311 Goto(if_bailout);
1312 };
1313
1314 TryPrototypeChainLookup(receiver, object, key, lookup_property_in_holder,
1315 lookup_element_in_holder, &if_notfound, &if_slow,
1316 &if_proxy);
1317
1318 BIND(&if_notfound);
1319 Label throw_reference_error(this);
1320 GotoIf(TaggedEqual(on_non_existent,
1321 SmiConstant(OnNonExistent::kThrowReferenceError)),
1322 &throw_reference_error);
1323 CSA_DCHECK(this, TaggedEqual(on_non_existent,
1324 SmiConstant(OnNonExistent::kReturnUndefined)));
1325 Return(UndefinedConstant());
1326
1327 BIND(&throw_reference_error);
1328 Return(CallRuntime(Runtime::kThrowReferenceError, context, key));
1329
1330 BIND(&if_slow);
1331 TailCallRuntime(Runtime::kGetPropertyWithReceiver, context, object, key,
1332 receiver, on_non_existent);
1333
1334 BIND(&if_proxy);
1335 {
1336 // Convert the {key} to a Name first.
1337 TNode<Name> name = CAST(CallBuiltin(Builtin::kToName, context, key));
1338
1339 // Proxy cannot handle private symbol so bailout.
1340 GotoIf(IsPrivateSymbol(name), &if_slow);
1341
1342 // The {object} is a JSProxy instance, look up the {name} on it, passing
1343 // {object} both as receiver and holder. If {name} is absent we can safely
1344 // return undefined from here.
1345 TailCallBuiltin(Builtin::kProxyGetProperty, context, object, name, receiver,
1346 on_non_existent);
1347 }
1348 }
1349
1350 // ES6 [[Set]] operation.
TF_BUILTIN(SetProperty,CodeStubAssembler)1351 TF_BUILTIN(SetProperty, CodeStubAssembler) {
1352 auto context = Parameter<Context>(Descriptor::kContext);
1353 auto receiver = Parameter<Object>(Descriptor::kReceiver);
1354 auto key = Parameter<Object>(Descriptor::kKey);
1355 auto value = Parameter<Object>(Descriptor::kValue);
1356
1357 KeyedStoreGenericGenerator::SetProperty(state(), context, receiver, key,
1358 value, LanguageMode::kStrict);
1359 }
1360
1361 // ES6 CreateDataProperty(), specialized for the case where objects are still
1362 // being initialized, and have not yet been made accessible to the user. Thus,
1363 // any operation here should be unobservable until after the object has been
1364 // returned.
TF_BUILTIN(CreateDataProperty,CodeStubAssembler)1365 TF_BUILTIN(CreateDataProperty, CodeStubAssembler) {
1366 auto context = Parameter<Context>(Descriptor::kContext);
1367 auto receiver = Parameter<JSObject>(Descriptor::kReceiver);
1368 auto key = Parameter<Object>(Descriptor::kKey);
1369 auto value = Parameter<Object>(Descriptor::kValue);
1370
1371 KeyedStoreGenericGenerator::CreateDataProperty(state(), context, receiver,
1372 key, value);
1373 }
1374
TF_BUILTIN(InstantiateAsmJs,CodeStubAssembler)1375 TF_BUILTIN(InstantiateAsmJs, CodeStubAssembler) {
1376 Label tailcall_to_function(this);
1377 auto context = Parameter<Context>(Descriptor::kContext);
1378 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
1379 auto arg_count =
1380 UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
1381 auto function = Parameter<JSFunction>(Descriptor::kTarget);
1382
1383 // Retrieve arguments from caller (stdlib, foreign, heap).
1384 CodeStubArguments args(this, arg_count);
1385 TNode<Object> stdlib = args.GetOptionalArgumentValue(0);
1386 TNode<Object> foreign = args.GetOptionalArgumentValue(1);
1387 TNode<Object> heap = args.GetOptionalArgumentValue(2);
1388
1389 // Call runtime, on success just pass the result to the caller and pop all
1390 // arguments. A smi 0 is returned on failure, an object on success.
1391 TNode<Object> maybe_result_or_smi_zero = CallRuntime(
1392 Runtime::kInstantiateAsmJs, context, function, stdlib, foreign, heap);
1393 GotoIf(TaggedIsSmi(maybe_result_or_smi_zero), &tailcall_to_function);
1394
1395 TNode<SharedFunctionInfo> shared = LoadJSFunctionSharedFunctionInfo(function);
1396 TNode<Int32T> parameter_count = UncheckedCast<Int32T>(
1397 LoadSharedFunctionInfoFormalParameterCountWithReceiver(shared));
1398 // This builtin intercepts a call to {function}, where the number of arguments
1399 // pushed is the maximum of actual arguments count and formal parameters
1400 // count.
1401 Label argc_lt_param_count(this), argc_ge_param_count(this);
1402 Branch(IntPtrLessThan(args.GetLengthWithReceiver(),
1403 ChangeInt32ToIntPtr(parameter_count)),
1404 &argc_lt_param_count, &argc_ge_param_count);
1405 BIND(&argc_lt_param_count);
1406 PopAndReturn(parameter_count, maybe_result_or_smi_zero);
1407 BIND(&argc_ge_param_count);
1408 args.PopAndReturn(maybe_result_or_smi_zero);
1409
1410 BIND(&tailcall_to_function);
1411 // On failure, tail call back to regular JavaScript by re-calling the given
1412 // function which has been reset to the compile lazy builtin.
1413
1414 TNode<CodeT> code = LoadJSFunctionCode(function);
1415 TailCallJSCode(code, context, function, new_target, arg_count);
1416 }
1417
1418 } // namespace internal
1419 } // namespace v8
1420