1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_IA32
6
7 #include "src/base/bits.h"
8 #include "src/base/division-by-constant.h"
9 #include "src/base/utils/random-number-generator.h"
10 #include "src/bootstrapper.h"
11 #include "src/callable.h"
12 #include "src/code-factory.h"
13 #include "src/code-stubs.h"
14 #include "src/debug/debug.h"
15 #include "src/external-reference-table.h"
16 #include "src/frame-constants.h"
17 #include "src/frames-inl.h"
18 #include "src/instruction-stream.h"
19 #include "src/runtime/runtime.h"
20 #include "src/snapshot/snapshot.h"
21
22 #include "src/ia32/assembler-ia32-inl.h"
23 #include "src/ia32/macro-assembler-ia32.h"
24
25 namespace v8 {
26 namespace internal {
27
28 // -------------------------------------------------------------------------
29 // MacroAssembler implementation.
30
MacroAssembler(Isolate * isolate,const AssemblerOptions & options,void * buffer,int size,CodeObjectRequired create_code_object)31 MacroAssembler::MacroAssembler(Isolate* isolate,
32 const AssemblerOptions& options, void* buffer,
33 int size, CodeObjectRequired create_code_object)
34 : TurboAssembler(isolate, options, buffer, size, create_code_object) {
35 if (create_code_object == CodeObjectRequired::kYes) {
36 // Unlike TurboAssembler, which can be used off the main thread and may not
37 // allocate, macro assembler creates its own copy of the self-reference
38 // marker in order to disambiguate between self-references during nested
39 // code generation (e.g.: codegen of the current object triggers stub
40 // compilation through CodeStub::GetCode()).
41 code_object_ = Handle<HeapObject>::New(
42 *isolate->factory()->NewSelfReferenceMarker(), isolate);
43 }
44
45 #ifdef V8_EMBEDDED_BUILTINS
46 // Fake it as long as we use indirections through an embedded external
47 // reference. This will let us implement indirections without a real
48 // root register.
49 // TODO(jgruber, v8:6666): Remove once a real root register exists.
50 if (FLAG_embedded_builtins) set_root_array_available(true);
51 #endif // V8_EMBEDDED_BUILTINS
52 }
53
LoadRoot(Register destination,Heap::RootListIndex index)54 void TurboAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
55 // TODO(jgruber, v8:6666): Support loads through the root register once it
56 // exists.
57 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
58 Handle<Object> object = isolate()->heap()->root_handle(index);
59 if (object->IsSmi()) {
60 mov(destination, Immediate(Smi::cast(*object)));
61 return;
62 } else if (!options().isolate_independent_code) {
63 DCHECK(object->IsHeapObject());
64 mov(destination, Handle<HeapObject>::cast(object));
65 return;
66 }
67 }
68 ExternalReference roots_array_start =
69 ExternalReference::roots_array_start(isolate());
70 mov(destination, Immediate(index));
71 mov(destination,
72 StaticArray(destination, times_pointer_size, roots_array_start));
73 }
74
CompareRoot(Register with,Register scratch,Heap::RootListIndex index)75 void MacroAssembler::CompareRoot(Register with,
76 Register scratch,
77 Heap::RootListIndex index) {
78 ExternalReference roots_array_start =
79 ExternalReference::roots_array_start(isolate());
80 mov(scratch, Immediate(index));
81 cmp(with, StaticArray(scratch, times_pointer_size, roots_array_start));
82 }
83
84
CompareRoot(Register with,Heap::RootListIndex index)85 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
86 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
87 Handle<Object> object = isolate()->heap()->root_handle(index);
88 if (object->IsHeapObject()) {
89 cmp(with, Handle<HeapObject>::cast(object));
90 } else {
91 cmp(with, Immediate(Smi::cast(*object)));
92 }
93 }
94
CompareRoot(Operand with,Heap::RootListIndex index)95 void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
96 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
97 Handle<Object> object = isolate()->heap()->root_handle(index);
98 if (object->IsHeapObject()) {
99 cmp(with, Handle<HeapObject>::cast(object));
100 } else {
101 cmp(with, Immediate(Smi::cast(*object)));
102 }
103 }
104
PushRoot(Heap::RootListIndex index)105 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
106 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
107 Handle<Object> object = isolate()->heap()->root_handle(index);
108 if (object->IsHeapObject()) {
109 Push(Handle<HeapObject>::cast(object));
110 } else {
111 Push(Smi::cast(*object));
112 }
113 }
114
LoadFromConstantsTable(Register destination,int constant_index)115 void TurboAssembler::LoadFromConstantsTable(Register destination,
116 int constant_index) {
117 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(
118 Heap::kBuiltinsConstantsTableRootIndex));
119 // TODO(jgruber): LoadRoot should be a register-relative load once we have
120 // the kRootRegister.
121 LoadRoot(destination, Heap::kBuiltinsConstantsTableRootIndex);
122 mov(destination,
123 FieldOperand(destination,
124 FixedArray::kHeaderSize + constant_index * kPointerSize));
125 }
126
LoadRootRegisterOffset(Register destination,intptr_t offset)127 void TurboAssembler::LoadRootRegisterOffset(Register destination,
128 intptr_t offset) {
129 DCHECK(is_int32(offset));
130 // TODO(jgruber): Register-relative load once kRootRegister exists.
131 mov(destination, Immediate(ExternalReference::roots_array_start(isolate())));
132 if (offset != 0) {
133 add(destination, Immediate(offset));
134 }
135 }
136
LoadRootRelative(Register destination,int32_t offset)137 void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) {
138 // TODO(jgruber): Register-relative load once kRootRegister exists.
139 LoadRootRegisterOffset(destination, offset);
140 mov(destination, Operand(destination, 0));
141 }
142
LoadAddress(Register destination,ExternalReference source)143 void TurboAssembler::LoadAddress(Register destination,
144 ExternalReference source) {
145 if (FLAG_embedded_builtins) {
146 if (root_array_available_ && options().isolate_independent_code) {
147 IndirectLoadExternalReference(destination, source);
148 return;
149 }
150 }
151 mov(destination, Immediate(source));
152 }
153
StaticVariable(const ExternalReference & ext)154 Operand TurboAssembler::StaticVariable(const ExternalReference& ext) {
155 // TODO(jgruber,v8:6666): Root-relative operand once kRootRegister exists.
156 return Operand(ext.address(), RelocInfo::EXTERNAL_REFERENCE);
157 }
158
StaticArray(Register index,ScaleFactor scale,const ExternalReference & ext)159 Operand TurboAssembler::StaticArray(Register index, ScaleFactor scale,
160 const ExternalReference& ext) {
161 // TODO(jgruber,v8:6666): Root-relative operand once kRootRegister exists.
162 return Operand(index, scale, ext.address(), RelocInfo::EXTERNAL_REFERENCE);
163 }
164
165 static constexpr Register saved_regs[] = {eax, ecx, edx};
166
167 static constexpr int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
168
RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,Register exclusion1,Register exclusion2,Register exclusion3) const169 int TurboAssembler::RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
170 Register exclusion1,
171 Register exclusion2,
172 Register exclusion3) const {
173 int bytes = 0;
174 for (int i = 0; i < kNumberOfSavedRegs; i++) {
175 Register reg = saved_regs[i];
176 if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
177 bytes += kPointerSize;
178 }
179 }
180
181 if (fp_mode == kSaveFPRegs) {
182 // Count all XMM registers except XMM0.
183 bytes += kDoubleSize * (XMMRegister::kNumRegisters - 1);
184 }
185
186 return bytes;
187 }
188
PushCallerSaved(SaveFPRegsMode fp_mode,Register exclusion1,Register exclusion2,Register exclusion3)189 int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
190 Register exclusion2, Register exclusion3) {
191 // We don't allow a GC during a store buffer overflow so there is no need to
192 // store the registers in any particular way, but we do have to store and
193 // restore them.
194 int bytes = 0;
195 for (int i = 0; i < kNumberOfSavedRegs; i++) {
196 Register reg = saved_regs[i];
197 if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
198 push(reg);
199 bytes += kPointerSize;
200 }
201 }
202
203 if (fp_mode == kSaveFPRegs) {
204 // Save all XMM registers except XMM0.
205 int delta = kDoubleSize * (XMMRegister::kNumRegisters - 1);
206 sub(esp, Immediate(delta));
207 for (int i = XMMRegister::kNumRegisters - 1; i > 0; i--) {
208 XMMRegister reg = XMMRegister::from_code(i);
209 movsd(Operand(esp, (i - 1) * kDoubleSize), reg);
210 }
211 bytes += delta;
212 }
213
214 return bytes;
215 }
216
PopCallerSaved(SaveFPRegsMode fp_mode,Register exclusion1,Register exclusion2,Register exclusion3)217 int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
218 Register exclusion2, Register exclusion3) {
219 int bytes = 0;
220 if (fp_mode == kSaveFPRegs) {
221 // Restore all XMM registers except XMM0.
222 int delta = kDoubleSize * (XMMRegister::kNumRegisters - 1);
223 for (int i = XMMRegister::kNumRegisters - 1; i > 0; i--) {
224 XMMRegister reg = XMMRegister::from_code(i);
225 movsd(reg, Operand(esp, (i - 1) * kDoubleSize));
226 }
227 add(esp, Immediate(delta));
228 bytes += delta;
229 }
230
231 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
232 Register reg = saved_regs[i];
233 if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
234 pop(reg);
235 bytes += kPointerSize;
236 }
237 }
238
239 return bytes;
240 }
241
DoubleToI(Register result_reg,XMMRegister input_reg,XMMRegister scratch,Label * lost_precision,Label * is_nan,Label::Distance dst)242 void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
243 XMMRegister scratch, Label* lost_precision,
244 Label* is_nan, Label::Distance dst) {
245 DCHECK(input_reg != scratch);
246 cvttsd2si(result_reg, Operand(input_reg));
247 Cvtsi2sd(scratch, Operand(result_reg));
248 ucomisd(scratch, input_reg);
249 j(not_equal, lost_precision, dst);
250 j(parity_even, is_nan, dst);
251 }
252
RecordWriteField(Register object,int offset,Register value,Register dst,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check)253 void MacroAssembler::RecordWriteField(Register object, int offset,
254 Register value, Register dst,
255 SaveFPRegsMode save_fp,
256 RememberedSetAction remembered_set_action,
257 SmiCheck smi_check) {
258 // First, check if a write barrier is even needed. The tests below
259 // catch stores of Smis.
260 Label done;
261
262 // Skip barrier if writing a smi.
263 if (smi_check == INLINE_SMI_CHECK) {
264 JumpIfSmi(value, &done);
265 }
266
267 // Although the object register is tagged, the offset is relative to the start
268 // of the object, so so offset must be a multiple of kPointerSize.
269 DCHECK(IsAligned(offset, kPointerSize));
270
271 lea(dst, FieldOperand(object, offset));
272 if (emit_debug_code()) {
273 Label ok;
274 test_b(dst, Immediate(kPointerSize - 1));
275 j(zero, &ok, Label::kNear);
276 int3();
277 bind(&ok);
278 }
279
280 RecordWrite(object, dst, value, save_fp, remembered_set_action,
281 OMIT_SMI_CHECK);
282
283 bind(&done);
284
285 // Clobber clobbered input registers when running with the debug-code flag
286 // turned on to provoke errors.
287 if (emit_debug_code()) {
288 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
289 mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
290 }
291 }
292
SaveRegisters(RegList registers)293 void TurboAssembler::SaveRegisters(RegList registers) {
294 DCHECK_GT(NumRegs(registers), 0);
295 for (int i = 0; i < Register::kNumRegisters; ++i) {
296 if ((registers >> i) & 1u) {
297 push(Register::from_code(i));
298 }
299 }
300 }
301
RestoreRegisters(RegList registers)302 void TurboAssembler::RestoreRegisters(RegList registers) {
303 DCHECK_GT(NumRegs(registers), 0);
304 for (int i = Register::kNumRegisters - 1; i >= 0; --i) {
305 if ((registers >> i) & 1u) {
306 pop(Register::from_code(i));
307 }
308 }
309 }
310
CallRecordWriteStub(Register object,Register address,RememberedSetAction remembered_set_action,SaveFPRegsMode fp_mode)311 void TurboAssembler::CallRecordWriteStub(
312 Register object, Register address,
313 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
314 // TODO(albertnetymk): For now we ignore remembered_set_action and fp_mode,
315 // i.e. always emit remember set and save FP registers in RecordWriteStub. If
316 // large performance regression is observed, we should use these values to
317 // avoid unnecessary work.
318
319 Callable const callable =
320 Builtins::CallableFor(isolate(), Builtins::kRecordWrite);
321 RegList registers = callable.descriptor().allocatable_registers();
322
323 SaveRegisters(registers);
324
325 Register object_parameter(callable.descriptor().GetRegisterParameter(
326 RecordWriteDescriptor::kObject));
327 Register slot_parameter(
328 callable.descriptor().GetRegisterParameter(RecordWriteDescriptor::kSlot));
329 Register isolate_parameter(callable.descriptor().GetRegisterParameter(
330 RecordWriteDescriptor::kIsolate));
331 Register remembered_set_parameter(callable.descriptor().GetRegisterParameter(
332 RecordWriteDescriptor::kRememberedSet));
333 Register fp_mode_parameter(callable.descriptor().GetRegisterParameter(
334 RecordWriteDescriptor::kFPMode));
335
336 push(object);
337 push(address);
338
339 pop(slot_parameter);
340 pop(object_parameter);
341
342 mov(isolate_parameter,
343 Immediate(ExternalReference::isolate_address(isolate())));
344 Move(remembered_set_parameter, Smi::FromEnum(remembered_set_action));
345 Move(fp_mode_parameter, Smi::FromEnum(fp_mode));
346 Call(callable.code(), RelocInfo::CODE_TARGET);
347
348 RestoreRegisters(registers);
349 }
350
RecordWrite(Register object,Register address,Register value,SaveFPRegsMode fp_mode,RememberedSetAction remembered_set_action,SmiCheck smi_check)351 void MacroAssembler::RecordWrite(Register object, Register address,
352 Register value, SaveFPRegsMode fp_mode,
353 RememberedSetAction remembered_set_action,
354 SmiCheck smi_check) {
355 DCHECK(object != value);
356 DCHECK(object != address);
357 DCHECK(value != address);
358 AssertNotSmi(object);
359
360 if (remembered_set_action == OMIT_REMEMBERED_SET &&
361 !FLAG_incremental_marking) {
362 return;
363 }
364
365 if (emit_debug_code()) {
366 Label ok;
367 cmp(value, Operand(address, 0));
368 j(equal, &ok, Label::kNear);
369 int3();
370 bind(&ok);
371 }
372
373 // First, check if a write barrier is even needed. The tests below
374 // catch stores of Smis and stores into young gen.
375 Label done;
376
377 if (smi_check == INLINE_SMI_CHECK) {
378 // Skip barrier if writing a smi.
379 JumpIfSmi(value, &done, Label::kNear);
380 }
381
382 CheckPageFlag(value,
383 value, // Used as scratch.
384 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
385 Label::kNear);
386 CheckPageFlag(object,
387 value, // Used as scratch.
388 MemoryChunk::kPointersFromHereAreInterestingMask,
389 zero,
390 &done,
391 Label::kNear);
392
393 CallRecordWriteStub(object, address, remembered_set_action, fp_mode);
394
395 bind(&done);
396
397 // Count number of write barriers in generated code.
398 isolate()->counters()->write_barriers_static()->Increment();
399 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
400
401 // Clobber clobbered registers when running with the debug-code flag
402 // turned on to provoke errors.
403 if (emit_debug_code()) {
404 mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
405 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
406 }
407 }
408
MaybeDropFrames()409 void MacroAssembler::MaybeDropFrames() {
410 // Check whether we need to drop frames to restart a function on the stack.
411 ExternalReference restart_fp =
412 ExternalReference::debug_restart_fp_address(isolate());
413 mov(ebx, StaticVariable(restart_fp));
414 test(ebx, ebx);
415 j(not_zero, BUILTIN_CODE(isolate(), FrameDropperTrampoline),
416 RelocInfo::CODE_TARGET);
417 }
418
Cvtsi2ss(XMMRegister dst,Operand src)419 void TurboAssembler::Cvtsi2ss(XMMRegister dst, Operand src) {
420 xorps(dst, dst);
421 cvtsi2ss(dst, src);
422 }
423
Cvtsi2sd(XMMRegister dst,Operand src)424 void TurboAssembler::Cvtsi2sd(XMMRegister dst, Operand src) {
425 xorpd(dst, dst);
426 cvtsi2sd(dst, src);
427 }
428
Cvtui2ss(XMMRegister dst,Operand src,Register tmp)429 void TurboAssembler::Cvtui2ss(XMMRegister dst, Operand src, Register tmp) {
430 Label done;
431 Register src_reg = src.is_reg_only() ? src.reg() : tmp;
432 if (src_reg == tmp) mov(tmp, src);
433 cvtsi2ss(dst, src_reg);
434 test(src_reg, src_reg);
435 j(positive, &done, Label::kNear);
436
437 // Compute {src/2 | (src&1)} (retain the LSB to avoid rounding errors).
438 if (src_reg != tmp) mov(tmp, src_reg);
439 shr(tmp, 1);
440 // The LSB is shifted into CF. If it is set, set the LSB in {tmp}.
441 Label msb_not_set;
442 j(not_carry, &msb_not_set, Label::kNear);
443 or_(tmp, Immediate(1));
444 bind(&msb_not_set);
445 cvtsi2ss(dst, tmp);
446 addss(dst, dst);
447 bind(&done);
448 }
449
Cvttss2ui(Register dst,Operand src,XMMRegister tmp)450 void TurboAssembler::Cvttss2ui(Register dst, Operand src, XMMRegister tmp) {
451 Label done;
452 cvttss2si(dst, src);
453 test(dst, dst);
454 j(positive, &done);
455 Move(tmp, static_cast<float>(INT32_MIN));
456 addss(tmp, src);
457 cvttss2si(dst, tmp);
458 or_(dst, Immediate(0x80000000));
459 bind(&done);
460 }
461
Cvtui2sd(XMMRegister dst,Operand src)462 void TurboAssembler::Cvtui2sd(XMMRegister dst, Operand src) {
463 Label done;
464 cmp(src, Immediate(0));
465 ExternalReference uint32_bias = ExternalReference::address_of_uint32_bias();
466 Cvtsi2sd(dst, src);
467 j(not_sign, &done, Label::kNear);
468 addsd(dst, StaticVariable(uint32_bias));
469 bind(&done);
470 }
471
Cvttsd2ui(Register dst,Operand src,XMMRegister tmp)472 void TurboAssembler::Cvttsd2ui(Register dst, Operand src, XMMRegister tmp) {
473 Move(tmp, -2147483648.0);
474 addsd(tmp, src);
475 cvttsd2si(dst, tmp);
476 add(dst, Immediate(0x80000000));
477 }
478
ShlPair(Register high,Register low,uint8_t shift)479 void TurboAssembler::ShlPair(Register high, Register low, uint8_t shift) {
480 if (shift >= 32) {
481 mov(high, low);
482 shl(high, shift - 32);
483 xor_(low, low);
484 } else {
485 shld(high, low, shift);
486 shl(low, shift);
487 }
488 }
489
ShlPair_cl(Register high,Register low)490 void TurboAssembler::ShlPair_cl(Register high, Register low) {
491 shld_cl(high, low);
492 shl_cl(low);
493 Label done;
494 test(ecx, Immediate(0x20));
495 j(equal, &done, Label::kNear);
496 mov(high, low);
497 xor_(low, low);
498 bind(&done);
499 }
500
ShrPair(Register high,Register low,uint8_t shift)501 void TurboAssembler::ShrPair(Register high, Register low, uint8_t shift) {
502 if (shift >= 32) {
503 mov(low, high);
504 shr(low, shift - 32);
505 xor_(high, high);
506 } else {
507 shrd(high, low, shift);
508 shr(high, shift);
509 }
510 }
511
ShrPair_cl(Register high,Register low)512 void TurboAssembler::ShrPair_cl(Register high, Register low) {
513 shrd_cl(low, high);
514 shr_cl(high);
515 Label done;
516 test(ecx, Immediate(0x20));
517 j(equal, &done, Label::kNear);
518 mov(low, high);
519 xor_(high, high);
520 bind(&done);
521 }
522
SarPair(Register high,Register low,uint8_t shift)523 void TurboAssembler::SarPair(Register high, Register low, uint8_t shift) {
524 if (shift >= 32) {
525 mov(low, high);
526 sar(low, shift - 32);
527 sar(high, 31);
528 } else {
529 shrd(high, low, shift);
530 sar(high, shift);
531 }
532 }
533
SarPair_cl(Register high,Register low)534 void TurboAssembler::SarPair_cl(Register high, Register low) {
535 shrd_cl(low, high);
536 sar_cl(high);
537 Label done;
538 test(ecx, Immediate(0x20));
539 j(equal, &done, Label::kNear);
540 mov(low, high);
541 sar(high, 31);
542 bind(&done);
543 }
544
CmpObjectType(Register heap_object,InstanceType type,Register map)545 void MacroAssembler::CmpObjectType(Register heap_object,
546 InstanceType type,
547 Register map) {
548 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
549 CmpInstanceType(map, type);
550 }
551
552
CmpInstanceType(Register map,InstanceType type)553 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
554 cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
555 }
556
AssertSmi(Register object)557 void MacroAssembler::AssertSmi(Register object) {
558 if (emit_debug_code()) {
559 test(object, Immediate(kSmiTagMask));
560 Check(equal, AbortReason::kOperandIsNotASmi);
561 }
562 }
563
AssertConstructor(Register object)564 void MacroAssembler::AssertConstructor(Register object) {
565 if (emit_debug_code()) {
566 test(object, Immediate(kSmiTagMask));
567 Check(not_equal, AbortReason::kOperandIsASmiAndNotAConstructor);
568 Push(object);
569 mov(object, FieldOperand(object, HeapObject::kMapOffset));
570 test_b(FieldOperand(object, Map::kBitFieldOffset),
571 Immediate(Map::IsConstructorBit::kMask));
572 Pop(object);
573 Check(not_zero, AbortReason::kOperandIsNotAConstructor);
574 }
575 }
576
AssertFunction(Register object)577 void MacroAssembler::AssertFunction(Register object) {
578 if (emit_debug_code()) {
579 test(object, Immediate(kSmiTagMask));
580 Check(not_equal, AbortReason::kOperandIsASmiAndNotAFunction);
581 Push(object);
582 CmpObjectType(object, JS_FUNCTION_TYPE, object);
583 Pop(object);
584 Check(equal, AbortReason::kOperandIsNotAFunction);
585 }
586 }
587
588
AssertBoundFunction(Register object)589 void MacroAssembler::AssertBoundFunction(Register object) {
590 if (emit_debug_code()) {
591 test(object, Immediate(kSmiTagMask));
592 Check(not_equal, AbortReason::kOperandIsASmiAndNotABoundFunction);
593 Push(object);
594 CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
595 Pop(object);
596 Check(equal, AbortReason::kOperandIsNotABoundFunction);
597 }
598 }
599
AssertGeneratorObject(Register object)600 void MacroAssembler::AssertGeneratorObject(Register object) {
601 if (!emit_debug_code()) return;
602
603 test(object, Immediate(kSmiTagMask));
604 Check(not_equal, AbortReason::kOperandIsASmiAndNotAGeneratorObject);
605
606 {
607 Push(object);
608 Register map = object;
609
610 // Load map
611 mov(map, FieldOperand(object, HeapObject::kMapOffset));
612
613 Label do_check;
614 // Check if JSGeneratorObject
615 CmpInstanceType(map, JS_GENERATOR_OBJECT_TYPE);
616 j(equal, &do_check, Label::kNear);
617
618 // Check if JSAsyncGeneratorObject
619 CmpInstanceType(map, JS_ASYNC_GENERATOR_OBJECT_TYPE);
620
621 bind(&do_check);
622 Pop(object);
623 }
624
625 Check(equal, AbortReason::kOperandIsNotAGeneratorObject);
626 }
627
AssertUndefinedOrAllocationSite(Register object)628 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
629 if (emit_debug_code()) {
630 Label done_checking;
631 AssertNotSmi(object);
632 cmp(object, isolate()->factory()->undefined_value());
633 j(equal, &done_checking);
634 cmp(FieldOperand(object, 0),
635 Immediate(isolate()->factory()->allocation_site_map()));
636 Assert(equal, AbortReason::kExpectedUndefinedOrCell);
637 bind(&done_checking);
638 }
639 }
640
641
AssertNotSmi(Register object)642 void MacroAssembler::AssertNotSmi(Register object) {
643 if (emit_debug_code()) {
644 test(object, Immediate(kSmiTagMask));
645 Check(not_equal, AbortReason::kOperandIsASmi);
646 }
647 }
648
StubPrologue(StackFrame::Type type)649 void TurboAssembler::StubPrologue(StackFrame::Type type) {
650 push(ebp); // Caller's frame pointer.
651 mov(ebp, esp);
652 push(Immediate(StackFrame::TypeToMarker(type)));
653 }
654
Prologue()655 void TurboAssembler::Prologue() {
656 push(ebp); // Caller's frame pointer.
657 mov(ebp, esp);
658 push(esi); // Callee's context.
659 push(edi); // Callee's JS function.
660 }
661
EnterFrame(StackFrame::Type type)662 void TurboAssembler::EnterFrame(StackFrame::Type type) {
663 push(ebp);
664 mov(ebp, esp);
665 push(Immediate(StackFrame::TypeToMarker(type)));
666 }
667
LeaveFrame(StackFrame::Type type)668 void TurboAssembler::LeaveFrame(StackFrame::Type type) {
669 if (emit_debug_code()) {
670 cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
671 Immediate(StackFrame::TypeToMarker(type)));
672 Check(equal, AbortReason::kStackFrameTypesMustMatch);
673 }
674 leave();
675 }
676
677 #ifdef V8_OS_WIN
AllocateStackFrame(Register bytes_scratch)678 void TurboAssembler::AllocateStackFrame(Register bytes_scratch) {
679 // In windows, we cannot increment the stack size by more than one page
680 // (minimum page size is 4KB) without accessing at least one byte on the
681 // page. Check this:
682 // https://msdn.microsoft.com/en-us/library/aa227153(v=vs.60).aspx.
683 constexpr int kPageSize = 4 * 1024;
684 Label check_offset;
685 Label touch_next_page;
686 jmp(&check_offset);
687 bind(&touch_next_page);
688 sub(esp, Immediate(kPageSize));
689 // Just to touch the page, before we increment further.
690 mov(Operand(esp, 0), Immediate(0));
691 sub(bytes_scratch, Immediate(kPageSize));
692
693 bind(&check_offset);
694 cmp(bytes_scratch, kPageSize);
695 j(greater, &touch_next_page);
696
697 sub(esp, bytes_scratch);
698 }
699 #endif
700
EnterBuiltinFrame(Register context,Register target,Register argc)701 void MacroAssembler::EnterBuiltinFrame(Register context, Register target,
702 Register argc) {
703 Push(ebp);
704 Move(ebp, esp);
705 Push(context);
706 Push(target);
707 Push(argc);
708 }
709
LeaveBuiltinFrame(Register context,Register target,Register argc)710 void MacroAssembler::LeaveBuiltinFrame(Register context, Register target,
711 Register argc) {
712 Pop(argc);
713 Pop(target);
714 Pop(context);
715 leave();
716 }
717
EnterExitFramePrologue(StackFrame::Type frame_type)718 void MacroAssembler::EnterExitFramePrologue(StackFrame::Type frame_type) {
719 DCHECK(frame_type == StackFrame::EXIT ||
720 frame_type == StackFrame::BUILTIN_EXIT);
721
722 // Set up the frame structure on the stack.
723 DCHECK_EQ(+2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
724 DCHECK_EQ(+1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
725 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
726 push(ebp);
727 mov(ebp, esp);
728
729 // Reserve room for entry stack pointer and push the code object.
730 push(Immediate(StackFrame::TypeToMarker(frame_type)));
731 DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
732 push(Immediate(0)); // Saved entry sp, patched before call.
733 DCHECK_EQ(-3 * kPointerSize, ExitFrameConstants::kCodeOffset);
734 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
735
736 // Save the frame pointer and the context in top.
737 ExternalReference c_entry_fp_address =
738 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
739 ExternalReference context_address =
740 ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
741 ExternalReference c_function_address =
742 ExternalReference::Create(IsolateAddressId::kCFunctionAddress, isolate());
743 mov(StaticVariable(c_entry_fp_address), ebp);
744 mov(StaticVariable(context_address), esi);
745 mov(StaticVariable(c_function_address), edx);
746 }
747
748
EnterExitFrameEpilogue(int argc,bool save_doubles)749 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
750 // Optionally save all XMM registers.
751 if (save_doubles) {
752 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
753 sub(esp, Immediate(space));
754 const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
755 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
756 XMMRegister reg = XMMRegister::from_code(i);
757 movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
758 }
759 } else {
760 sub(esp, Immediate(argc * kPointerSize));
761 }
762
763 // Get the required frame alignment for the OS.
764 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
765 if (kFrameAlignment > 0) {
766 DCHECK(base::bits::IsPowerOfTwo(kFrameAlignment));
767 and_(esp, -kFrameAlignment);
768 }
769
770 // Patch the saved entry sp.
771 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
772 }
773
EnterExitFrame(int argc,bool save_doubles,StackFrame::Type frame_type)774 void MacroAssembler::EnterExitFrame(int argc, bool save_doubles,
775 StackFrame::Type frame_type) {
776 EnterExitFramePrologue(frame_type);
777
778 // Set up argc and argv in callee-saved registers.
779 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
780 mov(edi, eax);
781 lea(esi, Operand(ebp, eax, times_4, offset));
782
783 // Reserve space for argc, argv and isolate.
784 EnterExitFrameEpilogue(argc, save_doubles);
785 }
786
787
EnterApiExitFrame(int argc)788 void MacroAssembler::EnterApiExitFrame(int argc) {
789 EnterExitFramePrologue(StackFrame::EXIT);
790 EnterExitFrameEpilogue(argc, false);
791 }
792
793
LeaveExitFrame(bool save_doubles,bool pop_arguments)794 void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
795 // Optionally restore all XMM registers.
796 if (save_doubles) {
797 const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
798 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
799 XMMRegister reg = XMMRegister::from_code(i);
800 movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
801 }
802 }
803
804 if (pop_arguments) {
805 // Get the return address from the stack and restore the frame pointer.
806 mov(ecx, Operand(ebp, 1 * kPointerSize));
807 mov(ebp, Operand(ebp, 0 * kPointerSize));
808
809 // Pop the arguments and the receiver from the caller stack.
810 lea(esp, Operand(esi, 1 * kPointerSize));
811
812 // Push the return address to get ready to return.
813 push(ecx);
814 } else {
815 // Otherwise just leave the exit frame.
816 leave();
817 }
818
819 LeaveExitFrameEpilogue();
820 }
821
LeaveExitFrameEpilogue()822 void MacroAssembler::LeaveExitFrameEpilogue() {
823 // Restore current context from top and clear it in debug mode.
824 ExternalReference context_address =
825 ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
826 mov(esi, StaticVariable(context_address));
827 #ifdef DEBUG
828 mov(StaticVariable(context_address), Immediate(Context::kInvalidContext));
829 #endif
830
831 // Clear the top frame.
832 ExternalReference c_entry_fp_address =
833 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
834 mov(StaticVariable(c_entry_fp_address), Immediate(0));
835 }
836
LeaveApiExitFrame()837 void MacroAssembler::LeaveApiExitFrame() {
838 mov(esp, ebp);
839 pop(ebp);
840
841 LeaveExitFrameEpilogue();
842 }
843
844
PushStackHandler()845 void MacroAssembler::PushStackHandler() {
846 // Adjust this code if not the case.
847 STATIC_ASSERT(StackHandlerConstants::kSize == 2 * kPointerSize);
848 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
849
850 push(Immediate(0)); // Padding.
851
852 // Link the current handler as the next handler.
853 ExternalReference handler_address =
854 ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
855 push(StaticVariable(handler_address));
856
857 // Set this new handler as the current one.
858 mov(StaticVariable(handler_address), esp);
859 }
860
861
PopStackHandler()862 void MacroAssembler::PopStackHandler() {
863 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
864 ExternalReference handler_address =
865 ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
866 pop(StaticVariable(handler_address));
867 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
868 }
869
870
CallStub(CodeStub * stub)871 void MacroAssembler::CallStub(CodeStub* stub) {
872 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
873 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
874 }
875
CallStubDelayed(CodeStub * stub)876 void TurboAssembler::CallStubDelayed(CodeStub* stub) {
877 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
878 call(stub);
879 }
880
TailCallStub(CodeStub * stub)881 void MacroAssembler::TailCallStub(CodeStub* stub) {
882 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
883 }
884
AllowThisStubCall(CodeStub * stub)885 bool TurboAssembler::AllowThisStubCall(CodeStub* stub) {
886 return has_frame() || !stub->SometimesSetsUpAFrame();
887 }
888
CallRuntime(const Runtime::Function * f,int num_arguments,SaveFPRegsMode save_doubles)889 void MacroAssembler::CallRuntime(const Runtime::Function* f,
890 int num_arguments,
891 SaveFPRegsMode save_doubles) {
892 // If the expected number of arguments of the runtime function is
893 // constant, we check that the actual number of arguments match the
894 // expectation.
895 CHECK(f->nargs < 0 || f->nargs == num_arguments);
896
897 // TODO(1236192): Most runtime routines don't need the number of
898 // arguments passed in because it is constant. At some point we
899 // should remove this need and make the runtime routine entry code
900 // smarter.
901 Move(kRuntimeCallArgCountRegister, Immediate(num_arguments));
902 Move(kRuntimeCallFunctionRegister, Immediate(ExternalReference::Create(f)));
903 Handle<Code> code =
904 CodeFactory::CEntry(isolate(), f->result_size, save_doubles);
905 Call(code, RelocInfo::CODE_TARGET);
906 }
907
CallRuntimeWithCEntry(Runtime::FunctionId fid,Register centry)908 void TurboAssembler::CallRuntimeWithCEntry(Runtime::FunctionId fid,
909 Register centry) {
910 const Runtime::Function* f = Runtime::FunctionForId(fid);
911 // TODO(1236192): Most runtime routines don't need the number of
912 // arguments passed in because it is constant. At some point we
913 // should remove this need and make the runtime routine entry code
914 // smarter.
915 Move(kRuntimeCallArgCountRegister, Immediate(f->nargs));
916 Move(kRuntimeCallFunctionRegister, Immediate(ExternalReference::Create(f)));
917 DCHECK(!AreAliased(centry, kRuntimeCallArgCountRegister,
918 kRuntimeCallFunctionRegister));
919 add(centry, Immediate(Code::kHeaderSize - kHeapObjectTag));
920 Call(centry);
921 }
922
TailCallRuntime(Runtime::FunctionId fid)923 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
924 // ----------- S t a t e -------------
925 // -- esp[0] : return address
926 // -- esp[8] : argument num_arguments - 1
927 // ...
928 // -- esp[8 * num_arguments] : argument 0 (receiver)
929 //
930 // For runtime functions with variable arguments:
931 // -- eax : number of arguments
932 // -----------------------------------
933
934 const Runtime::Function* function = Runtime::FunctionForId(fid);
935 DCHECK_EQ(1, function->result_size);
936 if (function->nargs >= 0) {
937 // TODO(1236192): Most runtime routines don't need the number of
938 // arguments passed in because it is constant. At some point we
939 // should remove this need and make the runtime routine entry code
940 // smarter.
941 Move(kRuntimeCallArgCountRegister, Immediate(function->nargs));
942 }
943 JumpToExternalReference(ExternalReference::Create(fid));
944 }
945
JumpToExternalReference(const ExternalReference & ext,bool builtin_exit_frame)946 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
947 bool builtin_exit_frame) {
948 // Set the entry point and jump to the C entry runtime stub.
949 Move(kRuntimeCallFunctionRegister, Immediate(ext));
950 Handle<Code> code = CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs,
951 kArgvOnStack, builtin_exit_frame);
952 Jump(code, RelocInfo::CODE_TARGET);
953 }
954
JumpToInstructionStream(Address entry)955 void MacroAssembler::JumpToInstructionStream(Address entry) {
956 jmp(entry, RelocInfo::OFF_HEAP_TARGET);
957 }
958
PrepareForTailCall(const ParameterCount & callee_args_count,Register caller_args_count_reg,Register scratch0,Register scratch1,int number_of_temp_values_after_return_address)959 void TurboAssembler::PrepareForTailCall(
960 const ParameterCount& callee_args_count, Register caller_args_count_reg,
961 Register scratch0, Register scratch1,
962 int number_of_temp_values_after_return_address) {
963 #if DEBUG
964 if (callee_args_count.is_reg()) {
965 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
966 scratch1));
967 } else {
968 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
969 }
970 #endif
971
972 // Calculate the destination address where we will put the return address
973 // after we drop current frame.
974 Register new_sp_reg = scratch0;
975 if (callee_args_count.is_reg()) {
976 sub(caller_args_count_reg, callee_args_count.reg());
977 lea(new_sp_reg,
978 Operand(ebp, caller_args_count_reg, times_pointer_size,
979 StandardFrameConstants::kCallerPCOffset -
980 number_of_temp_values_after_return_address * kPointerSize));
981 } else {
982 lea(new_sp_reg, Operand(ebp, caller_args_count_reg, times_pointer_size,
983 StandardFrameConstants::kCallerPCOffset -
984 (callee_args_count.immediate() +
985 number_of_temp_values_after_return_address) *
986 kPointerSize));
987 }
988
989 if (FLAG_debug_code) {
990 cmp(esp, new_sp_reg);
991 Check(below, AbortReason::kStackAccessBelowStackPointer);
992 }
993
994 // Copy return address from caller's frame to current frame's return address
995 // to avoid its trashing and let the following loop copy it to the right
996 // place.
997 Register tmp_reg = scratch1;
998 mov(tmp_reg, Operand(ebp, StandardFrameConstants::kCallerPCOffset));
999 mov(Operand(esp, number_of_temp_values_after_return_address * kPointerSize),
1000 tmp_reg);
1001
1002 // Restore caller's frame pointer now as it could be overwritten by
1003 // the copying loop.
1004 mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1005
1006 // +2 here is to copy both receiver and return address.
1007 Register count_reg = caller_args_count_reg;
1008 if (callee_args_count.is_reg()) {
1009 lea(count_reg, Operand(callee_args_count.reg(),
1010 2 + number_of_temp_values_after_return_address));
1011 } else {
1012 mov(count_reg, Immediate(callee_args_count.immediate() + 2 +
1013 number_of_temp_values_after_return_address));
1014 // TODO(ishell): Unroll copying loop for small immediate values.
1015 }
1016
1017 // Now copy callee arguments to the caller frame going backwards to avoid
1018 // callee arguments corruption (source and destination areas could overlap).
1019 Label loop, entry;
1020 jmp(&entry, Label::kNear);
1021 bind(&loop);
1022 dec(count_reg);
1023 mov(tmp_reg, Operand(esp, count_reg, times_pointer_size, 0));
1024 mov(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
1025 bind(&entry);
1026 cmp(count_reg, Immediate(0));
1027 j(not_equal, &loop, Label::kNear);
1028
1029 // Leave current frame.
1030 mov(esp, new_sp_reg);
1031 }
1032
InvokePrologue(const ParameterCount & expected,const ParameterCount & actual,Label * done,bool * definitely_mismatches,InvokeFlag flag,Label::Distance done_near)1033 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1034 const ParameterCount& actual, Label* done,
1035 bool* definitely_mismatches,
1036 InvokeFlag flag,
1037 Label::Distance done_near) {
1038 bool definitely_matches = false;
1039 *definitely_mismatches = false;
1040 Label invoke;
1041 if (expected.is_immediate()) {
1042 DCHECK(actual.is_immediate());
1043 mov(eax, actual.immediate());
1044 if (expected.immediate() == actual.immediate()) {
1045 definitely_matches = true;
1046 } else {
1047 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1048 if (expected.immediate() == sentinel) {
1049 // Don't worry about adapting arguments for builtins that
1050 // don't want that done. Skip adaption code by making it look
1051 // like we have a match between expected and actual number of
1052 // arguments.
1053 definitely_matches = true;
1054 } else {
1055 *definitely_mismatches = true;
1056 mov(ebx, expected.immediate());
1057 }
1058 }
1059 } else {
1060 if (actual.is_immediate()) {
1061 // Expected is in register, actual is immediate. This is the
1062 // case when we invoke function values without going through the
1063 // IC mechanism.
1064 mov(eax, actual.immediate());
1065 cmp(expected.reg(), actual.immediate());
1066 j(equal, &invoke);
1067 DCHECK(expected.reg() == ebx);
1068 } else if (expected.reg() != actual.reg()) {
1069 // Both expected and actual are in (different) registers. This
1070 // is the case when we invoke functions using call and apply.
1071 cmp(expected.reg(), actual.reg());
1072 j(equal, &invoke);
1073 DCHECK(actual.reg() == eax);
1074 DCHECK(expected.reg() == ebx);
1075 } else {
1076 definitely_matches = true;
1077 Move(eax, actual.reg());
1078 }
1079 }
1080
1081 if (!definitely_matches) {
1082 Handle<Code> adaptor = BUILTIN_CODE(isolate(), ArgumentsAdaptorTrampoline);
1083 if (flag == CALL_FUNCTION) {
1084 Call(adaptor, RelocInfo::CODE_TARGET);
1085 if (!*definitely_mismatches) {
1086 jmp(done, done_near);
1087 }
1088 } else {
1089 Jump(adaptor, RelocInfo::CODE_TARGET);
1090 }
1091 bind(&invoke);
1092 }
1093 }
1094
CheckDebugHook(Register fun,Register new_target,const ParameterCount & expected,const ParameterCount & actual)1095 void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
1096 const ParameterCount& expected,
1097 const ParameterCount& actual) {
1098 Label skip_hook;
1099
1100 ExternalReference debug_hook_active =
1101 ExternalReference::debug_hook_on_function_call_address(isolate());
1102 cmpb(StaticVariable(debug_hook_active), Immediate(0));
1103 j(equal, &skip_hook);
1104
1105 {
1106 FrameScope frame(this,
1107 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
1108 if (expected.is_reg()) {
1109 SmiTag(expected.reg());
1110 Push(expected.reg());
1111 }
1112 if (actual.is_reg()) {
1113 SmiTag(actual.reg());
1114 Push(actual.reg());
1115 SmiUntag(actual.reg());
1116 }
1117 if (new_target.is_valid()) {
1118 Push(new_target);
1119 }
1120 Push(fun);
1121 Push(fun);
1122 Operand receiver_op =
1123 actual.is_reg()
1124 ? Operand(ebp, actual.reg(), times_pointer_size, kPointerSize * 2)
1125 : Operand(ebp, actual.immediate() * times_pointer_size +
1126 kPointerSize * 2);
1127 Push(receiver_op);
1128 CallRuntime(Runtime::kDebugOnFunctionCall);
1129 Pop(fun);
1130 if (new_target.is_valid()) {
1131 Pop(new_target);
1132 }
1133 if (actual.is_reg()) {
1134 Pop(actual.reg());
1135 SmiUntag(actual.reg());
1136 }
1137 if (expected.is_reg()) {
1138 Pop(expected.reg());
1139 SmiUntag(expected.reg());
1140 }
1141 }
1142 bind(&skip_hook);
1143 }
1144
InvokeFunctionCode(Register function,Register new_target,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag)1145 void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
1146 const ParameterCount& expected,
1147 const ParameterCount& actual,
1148 InvokeFlag flag) {
1149 // You can't call a function without a valid frame.
1150 DCHECK(flag == JUMP_FUNCTION || has_frame());
1151 DCHECK(function == edi);
1152 DCHECK_IMPLIES(new_target.is_valid(), new_target == edx);
1153
1154 // On function call, call into the debugger if necessary.
1155 CheckDebugHook(function, new_target, expected, actual);
1156
1157 // Clear the new.target register if not given.
1158 if (!new_target.is_valid()) {
1159 mov(edx, isolate()->factory()->undefined_value());
1160 }
1161
1162 Label done;
1163 bool definitely_mismatches = false;
1164 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
1165 Label::kNear);
1166 if (!definitely_mismatches) {
1167 // We call indirectly through the code field in the function to
1168 // allow recompilation to take effect without changing any of the
1169 // call sites.
1170 static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
1171 mov(ecx, FieldOperand(function, JSFunction::kCodeOffset));
1172 add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1173 if (flag == CALL_FUNCTION) {
1174 call(ecx);
1175 } else {
1176 DCHECK(flag == JUMP_FUNCTION);
1177 jmp(ecx);
1178 }
1179 bind(&done);
1180 }
1181 }
1182
InvokeFunction(Register fun,Register new_target,const ParameterCount & actual,InvokeFlag flag)1183 void MacroAssembler::InvokeFunction(Register fun, Register new_target,
1184 const ParameterCount& actual,
1185 InvokeFlag flag) {
1186 // You can't call a function without a valid frame.
1187 DCHECK(flag == JUMP_FUNCTION || has_frame());
1188
1189 DCHECK(fun == edi);
1190 mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1191 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1192 movzx_w(ebx,
1193 FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
1194
1195 ParameterCount expected(ebx);
1196 InvokeFunctionCode(edi, new_target, expected, actual, flag);
1197 }
1198
InvokeFunction(Register fun,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag)1199 void MacroAssembler::InvokeFunction(Register fun,
1200 const ParameterCount& expected,
1201 const ParameterCount& actual,
1202 InvokeFlag flag) {
1203 // You can't call a function without a valid frame.
1204 DCHECK(flag == JUMP_FUNCTION || has_frame());
1205
1206 DCHECK(fun == edi);
1207 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1208
1209 InvokeFunctionCode(edi, no_reg, expected, actual, flag);
1210 }
1211
LoadGlobalProxy(Register dst)1212 void MacroAssembler::LoadGlobalProxy(Register dst) {
1213 mov(dst, NativeContextOperand());
1214 mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
1215 }
1216
LoadGlobalFunction(int index,Register function)1217 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1218 // Load the native context from the current context.
1219 mov(function, NativeContextOperand());
1220 // Load the function from the native context.
1221 mov(function, ContextOperand(function, index));
1222 }
1223
SafepointRegisterStackIndex(int reg_code)1224 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
1225 // The registers are pushed starting with the lowest encoding,
1226 // which means that lowest encodings are furthest away from
1227 // the stack pointer.
1228 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
1229 return kNumSafepointRegisters - reg_code - 1;
1230 }
1231
Ret()1232 void TurboAssembler::Ret() { ret(0); }
1233
Ret(int bytes_dropped,Register scratch)1234 void TurboAssembler::Ret(int bytes_dropped, Register scratch) {
1235 if (is_uint16(bytes_dropped)) {
1236 ret(bytes_dropped);
1237 } else {
1238 pop(scratch);
1239 add(esp, Immediate(bytes_dropped));
1240 push(scratch);
1241 ret(0);
1242 }
1243 }
1244
1245
Drop(int stack_elements)1246 void MacroAssembler::Drop(int stack_elements) {
1247 if (stack_elements > 0) {
1248 add(esp, Immediate(stack_elements * kPointerSize));
1249 }
1250 }
1251
Move(Register dst,Register src)1252 void TurboAssembler::Move(Register dst, Register src) {
1253 if (dst != src) {
1254 mov(dst, src);
1255 }
1256 }
1257
Move(Register dst,const Immediate & src)1258 void TurboAssembler::Move(Register dst, const Immediate& src) {
1259 if (!src.is_heap_object_request() && src.is_zero()) {
1260 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
1261 } else if (src.is_external_reference()) {
1262 LoadAddress(dst, src.external_reference());
1263 } else {
1264 mov(dst, src);
1265 }
1266 }
1267
Move(Operand dst,const Immediate & src)1268 void TurboAssembler::Move(Operand dst, const Immediate& src) { mov(dst, src); }
1269
Move(Register dst,Handle<HeapObject> src)1270 void TurboAssembler::Move(Register dst, Handle<HeapObject> src) {
1271 if (root_array_available_ && options().isolate_independent_code) {
1272 IndirectLoadConstant(dst, src);
1273 return;
1274 }
1275 mov(dst, src);
1276 }
1277
Move(XMMRegister dst,uint32_t src)1278 void TurboAssembler::Move(XMMRegister dst, uint32_t src) {
1279 if (src == 0) {
1280 pxor(dst, dst);
1281 } else {
1282 unsigned cnt = base::bits::CountPopulation(src);
1283 unsigned nlz = base::bits::CountLeadingZeros32(src);
1284 unsigned ntz = base::bits::CountTrailingZeros32(src);
1285 if (nlz + cnt + ntz == 32) {
1286 pcmpeqd(dst, dst);
1287 if (ntz == 0) {
1288 psrld(dst, 32 - cnt);
1289 } else {
1290 pslld(dst, 32 - cnt);
1291 if (nlz != 0) psrld(dst, nlz);
1292 }
1293 } else {
1294 push(eax);
1295 mov(eax, Immediate(src));
1296 movd(dst, Operand(eax));
1297 pop(eax);
1298 }
1299 }
1300 }
1301
Move(XMMRegister dst,uint64_t src)1302 void TurboAssembler::Move(XMMRegister dst, uint64_t src) {
1303 if (src == 0) {
1304 pxor(dst, dst);
1305 } else {
1306 uint32_t lower = static_cast<uint32_t>(src);
1307 uint32_t upper = static_cast<uint32_t>(src >> 32);
1308 unsigned cnt = base::bits::CountPopulation(src);
1309 unsigned nlz = base::bits::CountLeadingZeros64(src);
1310 unsigned ntz = base::bits::CountTrailingZeros64(src);
1311 if (nlz + cnt + ntz == 64) {
1312 pcmpeqd(dst, dst);
1313 if (ntz == 0) {
1314 psrlq(dst, 64 - cnt);
1315 } else {
1316 psllq(dst, 64 - cnt);
1317 if (nlz != 0) psrlq(dst, nlz);
1318 }
1319 } else if (lower == 0) {
1320 Move(dst, upper);
1321 psllq(dst, 32);
1322 } else if (CpuFeatures::IsSupported(SSE4_1)) {
1323 CpuFeatureScope scope(this, SSE4_1);
1324 push(eax);
1325 Move(eax, Immediate(lower));
1326 movd(dst, Operand(eax));
1327 if (upper != lower) {
1328 Move(eax, Immediate(upper));
1329 }
1330 pinsrd(dst, Operand(eax), 1);
1331 pop(eax);
1332 } else {
1333 push(Immediate(upper));
1334 push(Immediate(lower));
1335 movsd(dst, Operand(esp, 0));
1336 add(esp, Immediate(kDoubleSize));
1337 }
1338 }
1339 }
1340
Pshufhw(XMMRegister dst,Operand src,uint8_t shuffle)1341 void TurboAssembler::Pshufhw(XMMRegister dst, Operand src, uint8_t shuffle) {
1342 if (CpuFeatures::IsSupported(AVX)) {
1343 CpuFeatureScope scope(this, AVX);
1344 vpshufhw(dst, src, shuffle);
1345 } else {
1346 pshufhw(dst, src, shuffle);
1347 }
1348 }
1349
Pshuflw(XMMRegister dst,Operand src,uint8_t shuffle)1350 void TurboAssembler::Pshuflw(XMMRegister dst, Operand src, uint8_t shuffle) {
1351 if (CpuFeatures::IsSupported(AVX)) {
1352 CpuFeatureScope scope(this, AVX);
1353 vpshuflw(dst, src, shuffle);
1354 } else {
1355 pshuflw(dst, src, shuffle);
1356 }
1357 }
1358
Pshufd(XMMRegister dst,Operand src,uint8_t shuffle)1359 void TurboAssembler::Pshufd(XMMRegister dst, Operand src, uint8_t shuffle) {
1360 if (CpuFeatures::IsSupported(AVX)) {
1361 CpuFeatureScope scope(this, AVX);
1362 vpshufd(dst, src, shuffle);
1363 } else {
1364 pshufd(dst, src, shuffle);
1365 }
1366 }
1367
Psraw(XMMRegister dst,int8_t shift)1368 void TurboAssembler::Psraw(XMMRegister dst, int8_t shift) {
1369 if (CpuFeatures::IsSupported(AVX)) {
1370 CpuFeatureScope scope(this, AVX);
1371 vpsraw(dst, dst, shift);
1372 } else {
1373 psraw(dst, shift);
1374 }
1375 }
1376
Psrlw(XMMRegister dst,int8_t shift)1377 void TurboAssembler::Psrlw(XMMRegister dst, int8_t shift) {
1378 if (CpuFeatures::IsSupported(AVX)) {
1379 CpuFeatureScope scope(this, AVX);
1380 vpsrlw(dst, dst, shift);
1381 } else {
1382 psrlw(dst, shift);
1383 }
1384 }
1385
Psignb(XMMRegister dst,Operand src)1386 void TurboAssembler::Psignb(XMMRegister dst, Operand src) {
1387 if (CpuFeatures::IsSupported(AVX)) {
1388 CpuFeatureScope scope(this, AVX);
1389 vpsignb(dst, dst, src);
1390 return;
1391 }
1392 if (CpuFeatures::IsSupported(SSSE3)) {
1393 CpuFeatureScope sse_scope(this, SSSE3);
1394 psignb(dst, src);
1395 return;
1396 }
1397 UNREACHABLE();
1398 }
1399
Psignw(XMMRegister dst,Operand src)1400 void TurboAssembler::Psignw(XMMRegister dst, Operand src) {
1401 if (CpuFeatures::IsSupported(AVX)) {
1402 CpuFeatureScope scope(this, AVX);
1403 vpsignw(dst, dst, src);
1404 return;
1405 }
1406 if (CpuFeatures::IsSupported(SSSE3)) {
1407 CpuFeatureScope sse_scope(this, SSSE3);
1408 psignw(dst, src);
1409 return;
1410 }
1411 UNREACHABLE();
1412 }
1413
Psignd(XMMRegister dst,Operand src)1414 void TurboAssembler::Psignd(XMMRegister dst, Operand src) {
1415 if (CpuFeatures::IsSupported(AVX)) {
1416 CpuFeatureScope scope(this, AVX);
1417 vpsignd(dst, dst, src);
1418 return;
1419 }
1420 if (CpuFeatures::IsSupported(SSSE3)) {
1421 CpuFeatureScope sse_scope(this, SSSE3);
1422 psignd(dst, src);
1423 return;
1424 }
1425 UNREACHABLE();
1426 }
1427
Pshufb(XMMRegister dst,Operand src)1428 void TurboAssembler::Pshufb(XMMRegister dst, Operand src) {
1429 if (CpuFeatures::IsSupported(AVX)) {
1430 CpuFeatureScope scope(this, AVX);
1431 vpshufb(dst, dst, src);
1432 return;
1433 }
1434 if (CpuFeatures::IsSupported(SSSE3)) {
1435 CpuFeatureScope sse_scope(this, SSSE3);
1436 pshufb(dst, src);
1437 return;
1438 }
1439 UNREACHABLE();
1440 }
1441
Pblendw(XMMRegister dst,Operand src,uint8_t imm8)1442 void TurboAssembler::Pblendw(XMMRegister dst, Operand src, uint8_t imm8) {
1443 if (CpuFeatures::IsSupported(AVX)) {
1444 CpuFeatureScope scope(this, AVX);
1445 vpblendw(dst, dst, src, imm8);
1446 return;
1447 }
1448 if (CpuFeatures::IsSupported(SSE4_1)) {
1449 CpuFeatureScope sse_scope(this, SSE4_1);
1450 pblendw(dst, src, imm8);
1451 return;
1452 }
1453 UNREACHABLE();
1454 }
1455
Palignr(XMMRegister dst,Operand src,uint8_t imm8)1456 void TurboAssembler::Palignr(XMMRegister dst, Operand src, uint8_t imm8) {
1457 if (CpuFeatures::IsSupported(AVX)) {
1458 CpuFeatureScope scope(this, AVX);
1459 vpalignr(dst, dst, src, imm8);
1460 return;
1461 }
1462 if (CpuFeatures::IsSupported(SSSE3)) {
1463 CpuFeatureScope sse_scope(this, SSSE3);
1464 palignr(dst, src, imm8);
1465 return;
1466 }
1467 UNREACHABLE();
1468 }
1469
Pextrb(Register dst,XMMRegister src,int8_t imm8)1470 void TurboAssembler::Pextrb(Register dst, XMMRegister src, int8_t imm8) {
1471 if (CpuFeatures::IsSupported(AVX)) {
1472 CpuFeatureScope scope(this, AVX);
1473 vpextrb(dst, src, imm8);
1474 return;
1475 }
1476 if (CpuFeatures::IsSupported(SSE4_1)) {
1477 CpuFeatureScope sse_scope(this, SSE4_1);
1478 pextrb(dst, src, imm8);
1479 return;
1480 }
1481 UNREACHABLE();
1482 }
1483
Pextrw(Register dst,XMMRegister src,int8_t imm8)1484 void TurboAssembler::Pextrw(Register dst, XMMRegister src, int8_t imm8) {
1485 if (CpuFeatures::IsSupported(AVX)) {
1486 CpuFeatureScope scope(this, AVX);
1487 vpextrw(dst, src, imm8);
1488 return;
1489 }
1490 if (CpuFeatures::IsSupported(SSE4_1)) {
1491 CpuFeatureScope sse_scope(this, SSE4_1);
1492 pextrw(dst, src, imm8);
1493 return;
1494 }
1495 UNREACHABLE();
1496 }
1497
Pextrd(Register dst,XMMRegister src,int8_t imm8)1498 void TurboAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
1499 if (imm8 == 0) {
1500 Movd(dst, src);
1501 return;
1502 }
1503 if (CpuFeatures::IsSupported(AVX)) {
1504 CpuFeatureScope scope(this, AVX);
1505 vpextrd(dst, src, imm8);
1506 return;
1507 }
1508 if (CpuFeatures::IsSupported(SSE4_1)) {
1509 CpuFeatureScope sse_scope(this, SSE4_1);
1510 pextrd(dst, src, imm8);
1511 return;
1512 }
1513 DCHECK_LT(imm8, 4);
1514 pshufd(xmm0, src, imm8);
1515 movd(dst, xmm0);
1516 }
1517
Pinsrd(XMMRegister dst,Operand src,int8_t imm8,bool is_64_bits)1518 void TurboAssembler::Pinsrd(XMMRegister dst, Operand src, int8_t imm8,
1519 bool is_64_bits) {
1520 if (CpuFeatures::IsSupported(SSE4_1)) {
1521 CpuFeatureScope sse_scope(this, SSE4_1);
1522 pinsrd(dst, src, imm8);
1523 return;
1524 }
1525 if (is_64_bits) {
1526 movd(xmm0, src);
1527 if (imm8 == 1) {
1528 punpckldq(dst, xmm0);
1529 } else {
1530 DCHECK_EQ(0, imm8);
1531 psrlq(dst, 32);
1532 punpckldq(xmm0, dst);
1533 movaps(dst, xmm0);
1534 }
1535 } else {
1536 DCHECK_LT(imm8, 4);
1537 push(eax);
1538 mov(eax, src);
1539 pinsrw(dst, eax, imm8 * 2);
1540 shr(eax, 16);
1541 pinsrw(dst, eax, imm8 * 2 + 1);
1542 pop(eax);
1543 }
1544 }
1545
Lzcnt(Register dst,Operand src)1546 void TurboAssembler::Lzcnt(Register dst, Operand src) {
1547 if (CpuFeatures::IsSupported(LZCNT)) {
1548 CpuFeatureScope scope(this, LZCNT);
1549 lzcnt(dst, src);
1550 return;
1551 }
1552 Label not_zero_src;
1553 bsr(dst, src);
1554 j(not_zero, ¬_zero_src, Label::kNear);
1555 Move(dst, Immediate(63)); // 63^31 == 32
1556 bind(¬_zero_src);
1557 xor_(dst, Immediate(31)); // for x in [0..31], 31^x == 31-x.
1558 }
1559
Tzcnt(Register dst,Operand src)1560 void TurboAssembler::Tzcnt(Register dst, Operand src) {
1561 if (CpuFeatures::IsSupported(BMI1)) {
1562 CpuFeatureScope scope(this, BMI1);
1563 tzcnt(dst, src);
1564 return;
1565 }
1566 Label not_zero_src;
1567 bsf(dst, src);
1568 j(not_zero, ¬_zero_src, Label::kNear);
1569 Move(dst, Immediate(32)); // The result of tzcnt is 32 if src = 0.
1570 bind(¬_zero_src);
1571 }
1572
Popcnt(Register dst,Operand src)1573 void TurboAssembler::Popcnt(Register dst, Operand src) {
1574 if (CpuFeatures::IsSupported(POPCNT)) {
1575 CpuFeatureScope scope(this, POPCNT);
1576 popcnt(dst, src);
1577 return;
1578 }
1579 UNREACHABLE();
1580 }
1581
LoadWeakValue(Register in_out,Label * target_if_cleared)1582 void MacroAssembler::LoadWeakValue(Register in_out, Label* target_if_cleared) {
1583 cmp(in_out, Immediate(kClearedWeakHeapObject));
1584 j(equal, target_if_cleared);
1585
1586 and_(in_out, Immediate(~kWeakHeapObjectMask));
1587 }
1588
IncrementCounter(StatsCounter * counter,int value)1589 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1590 DCHECK_GT(value, 0);
1591 if (FLAG_native_code_counters && counter->Enabled()) {
1592 Operand operand = StaticVariable(ExternalReference::Create(counter));
1593 if (value == 1) {
1594 inc(operand);
1595 } else {
1596 add(operand, Immediate(value));
1597 }
1598 }
1599 }
1600
1601
DecrementCounter(StatsCounter * counter,int value)1602 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1603 DCHECK_GT(value, 0);
1604 if (FLAG_native_code_counters && counter->Enabled()) {
1605 Operand operand = StaticVariable(ExternalReference::Create(counter));
1606 if (value == 1) {
1607 dec(operand);
1608 } else {
1609 sub(operand, Immediate(value));
1610 }
1611 }
1612 }
1613
Assert(Condition cc,AbortReason reason)1614 void TurboAssembler::Assert(Condition cc, AbortReason reason) {
1615 if (emit_debug_code()) Check(cc, reason);
1616 }
1617
AssertUnreachable(AbortReason reason)1618 void TurboAssembler::AssertUnreachable(AbortReason reason) {
1619 if (emit_debug_code()) Abort(reason);
1620 }
1621
Check(Condition cc,AbortReason reason)1622 void TurboAssembler::Check(Condition cc, AbortReason reason) {
1623 Label L;
1624 j(cc, &L);
1625 Abort(reason);
1626 // will not return here
1627 bind(&L);
1628 }
1629
CheckStackAlignment()1630 void TurboAssembler::CheckStackAlignment() {
1631 int frame_alignment = base::OS::ActivationFrameAlignment();
1632 int frame_alignment_mask = frame_alignment - 1;
1633 if (frame_alignment > kPointerSize) {
1634 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
1635 Label alignment_as_expected;
1636 test(esp, Immediate(frame_alignment_mask));
1637 j(zero, &alignment_as_expected);
1638 // Abort if stack is not aligned.
1639 int3();
1640 bind(&alignment_as_expected);
1641 }
1642 }
1643
Abort(AbortReason reason)1644 void TurboAssembler::Abort(AbortReason reason) {
1645 #ifdef DEBUG
1646 const char* msg = GetAbortReason(reason);
1647 RecordComment("Abort message: ");
1648 RecordComment(msg);
1649 #endif
1650
1651 // Avoid emitting call to builtin if requested.
1652 if (trap_on_abort()) {
1653 int3();
1654 return;
1655 }
1656
1657 if (should_abort_hard()) {
1658 // We don't care if we constructed a frame. Just pretend we did.
1659 FrameScope assume_frame(this, StackFrame::NONE);
1660 PrepareCallCFunction(1, eax);
1661 mov(Operand(esp, 0), Immediate(static_cast<int>(reason)));
1662 CallCFunction(ExternalReference::abort_with_reason(), 1);
1663 return;
1664 }
1665
1666 Move(edx, Smi::FromInt(static_cast<int>(reason)));
1667
1668 // Disable stub call restrictions to always allow calls to abort.
1669 if (!has_frame()) {
1670 // We don't actually want to generate a pile of code for this, so just
1671 // claim there is a stack frame, without generating one.
1672 FrameScope scope(this, StackFrame::NONE);
1673 Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
1674 } else {
1675 Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
1676 }
1677 // will not return here
1678 int3();
1679 }
1680
1681
PrepareCallCFunction(int num_arguments,Register scratch)1682 void TurboAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1683 int frame_alignment = base::OS::ActivationFrameAlignment();
1684 if (frame_alignment != 0) {
1685 // Make stack end at alignment and make room for num_arguments words
1686 // and the original value of esp.
1687 mov(scratch, esp);
1688 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
1689 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
1690 and_(esp, -frame_alignment);
1691 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1692 } else {
1693 sub(esp, Immediate(num_arguments * kPointerSize));
1694 }
1695 }
1696
CallCFunction(ExternalReference function,int num_arguments)1697 void TurboAssembler::CallCFunction(ExternalReference function,
1698 int num_arguments) {
1699 // Trashing eax is ok as it will be the return value.
1700 mov(eax, Immediate(function));
1701 CallCFunction(eax, num_arguments);
1702 }
1703
CallCFunction(Register function,int num_arguments)1704 void TurboAssembler::CallCFunction(Register function, int num_arguments) {
1705 DCHECK_LE(num_arguments, kMaxCParameters);
1706 DCHECK(has_frame());
1707 // Check stack alignment.
1708 if (emit_debug_code()) {
1709 CheckStackAlignment();
1710 }
1711
1712 call(function);
1713 if (base::OS::ActivationFrameAlignment() != 0) {
1714 mov(esp, Operand(esp, num_arguments * kPointerSize));
1715 } else {
1716 add(esp, Immediate(num_arguments * kPointerSize));
1717 }
1718 }
1719
Call(Handle<Code> code_object,RelocInfo::Mode rmode)1720 void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1721 if (FLAG_embedded_builtins) {
1722 // TODO(jgruber): Figure out which register we can clobber here.
1723 // TODO(jgruber): Pc-relative builtin-to-builtin calls.
1724 Register scratch = kOffHeapTrampolineRegister;
1725 if (root_array_available_ && options().isolate_independent_code) {
1726 IndirectLoadConstant(scratch, code_object);
1727 lea(scratch, FieldOperand(scratch, Code::kHeaderSize));
1728 call(scratch);
1729 return;
1730 } else if (options().inline_offheap_trampolines) {
1731 int builtin_index = Builtins::kNoBuiltinId;
1732 if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
1733 Builtins::IsIsolateIndependent(builtin_index)) {
1734 // Inline the trampoline.
1735 RecordCommentForOffHeapTrampoline(builtin_index);
1736 CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
1737 EmbeddedData d = EmbeddedData::FromBlob();
1738 Address entry = d.InstructionStartOfBuiltin(builtin_index);
1739 call(entry, RelocInfo::OFF_HEAP_TARGET);
1740 return;
1741 }
1742 }
1743 }
1744 DCHECK(RelocInfo::IsCodeTarget(rmode));
1745 call(code_object, rmode);
1746 }
1747
Jump(Handle<Code> code_object,RelocInfo::Mode rmode)1748 void TurboAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
1749 if (FLAG_embedded_builtins) {
1750 // TODO(jgruber): Figure out which register we can clobber here.
1751 // TODO(jgruber): Pc-relative builtin-to-builtin calls.
1752 Register scratch = kOffHeapTrampolineRegister;
1753 if (root_array_available_ && options().isolate_independent_code) {
1754 IndirectLoadConstant(scratch, code_object);
1755 lea(scratch, FieldOperand(scratch, Code::kHeaderSize));
1756 jmp(scratch);
1757 return;
1758 } else if (options().inline_offheap_trampolines) {
1759 int builtin_index = Builtins::kNoBuiltinId;
1760 if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
1761 Builtins::IsIsolateIndependent(builtin_index)) {
1762 // Inline the trampoline.
1763 RecordCommentForOffHeapTrampoline(builtin_index);
1764 CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
1765 EmbeddedData d = EmbeddedData::FromBlob();
1766 Address entry = d.InstructionStartOfBuiltin(builtin_index);
1767 jmp(entry, RelocInfo::OFF_HEAP_TARGET);
1768 return;
1769 }
1770 }
1771 }
1772 DCHECK(RelocInfo::IsCodeTarget(rmode));
1773 jmp(code_object, rmode);
1774 }
1775
RetpolineCall(Register reg)1776 void TurboAssembler::RetpolineCall(Register reg) {
1777 Label setup_return, setup_target, inner_indirect_branch, capture_spec;
1778
1779 jmp(&setup_return); // Jump past the entire retpoline below.
1780
1781 bind(&inner_indirect_branch);
1782 call(&setup_target);
1783
1784 bind(&capture_spec);
1785 pause();
1786 jmp(&capture_spec);
1787
1788 bind(&setup_target);
1789 mov(Operand(esp, 0), reg);
1790 ret(0);
1791
1792 bind(&setup_return);
1793 call(&inner_indirect_branch); // Callee will return after this instruction.
1794 }
1795
RetpolineCall(Address destination,RelocInfo::Mode rmode)1796 void TurboAssembler::RetpolineCall(Address destination, RelocInfo::Mode rmode) {
1797 Label setup_return, setup_target, inner_indirect_branch, capture_spec;
1798
1799 jmp(&setup_return); // Jump past the entire retpoline below.
1800
1801 bind(&inner_indirect_branch);
1802 call(&setup_target);
1803
1804 bind(&capture_spec);
1805 pause();
1806 jmp(&capture_spec);
1807
1808 bind(&setup_target);
1809 mov(Operand(esp, 0), destination, rmode);
1810 ret(0);
1811
1812 bind(&setup_return);
1813 call(&inner_indirect_branch); // Callee will return after this instruction.
1814 }
1815
RetpolineJump(Register reg)1816 void TurboAssembler::RetpolineJump(Register reg) {
1817 Label setup_target, capture_spec;
1818
1819 call(&setup_target);
1820
1821 bind(&capture_spec);
1822 pause();
1823 jmp(&capture_spec);
1824
1825 bind(&setup_target);
1826 mov(Operand(esp, 0), reg);
1827 ret(0);
1828 }
1829
CheckPageFlag(Register object,Register scratch,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)1830 void TurboAssembler::CheckPageFlag(Register object, Register scratch, int mask,
1831 Condition cc, Label* condition_met,
1832 Label::Distance condition_met_distance) {
1833 DCHECK(cc == zero || cc == not_zero);
1834 if (scratch == object) {
1835 and_(scratch, Immediate(~kPageAlignmentMask));
1836 } else {
1837 mov(scratch, Immediate(~kPageAlignmentMask));
1838 and_(scratch, object);
1839 }
1840 if (mask < (1 << kBitsPerByte)) {
1841 test_b(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
1842 } else {
1843 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
1844 }
1845 j(cc, condition_met, condition_met_distance);
1846 }
1847
ComputeCodeStartAddress(Register dst)1848 void TurboAssembler::ComputeCodeStartAddress(Register dst) {
1849 // In order to get the address of the current instruction, we first need
1850 // to use a call and then use a pop, thus pushing the return address to
1851 // the stack and then popping it into the register.
1852 Label current;
1853 call(¤t);
1854 int pc = pc_offset();
1855 bind(¤t);
1856 pop(dst);
1857 if (pc != 0) {
1858 sub(dst, Immediate(pc));
1859 }
1860 }
1861
ResetSpeculationPoisonRegister()1862 void TurboAssembler::ResetSpeculationPoisonRegister() {
1863 mov(kSpeculationPoisonRegister, Immediate(-1));
1864 }
1865
1866 } // namespace internal
1867 } // namespace v8
1868
1869 #endif // V8_TARGET_ARCH_IA32
1870