• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_X64_MACRO_ASSEMBLER_X64_H_
6 #define V8_X64_MACRO_ASSEMBLER_X64_H_
7 
8 #include "src/bailout-reason.h"
9 #include "src/base/flags.h"
10 #include "src/globals.h"
11 #include "src/turbo-assembler.h"
12 #include "src/x64/assembler-x64.h"
13 
14 namespace v8 {
15 namespace internal {
16 
17 // Give alias names to registers for calling conventions.
18 constexpr Register kReturnRegister0 = rax;
19 constexpr Register kReturnRegister1 = rdx;
20 constexpr Register kReturnRegister2 = r8;
21 constexpr Register kJSFunctionRegister = rdi;
22 constexpr Register kContextRegister = rsi;
23 constexpr Register kAllocateSizeRegister = rdx;
24 constexpr Register kSpeculationPoisonRegister = r12;
25 constexpr Register kInterpreterAccumulatorRegister = rax;
26 constexpr Register kInterpreterBytecodeOffsetRegister = r9;
27 constexpr Register kInterpreterBytecodeArrayRegister = r14;
28 constexpr Register kInterpreterDispatchTableRegister = r15;
29 
30 constexpr Register kJavaScriptCallArgCountRegister = rax;
31 constexpr Register kJavaScriptCallCodeStartRegister = rcx;
32 constexpr Register kJavaScriptCallTargetRegister = kJSFunctionRegister;
33 constexpr Register kJavaScriptCallNewTargetRegister = rdx;
34 constexpr Register kJavaScriptCallExtraArg1Register = rbx;
35 
36 constexpr Register kRuntimeCallFunctionRegister = rbx;
37 constexpr Register kRuntimeCallArgCountRegister = rax;
38 constexpr Register kRuntimeCallArgvRegister = r15;
39 constexpr Register kWasmInstanceRegister = rsi;
40 
41 // Default scratch register used by MacroAssembler (and other code that needs
42 // a spare register). The register isn't callee save, and not used by the
43 // function calling convention.
44 constexpr Register kScratchRegister = r10;
45 constexpr XMMRegister kScratchDoubleReg = xmm15;
46 constexpr Register kRootRegister = r13;  // callee save
47 
48 constexpr Register kOffHeapTrampolineRegister = kScratchRegister;
49 
50 // Convenience for platform-independent signatures.
51 typedef Operand MemOperand;
52 
53 enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET };
54 enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK };
55 
56 struct SmiIndex {
SmiIndexSmiIndex57   SmiIndex(Register index_register, ScaleFactor scale)
58       : reg(index_register),
59         scale(scale) {}
60   Register reg;
61   ScaleFactor scale;
62 };
63 
64 enum StackArgumentsAccessorReceiverMode {
65   ARGUMENTS_CONTAIN_RECEIVER,
66   ARGUMENTS_DONT_CONTAIN_RECEIVER
67 };
68 
69 class StackArgumentsAccessor BASE_EMBEDDED {
70  public:
71   StackArgumentsAccessor(Register base_reg, int argument_count_immediate,
72                          StackArgumentsAccessorReceiverMode receiver_mode =
73                              ARGUMENTS_CONTAIN_RECEIVER,
74                          int extra_displacement_to_last_argument = 0)
base_reg_(base_reg)75       : base_reg_(base_reg),
76         argument_count_reg_(no_reg),
77         argument_count_immediate_(argument_count_immediate),
78         receiver_mode_(receiver_mode),
79         extra_displacement_to_last_argument_(
80             extra_displacement_to_last_argument) {}
81 
82   StackArgumentsAccessor(Register base_reg, Register argument_count_reg,
83                          StackArgumentsAccessorReceiverMode receiver_mode =
84                              ARGUMENTS_CONTAIN_RECEIVER,
85                          int extra_displacement_to_last_argument = 0)
base_reg_(base_reg)86       : base_reg_(base_reg),
87         argument_count_reg_(argument_count_reg),
88         argument_count_immediate_(0),
89         receiver_mode_(receiver_mode),
90         extra_displacement_to_last_argument_(
91             extra_displacement_to_last_argument) {}
92 
93   StackArgumentsAccessor(Register base_reg,
94                          const ParameterCount& parameter_count,
95                          StackArgumentsAccessorReceiverMode receiver_mode =
96                              ARGUMENTS_CONTAIN_RECEIVER,
97                          int extra_displacement_to_last_argument = 0);
98 
99   Operand GetArgumentOperand(int index);
GetReceiverOperand()100   Operand GetReceiverOperand() {
101     DCHECK(receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER);
102     return GetArgumentOperand(0);
103   }
104 
105  private:
106   const Register base_reg_;
107   const Register argument_count_reg_;
108   const int argument_count_immediate_;
109   const StackArgumentsAccessorReceiverMode receiver_mode_;
110   const int extra_displacement_to_last_argument_;
111 
112   DISALLOW_IMPLICIT_CONSTRUCTORS(StackArgumentsAccessor);
113 };
114 
115 class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
116  public:
TurboAssembler(Isolate * isolate,const AssemblerOptions & options,void * buffer,int buffer_size,CodeObjectRequired create_code_object)117   TurboAssembler(Isolate* isolate, const AssemblerOptions& options,
118                  void* buffer, int buffer_size,
119                  CodeObjectRequired create_code_object)
120       : TurboAssemblerBase(isolate, options, buffer, buffer_size,
121                            create_code_object) {}
122 
123   template <typename Dst, typename... Args>
124   struct AvxHelper {
125     Assembler* assm;
126     // Call an method where the AVX version expects the dst argument to be
127     // duplicated.
128     template <void (Assembler::*avx)(Dst, Dst, Args...),
129               void (Assembler::*no_avx)(Dst, Args...)>
emitAvxHelper130     void emit(Dst dst, Args... args) {
131       if (CpuFeatures::IsSupported(AVX)) {
132         CpuFeatureScope scope(assm, AVX);
133         (assm->*avx)(dst, dst, args...);
134       } else {
135         (assm->*no_avx)(dst, args...);
136       }
137     }
138 
139     // Call an method where the AVX version expects no duplicated dst argument.
140     template <void (Assembler::*avx)(Dst, Args...),
141               void (Assembler::*no_avx)(Dst, Args...)>
emitAvxHelper142     void emit(Dst dst, Args... args) {
143       if (CpuFeatures::IsSupported(AVX)) {
144         CpuFeatureScope scope(assm, AVX);
145         (assm->*avx)(dst, args...);
146       } else {
147         (assm->*no_avx)(dst, args...);
148       }
149     }
150   };
151 
152 #define AVX_OP(macro_name, name)                                             \
153   template <typename Dst, typename... Args>                                  \
154   void macro_name(Dst dst, Args... args) {                                   \
155     AvxHelper<Dst, Args...>{this}                                            \
156         .template emit<&Assembler::v##name, &Assembler::name>(dst, args...); \
157   }
158 
AVX_OP(Subsd,subsd)159   AVX_OP(Subsd, subsd)
160   AVX_OP(Divss, divss)
161   AVX_OP(Divsd, divsd)
162   AVX_OP(Xorps, xorps)
163   AVX_OP(Xorpd, xorpd)
164   AVX_OP(Movd, movd)
165   AVX_OP(Movq, movq)
166   AVX_OP(Movaps, movaps)
167   AVX_OP(Movapd, movapd)
168   AVX_OP(Movups, movups)
169   AVX_OP(Movmskps, movmskps)
170   AVX_OP(Movmskpd, movmskpd)
171   AVX_OP(Movss, movss)
172   AVX_OP(Movsd, movsd)
173   AVX_OP(Pcmpeqd, pcmpeqd)
174   AVX_OP(Pslld, pslld)
175   AVX_OP(Psllq, psllq)
176   AVX_OP(Psrld, psrld)
177   AVX_OP(Psrlq, psrlq)
178   AVX_OP(Addsd, addsd)
179   AVX_OP(Mulsd, mulsd)
180   AVX_OP(Andps, andps)
181   AVX_OP(Andpd, andpd)
182   AVX_OP(Orpd, orpd)
183   AVX_OP(Cmpeqps, cmpeqps)
184   AVX_OP(Cmpltps, cmpltps)
185   AVX_OP(Cmpleps, cmpleps)
186   AVX_OP(Cmpneqps, cmpneqps)
187   AVX_OP(Cmpnltps, cmpnltps)
188   AVX_OP(Cmpnleps, cmpnleps)
189   AVX_OP(Cmpeqpd, cmpeqpd)
190   AVX_OP(Cmpltpd, cmpltpd)
191   AVX_OP(Cmplepd, cmplepd)
192   AVX_OP(Cmpneqpd, cmpneqpd)
193   AVX_OP(Cmpnltpd, cmpnltpd)
194   AVX_OP(Cmpnlepd, cmpnlepd)
195   AVX_OP(Roundss, roundss)
196   AVX_OP(Roundsd, roundsd)
197   AVX_OP(Sqrtss, sqrtss)
198   AVX_OP(Sqrtsd, sqrtsd)
199   AVX_OP(Ucomiss, ucomiss)
200   AVX_OP(Ucomisd, ucomisd)
201 
202 #undef AVX_OP
203 
204   void PushReturnAddressFrom(Register src) { pushq(src); }
PopReturnAddressTo(Register dst)205   void PopReturnAddressTo(Register dst) { popq(dst); }
206 
207   void Ret();
208 
209   // Return and drop arguments from stack, where the number of arguments
210   // may be bigger than 2^16 - 1.  Requires a scratch register.
211   void Ret(int bytes_dropped, Register scratch);
212 
213   // Load a register with a long value as efficiently as possible.
214   void Set(Register dst, int64_t x);
215   void Set(Operand dst, intptr_t x);
216 
217   // Operations on roots in the root-array.
218   void LoadRoot(Register destination, Heap::RootListIndex index) override;
LoadRoot(Operand destination,Heap::RootListIndex index)219   void LoadRoot(Operand destination, Heap::RootListIndex index) {
220     LoadRoot(kScratchRegister, index);
221     movp(destination, kScratchRegister);
222   }
223 
224   void Push(Register src);
225   void Push(Operand src);
226   void Push(Immediate value);
227   void Push(Smi* smi);
228   void Push(Handle<HeapObject> source);
229 
230   // Before calling a C-function from generated code, align arguments on stack.
231   // After aligning the frame, arguments must be stored in rsp[0], rsp[8],
232   // etc., not pushed. The argument count assumes all arguments are word sized.
233   // The number of slots reserved for arguments depends on platform. On Windows
234   // stack slots are reserved for the arguments passed in registers. On other
235   // platforms stack slots are only reserved for the arguments actually passed
236   // on the stack.
237   void PrepareCallCFunction(int num_arguments);
238 
239   // Calls a C function and cleans up the space for arguments allocated
240   // by PrepareCallCFunction. The called function is not allowed to trigger a
241   // garbage collection, since that might move the code and invalidate the
242   // return address (unless this is somehow accounted for by the called
243   // function).
244   void CallCFunction(ExternalReference function, int num_arguments);
245   void CallCFunction(Register function, int num_arguments);
246 
247   // Calculate the number of stack slots to reserve for arguments when calling a
248   // C function.
249   int ArgumentStackSlotsForCFunctionCall(int num_arguments);
250 
251   void CheckPageFlag(Register object, Register scratch, int mask, Condition cc,
252                      Label* condition_met,
253                      Label::Distance condition_met_distance = Label::kFar);
254 
255   void Cvtss2sd(XMMRegister dst, XMMRegister src);
256   void Cvtss2sd(XMMRegister dst, Operand src);
257   void Cvtsd2ss(XMMRegister dst, XMMRegister src);
258   void Cvtsd2ss(XMMRegister dst, Operand src);
259   void Cvttsd2si(Register dst, XMMRegister src);
260   void Cvttsd2si(Register dst, Operand src);
261   void Cvttsd2siq(Register dst, XMMRegister src);
262   void Cvttsd2siq(Register dst, Operand src);
263   void Cvttss2si(Register dst, XMMRegister src);
264   void Cvttss2si(Register dst, Operand src);
265   void Cvttss2siq(Register dst, XMMRegister src);
266   void Cvttss2siq(Register dst, Operand src);
267   void Cvtqsi2ss(XMMRegister dst, Register src);
268   void Cvtqsi2ss(XMMRegister dst, Operand src);
269   void Cvtqsi2sd(XMMRegister dst, Register src);
270   void Cvtqsi2sd(XMMRegister dst, Operand src);
271   void Cvtlsi2ss(XMMRegister dst, Register src);
272   void Cvtlsi2ss(XMMRegister dst, Operand src);
273   void Cvtlui2ss(XMMRegister dst, Register src);
274   void Cvtlui2ss(XMMRegister dst, Operand src);
275   void Cvtlui2sd(XMMRegister dst, Register src);
276   void Cvtlui2sd(XMMRegister dst, Operand src);
277   void Cvtqui2ss(XMMRegister dst, Register src);
278   void Cvtqui2ss(XMMRegister dst, Operand src);
279   void Cvtqui2sd(XMMRegister dst, Register src);
280   void Cvtqui2sd(XMMRegister dst, Operand src);
281   void Cvttsd2uiq(Register dst, Operand src, Label* fail = nullptr);
282   void Cvttsd2uiq(Register dst, XMMRegister src, Label* fail = nullptr);
283   void Cvttss2uiq(Register dst, Operand src, Label* fail = nullptr);
284   void Cvttss2uiq(Register dst, XMMRegister src, Label* fail = nullptr);
285 
286   // cvtsi2sd instruction only writes to the low 64-bit of dst register, which
287   // hinders register renaming and makes dependence chains longer. So we use
288   // xorpd to clear the dst register before cvtsi2sd to solve this issue.
289   void Cvtlsi2sd(XMMRegister dst, Register src);
290   void Cvtlsi2sd(XMMRegister dst, Operand src);
291 
292   void Lzcntq(Register dst, Register src);
293   void Lzcntq(Register dst, Operand src);
294   void Lzcntl(Register dst, Register src);
295   void Lzcntl(Register dst, Operand src);
296   void Tzcntq(Register dst, Register src);
297   void Tzcntq(Register dst, Operand src);
298   void Tzcntl(Register dst, Register src);
299   void Tzcntl(Register dst, Operand src);
300   void Popcntl(Register dst, Register src);
301   void Popcntl(Register dst, Operand src);
302   void Popcntq(Register dst, Register src);
303   void Popcntq(Register dst, Operand src);
304 
305   // Is the value a tagged smi.
306   Condition CheckSmi(Register src);
307   Condition CheckSmi(Operand src);
308 
309   // Jump to label if the value is a tagged smi.
310   void JumpIfSmi(Register src, Label* on_smi,
311                  Label::Distance near_jump = Label::kFar);
312 
JumpIfEqual(Register a,int32_t b,Label * dest)313   void JumpIfEqual(Register a, int32_t b, Label* dest) {
314     cmpl(a, Immediate(b));
315     j(equal, dest);
316   }
317 
JumpIfLessThan(Register a,int32_t b,Label * dest)318   void JumpIfLessThan(Register a, int32_t b, Label* dest) {
319     cmpl(a, Immediate(b));
320     j(less, dest);
321   }
322 
323   void Move(Register dst, Smi* source);
324 
Move(Operand dst,Smi * source)325   void Move(Operand dst, Smi* source) {
326     Register constant = GetSmiConstant(source);
327     movp(dst, constant);
328   }
329 
330   void Move(Register dst, ExternalReference ext);
331 
332   void Move(XMMRegister dst, uint32_t src);
333   void Move(XMMRegister dst, uint64_t src);
Move(XMMRegister dst,float src)334   void Move(XMMRegister dst, float src) { Move(dst, bit_cast<uint32_t>(src)); }
Move(XMMRegister dst,double src)335   void Move(XMMRegister dst, double src) { Move(dst, bit_cast<uint64_t>(src)); }
336 
337   // Move if the registers are not identical.
338   void Move(Register target, Register source);
339 
340   void Move(Register dst, Handle<HeapObject> source,
341             RelocInfo::Mode rmode = RelocInfo::EMBEDDED_OBJECT);
342   void Move(Operand dst, Handle<HeapObject> source,
343             RelocInfo::Mode rmode = RelocInfo::EMBEDDED_OBJECT);
344 
345   // Loads a pointer into a register with a relocation mode.
Move(Register dst,Address ptr,RelocInfo::Mode rmode)346   void Move(Register dst, Address ptr, RelocInfo::Mode rmode) {
347     // This method must not be used with heap object references. The stored
348     // address is not GC safe. Use the handle version instead.
349     DCHECK(rmode > RelocInfo::LAST_GCED_ENUM);
350     movp(dst, ptr, rmode);
351   }
352 
353   // Convert smi to word-size sign-extended value.
354   void SmiUntag(Register dst, Register src);
355   void SmiUntag(Register dst, Operand src);
356 
357   // Loads the address of the external reference into the destination
358   // register.
359   void LoadAddress(Register destination, ExternalReference source);
360 
361   void LoadFromConstantsTable(Register destination,
362                               int constant_index) override;
363   void LoadRootRegisterOffset(Register destination, intptr_t offset) override;
364   void LoadRootRelative(Register destination, int32_t offset) override;
365 
366   // Operand pointing to an external reference.
367   // May emit code to set up the scratch register. The operand is
368   // only guaranteed to be correct as long as the scratch register
369   // isn't changed.
370   // If the operand is used more than once, use a scratch register
371   // that is guaranteed not to be clobbered.
372   Operand ExternalOperand(ExternalReference reference,
373                           Register scratch = kScratchRegister);
374 
Call(Register reg)375   void Call(Register reg) { call(reg); }
376   void Call(Operand op);
377   void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
378   void Call(Address destination, RelocInfo::Mode rmode);
379   void Call(ExternalReference ext);
Call(Label * target)380   void Call(Label* target) { call(target); }
381 
382   void RetpolineCall(Register reg);
383   void RetpolineCall(Address destination, RelocInfo::Mode rmode);
384 
385   void Jump(Address destination, RelocInfo::Mode rmode);
386   void Jump(ExternalReference ext);
387   void Jump(Operand op);
388   void Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
389             Condition cc = always);
390 
391   void RetpolineJump(Register reg);
392 
CallForDeoptimization(Address target,int deopt_id,RelocInfo::Mode rmode)393   void CallForDeoptimization(Address target, int deopt_id,
394                              RelocInfo::Mode rmode) {
395     USE(deopt_id);
396     call(target, rmode);
397   }
398 
399   // Non-SSE2 instructions.
400   void Pextrd(Register dst, XMMRegister src, int8_t imm8);
401   void Pinsrd(XMMRegister dst, Register src, int8_t imm8);
402   void Pinsrd(XMMRegister dst, Operand src, int8_t imm8);
403 
404   void CompareRoot(Register with, Heap::RootListIndex index);
405   void CompareRoot(Operand with, Heap::RootListIndex index);
406 
407   // Generates function and stub prologue code.
408   void StubPrologue(StackFrame::Type type);
409   void Prologue();
410 
411   // Calls Abort(msg) if the condition cc is not satisfied.
412   // Use --debug_code to enable.
413   void Assert(Condition cc, AbortReason reason);
414 
415   // Like Assert(), but without condition.
416   // Use --debug_code to enable.
417   void AssertUnreachable(AbortReason reason);
418 
419   // Abort execution if a 64 bit register containing a 32 bit payload does not
420   // have zeros in the top 32 bits, enabled via --debug-code.
421   void AssertZeroExtended(Register reg);
422 
423   // Like Assert(), but always enabled.
424   void Check(Condition cc, AbortReason reason);
425 
426   // Print a message to stdout and abort execution.
427   void Abort(AbortReason msg);
428 
429   // Check that the stack is aligned.
430   void CheckStackAlignment();
431 
432   // Activation support.
433   void EnterFrame(StackFrame::Type type);
EnterFrame(StackFrame::Type type,bool load_constant_pool_pointer_reg)434   void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) {
435     // Out-of-line constant pool not implemented on x64.
436     UNREACHABLE();
437   }
438   void LeaveFrame(StackFrame::Type type);
439 
440   // Removes current frame and its arguments from the stack preserving the
441   // arguments and a return address pushed to the stack for the next call.  Both
442   // |callee_args_count| and |caller_args_count_reg| do not include receiver.
443   // |callee_args_count| is not modified, |caller_args_count_reg| is trashed.
444   void PrepareForTailCall(const ParameterCount& callee_args_count,
445                           Register caller_args_count_reg, Register scratch0,
446                           Register scratch1);
447 
448   inline bool AllowThisStubCall(CodeStub* stub);
449 
450   // Call a code stub. This expects {stub} to be zone-allocated, as it does not
451   // trigger generation of the stub's code object but instead files a
452   // HeapObjectRequest that will be fulfilled after code assembly.
453   void CallStubDelayed(CodeStub* stub);
454 
455   // Call a runtime routine. This expects {centry} to contain a fitting CEntry
456   // builtin for the target runtime function and uses an indirect call.
457   void CallRuntimeWithCEntry(Runtime::FunctionId fid, Register centry);
458 
InitializeRootRegister()459   void InitializeRootRegister() {
460     ExternalReference roots_array_start =
461         ExternalReference::roots_array_start(isolate());
462     Move(kRootRegister, roots_array_start);
463     addp(kRootRegister, Immediate(kRootRegisterBias));
464   }
465 
466   void SaveRegisters(RegList registers);
467   void RestoreRegisters(RegList registers);
468 
469   void CallRecordWriteStub(Register object, Register address,
470                            RememberedSetAction remembered_set_action,
471                            SaveFPRegsMode fp_mode);
472 
473   void MoveNumber(Register dst, double value);
474   void MoveNonSmi(Register dst, double value);
475 
476   // Calculate how much stack space (in bytes) are required to store caller
477   // registers excluding those specified in the arguments.
478   int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
479                                       Register exclusion1 = no_reg,
480                                       Register exclusion2 = no_reg,
481                                       Register exclusion3 = no_reg) const;
482 
483   // PushCallerSaved and PopCallerSaved do not arrange the registers in any
484   // particular order so they are not useful for calls that can cause a GC.
485   // The caller can exclude up to 3 registers that do not need to be saved and
486   // restored.
487 
488   // Push caller saved registers on the stack, and return the number of bytes
489   // stack pointer is adjusted.
490   int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
491                       Register exclusion2 = no_reg,
492                       Register exclusion3 = no_reg);
493   // Restore caller saved registers from the stack, and return the number of
494   // bytes stack pointer is adjusted.
495   int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
496                      Register exclusion2 = no_reg,
497                      Register exclusion3 = no_reg);
498 
499   // Compute the start of the generated instruction stream from the current PC.
500   // This is an alternative to embedding the {CodeObject} handle as a reference.
501   void ComputeCodeStartAddress(Register dst);
502 
503   void ResetSpeculationPoisonRegister();
504 
505  protected:
506   static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
507   int smi_count = 0;
508   int heap_object_count = 0;
509 
510   int64_t RootRegisterDelta(ExternalReference other);
511 
512   // Returns a register holding the smi value. The register MUST NOT be
513   // modified. It may be the "smi 1 constant" register.
514   Register GetSmiConstant(Smi* value);
515 };
516 
517 // MacroAssembler implements a collection of frequently used macros.
518 class MacroAssembler : public TurboAssembler {
519  public:
520   // TODO(titzer): inline this utility constructor.
MacroAssembler(Isolate * isolate,void * buffer,int size,CodeObjectRequired create_code_object)521   MacroAssembler(Isolate* isolate, void* buffer, int size,
522                  CodeObjectRequired create_code_object)
523       : MacroAssembler(isolate, AssemblerOptions::Default(isolate), buffer,
524                        size, create_code_object) {}
525   MacroAssembler(Isolate* isolate, const AssemblerOptions& options,
526                  void* buffer, int size, CodeObjectRequired create_code_object);
527 
528   // Loads and stores the value of an external reference.
529   // Special case code for load and store to take advantage of
530   // load_rax/store_rax if possible/necessary.
531   // For other operations, just use:
532   //   Operand operand = ExternalOperand(extref);
533   //   operation(operand, ..);
534   void Load(Register destination, ExternalReference source);
535   void Store(ExternalReference destination, Register source);
536 
537   // Pushes the address of the external reference onto the stack.
538   void PushAddress(ExternalReference source);
539 
540   // Operations on roots in the root-array.
541   // Load a root value where the index (or part of it) is variable.
542   // The variable_offset register is added to the fixed_offset value
543   // to get the index into the root-array.
544   void PushRoot(Heap::RootListIndex index);
545 
546   // Compare the object in a register to a value and jump if they are equal.
547   void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal,
548                   Label::Distance if_equal_distance = Label::kFar) {
549     CompareRoot(with, index);
550     j(equal, if_equal, if_equal_distance);
551   }
552   void JumpIfRoot(Operand with, Heap::RootListIndex index, Label* if_equal,
553                   Label::Distance if_equal_distance = Label::kFar) {
554     CompareRoot(with, index);
555     j(equal, if_equal, if_equal_distance);
556   }
557 
558   // Compare the object in a register to a value and jump if they are not equal.
559   void JumpIfNotRoot(Register with, Heap::RootListIndex index,
560                      Label* if_not_equal,
561                      Label::Distance if_not_equal_distance = Label::kFar) {
562     CompareRoot(with, index);
563     j(not_equal, if_not_equal, if_not_equal_distance);
564   }
565   void JumpIfNotRoot(Operand with, Heap::RootListIndex index,
566                      Label* if_not_equal,
567                      Label::Distance if_not_equal_distance = Label::kFar) {
568     CompareRoot(with, index);
569     j(not_equal, if_not_equal, if_not_equal_distance);
570   }
571 
572 
573 // ---------------------------------------------------------------------------
574 // GC Support
575 
576   // Notify the garbage collector that we wrote a pointer into an object.
577   // |object| is the object being stored into, |value| is the object being
578   // stored.  value and scratch registers are clobbered by the operation.
579   // The offset is the offset from the start of the object, not the offset from
580   // the tagged HeapObject pointer.  For use with FieldOperand(reg, off).
581   void RecordWriteField(
582       Register object, int offset, Register value, Register scratch,
583       SaveFPRegsMode save_fp,
584       RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
585       SmiCheck smi_check = INLINE_SMI_CHECK);
586 
587   // For page containing |object| mark region covering |address|
588   // dirty. |object| is the object being stored into, |value| is the
589   // object being stored. The address and value registers are clobbered by the
590   // operation.  RecordWrite filters out smis so it does not update
591   // the write barrier if the value is a smi.
592   void RecordWrite(
593       Register object, Register address, Register value, SaveFPRegsMode save_fp,
594       RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
595       SmiCheck smi_check = INLINE_SMI_CHECK);
596 
597   // Frame restart support.
598   void MaybeDropFrames();
599 
600   // Enter specific kind of exit frame; either in normal or
601   // debug mode. Expects the number of arguments in register rax and
602   // sets up the number of arguments in register rdi and the pointer
603   // to the first argument in register rsi.
604   //
605   // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
606   // accessible via StackSpaceOperand.
607   void EnterExitFrame(int arg_stack_space = 0, bool save_doubles = false,
608                       StackFrame::Type frame_type = StackFrame::EXIT);
609 
610   // Enter specific kind of exit frame. Allocates arg_stack_space * kPointerSize
611   // memory (not GCed) on the stack accessible via StackSpaceOperand.
612   void EnterApiExitFrame(int arg_stack_space);
613 
614   // Leave the current exit frame. Expects/provides the return value in
615   // register rax:rdx (untouched) and the pointer to the first
616   // argument in register rsi (if pop_arguments == true).
617   void LeaveExitFrame(bool save_doubles = false, bool pop_arguments = true);
618 
619   // Leave the current exit frame. Expects/provides the return value in
620   // register rax (untouched).
621   void LeaveApiExitFrame();
622 
623   // Push and pop the registers that can hold pointers.
PushSafepointRegisters()624   void PushSafepointRegisters() { Pushad(); }
PopSafepointRegisters()625   void PopSafepointRegisters() { Popad(); }
626 
627   // ---------------------------------------------------------------------------
628   // JavaScript invokes
629 
630   // Invoke the JavaScript function code by either calling or jumping.
631   void InvokeFunctionCode(Register function, Register new_target,
632                           const ParameterCount& expected,
633                           const ParameterCount& actual, InvokeFlag flag);
634 
635   // On function call, call into the debugger if necessary.
636   void CheckDebugHook(Register fun, Register new_target,
637                       const ParameterCount& expected,
638                       const ParameterCount& actual);
639 
640   // Invoke the JavaScript function in the given register. Changes the
641   // current context to the context in the function before invoking.
642   void InvokeFunction(Register function, Register new_target,
643                       const ParameterCount& actual, InvokeFlag flag);
644 
645   void InvokeFunction(Register function, Register new_target,
646                       const ParameterCount& expected,
647                       const ParameterCount& actual, InvokeFlag flag);
648 
649   // ---------------------------------------------------------------------------
650   // Conversions between tagged smi values and non-tagged integer values.
651 
652   // Tag an word-size value. The result must be known to be a valid smi value.
653   void SmiTag(Register dst, Register src);
654 
655   // Simple comparison of smis.  Both sides must be known smis to use these,
656   // otherwise use Cmp.
657   void SmiCompare(Register smi1, Register smi2);
658   void SmiCompare(Register dst, Smi* src);
659   void SmiCompare(Register dst, Operand src);
660   void SmiCompare(Operand dst, Register src);
661   void SmiCompare(Operand dst, Smi* src);
662 
663   // Functions performing a check on a known or potential smi. Returns
664   // a condition that is satisfied if the check is successful.
665 
666   // Test-and-jump functions. Typically combines a check function
667   // above with a conditional jump.
668 
669   // Jump to label if the value is not a tagged smi.
670   void JumpIfNotSmi(Register src,
671                     Label* on_not_smi,
672                     Label::Distance near_jump = Label::kFar);
673 
674   // Jump to label if the value is not a tagged smi.
675   void JumpIfNotSmi(Operand src, Label* on_not_smi,
676                     Label::Distance near_jump = Label::kFar);
677 
678   // Operations on tagged smi values.
679 
680   // Smis represent a subset of integers. The subset is always equivalent to
681   // a two's complement interpretation of a fixed number of bits.
682 
683   // Add an integer constant to a tagged smi, giving a tagged smi as result.
684   // No overflow testing on the result is done.
685   void SmiAddConstant(Operand dst, Smi* constant);
686 
687   // Specialized operations
688 
689   // Converts, if necessary, a smi to a combination of number and
690   // multiplier to be used as a scaled index.
691   // The src register contains a *positive* smi value. The shift is the
692   // power of two to multiply the index value by (e.g.
693   // to index by smi-value * kPointerSize, pass the smi and kPointerSizeLog2).
694   // The returned index register may be either src or dst, depending
695   // on what is most efficient. If src and dst are different registers,
696   // src is always unchanged.
697   SmiIndex SmiToIndex(Register dst, Register src, int shift);
698 
699   // ---------------------------------------------------------------------------
700   // Macro instructions.
701 
702   // Load/store with specific representation.
703   void Load(Register dst, Operand src, Representation r);
704   void Store(Operand dst, Register src, Representation r);
705 
706   void Cmp(Register dst, Handle<Object> source);
707   void Cmp(Operand dst, Handle<Object> source);
708   void Cmp(Register dst, Smi* src);
709   void Cmp(Operand dst, Smi* src);
710 
711   // Emit code to discard a non-negative number of pointer-sized elements
712   // from the stack, clobbering only the rsp register.
713   void Drop(int stack_elements);
714   // Emit code to discard a positive number of pointer-sized elements
715   // from the stack under the return address which remains on the top,
716   // clobbering the rsp register.
717   void DropUnderReturnAddress(int stack_elements,
718                               Register scratch = kScratchRegister);
719 
720   void PushQuad(Operand src);
721   void PushImm32(int32_t imm32);
722   void Pop(Register dst);
723   void Pop(Operand dst);
724   void PopQuad(Operand dst);
725 
726   // ---------------------------------------------------------------------------
727   // SIMD macros.
728   void Absps(XMMRegister dst);
729   void Negps(XMMRegister dst);
730   void Abspd(XMMRegister dst);
731   void Negpd(XMMRegister dst);
732   // Generates a trampoline to jump to the off-heap instruction stream.
733   void JumpToInstructionStream(Address entry);
734 
735   // Non-x64 instructions.
736   // Push/pop all general purpose registers.
737   // Does not push rsp/rbp nor any of the assembler's special purpose registers
738   // (kScratchRegister, kRootRegister).
739   void Pushad();
740   void Popad();
741 
742   // Compare object type for heap object.
743   // Always use unsigned comparisons: above and below, not less and greater.
744   // Incoming register is heap_object and outgoing register is map.
745   // They may be the same register, and may be kScratchRegister.
746   void CmpObjectType(Register heap_object, InstanceType type, Register map);
747 
748   // Compare instance type for map.
749   // Always use unsigned comparisons: above and below, not less and greater.
750   void CmpInstanceType(Register map, InstanceType type);
751 
752   void DoubleToI(Register result_reg, XMMRegister input_reg,
753                  XMMRegister scratch, Label* lost_precision, Label* is_nan,
754                  Label::Distance dst = Label::kFar);
755 
756   template<typename Field>
DecodeField(Register reg)757   void DecodeField(Register reg) {
758     static const int shift = Field::kShift;
759     static const int mask = Field::kMask >> Field::kShift;
760     if (shift != 0) {
761       shrp(reg, Immediate(shift));
762     }
763     andp(reg, Immediate(mask));
764   }
765 
766   // Abort execution if argument is a smi, enabled via --debug-code.
767   void AssertNotSmi(Register object);
768 
769   // Abort execution if argument is not a smi, enabled via --debug-code.
770   void AssertSmi(Register object);
771   void AssertSmi(Operand object);
772 
773   // Abort execution if argument is not a Constructor, enabled via --debug-code.
774   void AssertConstructor(Register object);
775 
776   // Abort execution if argument is not a JSFunction, enabled via --debug-code.
777   void AssertFunction(Register object);
778 
779   // Abort execution if argument is not a JSBoundFunction,
780   // enabled via --debug-code.
781   void AssertBoundFunction(Register object);
782 
783   // Abort execution if argument is not a JSGeneratorObject (or subclass),
784   // enabled via --debug-code.
785   void AssertGeneratorObject(Register object);
786 
787   // Abort execution if argument is not undefined or an AllocationSite, enabled
788   // via --debug-code.
789   void AssertUndefinedOrAllocationSite(Register object);
790 
791   // ---------------------------------------------------------------------------
792   // Exception handling
793 
794   // Push a new stack handler and link it into stack handler chain.
795   void PushStackHandler();
796 
797   // Unlink the stack handler on top of the stack from the stack handler chain.
798   void PopStackHandler();
799 
800   // ---------------------------------------------------------------------------
801   // Support functions.
802 
803   // Load the global proxy from the current context.
LoadGlobalProxy(Register dst)804   void LoadGlobalProxy(Register dst) {
805     LoadNativeContextSlot(Context::GLOBAL_PROXY_INDEX, dst);
806   }
807 
808   // Load the native context slot with the current index.
809   void LoadNativeContextSlot(int index, Register dst);
810 
811   // ---------------------------------------------------------------------------
812   // Runtime calls
813 
814   // Call a code stub.
815   // The code object is generated immediately, in contrast to
816   // TurboAssembler::CallStubDelayed.
817   void CallStub(CodeStub* stub);
818 
819   // Tail call a code stub (jump).
820   void TailCallStub(CodeStub* stub);
821 
822   // Call a runtime routine.
823   void CallRuntime(const Runtime::Function* f,
824                    int num_arguments,
825                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
826 
827   // Convenience function: Same as above, but takes the fid instead.
828   void CallRuntime(Runtime::FunctionId fid,
829                    SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
830     const Runtime::Function* function = Runtime::FunctionForId(fid);
831     CallRuntime(function, function->nargs, save_doubles);
832   }
833 
834   // Convenience function: Same as above, but takes the fid instead.
835   void CallRuntime(Runtime::FunctionId fid, int num_arguments,
836                    SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
837     CallRuntime(Runtime::FunctionForId(fid), num_arguments, save_doubles);
838   }
839 
840   // Convenience function: tail call a runtime routine (jump)
841   void TailCallRuntime(Runtime::FunctionId fid);
842 
843   // Jump to a runtime routines
844   void JumpToExternalReference(const ExternalReference& ext,
845                                bool builtin_exit_frame = false);
846 
847   // ---------------------------------------------------------------------------
848   // StatsCounter support
849   void IncrementCounter(StatsCounter* counter, int value);
850   void DecrementCounter(StatsCounter* counter, int value);
851 
852   // ---------------------------------------------------------------------------
853   // In-place weak references.
854   void LoadWeakValue(Register in_out, Label* target_if_cleared);
855 
856   // ---------------------------------------------------------------------------
857   // Debugging
858 
SafepointRegisterStackIndex(Register reg)859   static int SafepointRegisterStackIndex(Register reg) {
860     return SafepointRegisterStackIndex(reg.code());
861   }
862 
863   void EnterBuiltinFrame(Register context, Register target, Register argc);
864   void LeaveBuiltinFrame(Register context, Register target, Register argc);
865 
866  private:
867   // Order general registers are pushed by Pushad.
868   // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14, r15.
869   static const int kSafepointPushRegisterIndices[Register::kNumRegisters];
870   static const int kNumSafepointSavedRegisters = 12;
871 
872   // Helper functions for generating invokes.
873   void InvokePrologue(const ParameterCount& expected,
874                       const ParameterCount& actual, Label* done,
875                       bool* definitely_mismatches, InvokeFlag flag,
876                       Label::Distance near_jump);
877 
878   void EnterExitFramePrologue(bool save_rax, StackFrame::Type frame_type);
879 
880   // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
881   // accessible via StackSpaceOperand.
882   void EnterExitFrameEpilogue(int arg_stack_space, bool save_doubles);
883 
884   void LeaveExitFrameEpilogue();
885 
886   // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace.
887   void InNewSpace(Register object,
888                   Register scratch,
889                   Condition cc,
890                   Label* branch,
891                   Label::Distance distance = Label::kFar);
892 
893   // Compute memory operands for safepoint stack slots.
SafepointRegisterStackIndex(int reg_code)894   static int SafepointRegisterStackIndex(int reg_code) {
895     return kNumSafepointRegisters - kSafepointPushRegisterIndices[reg_code] - 1;
896   }
897 
898   // Needs access to SafepointRegisterStackIndex for compiled frame
899   // traversal.
900   friend class StandardFrame;
901 };
902 
903 // -----------------------------------------------------------------------------
904 // Static helper functions.
905 
906 // Generate an Operand for loading a field from an object.
FieldOperand(Register object,int offset)907 inline Operand FieldOperand(Register object, int offset) {
908   return Operand(object, offset - kHeapObjectTag);
909 }
910 
911 
912 // Generate an Operand for loading an indexed field from an object.
FieldOperand(Register object,Register index,ScaleFactor scale,int offset)913 inline Operand FieldOperand(Register object,
914                             Register index,
915                             ScaleFactor scale,
916                             int offset) {
917   return Operand(object, index, scale, offset - kHeapObjectTag);
918 }
919 
920 
ContextOperand(Register context,int index)921 inline Operand ContextOperand(Register context, int index) {
922   return Operand(context, Context::SlotOffset(index));
923 }
924 
925 
ContextOperand(Register context,Register index)926 inline Operand ContextOperand(Register context, Register index) {
927   return Operand(context, index, times_pointer_size, Context::SlotOffset(0));
928 }
929 
930 
NativeContextOperand()931 inline Operand NativeContextOperand() {
932   return ContextOperand(rsi, Context::NATIVE_CONTEXT_INDEX);
933 }
934 
935 
936 // Provides access to exit frame stack space (not GCed).
StackSpaceOperand(int index)937 inline Operand StackSpaceOperand(int index) {
938 #ifdef _WIN64
939   const int kShaddowSpace = 4;
940   return Operand(rsp, (index + kShaddowSpace) * kPointerSize);
941 #else
942   return Operand(rsp, index * kPointerSize);
943 #endif
944 }
945 
946 
StackOperandForReturnAddress(int32_t disp)947 inline Operand StackOperandForReturnAddress(int32_t disp) {
948   return Operand(rsp, disp);
949 }
950 
951 #define ACCESS_MASM(masm) masm->
952 
953 }  // namespace internal
954 }  // namespace v8
955 
956 #endif  // V8_X64_MACRO_ASSEMBLER_X64_H_
957