1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef INCLUDED_FROM_MACRO_ASSEMBLER_H
6 #error This header must be included via macro-assembler.h
7 #endif
8
9 #ifndef V8_CODEGEN_X64_MACRO_ASSEMBLER_X64_H_
10 #define V8_CODEGEN_X64_MACRO_ASSEMBLER_X64_H_
11
12 #include "src/base/flags.h"
13 #include "src/codegen/bailout-reason.h"
14 #include "src/codegen/x64/assembler-x64.h"
15 #include "src/common/globals.h"
16 #include "src/objects/contexts.h"
17
18 namespace v8 {
19 namespace internal {
20
21 // Convenience for platform-independent signatures.
22 using MemOperand = Operand;
23
24 class StringConstantBase;
25
26 enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET };
27 enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK };
28
29 struct SmiIndex {
SmiIndexSmiIndex30 SmiIndex(Register index_register, ScaleFactor scale)
31 : reg(index_register), scale(scale) {}
32 Register reg;
33 ScaleFactor scale;
34 };
35
36 // TODO(victorgomes): Move definition to macro-assembler.h, once all other
37 // platforms are updated.
38 enum class StackLimitKind { kInterruptStackLimit, kRealStackLimit };
39
40 // Convenient class to access arguments below the stack pointer.
41 class StackArgumentsAccessor {
42 public:
43 // argc = the number of arguments not including the receiver.
StackArgumentsAccessor(Register argc)44 explicit StackArgumentsAccessor(Register argc) : argc_(argc) {
45 DCHECK_NE(argc_, no_reg);
46 }
47
48 // Argument 0 is the receiver (despite argc not including the receiver).
49 Operand operator[](int index) const { return GetArgumentOperand(index); }
50
51 Operand GetArgumentOperand(int index) const;
GetReceiverOperand()52 Operand GetReceiverOperand() const { return GetArgumentOperand(0); }
53
54 private:
55 const Register argc_;
56
57 DISALLOW_IMPLICIT_CONSTRUCTORS(StackArgumentsAccessor);
58 };
59
60 class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
61 public:
62 using TurboAssemblerBase::TurboAssemblerBase;
63
64 template <typename Dst, typename... Args>
65 struct AvxHelper {
66 Assembler* assm;
67 base::Optional<CpuFeature> feature = base::nullopt;
68 // Call a method where the AVX version expects the dst argument to be
69 // duplicated.
70 template <void (Assembler::*avx)(Dst, Dst, Args...),
71 void (Assembler::*no_avx)(Dst, Args...)>
emitAvxHelper72 void emit(Dst dst, Args... args) {
73 if (CpuFeatures::IsSupported(AVX)) {
74 CpuFeatureScope scope(assm, AVX);
75 (assm->*avx)(dst, dst, args...);
76 } else if (feature.has_value()) {
77 DCHECK(CpuFeatures::IsSupported(*feature));
78 CpuFeatureScope scope(assm, *feature);
79 (assm->*no_avx)(dst, args...);
80 } else {
81 (assm->*no_avx)(dst, args...);
82 }
83 }
84
85 // Call a method where the AVX version expects no duplicated dst argument.
86 template <void (Assembler::*avx)(Dst, Args...),
87 void (Assembler::*no_avx)(Dst, Args...)>
emitAvxHelper88 void emit(Dst dst, Args... args) {
89 if (CpuFeatures::IsSupported(AVX)) {
90 CpuFeatureScope scope(assm, AVX);
91 (assm->*avx)(dst, args...);
92 } else if (feature.has_value()) {
93 DCHECK(CpuFeatures::IsSupported(*feature));
94 CpuFeatureScope scope(assm, *feature);
95 (assm->*no_avx)(dst, args...);
96 } else {
97 (assm->*no_avx)(dst, args...);
98 }
99 }
100 };
101
102 #define AVX_OP(macro_name, name) \
103 template <typename Dst, typename... Args> \
104 void macro_name(Dst dst, Args... args) { \
105 AvxHelper<Dst, Args...>{this} \
106 .template emit<&Assembler::v##name, &Assembler::name>(dst, args...); \
107 }
108
109 #define AVX_OP_SSE3(macro_name, name) \
110 template <typename Dst, typename... Args> \
111 void macro_name(Dst dst, Args... args) { \
112 AvxHelper<Dst, Args...>{this, base::Optional<CpuFeature>(SSE3)} \
113 .template emit<&Assembler::v##name, &Assembler::name>(dst, args...); \
114 }
115
116 #define AVX_OP_SSSE3(macro_name, name) \
117 template <typename Dst, typename... Args> \
118 void macro_name(Dst dst, Args... args) { \
119 AvxHelper<Dst, Args...>{this, base::Optional<CpuFeature>(SSSE3)} \
120 .template emit<&Assembler::v##name, &Assembler::name>(dst, args...); \
121 }
122
123 #define AVX_OP_SSE4_1(macro_name, name) \
124 template <typename Dst, typename... Args> \
125 void macro_name(Dst dst, Args... args) { \
126 AvxHelper<Dst, Args...>{this, base::Optional<CpuFeature>(SSE4_1)} \
127 .template emit<&Assembler::v##name, &Assembler::name>(dst, args...); \
128 }
129 #define AVX_OP_SSE4_2(macro_name, name) \
130 template <typename Dst, typename... Args> \
131 void macro_name(Dst dst, Args... args) { \
132 AvxHelper<Dst, Args...>{this, base::Optional<CpuFeature>(SSE4_2)} \
133 .template emit<&Assembler::v##name, &Assembler::name>(dst, args...); \
134 }
AVX_OP(Subsd,subsd)135 AVX_OP(Subsd, subsd)
136 AVX_OP(Divss, divss)
137 AVX_OP(Divsd, divsd)
138 AVX_OP(Orps, orps)
139 AVX_OP(Xorps, xorps)
140 AVX_OP(Xorpd, xorpd)
141 AVX_OP(Movd, movd)
142 AVX_OP(Movq, movq)
143 AVX_OP(Movaps, movaps)
144 AVX_OP(Movapd, movapd)
145 AVX_OP(Movups, movups)
146 AVX_OP(Movmskps, movmskps)
147 AVX_OP(Movmskpd, movmskpd)
148 AVX_OP(Pmovmskb, pmovmskb)
149 AVX_OP(Movss, movss)
150 AVX_OP(Movsd, movsd)
151 AVX_OP(Movdqu, movdqu)
152 AVX_OP(Movlps, movlps)
153 AVX_OP(Movhps, movhps)
154 AVX_OP(Pcmpeqb, pcmpeqb)
155 AVX_OP(Pcmpeqw, pcmpeqw)
156 AVX_OP(Pcmpeqd, pcmpeqd)
157 AVX_OP(Pcmpgtb, pcmpgtb)
158 AVX_OP(Pcmpgtw, pcmpgtw)
159 AVX_OP(Pmaxsw, pmaxsw)
160 AVX_OP(Pmaxub, pmaxub)
161 AVX_OP(Pminsw, pminsw)
162 AVX_OP(Pminub, pminub)
163 AVX_OP(Addss, addss)
164 AVX_OP(Addsd, addsd)
165 AVX_OP(Mulsd, mulsd)
166 AVX_OP(Andps, andps)
167 AVX_OP(Andnps, andnps)
168 AVX_OP(Andpd, andpd)
169 AVX_OP(Andnpd, andnpd)
170 AVX_OP(Orpd, orpd)
171 AVX_OP(Cmpeqps, cmpeqps)
172 AVX_OP(Cmpltps, cmpltps)
173 AVX_OP(Cmpleps, cmpleps)
174 AVX_OP(Cmpneqps, cmpneqps)
175 AVX_OP(Cmpnltps, cmpnltps)
176 AVX_OP(Cmpnleps, cmpnleps)
177 AVX_OP(Cmpeqpd, cmpeqpd)
178 AVX_OP(Cmpltpd, cmpltpd)
179 AVX_OP(Cmplepd, cmplepd)
180 AVX_OP(Cmpneqpd, cmpneqpd)
181 AVX_OP(Cmpnltpd, cmpnltpd)
182 AVX_OP(Cmpnlepd, cmpnlepd)
183 AVX_OP(Sqrtss, sqrtss)
184 AVX_OP(Sqrtsd, sqrtsd)
185 AVX_OP(Sqrtps, sqrtps)
186 AVX_OP(Sqrtpd, sqrtpd)
187 AVX_OP(Cvttps2dq, cvttps2dq)
188 AVX_OP(Ucomiss, ucomiss)
189 AVX_OP(Ucomisd, ucomisd)
190 AVX_OP(Pand, pand)
191 AVX_OP(Por, por)
192 AVX_OP(Pxor, pxor)
193 AVX_OP(Psubb, psubb)
194 AVX_OP(Psubw, psubw)
195 AVX_OP(Psubd, psubd)
196 AVX_OP(Psubq, psubq)
197 AVX_OP(Psubsb, psubsb)
198 AVX_OP(Psubsw, psubsw)
199 AVX_OP(Psubusb, psubusb)
200 AVX_OP(Psubusw, psubusw)
201 AVX_OP(Pslld, pslld)
202 AVX_OP(Pavgb, pavgb)
203 AVX_OP(Pavgw, pavgw)
204 AVX_OP(Psraw, psraw)
205 AVX_OP(Psrad, psrad)
206 AVX_OP(Psllw, psllw)
207 AVX_OP(Psllq, psllq)
208 AVX_OP(Psrlw, psrlw)
209 AVX_OP(Psrld, psrld)
210 AVX_OP(Psrlq, psrlq)
211 AVX_OP(Pmaddwd, pmaddwd)
212 AVX_OP(Paddb, paddb)
213 AVX_OP(Paddw, paddw)
214 AVX_OP(Paddd, paddd)
215 AVX_OP(Paddq, paddq)
216 AVX_OP(Paddsb, paddsb)
217 AVX_OP(Paddsw, paddsw)
218 AVX_OP(Paddusb, paddusb)
219 AVX_OP(Paddusw, paddusw)
220 AVX_OP(Pcmpgtd, pcmpgtd)
221 AVX_OP(Pmullw, pmullw)
222 AVX_OP(Pmuludq, pmuludq)
223 AVX_OP(Addpd, addpd)
224 AVX_OP(Subpd, subpd)
225 AVX_OP(Mulpd, mulpd)
226 AVX_OP(Minps, minps)
227 AVX_OP(Minpd, minpd)
228 AVX_OP(Divpd, divpd)
229 AVX_OP(Maxps, maxps)
230 AVX_OP(Maxpd, maxpd)
231 AVX_OP(Cvtdq2ps, cvtdq2ps)
232 AVX_OP(Rcpps, rcpps)
233 AVX_OP(Rsqrtps, rsqrtps)
234 AVX_OP(Addps, addps)
235 AVX_OP(Subps, subps)
236 AVX_OP(Mulps, mulps)
237 AVX_OP(Divps, divps)
238 AVX_OP(Pshuflw, pshuflw)
239 AVX_OP(Pshufhw, pshufhw)
240 AVX_OP(Packsswb, packsswb)
241 AVX_OP(Packuswb, packuswb)
242 AVX_OP(Packssdw, packssdw)
243 AVX_OP(Punpcklbw, punpcklbw)
244 AVX_OP(Punpcklwd, punpcklwd)
245 AVX_OP(Punpckldq, punpckldq)
246 AVX_OP(Punpckhbw, punpckhbw)
247 AVX_OP(Punpckhwd, punpckhwd)
248 AVX_OP(Punpckhdq, punpckhdq)
249 AVX_OP(Punpcklqdq, punpcklqdq)
250 AVX_OP(Punpckhqdq, punpckhqdq)
251 AVX_OP(Pshufd, pshufd)
252 AVX_OP(Cmpps, cmpps)
253 AVX_OP(Cmppd, cmppd)
254 AVX_OP(Movlhps, movlhps)
255 AVX_OP_SSE3(Haddps, haddps)
256 AVX_OP_SSE3(Movddup, movddup)
257 AVX_OP_SSSE3(Phaddd, phaddd)
258 AVX_OP_SSSE3(Phaddw, phaddw)
259 AVX_OP_SSSE3(Pshufb, pshufb)
260 AVX_OP_SSSE3(Psignb, psignb)
261 AVX_OP_SSSE3(Psignw, psignw)
262 AVX_OP_SSSE3(Psignd, psignd)
263 AVX_OP_SSSE3(Palignr, palignr)
264 AVX_OP_SSSE3(Pabsb, pabsb)
265 AVX_OP_SSSE3(Pabsw, pabsw)
266 AVX_OP_SSSE3(Pabsd, pabsd)
267 AVX_OP_SSE4_1(Pcmpeqq, pcmpeqq)
268 AVX_OP_SSE4_1(Packusdw, packusdw)
269 AVX_OP_SSE4_1(Pminsb, pminsb)
270 AVX_OP_SSE4_1(Pminsd, pminsd)
271 AVX_OP_SSE4_1(Pminuw, pminuw)
272 AVX_OP_SSE4_1(Pminud, pminud)
273 AVX_OP_SSE4_1(Pmaxsb, pmaxsb)
274 AVX_OP_SSE4_1(Pmaxsd, pmaxsd)
275 AVX_OP_SSE4_1(Pmaxuw, pmaxuw)
276 AVX_OP_SSE4_1(Pmaxud, pmaxud)
277 AVX_OP_SSE4_1(Pmulld, pmulld)
278 AVX_OP_SSE4_1(Extractps, extractps)
279 AVX_OP_SSE4_1(Insertps, insertps)
280 AVX_OP_SSE4_1(Pinsrq, pinsrq)
281 AVX_OP_SSE4_1(Pblendw, pblendw)
282 AVX_OP_SSE4_1(Ptest, ptest)
283 AVX_OP_SSE4_1(Pmovsxbw, pmovsxbw)
284 AVX_OP_SSE4_1(Pmovsxwd, pmovsxwd)
285 AVX_OP_SSE4_1(Pmovsxdq, pmovsxdq)
286 AVX_OP_SSE4_1(Pmovzxbw, pmovzxbw)
287 AVX_OP_SSE4_1(Pmovzxwd, pmovzxwd)
288 AVX_OP_SSE4_1(Pmovzxdq, pmovzxdq)
289 AVX_OP_SSE4_1(Pextrb, pextrb)
290 AVX_OP_SSE4_1(Pextrw, pextrw)
291 AVX_OP_SSE4_1(Pextrq, pextrq)
292 AVX_OP_SSE4_1(Roundps, roundps)
293 AVX_OP_SSE4_1(Roundpd, roundpd)
294 AVX_OP_SSE4_1(Roundss, roundss)
295 AVX_OP_SSE4_1(Roundsd, roundsd)
296 AVX_OP_SSE4_2(Pcmpgtq, pcmpgtq)
297
298 #undef AVX_OP
299
300 void PushReturnAddressFrom(Register src) { pushq(src); }
PopReturnAddressTo(Register dst)301 void PopReturnAddressTo(Register dst) { popq(dst); }
302
303 void Ret();
304
305 // Return and drop arguments from stack, where the number of arguments
306 // may be bigger than 2^16 - 1. Requires a scratch register.
307 void Ret(int bytes_dropped, Register scratch);
308
309 // Load a register with a long value as efficiently as possible.
310 void Set(Register dst, int64_t x);
311 void Set(Operand dst, intptr_t x);
312
313 // Operations on roots in the root-array.
314 void LoadRoot(Register destination, RootIndex index) override;
LoadRoot(Operand destination,RootIndex index)315 void LoadRoot(Operand destination, RootIndex index) {
316 LoadRoot(kScratchRegister, index);
317 movq(destination, kScratchRegister);
318 }
319
320 void Push(Register src);
321 void Push(Operand src);
322 void Push(Immediate value);
323 void Push(Smi smi);
324 void Push(Handle<HeapObject> source);
325
326 enum class PushArrayOrder { kNormal, kReverse };
327 // `array` points to the first element (the lowest address).
328 // `array` and `size` are not modified.
329 void PushArray(Register array, Register size, Register scratch,
330 PushArrayOrder order = PushArrayOrder::kNormal);
331
332 // Before calling a C-function from generated code, align arguments on stack.
333 // After aligning the frame, arguments must be stored in rsp[0], rsp[8],
334 // etc., not pushed. The argument count assumes all arguments are word sized.
335 // The number of slots reserved for arguments depends on platform. On Windows
336 // stack slots are reserved for the arguments passed in registers. On other
337 // platforms stack slots are only reserved for the arguments actually passed
338 // on the stack.
339 void PrepareCallCFunction(int num_arguments);
340
341 // Calls a C function and cleans up the space for arguments allocated
342 // by PrepareCallCFunction. The called function is not allowed to trigger a
343 // garbage collection, since that might move the code and invalidate the
344 // return address (unless this is somehow accounted for by the called
345 // function).
346 void CallCFunction(ExternalReference function, int num_arguments);
347 void CallCFunction(Register function, int num_arguments);
348
349 // Calculate the number of stack slots to reserve for arguments when calling a
350 // C function.
351 int ArgumentStackSlotsForCFunctionCall(int num_arguments);
352
353 void CheckPageFlag(Register object, Register scratch, int mask, Condition cc,
354 Label* condition_met,
355 Label::Distance condition_met_distance = Label::kFar);
356
357 void Cvtss2sd(XMMRegister dst, XMMRegister src);
358 void Cvtss2sd(XMMRegister dst, Operand src);
359 void Cvtsd2ss(XMMRegister dst, XMMRegister src);
360 void Cvtsd2ss(XMMRegister dst, Operand src);
361 void Cvttsd2si(Register dst, XMMRegister src);
362 void Cvttsd2si(Register dst, Operand src);
363 void Cvttsd2siq(Register dst, XMMRegister src);
364 void Cvttsd2siq(Register dst, Operand src);
365 void Cvttss2si(Register dst, XMMRegister src);
366 void Cvttss2si(Register dst, Operand src);
367 void Cvttss2siq(Register dst, XMMRegister src);
368 void Cvttss2siq(Register dst, Operand src);
369 void Cvtlui2ss(XMMRegister dst, Register src);
370 void Cvtlui2ss(XMMRegister dst, Operand src);
371 void Cvtlui2sd(XMMRegister dst, Register src);
372 void Cvtlui2sd(XMMRegister dst, Operand src);
373 void Cvtqui2ss(XMMRegister dst, Register src);
374 void Cvtqui2ss(XMMRegister dst, Operand src);
375 void Cvtqui2sd(XMMRegister dst, Register src);
376 void Cvtqui2sd(XMMRegister dst, Operand src);
377 void Cvttsd2uiq(Register dst, Operand src, Label* fail = nullptr);
378 void Cvttsd2uiq(Register dst, XMMRegister src, Label* fail = nullptr);
379 void Cvttss2uiq(Register dst, Operand src, Label* fail = nullptr);
380 void Cvttss2uiq(Register dst, XMMRegister src, Label* fail = nullptr);
381
382 // cvtsi2sd and cvtsi2ss instructions only write to the low 64/32-bit of dst
383 // register, which hinders register renaming and makes dependence chains
384 // longer. So we use xorpd to clear the dst register before cvtsi2sd for
385 // non-AVX and a scratch XMM register as first src for AVX to solve this
386 // issue.
387 void Cvtqsi2ss(XMMRegister dst, Register src);
388 void Cvtqsi2ss(XMMRegister dst, Operand src);
389 void Cvtqsi2sd(XMMRegister dst, Register src);
390 void Cvtqsi2sd(XMMRegister dst, Operand src);
391 void Cvtlsi2ss(XMMRegister dst, Register src);
392 void Cvtlsi2ss(XMMRegister dst, Operand src);
393 void Cvtlsi2sd(XMMRegister dst, Register src);
394 void Cvtlsi2sd(XMMRegister dst, Operand src);
395
396 void Lzcntq(Register dst, Register src);
397 void Lzcntq(Register dst, Operand src);
398 void Lzcntl(Register dst, Register src);
399 void Lzcntl(Register dst, Operand src);
400 void Tzcntq(Register dst, Register src);
401 void Tzcntq(Register dst, Operand src);
402 void Tzcntl(Register dst, Register src);
403 void Tzcntl(Register dst, Operand src);
404 void Popcntl(Register dst, Register src);
405 void Popcntl(Register dst, Operand src);
406 void Popcntq(Register dst, Register src);
407 void Popcntq(Register dst, Operand src);
408
409 // Is the value a tagged smi.
410 Condition CheckSmi(Register src);
411 Condition CheckSmi(Operand src);
412
413 // Jump to label if the value is a tagged smi.
414 void JumpIfSmi(Register src, Label* on_smi,
415 Label::Distance near_jump = Label::kFar);
416
JumpIfEqual(Register a,int32_t b,Label * dest)417 void JumpIfEqual(Register a, int32_t b, Label* dest) {
418 cmpl(a, Immediate(b));
419 j(equal, dest);
420 }
421
JumpIfLessThan(Register a,int32_t b,Label * dest)422 void JumpIfLessThan(Register a, int32_t b, Label* dest) {
423 cmpl(a, Immediate(b));
424 j(less, dest);
425 }
426
427 void LoadMap(Register destination, Register object);
428
429 void Move(Register dst, Smi source);
430
Move(Operand dst,Smi source)431 void Move(Operand dst, Smi source) {
432 Register constant = GetSmiConstant(source);
433 movq(dst, constant);
434 }
435
436 void Move(Register dst, ExternalReference ext);
437
438 void Move(XMMRegister dst, uint32_t src);
439 void Move(XMMRegister dst, uint64_t src);
Move(XMMRegister dst,float src)440 void Move(XMMRegister dst, float src) { Move(dst, bit_cast<uint32_t>(src)); }
Move(XMMRegister dst,double src)441 void Move(XMMRegister dst, double src) { Move(dst, bit_cast<uint64_t>(src)); }
442 void Move(XMMRegister dst, uint64_t high, uint64_t low);
443
444 // Move if the registers are not identical.
445 void Move(Register target, Register source);
446
447 void Move(Register dst, Handle<HeapObject> source,
448 RelocInfo::Mode rmode = RelocInfo::FULL_EMBEDDED_OBJECT);
449 void Move(Operand dst, Handle<HeapObject> source,
450 RelocInfo::Mode rmode = RelocInfo::FULL_EMBEDDED_OBJECT);
451
452 // Loads a pointer into a register with a relocation mode.
Move(Register dst,Address ptr,RelocInfo::Mode rmode)453 void Move(Register dst, Address ptr, RelocInfo::Mode rmode) {
454 // This method must not be used with heap object references. The stored
455 // address is not GC safe. Use the handle version instead.
456 DCHECK(rmode == RelocInfo::NONE || rmode > RelocInfo::LAST_GCED_ENUM);
457 movq(dst, Immediate64(ptr, rmode));
458 }
459
460 // Move src0 to dst0 and src1 to dst1, handling possible overlaps.
461 void MovePair(Register dst0, Register src0, Register dst1, Register src1);
462
463 void MoveStringConstant(
464 Register result, const StringConstantBase* string,
465 RelocInfo::Mode rmode = RelocInfo::FULL_EMBEDDED_OBJECT);
466
467 // Convert smi to word-size sign-extended value.
468 void SmiUntag(Register reg);
469 // Requires dst != src
470 void SmiUntag(Register dst, Register src);
471 void SmiUntag(Register dst, Operand src);
472
473 // Loads the address of the external reference into the destination
474 // register.
475 void LoadAddress(Register destination, ExternalReference source);
476
477 void LoadFromConstantsTable(Register destination,
478 int constant_index) override;
479 void LoadRootRegisterOffset(Register destination, intptr_t offset) override;
480 void LoadRootRelative(Register destination, int32_t offset) override;
481
482 // Operand pointing to an external reference.
483 // May emit code to set up the scratch register. The operand is
484 // only guaranteed to be correct as long as the scratch register
485 // isn't changed.
486 // If the operand is used more than once, use a scratch register
487 // that is guaranteed not to be clobbered.
488 Operand ExternalReferenceAsOperand(ExternalReference reference,
489 Register scratch = kScratchRegister);
490
Call(Register reg)491 void Call(Register reg) { call(reg); }
492 void Call(Operand op);
493 void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
494 void Call(Address destination, RelocInfo::Mode rmode);
495 void Call(ExternalReference ext);
Call(Label * target)496 void Call(Label* target) { call(target); }
497
498 Operand EntryFromBuiltinIndexAsOperand(Builtins::Name builtin_index);
499 Operand EntryFromBuiltinIndexAsOperand(Register builtin_index);
500 void CallBuiltinByIndex(Register builtin_index) override;
501 void CallBuiltin(int builtin_index);
502
503 void LoadCodeObjectEntry(Register destination, Register code_object) override;
504 void CallCodeObject(Register code_object) override;
505 void JumpCodeObject(Register code_object) override;
506
507 void RetpolineCall(Register reg);
508 void RetpolineCall(Address destination, RelocInfo::Mode rmode);
509
510 void Jump(Address destination, RelocInfo::Mode rmode);
511 void Jump(const ExternalReference& reference) override;
512 void Jump(Operand op);
513 void Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
514 Condition cc = always);
515
516 void RetpolineJump(Register reg);
517
518 void CallForDeoptimization(Builtins::Name target, int deopt_id, Label* exit,
519 DeoptimizeKind kind,
520 Label* jump_deoptimization_entry_label);
521
522 void Trap() override;
523 void DebugBreak() override;
524
525 // Shufps that will mov src into dst if AVX is not supported.
526 void Shufps(XMMRegister dst, XMMRegister src, byte imm8);
527
528 // Non-SSE2 instructions.
529 void Pextrd(Register dst, XMMRegister src, uint8_t imm8);
530
531 void Pinsrb(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8);
532 void Pinsrb(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8);
533 void Pinsrw(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8);
534 void Pinsrw(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8);
535 void Pinsrd(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8);
536 void Pinsrd(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8);
537 void Pinsrd(XMMRegister dst, Register src2, uint8_t imm8);
538 void Pinsrd(XMMRegister dst, Operand src2, uint8_t imm8);
539 void Pinsrq(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8);
540 void Pinsrq(XMMRegister dst, XMMRegister src1, Operand src2, uint8_t imm8);
541
Psllq(XMMRegister dst,int imm8)542 void Psllq(XMMRegister dst, int imm8) { Psllq(dst, static_cast<byte>(imm8)); }
543 void Psllq(XMMRegister dst, byte imm8);
Psrlq(XMMRegister dst,int imm8)544 void Psrlq(XMMRegister dst, int imm8) { Psrlq(dst, static_cast<byte>(imm8)); }
545 void Psrlq(XMMRegister dst, byte imm8);
546 void Pslld(XMMRegister dst, byte imm8);
547 void Psrld(XMMRegister dst, byte imm8);
548
549 void Pblendvb(XMMRegister dst, XMMRegister src1, XMMRegister src2,
550 XMMRegister mask);
551 void Blendvps(XMMRegister dst, XMMRegister src1, XMMRegister src2,
552 XMMRegister mask);
553 void Blendvpd(XMMRegister dst, XMMRegister src1, XMMRegister src2,
554 XMMRegister mask);
555
556 // Supports both SSE and AVX. Move src1 to dst if they are not equal on SSE.
557 void Pshufb(XMMRegister dst, XMMRegister src1, XMMRegister src2);
558
559 void CompareRoot(Register with, RootIndex index);
560 void CompareRoot(Operand with, RootIndex index);
561
562 // Generates function and stub prologue code.
563 void StubPrologue(StackFrame::Type type);
564 void Prologue();
565
566 // Calls Abort(msg) if the condition cc is not satisfied.
567 // Use --debug_code to enable.
568 void Assert(Condition cc, AbortReason reason);
569
570 // Like Assert(), but without condition.
571 // Use --debug_code to enable.
572 void AssertUnreachable(AbortReason reason);
573
574 // Abort execution if a 64 bit register containing a 32 bit payload does not
575 // have zeros in the top 32 bits, enabled via --debug-code.
576 void AssertZeroExtended(Register reg);
577
578 // Like Assert(), but always enabled.
579 void Check(Condition cc, AbortReason reason);
580
581 // Print a message to stdout and abort execution.
582 void Abort(AbortReason msg);
583
584 // Check that the stack is aligned.
585 void CheckStackAlignment();
586
587 // Activation support.
588 void EnterFrame(StackFrame::Type type);
EnterFrame(StackFrame::Type type,bool load_constant_pool_pointer_reg)589 void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) {
590 // Out-of-line constant pool not implemented on x64.
591 UNREACHABLE();
592 }
593 void LeaveFrame(StackFrame::Type type);
594
595 // Allocate stack space of given size (i.e. decrement {rsp} by the value
596 // stored in the given register, or by a constant). If you need to perform a
597 // stack check, do it before calling this function because this function may
598 // write into the newly allocated space. It may also overwrite the given
599 // register's value, in the version that takes a register.
600 #ifdef V8_TARGET_OS_WIN
601 void AllocateStackSpace(Register bytes_scratch);
602 void AllocateStackSpace(int bytes);
603 #else
AllocateStackSpace(Register bytes)604 void AllocateStackSpace(Register bytes) { subq(rsp, bytes); }
AllocateStackSpace(int bytes)605 void AllocateStackSpace(int bytes) { subq(rsp, Immediate(bytes)); }
606 #endif
607
608 // Removes current frame and its arguments from the stack preserving the
609 // arguments and a return address pushed to the stack for the next call. Both
610 // |callee_args_count| and |caller_args_count| do not include receiver.
611 // |callee_args_count| is not modified. |caller_args_count| is trashed.
612 void PrepareForTailCall(Register callee_args_count,
613 Register caller_args_count, Register scratch0,
614 Register scratch1);
615
InitializeRootRegister()616 void InitializeRootRegister() {
617 ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
618 Move(kRootRegister, isolate_root);
619 }
620
621 void SaveRegisters(RegList registers);
622 void RestoreRegisters(RegList registers);
623
624 void CallRecordWriteStub(Register object, Register address,
625 RememberedSetAction remembered_set_action,
626 SaveFPRegsMode fp_mode);
627 void CallRecordWriteStub(Register object, Register address,
628 RememberedSetAction remembered_set_action,
629 SaveFPRegsMode fp_mode, Address wasm_target);
630 void CallEphemeronKeyBarrier(Register object, Register address,
631 SaveFPRegsMode fp_mode);
632
633 void MoveNumber(Register dst, double value);
634 void MoveNonSmi(Register dst, double value);
635
636 // Calculate how much stack space (in bytes) are required to store caller
637 // registers excluding those specified in the arguments.
638 int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
639 Register exclusion1 = no_reg,
640 Register exclusion2 = no_reg,
641 Register exclusion3 = no_reg) const;
642
643 // PushCallerSaved and PopCallerSaved do not arrange the registers in any
644 // particular order so they are not useful for calls that can cause a GC.
645 // The caller can exclude up to 3 registers that do not need to be saved and
646 // restored.
647
648 // Push caller saved registers on the stack, and return the number of bytes
649 // stack pointer is adjusted.
650 int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
651 Register exclusion2 = no_reg,
652 Register exclusion3 = no_reg);
653 // Restore caller saved registers from the stack, and return the number of
654 // bytes stack pointer is adjusted.
655 int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
656 Register exclusion2 = no_reg,
657 Register exclusion3 = no_reg);
658
659 // Compute the start of the generated instruction stream from the current PC.
660 // This is an alternative to embedding the {CodeObject} handle as a reference.
661 void ComputeCodeStartAddress(Register dst);
662
663 void ResetSpeculationPoisonRegister();
664
665 // Control-flow integrity:
666
667 // Define a function entrypoint. This doesn't emit any code for this
668 // architecture, as control-flow integrity is not supported for it.
CodeEntry()669 void CodeEntry() {}
670 // Define an exception handler.
ExceptionHandler()671 void ExceptionHandler() {}
672 // Define an exception handler and bind a label.
BindExceptionHandler(Label * label)673 void BindExceptionHandler(Label* label) { bind(label); }
674
675 // ---------------------------------------------------------------------------
676 // Pointer compression support
677
678 // Loads a field containing a HeapObject and decompresses it if pointer
679 // compression is enabled.
680 void LoadTaggedPointerField(Register destination, Operand field_operand);
681
682 // Loads a field containing any tagged value and decompresses it if necessary.
683 void LoadAnyTaggedField(Register destination, Operand field_operand);
684
685 // Loads a field containing a HeapObject, decompresses it if necessary and
686 // pushes full pointer to the stack. When pointer compression is enabled,
687 // uses |scratch| to decompress the value.
688 void PushTaggedPointerField(Operand field_operand, Register scratch);
689
690 // Loads a field containing any tagged value, decompresses it if necessary and
691 // pushes the full pointer to the stack. When pointer compression is enabled,
692 // uses |scratch| to decompress the value.
693 void PushTaggedAnyField(Operand field_operand, Register scratch);
694
695 // Loads a field containing smi value and untags it.
696 void SmiUntagField(Register dst, Operand src);
697
698 // Compresses tagged value if necessary and stores it to given on-heap
699 // location.
700 void StoreTaggedField(Operand dst_field_operand, Immediate immediate);
701 void StoreTaggedField(Operand dst_field_operand, Register value);
702
703 // The following macros work even when pointer compression is not enabled.
704 void DecompressTaggedSigned(Register destination, Operand field_operand);
705 void DecompressTaggedPointer(Register destination, Operand field_operand);
706 void DecompressTaggedPointer(Register destination, Register source);
707 void DecompressAnyTagged(Register destination, Operand field_operand);
708
709 // ---------------------------------------------------------------------------
710 // V8 Heap sandbox support
711
712 // Loads a field containing off-heap pointer and does necessary decoding
713 // if V8 heap sandbox is enabled.
714 void LoadExternalPointerField(Register destination, Operand field_operand,
715 ExternalPointerTag tag);
716
717 protected:
718 static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
719
720 // Returns a register holding the smi value. The register MUST NOT be
721 // modified. It may be the "smi 1 constant" register.
722 Register GetSmiConstant(Smi value);
723
724 void CallRecordWriteStub(Register object, Register address,
725 RememberedSetAction remembered_set_action,
726 SaveFPRegsMode fp_mode, Handle<Code> code_target,
727 Address wasm_target);
728 };
729
730 // MacroAssembler implements a collection of frequently used macros.
731 class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
732 public:
733 using TurboAssembler::TurboAssembler;
734
735 // Loads and stores the value of an external reference.
736 // Special case code for load and store to take advantage of
737 // load_rax/store_rax if possible/necessary.
738 // For other operations, just use:
739 // Operand operand = ExternalReferenceAsOperand(extref);
740 // operation(operand, ..);
741 void Load(Register destination, ExternalReference source);
742 void Store(ExternalReference destination, Register source);
743
744 // Pushes the address of the external reference onto the stack.
745 void PushAddress(ExternalReference source);
746
747 // Operations on roots in the root-array.
748 // Load a root value where the index (or part of it) is variable.
749 // The variable_offset register is added to the fixed_offset value
750 // to get the index into the root-array.
751 void PushRoot(RootIndex index);
752
753 // Compare the object in a register to a value and jump if they are equal.
754 void JumpIfRoot(Register with, RootIndex index, Label* if_equal,
755 Label::Distance if_equal_distance = Label::kFar) {
756 CompareRoot(with, index);
757 j(equal, if_equal, if_equal_distance);
758 }
759 void JumpIfRoot(Operand with, RootIndex index, Label* if_equal,
760 Label::Distance if_equal_distance = Label::kFar) {
761 CompareRoot(with, index);
762 j(equal, if_equal, if_equal_distance);
763 }
764
765 // Compare the object in a register to a value and jump if they are not equal.
766 void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal,
767 Label::Distance if_not_equal_distance = Label::kFar) {
768 CompareRoot(with, index);
769 j(not_equal, if_not_equal, if_not_equal_distance);
770 }
771 void JumpIfNotRoot(Operand with, RootIndex index, Label* if_not_equal,
772 Label::Distance if_not_equal_distance = Label::kFar) {
773 CompareRoot(with, index);
774 j(not_equal, if_not_equal, if_not_equal_distance);
775 }
776
777 // ---------------------------------------------------------------------------
778 // GC Support
779
780 // Notify the garbage collector that we wrote a pointer into an object.
781 // |object| is the object being stored into, |value| is the object being
782 // stored. value and scratch registers are clobbered by the operation.
783 // The offset is the offset from the start of the object, not the offset from
784 // the tagged HeapObject pointer. For use with FieldOperand(reg, off).
785 void RecordWriteField(
786 Register object, int offset, Register value, Register scratch,
787 SaveFPRegsMode save_fp,
788 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
789 SmiCheck smi_check = INLINE_SMI_CHECK);
790
791 // For page containing |object| mark region covering |address|
792 // dirty. |object| is the object being stored into, |value| is the
793 // object being stored. The address and value registers are clobbered by the
794 // operation. RecordWrite filters out smis so it does not update
795 // the write barrier if the value is a smi.
796 void RecordWrite(
797 Register object, Register address, Register value, SaveFPRegsMode save_fp,
798 RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
799 SmiCheck smi_check = INLINE_SMI_CHECK);
800
801 // Frame restart support.
802 void MaybeDropFrames();
803
804 // Enter specific kind of exit frame; either in normal or
805 // debug mode. Expects the number of arguments in register rax and
806 // sets up the number of arguments in register rdi and the pointer
807 // to the first argument in register rsi.
808 //
809 // Allocates arg_stack_space * kSystemPointerSize memory (not GCed) on the
810 // stack accessible via StackSpaceOperand.
811 void EnterExitFrame(int arg_stack_space = 0, bool save_doubles = false,
812 StackFrame::Type frame_type = StackFrame::EXIT);
813
814 // Enter specific kind of exit frame. Allocates
815 // (arg_stack_space * kSystemPointerSize) memory (not GCed) on the stack
816 // accessible via StackSpaceOperand.
817 void EnterApiExitFrame(int arg_stack_space);
818
819 // Leave the current exit frame. Expects/provides the return value in
820 // register rax:rdx (untouched) and the pointer to the first
821 // argument in register rsi (if pop_arguments == true).
822 void LeaveExitFrame(bool save_doubles = false, bool pop_arguments = true);
823
824 // Leave the current exit frame. Expects/provides the return value in
825 // register rax (untouched).
826 void LeaveApiExitFrame();
827
828 // ---------------------------------------------------------------------------
829 // JavaScript invokes
830
831 // Invoke the JavaScript function code by either calling or jumping.
832 void InvokeFunctionCode(Register function, Register new_target,
833 Register expected_parameter_count,
834 Register actual_parameter_count, InvokeFlag flag);
835
836 // On function call, call into the debugger.
837 void CallDebugOnFunctionCall(Register fun, Register new_target,
838 Register expected_parameter_count,
839 Register actual_parameter_count);
840
841 // Invoke the JavaScript function in the given register. Changes the
842 // current context to the context in the function before invoking.
843 void InvokeFunction(Register function, Register new_target,
844 Register actual_parameter_count, InvokeFlag flag);
845
846 void InvokeFunction(Register function, Register new_target,
847 Register expected_parameter_count,
848 Register actual_parameter_count, InvokeFlag flag);
849
850 // ---------------------------------------------------------------------------
851 // Conversions between tagged smi values and non-tagged integer values.
852
853 // Tag an word-size value. The result must be known to be a valid smi value.
854 void SmiTag(Register reg);
855 // Requires dst != src
856 void SmiTag(Register dst, Register src);
857
858 // Simple comparison of smis. Both sides must be known smis to use these,
859 // otherwise use Cmp.
860 void SmiCompare(Register smi1, Register smi2);
861 void SmiCompare(Register dst, Smi src);
862 void SmiCompare(Register dst, Operand src);
863 void SmiCompare(Operand dst, Register src);
864 void SmiCompare(Operand dst, Smi src);
865
866 // Functions performing a check on a known or potential smi. Returns
867 // a condition that is satisfied if the check is successful.
868
869 // Test-and-jump functions. Typically combines a check function
870 // above with a conditional jump.
871
872 // Jump to label if the value is not a tagged smi.
873 void JumpIfNotSmi(Register src, Label* on_not_smi,
874 Label::Distance near_jump = Label::kFar);
875
876 // Jump to label if the value is not a tagged smi.
877 void JumpIfNotSmi(Operand src, Label* on_not_smi,
878 Label::Distance near_jump = Label::kFar);
879
880 // Operations on tagged smi values.
881
882 // Smis represent a subset of integers. The subset is always equivalent to
883 // a two's complement interpretation of a fixed number of bits.
884
885 // Add an integer constant to a tagged smi, giving a tagged smi as result.
886 // No overflow testing on the result is done.
887 void SmiAddConstant(Operand dst, Smi constant);
888
889 // Specialized operations
890
891 // Converts, if necessary, a smi to a combination of number and
892 // multiplier to be used as a scaled index.
893 // The src register contains a *positive* smi value. The shift is the
894 // power of two to multiply the index value by (e.g. to index by
895 // smi-value * kSystemPointerSize, pass the smi and kSystemPointerSizeLog2).
896 // The returned index register may be either src or dst, depending
897 // on what is most efficient. If src and dst are different registers,
898 // src is always unchanged.
899 SmiIndex SmiToIndex(Register dst, Register src, int shift);
900
901 // ---------------------------------------------------------------------------
902 // Macro instructions.
903
904 void Cmp(Register dst, Handle<Object> source);
905 void Cmp(Operand dst, Handle<Object> source);
906 void Cmp(Register dst, Smi src);
907 void Cmp(Operand dst, Smi src);
908 void Cmp(Register dst, int32_t src);
909
910 // Checks if value is in range [lower_limit, higher_limit] using a single
911 // comparison.
912 void JumpIfIsInRange(Register value, unsigned lower_limit,
913 unsigned higher_limit, Label* on_in_range,
914 Label::Distance near_jump = Label::kFar);
915
916 // Emit code to discard a non-negative number of pointer-sized elements
917 // from the stack, clobbering only the rsp register.
918 void Drop(int stack_elements);
919 // Emit code to discard a positive number of pointer-sized elements
920 // from the stack under the return address which remains on the top,
921 // clobbering the rsp register.
922 void DropUnderReturnAddress(int stack_elements,
923 Register scratch = kScratchRegister);
924
925 void PushQuad(Operand src);
926 void PushImm32(int32_t imm32);
927 void Pop(Register dst);
928 void Pop(Operand dst);
929 void PopQuad(Operand dst);
930
931 // ---------------------------------------------------------------------------
932 // SIMD macros.
933 void Absps(XMMRegister dst);
934 void Negps(XMMRegister dst);
935 void Abspd(XMMRegister dst);
936 void Negpd(XMMRegister dst);
937 // Generates a trampoline to jump to the off-heap instruction stream.
938 void JumpToInstructionStream(Address entry);
939
940 // Compare object type for heap object.
941 // Always use unsigned comparisons: above and below, not less and greater.
942 // Incoming register is heap_object and outgoing register is map.
943 // They may be the same register, and may be kScratchRegister.
944 void CmpObjectType(Register heap_object, InstanceType type, Register map);
945
946 // Compare instance type for map.
947 // Always use unsigned comparisons: above and below, not less and greater.
948 void CmpInstanceType(Register map, InstanceType type);
949
950 template <typename Field>
DecodeField(Register reg)951 void DecodeField(Register reg) {
952 static const int shift = Field::kShift;
953 static const int mask = Field::kMask >> Field::kShift;
954 if (shift != 0) {
955 shrq(reg, Immediate(shift));
956 }
957 andq(reg, Immediate(mask));
958 }
959
960 // Abort execution if argument is a smi, enabled via --debug-code.
961 void AssertNotSmi(Register object);
962
963 // Abort execution if argument is not a smi, enabled via --debug-code.
964 void AssertSmi(Register object);
965 void AssertSmi(Operand object);
966
967 // Abort execution if argument is not a Constructor, enabled via --debug-code.
968 void AssertConstructor(Register object);
969
970 // Abort execution if argument is not a JSFunction, enabled via --debug-code.
971 void AssertFunction(Register object);
972
973 // Abort execution if argument is not a JSBoundFunction,
974 // enabled via --debug-code.
975 void AssertBoundFunction(Register object);
976
977 // Abort execution if argument is not a JSGeneratorObject (or subclass),
978 // enabled via --debug-code.
979 void AssertGeneratorObject(Register object);
980
981 // Abort execution if argument is not undefined or an AllocationSite, enabled
982 // via --debug-code.
983 void AssertUndefinedOrAllocationSite(Register object);
984
985 // ---------------------------------------------------------------------------
986 // Exception handling
987
988 // Push a new stack handler and link it into stack handler chain.
989 void PushStackHandler();
990
991 // Unlink the stack handler on top of the stack from the stack handler chain.
992 void PopStackHandler();
993
994 // ---------------------------------------------------------------------------
995 // Support functions.
996
997 // Load the global proxy from the current context.
LoadGlobalProxy(Register dst)998 void LoadGlobalProxy(Register dst) {
999 LoadNativeContextSlot(Context::GLOBAL_PROXY_INDEX, dst);
1000 }
1001
1002 // Load the native context slot with the current index.
1003 void LoadNativeContextSlot(int index, Register dst);
1004
1005 // ---------------------------------------------------------------------------
1006 // Runtime calls
1007
1008 // Call a runtime routine.
1009 void CallRuntime(const Runtime::Function* f, int num_arguments,
1010 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
1011
1012 // Convenience function: Same as above, but takes the fid instead.
1013 void CallRuntime(Runtime::FunctionId fid,
1014 SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
1015 const Runtime::Function* function = Runtime::FunctionForId(fid);
1016 CallRuntime(function, function->nargs, save_doubles);
1017 }
1018
1019 // Convenience function: Same as above, but takes the fid instead.
1020 void CallRuntime(Runtime::FunctionId fid, int num_arguments,
1021 SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
1022 CallRuntime(Runtime::FunctionForId(fid), num_arguments, save_doubles);
1023 }
1024
1025 // Convenience function: tail call a runtime routine (jump)
1026 void TailCallRuntime(Runtime::FunctionId fid);
1027
1028 // Jump to a runtime routines
1029 void JumpToExternalReference(const ExternalReference& ext,
1030 bool builtin_exit_frame = false);
1031
1032 // ---------------------------------------------------------------------------
1033 // StatsCounter support
1034 void IncrementCounter(StatsCounter* counter, int value);
1035 void DecrementCounter(StatsCounter* counter, int value);
1036
1037 // ---------------------------------------------------------------------------
1038 // Stack limit utilities
1039 Operand StackLimitAsOperand(StackLimitKind kind);
1040 void StackOverflowCheck(
1041 Register num_args, Register scratch, Label* stack_overflow,
1042 Label::Distance stack_overflow_distance = Label::kFar);
1043
1044 // ---------------------------------------------------------------------------
1045 // In-place weak references.
1046 void LoadWeakValue(Register in_out, Label* target_if_cleared);
1047
1048 // ---------------------------------------------------------------------------
1049 // Debugging
1050
SafepointRegisterStackIndex(Register reg)1051 static int SafepointRegisterStackIndex(Register reg) {
1052 return SafepointRegisterStackIndex(reg.code());
1053 }
1054
1055 private:
1056 // Order general registers are pushed by Pushad.
1057 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14, r15.
1058 static const int kSafepointPushRegisterIndices[Register::kNumRegisters];
1059 static const int kNumSafepointSavedRegisters = 12;
1060
1061 // Helper functions for generating invokes.
1062 void InvokePrologue(Register expected_parameter_count,
1063 Register actual_parameter_count, Label* done,
1064 InvokeFlag flag);
1065
1066 void EnterExitFramePrologue(bool save_rax, StackFrame::Type frame_type);
1067
1068 // Allocates arg_stack_space * kSystemPointerSize memory (not GCed) on the
1069 // stack accessible via StackSpaceOperand.
1070 void EnterExitFrameEpilogue(int arg_stack_space, bool save_doubles);
1071
1072 void LeaveExitFrameEpilogue();
1073
1074 // Compute memory operands for safepoint stack slots.
SafepointRegisterStackIndex(int reg_code)1075 static int SafepointRegisterStackIndex(int reg_code) {
1076 return kNumSafepointRegisters - kSafepointPushRegisterIndices[reg_code] - 1;
1077 }
1078
1079 // Needs access to SafepointRegisterStackIndex for compiled frame
1080 // traversal.
1081 friend class CommonFrame;
1082
1083 DISALLOW_IMPLICIT_CONSTRUCTORS(MacroAssembler);
1084 };
1085
1086 // -----------------------------------------------------------------------------
1087 // Static helper functions.
1088
1089 // Generate an Operand for loading a field from an object.
FieldOperand(Register object,int offset)1090 inline Operand FieldOperand(Register object, int offset) {
1091 return Operand(object, offset - kHeapObjectTag);
1092 }
1093
1094 // Generate an Operand for loading an indexed field from an object.
FieldOperand(Register object,Register index,ScaleFactor scale,int offset)1095 inline Operand FieldOperand(Register object, Register index, ScaleFactor scale,
1096 int offset) {
1097 return Operand(object, index, scale, offset - kHeapObjectTag);
1098 }
1099
1100 // Provides access to exit frame stack space (not GCed).
StackSpaceOperand(int index)1101 inline Operand StackSpaceOperand(int index) {
1102 #ifdef V8_TARGET_OS_WIN
1103 const int kShaddowSpace = 4;
1104 return Operand(rsp, (index + kShaddowSpace) * kSystemPointerSize);
1105 #else
1106 return Operand(rsp, index * kSystemPointerSize);
1107 #endif
1108 }
1109
StackOperandForReturnAddress(int32_t disp)1110 inline Operand StackOperandForReturnAddress(int32_t disp) {
1111 return Operand(rsp, disp);
1112 }
1113
1114 #define ACCESS_MASM(masm) masm->
1115
1116 } // namespace internal
1117 } // namespace v8
1118
1119 #endif // V8_CODEGEN_X64_MACRO_ASSEMBLER_X64_H_
1120