1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_
6 #define V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_
7
8 #include <ctype.h>
9
10 #include "src/globals.h"
11
12 #include "src/arm64/assembler-arm64.h"
13 #include "src/arm64/assembler-arm64-inl.h"
14 #include "src/arm64/macro-assembler-arm64.h"
15 #include "src/arm64/instrument-arm64.h"
16
17
18 namespace v8 {
19 namespace internal {
20
21
FieldMemOperand(Register object,int offset)22 MemOperand FieldMemOperand(Register object, int offset) {
23 return MemOperand(object, offset - kHeapObjectTag);
24 }
25
26
UntagSmiFieldMemOperand(Register object,int offset)27 MemOperand UntagSmiFieldMemOperand(Register object, int offset) {
28 return UntagSmiMemOperand(object, offset - kHeapObjectTag);
29 }
30
31
UntagSmiMemOperand(Register object,int offset)32 MemOperand UntagSmiMemOperand(Register object, int offset) {
33 // Assumes that Smis are shifted by 32 bits and little endianness.
34 STATIC_ASSERT(kSmiShift == 32);
35 return MemOperand(object, offset + (kSmiShift / kBitsPerByte));
36 }
37
38
CodeObject()39 Handle<Object> MacroAssembler::CodeObject() {
40 ASSERT(!code_object_.is_null());
41 return code_object_;
42 }
43
44
And(const Register & rd,const Register & rn,const Operand & operand)45 void MacroAssembler::And(const Register& rd,
46 const Register& rn,
47 const Operand& operand) {
48 ASSERT(allow_macro_instructions_);
49 ASSERT(!rd.IsZero());
50 LogicalMacro(rd, rn, operand, AND);
51 }
52
53
Ands(const Register & rd,const Register & rn,const Operand & operand)54 void MacroAssembler::Ands(const Register& rd,
55 const Register& rn,
56 const Operand& operand) {
57 ASSERT(allow_macro_instructions_);
58 ASSERT(!rd.IsZero());
59 LogicalMacro(rd, rn, operand, ANDS);
60 }
61
62
Tst(const Register & rn,const Operand & operand)63 void MacroAssembler::Tst(const Register& rn,
64 const Operand& operand) {
65 ASSERT(allow_macro_instructions_);
66 LogicalMacro(AppropriateZeroRegFor(rn), rn, operand, ANDS);
67 }
68
69
Bic(const Register & rd,const Register & rn,const Operand & operand)70 void MacroAssembler::Bic(const Register& rd,
71 const Register& rn,
72 const Operand& operand) {
73 ASSERT(allow_macro_instructions_);
74 ASSERT(!rd.IsZero());
75 LogicalMacro(rd, rn, operand, BIC);
76 }
77
78
Bics(const Register & rd,const Register & rn,const Operand & operand)79 void MacroAssembler::Bics(const Register& rd,
80 const Register& rn,
81 const Operand& operand) {
82 ASSERT(allow_macro_instructions_);
83 ASSERT(!rd.IsZero());
84 LogicalMacro(rd, rn, operand, BICS);
85 }
86
87
Orr(const Register & rd,const Register & rn,const Operand & operand)88 void MacroAssembler::Orr(const Register& rd,
89 const Register& rn,
90 const Operand& operand) {
91 ASSERT(allow_macro_instructions_);
92 ASSERT(!rd.IsZero());
93 LogicalMacro(rd, rn, operand, ORR);
94 }
95
96
Orn(const Register & rd,const Register & rn,const Operand & operand)97 void MacroAssembler::Orn(const Register& rd,
98 const Register& rn,
99 const Operand& operand) {
100 ASSERT(allow_macro_instructions_);
101 ASSERT(!rd.IsZero());
102 LogicalMacro(rd, rn, operand, ORN);
103 }
104
105
Eor(const Register & rd,const Register & rn,const Operand & operand)106 void MacroAssembler::Eor(const Register& rd,
107 const Register& rn,
108 const Operand& operand) {
109 ASSERT(allow_macro_instructions_);
110 ASSERT(!rd.IsZero());
111 LogicalMacro(rd, rn, operand, EOR);
112 }
113
114
Eon(const Register & rd,const Register & rn,const Operand & operand)115 void MacroAssembler::Eon(const Register& rd,
116 const Register& rn,
117 const Operand& operand) {
118 ASSERT(allow_macro_instructions_);
119 ASSERT(!rd.IsZero());
120 LogicalMacro(rd, rn, operand, EON);
121 }
122
123
Ccmp(const Register & rn,const Operand & operand,StatusFlags nzcv,Condition cond)124 void MacroAssembler::Ccmp(const Register& rn,
125 const Operand& operand,
126 StatusFlags nzcv,
127 Condition cond) {
128 ASSERT(allow_macro_instructions_);
129 if (operand.IsImmediate() && (operand.ImmediateValue() < 0)) {
130 ConditionalCompareMacro(rn, -operand.ImmediateValue(), nzcv, cond, CCMN);
131 } else {
132 ConditionalCompareMacro(rn, operand, nzcv, cond, CCMP);
133 }
134 }
135
136
Ccmn(const Register & rn,const Operand & operand,StatusFlags nzcv,Condition cond)137 void MacroAssembler::Ccmn(const Register& rn,
138 const Operand& operand,
139 StatusFlags nzcv,
140 Condition cond) {
141 ASSERT(allow_macro_instructions_);
142 if (operand.IsImmediate() && (operand.ImmediateValue() < 0)) {
143 ConditionalCompareMacro(rn, -operand.ImmediateValue(), nzcv, cond, CCMP);
144 } else {
145 ConditionalCompareMacro(rn, operand, nzcv, cond, CCMN);
146 }
147 }
148
149
Add(const Register & rd,const Register & rn,const Operand & operand)150 void MacroAssembler::Add(const Register& rd,
151 const Register& rn,
152 const Operand& operand) {
153 ASSERT(allow_macro_instructions_);
154 if (operand.IsImmediate() && (operand.ImmediateValue() < 0)) {
155 AddSubMacro(rd, rn, -operand.ImmediateValue(), LeaveFlags, SUB);
156 } else {
157 AddSubMacro(rd, rn, operand, LeaveFlags, ADD);
158 }
159 }
160
Adds(const Register & rd,const Register & rn,const Operand & operand)161 void MacroAssembler::Adds(const Register& rd,
162 const Register& rn,
163 const Operand& operand) {
164 ASSERT(allow_macro_instructions_);
165 if (operand.IsImmediate() && (operand.ImmediateValue() < 0)) {
166 AddSubMacro(rd, rn, -operand.ImmediateValue(), SetFlags, SUB);
167 } else {
168 AddSubMacro(rd, rn, operand, SetFlags, ADD);
169 }
170 }
171
172
Sub(const Register & rd,const Register & rn,const Operand & operand)173 void MacroAssembler::Sub(const Register& rd,
174 const Register& rn,
175 const Operand& operand) {
176 ASSERT(allow_macro_instructions_);
177 if (operand.IsImmediate() && (operand.ImmediateValue() < 0)) {
178 AddSubMacro(rd, rn, -operand.ImmediateValue(), LeaveFlags, ADD);
179 } else {
180 AddSubMacro(rd, rn, operand, LeaveFlags, SUB);
181 }
182 }
183
184
Subs(const Register & rd,const Register & rn,const Operand & operand)185 void MacroAssembler::Subs(const Register& rd,
186 const Register& rn,
187 const Operand& operand) {
188 ASSERT(allow_macro_instructions_);
189 if (operand.IsImmediate() && (operand.ImmediateValue() < 0)) {
190 AddSubMacro(rd, rn, -operand.ImmediateValue(), SetFlags, ADD);
191 } else {
192 AddSubMacro(rd, rn, operand, SetFlags, SUB);
193 }
194 }
195
196
Cmn(const Register & rn,const Operand & operand)197 void MacroAssembler::Cmn(const Register& rn, const Operand& operand) {
198 ASSERT(allow_macro_instructions_);
199 Adds(AppropriateZeroRegFor(rn), rn, operand);
200 }
201
202
Cmp(const Register & rn,const Operand & operand)203 void MacroAssembler::Cmp(const Register& rn, const Operand& operand) {
204 ASSERT(allow_macro_instructions_);
205 Subs(AppropriateZeroRegFor(rn), rn, operand);
206 }
207
208
Neg(const Register & rd,const Operand & operand)209 void MacroAssembler::Neg(const Register& rd,
210 const Operand& operand) {
211 ASSERT(allow_macro_instructions_);
212 ASSERT(!rd.IsZero());
213 if (operand.IsImmediate()) {
214 Mov(rd, -operand.ImmediateValue());
215 } else {
216 Sub(rd, AppropriateZeroRegFor(rd), operand);
217 }
218 }
219
220
Negs(const Register & rd,const Operand & operand)221 void MacroAssembler::Negs(const Register& rd,
222 const Operand& operand) {
223 ASSERT(allow_macro_instructions_);
224 Subs(rd, AppropriateZeroRegFor(rd), operand);
225 }
226
227
Adc(const Register & rd,const Register & rn,const Operand & operand)228 void MacroAssembler::Adc(const Register& rd,
229 const Register& rn,
230 const Operand& operand) {
231 ASSERT(allow_macro_instructions_);
232 ASSERT(!rd.IsZero());
233 AddSubWithCarryMacro(rd, rn, operand, LeaveFlags, ADC);
234 }
235
236
Adcs(const Register & rd,const Register & rn,const Operand & operand)237 void MacroAssembler::Adcs(const Register& rd,
238 const Register& rn,
239 const Operand& operand) {
240 ASSERT(allow_macro_instructions_);
241 ASSERT(!rd.IsZero());
242 AddSubWithCarryMacro(rd, rn, operand, SetFlags, ADC);
243 }
244
245
Sbc(const Register & rd,const Register & rn,const Operand & operand)246 void MacroAssembler::Sbc(const Register& rd,
247 const Register& rn,
248 const Operand& operand) {
249 ASSERT(allow_macro_instructions_);
250 ASSERT(!rd.IsZero());
251 AddSubWithCarryMacro(rd, rn, operand, LeaveFlags, SBC);
252 }
253
254
Sbcs(const Register & rd,const Register & rn,const Operand & operand)255 void MacroAssembler::Sbcs(const Register& rd,
256 const Register& rn,
257 const Operand& operand) {
258 ASSERT(allow_macro_instructions_);
259 ASSERT(!rd.IsZero());
260 AddSubWithCarryMacro(rd, rn, operand, SetFlags, SBC);
261 }
262
263
Ngc(const Register & rd,const Operand & operand)264 void MacroAssembler::Ngc(const Register& rd,
265 const Operand& operand) {
266 ASSERT(allow_macro_instructions_);
267 ASSERT(!rd.IsZero());
268 Register zr = AppropriateZeroRegFor(rd);
269 Sbc(rd, zr, operand);
270 }
271
272
Ngcs(const Register & rd,const Operand & operand)273 void MacroAssembler::Ngcs(const Register& rd,
274 const Operand& operand) {
275 ASSERT(allow_macro_instructions_);
276 ASSERT(!rd.IsZero());
277 Register zr = AppropriateZeroRegFor(rd);
278 Sbcs(rd, zr, operand);
279 }
280
281
Mvn(const Register & rd,uint64_t imm)282 void MacroAssembler::Mvn(const Register& rd, uint64_t imm) {
283 ASSERT(allow_macro_instructions_);
284 ASSERT(!rd.IsZero());
285 Mov(rd, ~imm);
286 }
287
288
289 #define DEFINE_FUNCTION(FN, REGTYPE, REG, OP) \
290 void MacroAssembler::FN(const REGTYPE REG, const MemOperand& addr) { \
291 ASSERT(allow_macro_instructions_); \
292 LoadStoreMacro(REG, addr, OP); \
293 }
LS_MACRO_LIST(DEFINE_FUNCTION)294 LS_MACRO_LIST(DEFINE_FUNCTION)
295 #undef DEFINE_FUNCTION
296
297
298 void MacroAssembler::Asr(const Register& rd,
299 const Register& rn,
300 unsigned shift) {
301 ASSERT(allow_macro_instructions_);
302 ASSERT(!rd.IsZero());
303 asr(rd, rn, shift);
304 }
305
306
Asr(const Register & rd,const Register & rn,const Register & rm)307 void MacroAssembler::Asr(const Register& rd,
308 const Register& rn,
309 const Register& rm) {
310 ASSERT(allow_macro_instructions_);
311 ASSERT(!rd.IsZero());
312 asrv(rd, rn, rm);
313 }
314
315
B(Label * label)316 void MacroAssembler::B(Label* label) {
317 b(label);
318 CheckVeneerPool(false, false);
319 }
320
321
B(Condition cond,Label * label)322 void MacroAssembler::B(Condition cond, Label* label) {
323 ASSERT(allow_macro_instructions_);
324 B(label, cond);
325 }
326
327
Bfi(const Register & rd,const Register & rn,unsigned lsb,unsigned width)328 void MacroAssembler::Bfi(const Register& rd,
329 const Register& rn,
330 unsigned lsb,
331 unsigned width) {
332 ASSERT(allow_macro_instructions_);
333 ASSERT(!rd.IsZero());
334 bfi(rd, rn, lsb, width);
335 }
336
337
Bfxil(const Register & rd,const Register & rn,unsigned lsb,unsigned width)338 void MacroAssembler::Bfxil(const Register& rd,
339 const Register& rn,
340 unsigned lsb,
341 unsigned width) {
342 ASSERT(allow_macro_instructions_);
343 ASSERT(!rd.IsZero());
344 bfxil(rd, rn, lsb, width);
345 }
346
347
Bind(Label * label)348 void MacroAssembler::Bind(Label* label) {
349 ASSERT(allow_macro_instructions_);
350 bind(label);
351 }
352
353
Bl(Label * label)354 void MacroAssembler::Bl(Label* label) {
355 ASSERT(allow_macro_instructions_);
356 bl(label);
357 }
358
359
Blr(const Register & xn)360 void MacroAssembler::Blr(const Register& xn) {
361 ASSERT(allow_macro_instructions_);
362 ASSERT(!xn.IsZero());
363 blr(xn);
364 }
365
366
Br(const Register & xn)367 void MacroAssembler::Br(const Register& xn) {
368 ASSERT(allow_macro_instructions_);
369 ASSERT(!xn.IsZero());
370 br(xn);
371 }
372
373
Brk(int code)374 void MacroAssembler::Brk(int code) {
375 ASSERT(allow_macro_instructions_);
376 brk(code);
377 }
378
379
Cinc(const Register & rd,const Register & rn,Condition cond)380 void MacroAssembler::Cinc(const Register& rd,
381 const Register& rn,
382 Condition cond) {
383 ASSERT(allow_macro_instructions_);
384 ASSERT(!rd.IsZero());
385 ASSERT((cond != al) && (cond != nv));
386 cinc(rd, rn, cond);
387 }
388
389
Cinv(const Register & rd,const Register & rn,Condition cond)390 void MacroAssembler::Cinv(const Register& rd,
391 const Register& rn,
392 Condition cond) {
393 ASSERT(allow_macro_instructions_);
394 ASSERT(!rd.IsZero());
395 ASSERT((cond != al) && (cond != nv));
396 cinv(rd, rn, cond);
397 }
398
399
Cls(const Register & rd,const Register & rn)400 void MacroAssembler::Cls(const Register& rd, const Register& rn) {
401 ASSERT(allow_macro_instructions_);
402 ASSERT(!rd.IsZero());
403 cls(rd, rn);
404 }
405
406
Clz(const Register & rd,const Register & rn)407 void MacroAssembler::Clz(const Register& rd, const Register& rn) {
408 ASSERT(allow_macro_instructions_);
409 ASSERT(!rd.IsZero());
410 clz(rd, rn);
411 }
412
413
Cneg(const Register & rd,const Register & rn,Condition cond)414 void MacroAssembler::Cneg(const Register& rd,
415 const Register& rn,
416 Condition cond) {
417 ASSERT(allow_macro_instructions_);
418 ASSERT(!rd.IsZero());
419 ASSERT((cond != al) && (cond != nv));
420 cneg(rd, rn, cond);
421 }
422
423
424 // Conditionally zero the destination register. Only X registers are supported
425 // due to the truncation side-effect when used on W registers.
CzeroX(const Register & rd,Condition cond)426 void MacroAssembler::CzeroX(const Register& rd,
427 Condition cond) {
428 ASSERT(allow_macro_instructions_);
429 ASSERT(!rd.IsSP() && rd.Is64Bits());
430 ASSERT((cond != al) && (cond != nv));
431 csel(rd, xzr, rd, cond);
432 }
433
434
435 // Conditionally move a value into the destination register. Only X registers
436 // are supported due to the truncation side-effect when used on W registers.
CmovX(const Register & rd,const Register & rn,Condition cond)437 void MacroAssembler::CmovX(const Register& rd,
438 const Register& rn,
439 Condition cond) {
440 ASSERT(allow_macro_instructions_);
441 ASSERT(!rd.IsSP());
442 ASSERT(rd.Is64Bits() && rn.Is64Bits());
443 ASSERT((cond != al) && (cond != nv));
444 if (!rd.is(rn)) {
445 csel(rd, rn, rd, cond);
446 }
447 }
448
449
Cset(const Register & rd,Condition cond)450 void MacroAssembler::Cset(const Register& rd, Condition cond) {
451 ASSERT(allow_macro_instructions_);
452 ASSERT(!rd.IsZero());
453 ASSERT((cond != al) && (cond != nv));
454 cset(rd, cond);
455 }
456
457
Csetm(const Register & rd,Condition cond)458 void MacroAssembler::Csetm(const Register& rd, Condition cond) {
459 ASSERT(allow_macro_instructions_);
460 ASSERT(!rd.IsZero());
461 ASSERT((cond != al) && (cond != nv));
462 csetm(rd, cond);
463 }
464
465
Csinc(const Register & rd,const Register & rn,const Register & rm,Condition cond)466 void MacroAssembler::Csinc(const Register& rd,
467 const Register& rn,
468 const Register& rm,
469 Condition cond) {
470 ASSERT(allow_macro_instructions_);
471 ASSERT(!rd.IsZero());
472 ASSERT((cond != al) && (cond != nv));
473 csinc(rd, rn, rm, cond);
474 }
475
476
Csinv(const Register & rd,const Register & rn,const Register & rm,Condition cond)477 void MacroAssembler::Csinv(const Register& rd,
478 const Register& rn,
479 const Register& rm,
480 Condition cond) {
481 ASSERT(allow_macro_instructions_);
482 ASSERT(!rd.IsZero());
483 ASSERT((cond != al) && (cond != nv));
484 csinv(rd, rn, rm, cond);
485 }
486
487
Csneg(const Register & rd,const Register & rn,const Register & rm,Condition cond)488 void MacroAssembler::Csneg(const Register& rd,
489 const Register& rn,
490 const Register& rm,
491 Condition cond) {
492 ASSERT(allow_macro_instructions_);
493 ASSERT(!rd.IsZero());
494 ASSERT((cond != al) && (cond != nv));
495 csneg(rd, rn, rm, cond);
496 }
497
498
Dmb(BarrierDomain domain,BarrierType type)499 void MacroAssembler::Dmb(BarrierDomain domain, BarrierType type) {
500 ASSERT(allow_macro_instructions_);
501 dmb(domain, type);
502 }
503
504
Dsb(BarrierDomain domain,BarrierType type)505 void MacroAssembler::Dsb(BarrierDomain domain, BarrierType type) {
506 ASSERT(allow_macro_instructions_);
507 dsb(domain, type);
508 }
509
510
Debug(const char * message,uint32_t code,Instr params)511 void MacroAssembler::Debug(const char* message, uint32_t code, Instr params) {
512 ASSERT(allow_macro_instructions_);
513 debug(message, code, params);
514 }
515
516
Extr(const Register & rd,const Register & rn,const Register & rm,unsigned lsb)517 void MacroAssembler::Extr(const Register& rd,
518 const Register& rn,
519 const Register& rm,
520 unsigned lsb) {
521 ASSERT(allow_macro_instructions_);
522 ASSERT(!rd.IsZero());
523 extr(rd, rn, rm, lsb);
524 }
525
526
Fabs(const FPRegister & fd,const FPRegister & fn)527 void MacroAssembler::Fabs(const FPRegister& fd, const FPRegister& fn) {
528 ASSERT(allow_macro_instructions_);
529 fabs(fd, fn);
530 }
531
532
Fadd(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm)533 void MacroAssembler::Fadd(const FPRegister& fd,
534 const FPRegister& fn,
535 const FPRegister& fm) {
536 ASSERT(allow_macro_instructions_);
537 fadd(fd, fn, fm);
538 }
539
540
Fccmp(const FPRegister & fn,const FPRegister & fm,StatusFlags nzcv,Condition cond)541 void MacroAssembler::Fccmp(const FPRegister& fn,
542 const FPRegister& fm,
543 StatusFlags nzcv,
544 Condition cond) {
545 ASSERT(allow_macro_instructions_);
546 ASSERT((cond != al) && (cond != nv));
547 fccmp(fn, fm, nzcv, cond);
548 }
549
550
Fcmp(const FPRegister & fn,const FPRegister & fm)551 void MacroAssembler::Fcmp(const FPRegister& fn, const FPRegister& fm) {
552 ASSERT(allow_macro_instructions_);
553 fcmp(fn, fm);
554 }
555
556
Fcmp(const FPRegister & fn,double value)557 void MacroAssembler::Fcmp(const FPRegister& fn, double value) {
558 ASSERT(allow_macro_instructions_);
559 if (value != 0.0) {
560 UseScratchRegisterScope temps(this);
561 FPRegister tmp = temps.AcquireSameSizeAs(fn);
562 Fmov(tmp, value);
563 fcmp(fn, tmp);
564 } else {
565 fcmp(fn, value);
566 }
567 }
568
569
Fcsel(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm,Condition cond)570 void MacroAssembler::Fcsel(const FPRegister& fd,
571 const FPRegister& fn,
572 const FPRegister& fm,
573 Condition cond) {
574 ASSERT(allow_macro_instructions_);
575 ASSERT((cond != al) && (cond != nv));
576 fcsel(fd, fn, fm, cond);
577 }
578
579
Fcvt(const FPRegister & fd,const FPRegister & fn)580 void MacroAssembler::Fcvt(const FPRegister& fd, const FPRegister& fn) {
581 ASSERT(allow_macro_instructions_);
582 fcvt(fd, fn);
583 }
584
585
Fcvtas(const Register & rd,const FPRegister & fn)586 void MacroAssembler::Fcvtas(const Register& rd, const FPRegister& fn) {
587 ASSERT(allow_macro_instructions_);
588 ASSERT(!rd.IsZero());
589 fcvtas(rd, fn);
590 }
591
592
Fcvtau(const Register & rd,const FPRegister & fn)593 void MacroAssembler::Fcvtau(const Register& rd, const FPRegister& fn) {
594 ASSERT(allow_macro_instructions_);
595 ASSERT(!rd.IsZero());
596 fcvtau(rd, fn);
597 }
598
599
Fcvtms(const Register & rd,const FPRegister & fn)600 void MacroAssembler::Fcvtms(const Register& rd, const FPRegister& fn) {
601 ASSERT(allow_macro_instructions_);
602 ASSERT(!rd.IsZero());
603 fcvtms(rd, fn);
604 }
605
606
Fcvtmu(const Register & rd,const FPRegister & fn)607 void MacroAssembler::Fcvtmu(const Register& rd, const FPRegister& fn) {
608 ASSERT(allow_macro_instructions_);
609 ASSERT(!rd.IsZero());
610 fcvtmu(rd, fn);
611 }
612
613
Fcvtns(const Register & rd,const FPRegister & fn)614 void MacroAssembler::Fcvtns(const Register& rd, const FPRegister& fn) {
615 ASSERT(allow_macro_instructions_);
616 ASSERT(!rd.IsZero());
617 fcvtns(rd, fn);
618 }
619
620
Fcvtnu(const Register & rd,const FPRegister & fn)621 void MacroAssembler::Fcvtnu(const Register& rd, const FPRegister& fn) {
622 ASSERT(allow_macro_instructions_);
623 ASSERT(!rd.IsZero());
624 fcvtnu(rd, fn);
625 }
626
627
Fcvtzs(const Register & rd,const FPRegister & fn)628 void MacroAssembler::Fcvtzs(const Register& rd, const FPRegister& fn) {
629 ASSERT(allow_macro_instructions_);
630 ASSERT(!rd.IsZero());
631 fcvtzs(rd, fn);
632 }
Fcvtzu(const Register & rd,const FPRegister & fn)633 void MacroAssembler::Fcvtzu(const Register& rd, const FPRegister& fn) {
634 ASSERT(allow_macro_instructions_);
635 ASSERT(!rd.IsZero());
636 fcvtzu(rd, fn);
637 }
638
639
Fdiv(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm)640 void MacroAssembler::Fdiv(const FPRegister& fd,
641 const FPRegister& fn,
642 const FPRegister& fm) {
643 ASSERT(allow_macro_instructions_);
644 fdiv(fd, fn, fm);
645 }
646
647
Fmadd(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm,const FPRegister & fa)648 void MacroAssembler::Fmadd(const FPRegister& fd,
649 const FPRegister& fn,
650 const FPRegister& fm,
651 const FPRegister& fa) {
652 ASSERT(allow_macro_instructions_);
653 fmadd(fd, fn, fm, fa);
654 }
655
656
Fmax(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm)657 void MacroAssembler::Fmax(const FPRegister& fd,
658 const FPRegister& fn,
659 const FPRegister& fm) {
660 ASSERT(allow_macro_instructions_);
661 fmax(fd, fn, fm);
662 }
663
664
Fmaxnm(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm)665 void MacroAssembler::Fmaxnm(const FPRegister& fd,
666 const FPRegister& fn,
667 const FPRegister& fm) {
668 ASSERT(allow_macro_instructions_);
669 fmaxnm(fd, fn, fm);
670 }
671
672
Fmin(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm)673 void MacroAssembler::Fmin(const FPRegister& fd,
674 const FPRegister& fn,
675 const FPRegister& fm) {
676 ASSERT(allow_macro_instructions_);
677 fmin(fd, fn, fm);
678 }
679
680
Fminnm(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm)681 void MacroAssembler::Fminnm(const FPRegister& fd,
682 const FPRegister& fn,
683 const FPRegister& fm) {
684 ASSERT(allow_macro_instructions_);
685 fminnm(fd, fn, fm);
686 }
687
688
Fmov(FPRegister fd,FPRegister fn)689 void MacroAssembler::Fmov(FPRegister fd, FPRegister fn) {
690 ASSERT(allow_macro_instructions_);
691 // Only emit an instruction if fd and fn are different, and they are both D
692 // registers. fmov(s0, s0) is not a no-op because it clears the top word of
693 // d0. Technically, fmov(d0, d0) is not a no-op either because it clears the
694 // top of q0, but FPRegister does not currently support Q registers.
695 if (!fd.Is(fn) || !fd.Is64Bits()) {
696 fmov(fd, fn);
697 }
698 }
699
700
Fmov(FPRegister fd,Register rn)701 void MacroAssembler::Fmov(FPRegister fd, Register rn) {
702 ASSERT(allow_macro_instructions_);
703 fmov(fd, rn);
704 }
705
706
Fmov(FPRegister fd,double imm)707 void MacroAssembler::Fmov(FPRegister fd, double imm) {
708 ASSERT(allow_macro_instructions_);
709 if (fd.Is32Bits()) {
710 Fmov(fd, static_cast<float>(imm));
711 return;
712 }
713
714 ASSERT(fd.Is64Bits());
715 if (IsImmFP64(imm)) {
716 fmov(fd, imm);
717 } else if ((imm == 0.0) && (copysign(1.0, imm) == 1.0)) {
718 fmov(fd, xzr);
719 } else {
720 Ldr(fd, imm);
721 }
722 }
723
724
Fmov(FPRegister fd,float imm)725 void MacroAssembler::Fmov(FPRegister fd, float imm) {
726 ASSERT(allow_macro_instructions_);
727 if (fd.Is64Bits()) {
728 Fmov(fd, static_cast<double>(imm));
729 return;
730 }
731
732 ASSERT(fd.Is32Bits());
733 if (IsImmFP32(imm)) {
734 fmov(fd, imm);
735 } else if ((imm == 0.0) && (copysign(1.0, imm) == 1.0)) {
736 fmov(fd, wzr);
737 } else {
738 UseScratchRegisterScope temps(this);
739 Register tmp = temps.AcquireW();
740 // TODO(all): Use Assembler::ldr(const FPRegister& ft, float imm).
741 Mov(tmp, float_to_rawbits(imm));
742 Fmov(fd, tmp);
743 }
744 }
745
746
Fmov(Register rd,FPRegister fn)747 void MacroAssembler::Fmov(Register rd, FPRegister fn) {
748 ASSERT(allow_macro_instructions_);
749 ASSERT(!rd.IsZero());
750 fmov(rd, fn);
751 }
752
753
Fmsub(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm,const FPRegister & fa)754 void MacroAssembler::Fmsub(const FPRegister& fd,
755 const FPRegister& fn,
756 const FPRegister& fm,
757 const FPRegister& fa) {
758 ASSERT(allow_macro_instructions_);
759 fmsub(fd, fn, fm, fa);
760 }
761
762
Fmul(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm)763 void MacroAssembler::Fmul(const FPRegister& fd,
764 const FPRegister& fn,
765 const FPRegister& fm) {
766 ASSERT(allow_macro_instructions_);
767 fmul(fd, fn, fm);
768 }
769
770
Fneg(const FPRegister & fd,const FPRegister & fn)771 void MacroAssembler::Fneg(const FPRegister& fd, const FPRegister& fn) {
772 ASSERT(allow_macro_instructions_);
773 fneg(fd, fn);
774 }
775
776
Fnmadd(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm,const FPRegister & fa)777 void MacroAssembler::Fnmadd(const FPRegister& fd,
778 const FPRegister& fn,
779 const FPRegister& fm,
780 const FPRegister& fa) {
781 ASSERT(allow_macro_instructions_);
782 fnmadd(fd, fn, fm, fa);
783 }
784
785
Fnmsub(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm,const FPRegister & fa)786 void MacroAssembler::Fnmsub(const FPRegister& fd,
787 const FPRegister& fn,
788 const FPRegister& fm,
789 const FPRegister& fa) {
790 ASSERT(allow_macro_instructions_);
791 fnmsub(fd, fn, fm, fa);
792 }
793
794
Frinta(const FPRegister & fd,const FPRegister & fn)795 void MacroAssembler::Frinta(const FPRegister& fd, const FPRegister& fn) {
796 ASSERT(allow_macro_instructions_);
797 frinta(fd, fn);
798 }
799
800
Frintm(const FPRegister & fd,const FPRegister & fn)801 void MacroAssembler::Frintm(const FPRegister& fd, const FPRegister& fn) {
802 ASSERT(allow_macro_instructions_);
803 frintm(fd, fn);
804 }
805
806
Frintn(const FPRegister & fd,const FPRegister & fn)807 void MacroAssembler::Frintn(const FPRegister& fd, const FPRegister& fn) {
808 ASSERT(allow_macro_instructions_);
809 frintn(fd, fn);
810 }
811
812
Frintz(const FPRegister & fd,const FPRegister & fn)813 void MacroAssembler::Frintz(const FPRegister& fd, const FPRegister& fn) {
814 ASSERT(allow_macro_instructions_);
815 frintz(fd, fn);
816 }
817
818
Fsqrt(const FPRegister & fd,const FPRegister & fn)819 void MacroAssembler::Fsqrt(const FPRegister& fd, const FPRegister& fn) {
820 ASSERT(allow_macro_instructions_);
821 fsqrt(fd, fn);
822 }
823
824
Fsub(const FPRegister & fd,const FPRegister & fn,const FPRegister & fm)825 void MacroAssembler::Fsub(const FPRegister& fd,
826 const FPRegister& fn,
827 const FPRegister& fm) {
828 ASSERT(allow_macro_instructions_);
829 fsub(fd, fn, fm);
830 }
831
832
Hint(SystemHint code)833 void MacroAssembler::Hint(SystemHint code) {
834 ASSERT(allow_macro_instructions_);
835 hint(code);
836 }
837
838
Hlt(int code)839 void MacroAssembler::Hlt(int code) {
840 ASSERT(allow_macro_instructions_);
841 hlt(code);
842 }
843
844
Isb()845 void MacroAssembler::Isb() {
846 ASSERT(allow_macro_instructions_);
847 isb();
848 }
849
850
Ldnp(const CPURegister & rt,const CPURegister & rt2,const MemOperand & src)851 void MacroAssembler::Ldnp(const CPURegister& rt,
852 const CPURegister& rt2,
853 const MemOperand& src) {
854 ASSERT(allow_macro_instructions_);
855 ASSERT(!AreAliased(rt, rt2));
856 ldnp(rt, rt2, src);
857 }
858
859
Ldp(const CPURegister & rt,const CPURegister & rt2,const MemOperand & src)860 void MacroAssembler::Ldp(const CPURegister& rt,
861 const CPURegister& rt2,
862 const MemOperand& src) {
863 ASSERT(allow_macro_instructions_);
864 ASSERT(!AreAliased(rt, rt2));
865 ldp(rt, rt2, src);
866 }
867
868
Ldpsw(const Register & rt,const Register & rt2,const MemOperand & src)869 void MacroAssembler::Ldpsw(const Register& rt,
870 const Register& rt2,
871 const MemOperand& src) {
872 ASSERT(allow_macro_instructions_);
873 ASSERT(!rt.IsZero());
874 ASSERT(!rt2.IsZero());
875 ldpsw(rt, rt2, src);
876 }
877
878
Ldr(const CPURegister & rt,const Immediate & imm)879 void MacroAssembler::Ldr(const CPURegister& rt, const Immediate& imm) {
880 ASSERT(allow_macro_instructions_);
881 ldr(rt, imm);
882 }
883
884
Ldr(const CPURegister & rt,double imm)885 void MacroAssembler::Ldr(const CPURegister& rt, double imm) {
886 ASSERT(allow_macro_instructions_);
887 ASSERT(rt.Is64Bits());
888 ldr(rt, Immediate(double_to_rawbits(imm)));
889 }
890
891
Lsl(const Register & rd,const Register & rn,unsigned shift)892 void MacroAssembler::Lsl(const Register& rd,
893 const Register& rn,
894 unsigned shift) {
895 ASSERT(allow_macro_instructions_);
896 ASSERT(!rd.IsZero());
897 lsl(rd, rn, shift);
898 }
899
900
Lsl(const Register & rd,const Register & rn,const Register & rm)901 void MacroAssembler::Lsl(const Register& rd,
902 const Register& rn,
903 const Register& rm) {
904 ASSERT(allow_macro_instructions_);
905 ASSERT(!rd.IsZero());
906 lslv(rd, rn, rm);
907 }
908
909
Lsr(const Register & rd,const Register & rn,unsigned shift)910 void MacroAssembler::Lsr(const Register& rd,
911 const Register& rn,
912 unsigned shift) {
913 ASSERT(allow_macro_instructions_);
914 ASSERT(!rd.IsZero());
915 lsr(rd, rn, shift);
916 }
917
918
Lsr(const Register & rd,const Register & rn,const Register & rm)919 void MacroAssembler::Lsr(const Register& rd,
920 const Register& rn,
921 const Register& rm) {
922 ASSERT(allow_macro_instructions_);
923 ASSERT(!rd.IsZero());
924 lsrv(rd, rn, rm);
925 }
926
927
Madd(const Register & rd,const Register & rn,const Register & rm,const Register & ra)928 void MacroAssembler::Madd(const Register& rd,
929 const Register& rn,
930 const Register& rm,
931 const Register& ra) {
932 ASSERT(allow_macro_instructions_);
933 ASSERT(!rd.IsZero());
934 madd(rd, rn, rm, ra);
935 }
936
937
Mneg(const Register & rd,const Register & rn,const Register & rm)938 void MacroAssembler::Mneg(const Register& rd,
939 const Register& rn,
940 const Register& rm) {
941 ASSERT(allow_macro_instructions_);
942 ASSERT(!rd.IsZero());
943 mneg(rd, rn, rm);
944 }
945
946
Mov(const Register & rd,const Register & rn)947 void MacroAssembler::Mov(const Register& rd, const Register& rn) {
948 ASSERT(allow_macro_instructions_);
949 ASSERT(!rd.IsZero());
950 // Emit a register move only if the registers are distinct, or if they are
951 // not X registers. Note that mov(w0, w0) is not a no-op because it clears
952 // the top word of x0.
953 if (!rd.Is(rn) || !rd.Is64Bits()) {
954 Assembler::mov(rd, rn);
955 }
956 }
957
958
Movk(const Register & rd,uint64_t imm,int shift)959 void MacroAssembler::Movk(const Register& rd, uint64_t imm, int shift) {
960 ASSERT(allow_macro_instructions_);
961 ASSERT(!rd.IsZero());
962 movk(rd, imm, shift);
963 }
964
965
Mrs(const Register & rt,SystemRegister sysreg)966 void MacroAssembler::Mrs(const Register& rt, SystemRegister sysreg) {
967 ASSERT(allow_macro_instructions_);
968 ASSERT(!rt.IsZero());
969 mrs(rt, sysreg);
970 }
971
972
Msr(SystemRegister sysreg,const Register & rt)973 void MacroAssembler::Msr(SystemRegister sysreg, const Register& rt) {
974 ASSERT(allow_macro_instructions_);
975 msr(sysreg, rt);
976 }
977
978
Msub(const Register & rd,const Register & rn,const Register & rm,const Register & ra)979 void MacroAssembler::Msub(const Register& rd,
980 const Register& rn,
981 const Register& rm,
982 const Register& ra) {
983 ASSERT(allow_macro_instructions_);
984 ASSERT(!rd.IsZero());
985 msub(rd, rn, rm, ra);
986 }
987
988
Mul(const Register & rd,const Register & rn,const Register & rm)989 void MacroAssembler::Mul(const Register& rd,
990 const Register& rn,
991 const Register& rm) {
992 ASSERT(allow_macro_instructions_);
993 ASSERT(!rd.IsZero());
994 mul(rd, rn, rm);
995 }
996
997
Rbit(const Register & rd,const Register & rn)998 void MacroAssembler::Rbit(const Register& rd, const Register& rn) {
999 ASSERT(allow_macro_instructions_);
1000 ASSERT(!rd.IsZero());
1001 rbit(rd, rn);
1002 }
1003
1004
Ret(const Register & xn)1005 void MacroAssembler::Ret(const Register& xn) {
1006 ASSERT(allow_macro_instructions_);
1007 ASSERT(!xn.IsZero());
1008 ret(xn);
1009 CheckVeneerPool(false, false);
1010 }
1011
1012
Rev(const Register & rd,const Register & rn)1013 void MacroAssembler::Rev(const Register& rd, const Register& rn) {
1014 ASSERT(allow_macro_instructions_);
1015 ASSERT(!rd.IsZero());
1016 rev(rd, rn);
1017 }
1018
1019
Rev16(const Register & rd,const Register & rn)1020 void MacroAssembler::Rev16(const Register& rd, const Register& rn) {
1021 ASSERT(allow_macro_instructions_);
1022 ASSERT(!rd.IsZero());
1023 rev16(rd, rn);
1024 }
1025
1026
Rev32(const Register & rd,const Register & rn)1027 void MacroAssembler::Rev32(const Register& rd, const Register& rn) {
1028 ASSERT(allow_macro_instructions_);
1029 ASSERT(!rd.IsZero());
1030 rev32(rd, rn);
1031 }
1032
1033
Ror(const Register & rd,const Register & rs,unsigned shift)1034 void MacroAssembler::Ror(const Register& rd,
1035 const Register& rs,
1036 unsigned shift) {
1037 ASSERT(allow_macro_instructions_);
1038 ASSERT(!rd.IsZero());
1039 ror(rd, rs, shift);
1040 }
1041
1042
Ror(const Register & rd,const Register & rn,const Register & rm)1043 void MacroAssembler::Ror(const Register& rd,
1044 const Register& rn,
1045 const Register& rm) {
1046 ASSERT(allow_macro_instructions_);
1047 ASSERT(!rd.IsZero());
1048 rorv(rd, rn, rm);
1049 }
1050
1051
Sbfiz(const Register & rd,const Register & rn,unsigned lsb,unsigned width)1052 void MacroAssembler::Sbfiz(const Register& rd,
1053 const Register& rn,
1054 unsigned lsb,
1055 unsigned width) {
1056 ASSERT(allow_macro_instructions_);
1057 ASSERT(!rd.IsZero());
1058 sbfiz(rd, rn, lsb, width);
1059 }
1060
1061
Sbfx(const Register & rd,const Register & rn,unsigned lsb,unsigned width)1062 void MacroAssembler::Sbfx(const Register& rd,
1063 const Register& rn,
1064 unsigned lsb,
1065 unsigned width) {
1066 ASSERT(allow_macro_instructions_);
1067 ASSERT(!rd.IsZero());
1068 sbfx(rd, rn, lsb, width);
1069 }
1070
1071
Scvtf(const FPRegister & fd,const Register & rn,unsigned fbits)1072 void MacroAssembler::Scvtf(const FPRegister& fd,
1073 const Register& rn,
1074 unsigned fbits) {
1075 ASSERT(allow_macro_instructions_);
1076 scvtf(fd, rn, fbits);
1077 }
1078
1079
Sdiv(const Register & rd,const Register & rn,const Register & rm)1080 void MacroAssembler::Sdiv(const Register& rd,
1081 const Register& rn,
1082 const Register& rm) {
1083 ASSERT(allow_macro_instructions_);
1084 ASSERT(!rd.IsZero());
1085 sdiv(rd, rn, rm);
1086 }
1087
1088
Smaddl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)1089 void MacroAssembler::Smaddl(const Register& rd,
1090 const Register& rn,
1091 const Register& rm,
1092 const Register& ra) {
1093 ASSERT(allow_macro_instructions_);
1094 ASSERT(!rd.IsZero());
1095 smaddl(rd, rn, rm, ra);
1096 }
1097
1098
Smsubl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)1099 void MacroAssembler::Smsubl(const Register& rd,
1100 const Register& rn,
1101 const Register& rm,
1102 const Register& ra) {
1103 ASSERT(allow_macro_instructions_);
1104 ASSERT(!rd.IsZero());
1105 smsubl(rd, rn, rm, ra);
1106 }
1107
1108
Smull(const Register & rd,const Register & rn,const Register & rm)1109 void MacroAssembler::Smull(const Register& rd,
1110 const Register& rn,
1111 const Register& rm) {
1112 ASSERT(allow_macro_instructions_);
1113 ASSERT(!rd.IsZero());
1114 smull(rd, rn, rm);
1115 }
1116
1117
Smulh(const Register & rd,const Register & rn,const Register & rm)1118 void MacroAssembler::Smulh(const Register& rd,
1119 const Register& rn,
1120 const Register& rm) {
1121 ASSERT(allow_macro_instructions_);
1122 ASSERT(!rd.IsZero());
1123 smulh(rd, rn, rm);
1124 }
1125
1126
Stnp(const CPURegister & rt,const CPURegister & rt2,const MemOperand & dst)1127 void MacroAssembler::Stnp(const CPURegister& rt,
1128 const CPURegister& rt2,
1129 const MemOperand& dst) {
1130 ASSERT(allow_macro_instructions_);
1131 stnp(rt, rt2, dst);
1132 }
1133
1134
Stp(const CPURegister & rt,const CPURegister & rt2,const MemOperand & dst)1135 void MacroAssembler::Stp(const CPURegister& rt,
1136 const CPURegister& rt2,
1137 const MemOperand& dst) {
1138 ASSERT(allow_macro_instructions_);
1139 stp(rt, rt2, dst);
1140 }
1141
1142
Sxtb(const Register & rd,const Register & rn)1143 void MacroAssembler::Sxtb(const Register& rd, const Register& rn) {
1144 ASSERT(allow_macro_instructions_);
1145 ASSERT(!rd.IsZero());
1146 sxtb(rd, rn);
1147 }
1148
1149
Sxth(const Register & rd,const Register & rn)1150 void MacroAssembler::Sxth(const Register& rd, const Register& rn) {
1151 ASSERT(allow_macro_instructions_);
1152 ASSERT(!rd.IsZero());
1153 sxth(rd, rn);
1154 }
1155
1156
Sxtw(const Register & rd,const Register & rn)1157 void MacroAssembler::Sxtw(const Register& rd, const Register& rn) {
1158 ASSERT(allow_macro_instructions_);
1159 ASSERT(!rd.IsZero());
1160 sxtw(rd, rn);
1161 }
1162
1163
Ubfiz(const Register & rd,const Register & rn,unsigned lsb,unsigned width)1164 void MacroAssembler::Ubfiz(const Register& rd,
1165 const Register& rn,
1166 unsigned lsb,
1167 unsigned width) {
1168 ASSERT(allow_macro_instructions_);
1169 ASSERT(!rd.IsZero());
1170 ubfiz(rd, rn, lsb, width);
1171 }
1172
1173
Ubfx(const Register & rd,const Register & rn,unsigned lsb,unsigned width)1174 void MacroAssembler::Ubfx(const Register& rd,
1175 const Register& rn,
1176 unsigned lsb,
1177 unsigned width) {
1178 ASSERT(allow_macro_instructions_);
1179 ASSERT(!rd.IsZero());
1180 ubfx(rd, rn, lsb, width);
1181 }
1182
1183
Ucvtf(const FPRegister & fd,const Register & rn,unsigned fbits)1184 void MacroAssembler::Ucvtf(const FPRegister& fd,
1185 const Register& rn,
1186 unsigned fbits) {
1187 ASSERT(allow_macro_instructions_);
1188 ucvtf(fd, rn, fbits);
1189 }
1190
1191
Udiv(const Register & rd,const Register & rn,const Register & rm)1192 void MacroAssembler::Udiv(const Register& rd,
1193 const Register& rn,
1194 const Register& rm) {
1195 ASSERT(allow_macro_instructions_);
1196 ASSERT(!rd.IsZero());
1197 udiv(rd, rn, rm);
1198 }
1199
1200
Umaddl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)1201 void MacroAssembler::Umaddl(const Register& rd,
1202 const Register& rn,
1203 const Register& rm,
1204 const Register& ra) {
1205 ASSERT(allow_macro_instructions_);
1206 ASSERT(!rd.IsZero());
1207 umaddl(rd, rn, rm, ra);
1208 }
1209
1210
Umsubl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)1211 void MacroAssembler::Umsubl(const Register& rd,
1212 const Register& rn,
1213 const Register& rm,
1214 const Register& ra) {
1215 ASSERT(allow_macro_instructions_);
1216 ASSERT(!rd.IsZero());
1217 umsubl(rd, rn, rm, ra);
1218 }
1219
1220
Uxtb(const Register & rd,const Register & rn)1221 void MacroAssembler::Uxtb(const Register& rd, const Register& rn) {
1222 ASSERT(allow_macro_instructions_);
1223 ASSERT(!rd.IsZero());
1224 uxtb(rd, rn);
1225 }
1226
1227
Uxth(const Register & rd,const Register & rn)1228 void MacroAssembler::Uxth(const Register& rd, const Register& rn) {
1229 ASSERT(allow_macro_instructions_);
1230 ASSERT(!rd.IsZero());
1231 uxth(rd, rn);
1232 }
1233
1234
Uxtw(const Register & rd,const Register & rn)1235 void MacroAssembler::Uxtw(const Register& rd, const Register& rn) {
1236 ASSERT(allow_macro_instructions_);
1237 ASSERT(!rd.IsZero());
1238 uxtw(rd, rn);
1239 }
1240
1241
BumpSystemStackPointer(const Operand & space)1242 void MacroAssembler::BumpSystemStackPointer(const Operand& space) {
1243 ASSERT(!csp.Is(sp_));
1244 if (!TmpList()->IsEmpty()) {
1245 if (CpuFeatures::IsSupported(ALWAYS_ALIGN_CSP)) {
1246 UseScratchRegisterScope temps(this);
1247 Register temp = temps.AcquireX();
1248 Sub(temp, StackPointer(), space);
1249 Bic(csp, temp, 0xf);
1250 } else {
1251 Sub(csp, StackPointer(), space);
1252 }
1253 } else {
1254 // TODO(jbramley): Several callers rely on this not using scratch
1255 // registers, so we use the assembler directly here. However, this means
1256 // that large immediate values of 'space' cannot be handled cleanly. (Only
1257 // 24-bits immediates or values of 'space' that can be encoded in one
1258 // instruction are accepted.) Once we implement our flexible scratch
1259 // register idea, we could greatly simplify this function.
1260 InstructionAccurateScope scope(this);
1261 ASSERT(space.IsImmediate());
1262 // Align to 16 bytes.
1263 uint64_t imm = RoundUp(space.ImmediateValue(), 0x10);
1264 ASSERT(is_uint24(imm));
1265
1266 Register source = StackPointer();
1267 if (CpuFeatures::IsSupported(ALWAYS_ALIGN_CSP)) {
1268 bic(csp, source, 0xf);
1269 source = csp;
1270 }
1271 if (!is_uint12(imm)) {
1272 int64_t imm_top_12_bits = imm >> 12;
1273 sub(csp, source, imm_top_12_bits << 12);
1274 source = csp;
1275 imm -= imm_top_12_bits << 12;
1276 }
1277 if (imm > 0) {
1278 sub(csp, source, imm);
1279 }
1280 }
1281 AssertStackConsistency();
1282 }
1283
1284
SyncSystemStackPointer()1285 void MacroAssembler::SyncSystemStackPointer() {
1286 ASSERT(emit_debug_code());
1287 ASSERT(!csp.Is(sp_));
1288 { InstructionAccurateScope scope(this);
1289 if (CpuFeatures::IsSupported(ALWAYS_ALIGN_CSP)) {
1290 bic(csp, StackPointer(), 0xf);
1291 } else {
1292 mov(csp, StackPointer());
1293 }
1294 }
1295 AssertStackConsistency();
1296 }
1297
1298
InitializeRootRegister()1299 void MacroAssembler::InitializeRootRegister() {
1300 ExternalReference roots_array_start =
1301 ExternalReference::roots_array_start(isolate());
1302 Mov(root, Operand(roots_array_start));
1303 }
1304
1305
SmiTag(Register dst,Register src)1306 void MacroAssembler::SmiTag(Register dst, Register src) {
1307 ASSERT(dst.Is64Bits() && src.Is64Bits());
1308 Lsl(dst, src, kSmiShift);
1309 }
1310
1311
SmiTag(Register smi)1312 void MacroAssembler::SmiTag(Register smi) { SmiTag(smi, smi); }
1313
1314
SmiUntag(Register dst,Register src)1315 void MacroAssembler::SmiUntag(Register dst, Register src) {
1316 ASSERT(dst.Is64Bits() && src.Is64Bits());
1317 if (FLAG_enable_slow_asserts) {
1318 AssertSmi(src);
1319 }
1320 Asr(dst, src, kSmiShift);
1321 }
1322
1323
SmiUntag(Register smi)1324 void MacroAssembler::SmiUntag(Register smi) { SmiUntag(smi, smi); }
1325
1326
SmiUntagToDouble(FPRegister dst,Register src,UntagMode mode)1327 void MacroAssembler::SmiUntagToDouble(FPRegister dst,
1328 Register src,
1329 UntagMode mode) {
1330 ASSERT(dst.Is64Bits() && src.Is64Bits());
1331 if (FLAG_enable_slow_asserts && (mode == kNotSpeculativeUntag)) {
1332 AssertSmi(src);
1333 }
1334 Scvtf(dst, src, kSmiShift);
1335 }
1336
1337
SmiUntagToFloat(FPRegister dst,Register src,UntagMode mode)1338 void MacroAssembler::SmiUntagToFloat(FPRegister dst,
1339 Register src,
1340 UntagMode mode) {
1341 ASSERT(dst.Is32Bits() && src.Is64Bits());
1342 if (FLAG_enable_slow_asserts && (mode == kNotSpeculativeUntag)) {
1343 AssertSmi(src);
1344 }
1345 Scvtf(dst, src, kSmiShift);
1346 }
1347
1348
SmiTagAndPush(Register src)1349 void MacroAssembler::SmiTagAndPush(Register src) {
1350 STATIC_ASSERT((kSmiShift == 32) && (kSmiTag == 0));
1351 Push(src.W(), wzr);
1352 }
1353
1354
SmiTagAndPush(Register src1,Register src2)1355 void MacroAssembler::SmiTagAndPush(Register src1, Register src2) {
1356 STATIC_ASSERT((kSmiShift == 32) && (kSmiTag == 0));
1357 Push(src1.W(), wzr, src2.W(), wzr);
1358 }
1359
1360
JumpIfSmi(Register value,Label * smi_label,Label * not_smi_label)1361 void MacroAssembler::JumpIfSmi(Register value,
1362 Label* smi_label,
1363 Label* not_smi_label) {
1364 STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag == 0));
1365 // Check if the tag bit is set.
1366 if (smi_label) {
1367 Tbz(value, 0, smi_label);
1368 if (not_smi_label) {
1369 B(not_smi_label);
1370 }
1371 } else {
1372 ASSERT(not_smi_label);
1373 Tbnz(value, 0, not_smi_label);
1374 }
1375 }
1376
1377
JumpIfNotSmi(Register value,Label * not_smi_label)1378 void MacroAssembler::JumpIfNotSmi(Register value, Label* not_smi_label) {
1379 JumpIfSmi(value, NULL, not_smi_label);
1380 }
1381
1382
JumpIfBothSmi(Register value1,Register value2,Label * both_smi_label,Label * not_smi_label)1383 void MacroAssembler::JumpIfBothSmi(Register value1,
1384 Register value2,
1385 Label* both_smi_label,
1386 Label* not_smi_label) {
1387 STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag == 0));
1388 UseScratchRegisterScope temps(this);
1389 Register tmp = temps.AcquireX();
1390 // Check if both tag bits are clear.
1391 Orr(tmp, value1, value2);
1392 JumpIfSmi(tmp, both_smi_label, not_smi_label);
1393 }
1394
1395
JumpIfEitherSmi(Register value1,Register value2,Label * either_smi_label,Label * not_smi_label)1396 void MacroAssembler::JumpIfEitherSmi(Register value1,
1397 Register value2,
1398 Label* either_smi_label,
1399 Label* not_smi_label) {
1400 STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag == 0));
1401 UseScratchRegisterScope temps(this);
1402 Register tmp = temps.AcquireX();
1403 // Check if either tag bit is clear.
1404 And(tmp, value1, value2);
1405 JumpIfSmi(tmp, either_smi_label, not_smi_label);
1406 }
1407
1408
JumpIfEitherNotSmi(Register value1,Register value2,Label * not_smi_label)1409 void MacroAssembler::JumpIfEitherNotSmi(Register value1,
1410 Register value2,
1411 Label* not_smi_label) {
1412 JumpIfBothSmi(value1, value2, NULL, not_smi_label);
1413 }
1414
1415
JumpIfBothNotSmi(Register value1,Register value2,Label * not_smi_label)1416 void MacroAssembler::JumpIfBothNotSmi(Register value1,
1417 Register value2,
1418 Label* not_smi_label) {
1419 JumpIfEitherSmi(value1, value2, NULL, not_smi_label);
1420 }
1421
1422
ObjectTag(Register tagged_obj,Register obj)1423 void MacroAssembler::ObjectTag(Register tagged_obj, Register obj) {
1424 STATIC_ASSERT(kHeapObjectTag == 1);
1425 if (emit_debug_code()) {
1426 Label ok;
1427 Tbz(obj, 0, &ok);
1428 Abort(kObjectTagged);
1429 Bind(&ok);
1430 }
1431 Orr(tagged_obj, obj, kHeapObjectTag);
1432 }
1433
1434
ObjectUntag(Register untagged_obj,Register obj)1435 void MacroAssembler::ObjectUntag(Register untagged_obj, Register obj) {
1436 STATIC_ASSERT(kHeapObjectTag == 1);
1437 if (emit_debug_code()) {
1438 Label ok;
1439 Tbnz(obj, 0, &ok);
1440 Abort(kObjectNotTagged);
1441 Bind(&ok);
1442 }
1443 Bic(untagged_obj, obj, kHeapObjectTag);
1444 }
1445
1446
IsObjectNameType(Register object,Register type,Label * fail)1447 void MacroAssembler::IsObjectNameType(Register object,
1448 Register type,
1449 Label* fail) {
1450 CompareObjectType(object, type, type, LAST_NAME_TYPE);
1451 B(hi, fail);
1452 }
1453
1454
IsObjectJSObjectType(Register heap_object,Register map,Register scratch,Label * fail)1455 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
1456 Register map,
1457 Register scratch,
1458 Label* fail) {
1459 Ldr(map, FieldMemOperand(heap_object, HeapObject::kMapOffset));
1460 IsInstanceJSObjectType(map, scratch, fail);
1461 }
1462
1463
IsInstanceJSObjectType(Register map,Register scratch,Label * fail)1464 void MacroAssembler::IsInstanceJSObjectType(Register map,
1465 Register scratch,
1466 Label* fail) {
1467 Ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
1468 // If cmp result is lt, the following ccmp will clear all flags.
1469 // Z == 0, N == V implies gt condition.
1470 Cmp(scratch, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
1471 Ccmp(scratch, LAST_NONCALLABLE_SPEC_OBJECT_TYPE, NoFlag, ge);
1472
1473 // If we didn't get a valid label object just fall through and leave the
1474 // flags updated.
1475 if (fail != NULL) {
1476 B(gt, fail);
1477 }
1478 }
1479
1480
IsObjectJSStringType(Register object,Register type,Label * not_string,Label * string)1481 void MacroAssembler::IsObjectJSStringType(Register object,
1482 Register type,
1483 Label* not_string,
1484 Label* string) {
1485 Ldr(type, FieldMemOperand(object, HeapObject::kMapOffset));
1486 Ldrb(type.W(), FieldMemOperand(type, Map::kInstanceTypeOffset));
1487
1488 STATIC_ASSERT(kStringTag == 0);
1489 ASSERT((string != NULL) || (not_string != NULL));
1490 if (string == NULL) {
1491 TestAndBranchIfAnySet(type.W(), kIsNotStringMask, not_string);
1492 } else if (not_string == NULL) {
1493 TestAndBranchIfAllClear(type.W(), kIsNotStringMask, string);
1494 } else {
1495 TestAndBranchIfAnySet(type.W(), kIsNotStringMask, not_string);
1496 B(string);
1497 }
1498 }
1499
1500
Push(Handle<Object> handle)1501 void MacroAssembler::Push(Handle<Object> handle) {
1502 UseScratchRegisterScope temps(this);
1503 Register tmp = temps.AcquireX();
1504 Mov(tmp, Operand(handle));
1505 Push(tmp);
1506 }
1507
1508
Claim(uint64_t count,uint64_t unit_size)1509 void MacroAssembler::Claim(uint64_t count, uint64_t unit_size) {
1510 uint64_t size = count * unit_size;
1511
1512 if (size == 0) {
1513 return;
1514 }
1515
1516 if (csp.Is(StackPointer())) {
1517 ASSERT(size % 16 == 0);
1518 } else {
1519 BumpSystemStackPointer(size);
1520 }
1521
1522 Sub(StackPointer(), StackPointer(), size);
1523 }
1524
1525
Claim(const Register & count,uint64_t unit_size)1526 void MacroAssembler::Claim(const Register& count, uint64_t unit_size) {
1527 if (unit_size == 0) return;
1528 ASSERT(IsPowerOf2(unit_size));
1529
1530 const int shift = CountTrailingZeros(unit_size, kXRegSizeInBits);
1531 const Operand size(count, LSL, shift);
1532
1533 if (size.IsZero()) {
1534 return;
1535 }
1536
1537 if (!csp.Is(StackPointer())) {
1538 BumpSystemStackPointer(size);
1539 }
1540
1541 Sub(StackPointer(), StackPointer(), size);
1542 }
1543
1544
ClaimBySMI(const Register & count_smi,uint64_t unit_size)1545 void MacroAssembler::ClaimBySMI(const Register& count_smi, uint64_t unit_size) {
1546 ASSERT(unit_size == 0 || IsPowerOf2(unit_size));
1547 const int shift = CountTrailingZeros(unit_size, kXRegSizeInBits) - kSmiShift;
1548 const Operand size(count_smi,
1549 (shift >= 0) ? (LSL) : (LSR),
1550 (shift >= 0) ? (shift) : (-shift));
1551
1552 if (size.IsZero()) {
1553 return;
1554 }
1555
1556 if (!csp.Is(StackPointer())) {
1557 BumpSystemStackPointer(size);
1558 }
1559
1560 Sub(StackPointer(), StackPointer(), size);
1561 }
1562
1563
Drop(uint64_t count,uint64_t unit_size)1564 void MacroAssembler::Drop(uint64_t count, uint64_t unit_size) {
1565 uint64_t size = count * unit_size;
1566
1567 if (size == 0) {
1568 return;
1569 }
1570
1571 Add(StackPointer(), StackPointer(), size);
1572
1573 if (csp.Is(StackPointer())) {
1574 ASSERT(size % 16 == 0);
1575 } else if (emit_debug_code()) {
1576 // It is safe to leave csp where it is when unwinding the JavaScript stack,
1577 // but if we keep it matching StackPointer, the simulator can detect memory
1578 // accesses in the now-free part of the stack.
1579 SyncSystemStackPointer();
1580 }
1581 }
1582
1583
Drop(const Register & count,uint64_t unit_size)1584 void MacroAssembler::Drop(const Register& count, uint64_t unit_size) {
1585 if (unit_size == 0) return;
1586 ASSERT(IsPowerOf2(unit_size));
1587
1588 const int shift = CountTrailingZeros(unit_size, kXRegSizeInBits);
1589 const Operand size(count, LSL, shift);
1590
1591 if (size.IsZero()) {
1592 return;
1593 }
1594
1595 Add(StackPointer(), StackPointer(), size);
1596
1597 if (!csp.Is(StackPointer()) && emit_debug_code()) {
1598 // It is safe to leave csp where it is when unwinding the JavaScript stack,
1599 // but if we keep it matching StackPointer, the simulator can detect memory
1600 // accesses in the now-free part of the stack.
1601 SyncSystemStackPointer();
1602 }
1603 }
1604
1605
DropBySMI(const Register & count_smi,uint64_t unit_size)1606 void MacroAssembler::DropBySMI(const Register& count_smi, uint64_t unit_size) {
1607 ASSERT(unit_size == 0 || IsPowerOf2(unit_size));
1608 const int shift = CountTrailingZeros(unit_size, kXRegSizeInBits) - kSmiShift;
1609 const Operand size(count_smi,
1610 (shift >= 0) ? (LSL) : (LSR),
1611 (shift >= 0) ? (shift) : (-shift));
1612
1613 if (size.IsZero()) {
1614 return;
1615 }
1616
1617 Add(StackPointer(), StackPointer(), size);
1618
1619 if (!csp.Is(StackPointer()) && emit_debug_code()) {
1620 // It is safe to leave csp where it is when unwinding the JavaScript stack,
1621 // but if we keep it matching StackPointer, the simulator can detect memory
1622 // accesses in the now-free part of the stack.
1623 SyncSystemStackPointer();
1624 }
1625 }
1626
1627
CompareAndBranch(const Register & lhs,const Operand & rhs,Condition cond,Label * label)1628 void MacroAssembler::CompareAndBranch(const Register& lhs,
1629 const Operand& rhs,
1630 Condition cond,
1631 Label* label) {
1632 if (rhs.IsImmediate() && (rhs.ImmediateValue() == 0) &&
1633 ((cond == eq) || (cond == ne))) {
1634 if (cond == eq) {
1635 Cbz(lhs, label);
1636 } else {
1637 Cbnz(lhs, label);
1638 }
1639 } else {
1640 Cmp(lhs, rhs);
1641 B(cond, label);
1642 }
1643 }
1644
1645
TestAndBranchIfAnySet(const Register & reg,const uint64_t bit_pattern,Label * label)1646 void MacroAssembler::TestAndBranchIfAnySet(const Register& reg,
1647 const uint64_t bit_pattern,
1648 Label* label) {
1649 int bits = reg.SizeInBits();
1650 ASSERT(CountSetBits(bit_pattern, bits) > 0);
1651 if (CountSetBits(bit_pattern, bits) == 1) {
1652 Tbnz(reg, MaskToBit(bit_pattern), label);
1653 } else {
1654 Tst(reg, bit_pattern);
1655 B(ne, label);
1656 }
1657 }
1658
1659
TestAndBranchIfAllClear(const Register & reg,const uint64_t bit_pattern,Label * label)1660 void MacroAssembler::TestAndBranchIfAllClear(const Register& reg,
1661 const uint64_t bit_pattern,
1662 Label* label) {
1663 int bits = reg.SizeInBits();
1664 ASSERT(CountSetBits(bit_pattern, bits) > 0);
1665 if (CountSetBits(bit_pattern, bits) == 1) {
1666 Tbz(reg, MaskToBit(bit_pattern), label);
1667 } else {
1668 Tst(reg, bit_pattern);
1669 B(eq, label);
1670 }
1671 }
1672
1673
InlineData(uint64_t data)1674 void MacroAssembler::InlineData(uint64_t data) {
1675 ASSERT(is_uint16(data));
1676 InstructionAccurateScope scope(this, 1);
1677 movz(xzr, data);
1678 }
1679
1680
EnableInstrumentation()1681 void MacroAssembler::EnableInstrumentation() {
1682 InstructionAccurateScope scope(this, 1);
1683 movn(xzr, InstrumentStateEnable);
1684 }
1685
1686
DisableInstrumentation()1687 void MacroAssembler::DisableInstrumentation() {
1688 InstructionAccurateScope scope(this, 1);
1689 movn(xzr, InstrumentStateDisable);
1690 }
1691
1692
AnnotateInstrumentation(const char * marker_name)1693 void MacroAssembler::AnnotateInstrumentation(const char* marker_name) {
1694 ASSERT(strlen(marker_name) == 2);
1695
1696 // We allow only printable characters in the marker names. Unprintable
1697 // characters are reserved for controlling features of the instrumentation.
1698 ASSERT(isprint(marker_name[0]) && isprint(marker_name[1]));
1699
1700 InstructionAccurateScope scope(this, 1);
1701 movn(xzr, (marker_name[1] << 8) | marker_name[0]);
1702 }
1703
1704 } } // namespace v8::internal
1705
1706 #endif // V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_
1707