1 // Copyright 2015, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 // * Redistributions of source code must retain the above copyright notice,
8 // this list of conditions and the following disclaimer.
9 // * Redistributions in binary form must reproduce the above copyright notice,
10 // this list of conditions and the following disclaimer in the documentation
11 // and/or other materials provided with the distribution.
12 // * Neither the name of ARM Limited nor the names of its contributors may be
13 // used to endorse or promote products derived from this software without
14 // specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27 #ifndef VIXL_AARCH64_MACRO_ASSEMBLER_AARCH64_H_
28 #define VIXL_AARCH64_MACRO_ASSEMBLER_AARCH64_H_
29
30 #include <algorithm>
31 #include <limits>
32
33 #include "../code-generation-scopes-vixl.h"
34 #include "../globals-vixl.h"
35 #include "../macro-assembler-interface.h"
36
37 #include "assembler-aarch64.h"
38 // Required for runtime call support.
39 // TODO: Break this dependency. We should be able to separate out the necessary
40 // parts so that we don't need to include the whole simulator header.
41 #include "simulator-aarch64.h"
42 // Required in order to generate debugging instructions for the simulator. This
43 // is needed regardless of whether the simulator is included or not, since
44 // generating simulator specific instructions is controlled at runtime.
45 #include "simulator-constants-aarch64.h"
46
47
48 #define LS_MACRO_LIST(V) \
49 V(Ldrb, Register&, rt, LDRB_w) \
50 V(Strb, Register&, rt, STRB_w) \
51 V(Ldrsb, Register&, rt, rt.Is64Bits() ? LDRSB_x : LDRSB_w) \
52 V(Ldrh, Register&, rt, LDRH_w) \
53 V(Strh, Register&, rt, STRH_w) \
54 V(Ldrsh, Register&, rt, rt.Is64Bits() ? LDRSH_x : LDRSH_w) \
55 V(Ldr, CPURegister&, rt, LoadOpFor(rt)) \
56 V(Str, CPURegister&, rt, StoreOpFor(rt)) \
57 V(Ldrsw, Register&, rt, LDRSW_x)
58
59
60 #define LSPAIR_MACRO_LIST(V) \
61 V(Ldp, CPURegister&, rt, rt2, LoadPairOpFor(rt, rt2)) \
62 V(Stp, CPURegister&, rt, rt2, StorePairOpFor(rt, rt2)) \
63 V(Ldpsw, Register&, rt, rt2, LDPSW_x)
64
65 namespace vixl {
66 namespace aarch64 {
67
68 // Forward declaration
69 class MacroAssembler;
70 class UseScratchRegisterScope;
71
72 class Pool {
73 public:
Pool(MacroAssembler * masm)74 explicit Pool(MacroAssembler* masm)
75 : checkpoint_(kNoCheckpointRequired), masm_(masm) {
76 Reset();
77 }
78
Reset()79 void Reset() {
80 checkpoint_ = kNoCheckpointRequired;
81 monitor_ = 0;
82 }
83
Block()84 void Block() { monitor_++; }
85 void Release();
IsBlocked()86 bool IsBlocked() const { return monitor_ != 0; }
87
88 static const ptrdiff_t kNoCheckpointRequired = PTRDIFF_MAX;
89
90 void SetNextCheckpoint(ptrdiff_t checkpoint);
GetCheckpoint()91 ptrdiff_t GetCheckpoint() const { return checkpoint_; }
92 VIXL_DEPRECATED("GetCheckpoint", ptrdiff_t checkpoint() const) {
93 return GetCheckpoint();
94 }
95
96 enum EmitOption { kBranchRequired, kNoBranchRequired };
97
98 protected:
99 // Next buffer offset at which a check is required for this pool.
100 ptrdiff_t checkpoint_;
101 // Indicates whether the emission of this pool is blocked.
102 int monitor_;
103 // The MacroAssembler using this pool.
104 MacroAssembler* masm_;
105 };
106
107
108 class LiteralPool : public Pool {
109 public:
110 explicit LiteralPool(MacroAssembler* masm);
111 ~LiteralPool() VIXL_NEGATIVE_TESTING_ALLOW_EXCEPTION;
112 void Reset();
113
114 void AddEntry(RawLiteral* literal);
IsEmpty()115 bool IsEmpty() const { return entries_.empty(); }
116 size_t GetSize() const;
117 VIXL_DEPRECATED("GetSize", size_t Size() const) { return GetSize(); }
118
119 size_t GetMaxSize() const;
120 VIXL_DEPRECATED("GetMaxSize", size_t MaxSize() const) { return GetMaxSize(); }
121
122 size_t GetOtherPoolsMaxSize() const;
123 VIXL_DEPRECATED("GetOtherPoolsMaxSize", size_t OtherPoolsMaxSize() const) {
124 return GetOtherPoolsMaxSize();
125 }
126
127 void CheckEmitFor(size_t amount, EmitOption option = kBranchRequired);
128 // Check whether we need to emit the literal pool in order to be able to
129 // safely emit a branch with a given range.
130 void CheckEmitForBranch(size_t range);
131 void Emit(EmitOption option = kNoBranchRequired);
132
133 void SetNextRecommendedCheckpoint(ptrdiff_t offset);
134 ptrdiff_t GetNextRecommendedCheckpoint();
135 VIXL_DEPRECATED("GetNextRecommendedCheckpoint",
136 ptrdiff_t NextRecommendedCheckpoint()) {
137 return GetNextRecommendedCheckpoint();
138 }
139
140 void UpdateFirstUse(ptrdiff_t use_position);
141
DeleteOnDestruction(RawLiteral * literal)142 void DeleteOnDestruction(RawLiteral* literal) {
143 deleted_on_destruction_.push_back(literal);
144 }
145
146 // Recommended not exact since the pool can be blocked for short periods.
147 static const ptrdiff_t kRecommendedLiteralPoolRange = 128 * KBytes;
148
149 private:
150 std::vector<RawLiteral*> entries_;
151 size_t size_;
152 ptrdiff_t first_use_;
153 // The parent class `Pool` provides a `checkpoint_`, which is the buffer
154 // offset before which a check *must* occur. This recommended checkpoint
155 // indicates when we would like to start emitting the constant pool. The
156 // MacroAssembler can, but does not have to, check the buffer when the
157 // checkpoint is reached.
158 ptrdiff_t recommended_checkpoint_;
159
160 std::vector<RawLiteral*> deleted_on_destruction_;
161 };
162
163
GetSize()164 inline size_t LiteralPool::GetSize() const {
165 // Account for the pool header.
166 return size_ + kInstructionSize;
167 }
168
169
GetMaxSize()170 inline size_t LiteralPool::GetMaxSize() const {
171 // Account for the potential branch over the pool.
172 return GetSize() + kInstructionSize;
173 }
174
175
GetNextRecommendedCheckpoint()176 inline ptrdiff_t LiteralPool::GetNextRecommendedCheckpoint() {
177 return first_use_ + kRecommendedLiteralPoolRange;
178 }
179
180
181 class VeneerPool : public Pool {
182 public:
VeneerPool(MacroAssembler * masm)183 explicit VeneerPool(MacroAssembler* masm) : Pool(masm) {}
184
185 void Reset();
186
Block()187 void Block() { monitor_++; }
188 void Release();
IsBlocked()189 bool IsBlocked() const { return monitor_ != 0; }
IsEmpty()190 bool IsEmpty() const { return unresolved_branches_.IsEmpty(); }
191
192 class BranchInfo {
193 public:
BranchInfo()194 BranchInfo()
195 : first_unreacheable_pc_(0),
196 pc_offset_(0),
197 label_(NULL),
198 branch_type_(UnknownBranchType) {}
BranchInfo(ptrdiff_t offset,Label * label,ImmBranchType branch_type)199 BranchInfo(ptrdiff_t offset, Label* label, ImmBranchType branch_type)
200 : pc_offset_(offset), label_(label), branch_type_(branch_type) {
201 first_unreacheable_pc_ =
202 pc_offset_ + Instruction::GetImmBranchForwardRange(branch_type_);
203 }
204
IsValidComparison(const BranchInfo & branch_1,const BranchInfo & branch_2)205 static bool IsValidComparison(const BranchInfo& branch_1,
206 const BranchInfo& branch_2) {
207 // BranchInfo are always compared against against other objects with
208 // the same branch type.
209 if (branch_1.branch_type_ != branch_2.branch_type_) {
210 return false;
211 }
212 // Since we should never have two branch infos with the same offsets, it
213 // first looks like we should check that offsets are different. However
214 // the operators may also be used to *search* for a branch info in the
215 // set.
216 bool same_offsets = (branch_1.pc_offset_ == branch_2.pc_offset_);
217 return (!same_offsets || ((branch_1.label_ == branch_2.label_) &&
218 (branch_1.first_unreacheable_pc_ ==
219 branch_2.first_unreacheable_pc_)));
220 }
221
222 // We must provide comparison operators to work with InvalSet.
223 bool operator==(const BranchInfo& other) const {
224 VIXL_ASSERT(IsValidComparison(*this, other));
225 return pc_offset_ == other.pc_offset_;
226 }
227 bool operator<(const BranchInfo& other) const {
228 VIXL_ASSERT(IsValidComparison(*this, other));
229 return pc_offset_ < other.pc_offset_;
230 }
231 bool operator<=(const BranchInfo& other) const {
232 VIXL_ASSERT(IsValidComparison(*this, other));
233 return pc_offset_ <= other.pc_offset_;
234 }
235 bool operator>(const BranchInfo& other) const {
236 VIXL_ASSERT(IsValidComparison(*this, other));
237 return pc_offset_ > other.pc_offset_;
238 }
239
240 // First instruction position that is not reachable by the branch using a
241 // positive branch offset.
242 ptrdiff_t first_unreacheable_pc_;
243 // Offset of the branch in the code generation buffer.
244 ptrdiff_t pc_offset_;
245 // The label branched to.
246 Label* label_;
247 ImmBranchType branch_type_;
248 };
249
BranchTypeUsesVeneers(ImmBranchType type)250 bool BranchTypeUsesVeneers(ImmBranchType type) {
251 return (type != UnknownBranchType) && (type != UncondBranchType);
252 }
253
254 void RegisterUnresolvedBranch(ptrdiff_t branch_pos,
255 Label* label,
256 ImmBranchType branch_type);
257 void DeleteUnresolvedBranchInfoForLabel(Label* label);
258
259 bool ShouldEmitVeneer(int64_t first_unreacheable_pc, size_t amount);
ShouldEmitVeneers(size_t amount)260 bool ShouldEmitVeneers(size_t amount) {
261 return ShouldEmitVeneer(unresolved_branches_.GetFirstLimit(), amount);
262 }
263
264 void CheckEmitFor(size_t amount, EmitOption option = kBranchRequired);
265 void Emit(EmitOption option, size_t margin);
266
267 // The code size generated for a veneer. Currently one branch instruction.
268 // This is for code size checking purposes, and can be extended in the future
269 // for example if we decide to add nops between the veneers.
270 static const int kVeneerCodeSize = 1 * kInstructionSize;
271 // The maximum size of code other than veneers that can be generated when
272 // emitting a veneer pool. Currently there can be an additional branch to jump
273 // over the pool.
274 static const int kPoolNonVeneerCodeSize = 1 * kInstructionSize;
275
UpdateNextCheckPoint()276 void UpdateNextCheckPoint() { SetNextCheckpoint(GetNextCheckPoint()); }
277
GetNumberOfPotentialVeneers()278 int GetNumberOfPotentialVeneers() const {
279 return static_cast<int>(unresolved_branches_.GetSize());
280 }
281 VIXL_DEPRECATED("GetNumberOfPotentialVeneers",
NumberOfPotentialVeneers()282 int NumberOfPotentialVeneers() const) {
283 return GetNumberOfPotentialVeneers();
284 }
285
GetMaxSize()286 size_t GetMaxSize() const {
287 return kPoolNonVeneerCodeSize +
288 unresolved_branches_.GetSize() * kVeneerCodeSize;
289 }
290 VIXL_DEPRECATED("GetMaxSize", size_t MaxSize() const) { return GetMaxSize(); }
291
292 size_t GetOtherPoolsMaxSize() const;
293 VIXL_DEPRECATED("GetOtherPoolsMaxSize", size_t OtherPoolsMaxSize() const) {
294 return GetOtherPoolsMaxSize();
295 }
296
297 static const int kNPreallocatedInfos = 4;
298 static const ptrdiff_t kInvalidOffset = PTRDIFF_MAX;
299 static const size_t kReclaimFrom = 128;
300 static const size_t kReclaimFactor = 16;
301
302 private:
303 typedef InvalSet<BranchInfo,
304 kNPreallocatedInfos,
305 ptrdiff_t,
306 kInvalidOffset,
307 kReclaimFrom,
308 kReclaimFactor>
309 BranchInfoTypedSetBase;
310 typedef InvalSetIterator<BranchInfoTypedSetBase> BranchInfoTypedSetIterBase;
311
312 class BranchInfoTypedSet : public BranchInfoTypedSetBase {
313 public:
BranchInfoTypedSet()314 BranchInfoTypedSet() : BranchInfoTypedSetBase() {}
315
GetFirstLimit()316 ptrdiff_t GetFirstLimit() {
317 if (empty()) {
318 return kInvalidOffset;
319 }
320 return GetMinElementKey();
321 }
322 VIXL_DEPRECATED("GetFirstLimit", ptrdiff_t FirstLimit()) {
323 return GetFirstLimit();
324 }
325 };
326
327 class BranchInfoTypedSetIterator : public BranchInfoTypedSetIterBase {
328 public:
BranchInfoTypedSetIterator()329 BranchInfoTypedSetIterator() : BranchInfoTypedSetIterBase(NULL) {}
BranchInfoTypedSetIterator(BranchInfoTypedSet * typed_set)330 explicit BranchInfoTypedSetIterator(BranchInfoTypedSet* typed_set)
331 : BranchInfoTypedSetIterBase(typed_set) {}
332
333 // TODO: Remove these and use the STL-like interface instead.
334 using BranchInfoTypedSetIterBase::Advance;
335 using BranchInfoTypedSetIterBase::Current;
336 };
337
338 class BranchInfoSet {
339 public:
insert(BranchInfo branch_info)340 void insert(BranchInfo branch_info) {
341 ImmBranchType type = branch_info.branch_type_;
342 VIXL_ASSERT(IsValidBranchType(type));
343 typed_set_[BranchIndexFromType(type)].insert(branch_info);
344 }
345
erase(BranchInfo branch_info)346 void erase(BranchInfo branch_info) {
347 if (IsValidBranchType(branch_info.branch_type_)) {
348 int index =
349 BranchInfoSet::BranchIndexFromType(branch_info.branch_type_);
350 typed_set_[index].erase(branch_info);
351 }
352 }
353
GetSize()354 size_t GetSize() const {
355 size_t res = 0;
356 for (int i = 0; i < kNumberOfTrackedBranchTypes; i++) {
357 res += typed_set_[i].size();
358 }
359 return res;
360 }
361 VIXL_DEPRECATED("GetSize", size_t size() const) { return GetSize(); }
362
IsEmpty()363 bool IsEmpty() const {
364 for (int i = 0; i < kNumberOfTrackedBranchTypes; i++) {
365 if (!typed_set_[i].empty()) {
366 return false;
367 }
368 }
369 return true;
370 }
empty()371 VIXL_DEPRECATED("IsEmpty", bool empty() const) { return IsEmpty(); }
372
GetFirstLimit()373 ptrdiff_t GetFirstLimit() {
374 ptrdiff_t res = kInvalidOffset;
375 for (int i = 0; i < kNumberOfTrackedBranchTypes; i++) {
376 res = std::min(res, typed_set_[i].GetFirstLimit());
377 }
378 return res;
379 }
380 VIXL_DEPRECATED("GetFirstLimit", ptrdiff_t FirstLimit()) {
381 return GetFirstLimit();
382 }
383
Reset()384 void Reset() {
385 for (int i = 0; i < kNumberOfTrackedBranchTypes; i++) {
386 typed_set_[i].clear();
387 }
388 }
389
BranchTypeFromIndex(int index)390 static ImmBranchType BranchTypeFromIndex(int index) {
391 switch (index) {
392 case 0:
393 return CondBranchType;
394 case 1:
395 return CompareBranchType;
396 case 2:
397 return TestBranchType;
398 default:
399 VIXL_UNREACHABLE();
400 return UnknownBranchType;
401 }
402 }
BranchIndexFromType(ImmBranchType branch_type)403 static int BranchIndexFromType(ImmBranchType branch_type) {
404 switch (branch_type) {
405 case CondBranchType:
406 return 0;
407 case CompareBranchType:
408 return 1;
409 case TestBranchType:
410 return 2;
411 default:
412 VIXL_UNREACHABLE();
413 return 0;
414 }
415 }
416
IsValidBranchType(ImmBranchType branch_type)417 bool IsValidBranchType(ImmBranchType branch_type) {
418 return (branch_type != UnknownBranchType) &&
419 (branch_type != UncondBranchType);
420 }
421
422 private:
423 static const int kNumberOfTrackedBranchTypes = 3;
424 BranchInfoTypedSet typed_set_[kNumberOfTrackedBranchTypes];
425
426 friend class VeneerPool;
427 friend class BranchInfoSetIterator;
428 };
429
430 class BranchInfoSetIterator {
431 public:
BranchInfoSetIterator(BranchInfoSet * set)432 explicit BranchInfoSetIterator(BranchInfoSet* set) : set_(set) {
433 for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
434 new (&sub_iterator_[i])
435 BranchInfoTypedSetIterator(&(set_->typed_set_[i]));
436 }
437 }
438
Current()439 VeneerPool::BranchInfo* Current() {
440 for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
441 if (!sub_iterator_[i].Done()) {
442 return sub_iterator_[i].Current();
443 }
444 }
445 VIXL_UNREACHABLE();
446 return NULL;
447 }
448
Advance()449 void Advance() {
450 VIXL_ASSERT(!Done());
451 for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
452 if (!sub_iterator_[i].Done()) {
453 sub_iterator_[i].Advance();
454 return;
455 }
456 }
457 VIXL_UNREACHABLE();
458 }
459
Done()460 bool Done() const {
461 for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
462 if (!sub_iterator_[i].Done()) return false;
463 }
464 return true;
465 }
466
AdvanceToNextType()467 void AdvanceToNextType() {
468 VIXL_ASSERT(!Done());
469 for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
470 if (!sub_iterator_[i].Done()) {
471 sub_iterator_[i].Finish();
472 return;
473 }
474 }
475 VIXL_UNREACHABLE();
476 }
477
DeleteCurrentAndAdvance()478 void DeleteCurrentAndAdvance() {
479 for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
480 if (!sub_iterator_[i].Done()) {
481 sub_iterator_[i].DeleteCurrentAndAdvance();
482 return;
483 }
484 }
485 }
486
487 private:
488 BranchInfoSet* set_;
489 BranchInfoTypedSetIterator
490 sub_iterator_[BranchInfoSet::kNumberOfTrackedBranchTypes];
491 };
492
GetNextCheckPoint()493 ptrdiff_t GetNextCheckPoint() {
494 if (unresolved_branches_.IsEmpty()) {
495 return kNoCheckpointRequired;
496 } else {
497 return unresolved_branches_.GetFirstLimit();
498 }
499 }
500 VIXL_DEPRECATED("GetNextCheckPoint", ptrdiff_t NextCheckPoint()) {
501 return GetNextCheckPoint();
502 }
503
504 // Information about unresolved (forward) branches.
505 BranchInfoSet unresolved_branches_;
506 };
507
508
509 // Helper for common Emission checks.
510 // The macro-instruction maps to a single instruction.
511 class SingleEmissionCheckScope : public EmissionCheckScope {
512 public:
SingleEmissionCheckScope(MacroAssemblerInterface * masm)513 explicit SingleEmissionCheckScope(MacroAssemblerInterface* masm)
514 : EmissionCheckScope(masm, kInstructionSize) {}
515 };
516
517
518 // The macro instruction is a "typical" macro-instruction. Typical macro-
519 // instruction only emit a few instructions, a few being defined as 8 here.
520 class MacroEmissionCheckScope : public EmissionCheckScope {
521 public:
MacroEmissionCheckScope(MacroAssemblerInterface * masm)522 explicit MacroEmissionCheckScope(MacroAssemblerInterface* masm)
523 : EmissionCheckScope(masm, kTypicalMacroInstructionMaxSize) {}
524
525 private:
526 static const size_t kTypicalMacroInstructionMaxSize = 8 * kInstructionSize;
527 };
528
529
530 // This scope simplifies the handling of the SVE `movprfx` instruction.
531 //
532 // If dst.Aliases(src):
533 // - Start an ExactAssemblyScope(masm, kInstructionSize).
534 // Otherwise:
535 // - Start an ExactAssemblyScope(masm, 2 * kInstructionSize).
536 // - Generate a suitable `movprfx` instruction.
537 //
538 // In both cases, the ExactAssemblyScope is left with enough remaining space for
539 // exactly one destructive instruction.
540 class MovprfxHelperScope : public ExactAssemblyScope {
541 public:
542 inline MovprfxHelperScope(MacroAssembler* masm,
543 const ZRegister& dst,
544 const ZRegister& src);
545
546 inline MovprfxHelperScope(MacroAssembler* masm,
547 const ZRegister& dst,
548 const PRegister& pg,
549 const ZRegister& src);
550
551 // TODO: Implement constructors that examine _all_ sources. If `dst` aliases
552 // any other source register, we can't use `movprfx`. This isn't obviously
553 // useful, but the MacroAssembler should not generate invalid code for it.
554 // Valid behaviour can be implemented using `mov`.
555 //
556 // The best way to handle this in an instruction-agnostic way is probably to
557 // use variadic templates.
558
559 private:
ShouldGenerateMovprfx(const ZRegister & dst,const ZRegister & src)560 inline bool ShouldGenerateMovprfx(const ZRegister& dst,
561 const ZRegister& src) {
562 VIXL_ASSERT(AreSameLaneSize(dst, src));
563 return !dst.Aliases(src);
564 }
565
ShouldGenerateMovprfx(const ZRegister & dst,const PRegister & pg,const ZRegister & src)566 inline bool ShouldGenerateMovprfx(const ZRegister& dst,
567 const PRegister& pg,
568 const ZRegister& src) {
569 VIXL_ASSERT(pg.IsMerging() || pg.IsZeroing());
570 // We need to emit movprfx in two cases:
571 // 1. To give a predicated merging unary instruction zeroing predication.
572 // 2. To make destructive instructions constructive.
573 //
574 // There are no predicated zeroing instructions that can take movprfx, so we
575 // will never generate an unnecessary movprfx with this logic.
576 return pg.IsZeroing() || ShouldGenerateMovprfx(dst, src);
577 }
578 };
579
580
581 enum BranchType {
582 // Copies of architectural conditions.
583 // The associated conditions can be used in place of those, the code will
584 // take care of reinterpreting them with the correct type.
585 integer_eq = eq,
586 integer_ne = ne,
587 integer_hs = hs,
588 integer_lo = lo,
589 integer_mi = mi,
590 integer_pl = pl,
591 integer_vs = vs,
592 integer_vc = vc,
593 integer_hi = hi,
594 integer_ls = ls,
595 integer_ge = ge,
596 integer_lt = lt,
597 integer_gt = gt,
598 integer_le = le,
599 integer_al = al,
600 integer_nv = nv,
601
602 // These two are *different* from the architectural codes al and nv.
603 // 'always' is used to generate unconditional branches.
604 // 'never' is used to not generate a branch (generally as the inverse
605 // branch type of 'always).
606 always,
607 never,
608 // cbz and cbnz
609 reg_zero,
610 reg_not_zero,
611 // tbz and tbnz
612 reg_bit_clear,
613 reg_bit_set,
614
615 // Aliases.
616 kBranchTypeFirstCondition = eq,
617 kBranchTypeLastCondition = nv,
618 kBranchTypeFirstUsingReg = reg_zero,
619 kBranchTypeFirstUsingBit = reg_bit_clear,
620
621 // SVE branch conditions.
622 integer_none = eq,
623 integer_any = ne,
624 integer_nlast = cs,
625 integer_last = cc,
626 integer_first = mi,
627 integer_nfrst = pl,
628 integer_pmore = hi,
629 integer_plast = ls,
630 integer_tcont = ge,
631 integer_tstop = lt
632 };
633
634
635 enum DiscardMoveMode { kDontDiscardForSameWReg, kDiscardForSameWReg };
636
637 // The macro assembler supports moving automatically pre-shifted immediates for
638 // arithmetic and logical instructions, and then applying a post shift in the
639 // instruction to undo the modification, in order to reduce the code emitted for
640 // an operation. For example:
641 //
642 // Add(x0, x0, 0x1f7de) => movz x16, 0xfbef; add x0, x0, x16, lsl #1.
643 //
644 // This optimisation can be only partially applied when the stack pointer is an
645 // operand or destination, so this enumeration is used to control the shift.
646 enum PreShiftImmMode {
647 kNoShift, // Don't pre-shift.
648 kLimitShiftForSP, // Limit pre-shift for add/sub extend use.
649 kAnyShift // Allow any pre-shift.
650 };
651
652 enum FPMacroNaNPropagationOption {
653 // The default option. This generates a run-time error in macros that respect
654 // this option.
655 NoFPMacroNaNPropagationSelected,
656 // For example, Fmin(result, NaN(a), NaN(b)) always selects NaN(a) if both
657 // NaN(a) and NaN(b) are both quiet, or both are signalling, at the
658 // cost of extra code generation in some cases.
659 StrictNaNPropagation,
660 // For example, Fmin(result, NaN(a), NaN(b)) selects either NaN, but using the
661 // fewest instructions.
662 FastNaNPropagation
663 };
664
665 class MacroAssembler : public Assembler, public MacroAssemblerInterface {
666 public:
667 explicit MacroAssembler(
668 PositionIndependentCodeOption pic = PositionIndependentCode);
669 MacroAssembler(size_t capacity,
670 PositionIndependentCodeOption pic = PositionIndependentCode);
671 MacroAssembler(byte* buffer,
672 size_t capacity,
673 PositionIndependentCodeOption pic = PositionIndependentCode);
674 ~MacroAssembler();
675
676 enum FinalizeOption {
677 kFallThrough, // There may be more code to execute after calling Finalize.
678 kUnreachable // Anything generated after calling Finalize is unreachable.
679 };
680
AsAssemblerBase()681 virtual vixl::internal::AssemblerBase* AsAssemblerBase() VIXL_OVERRIDE {
682 return this;
683 }
684
685 // TODO(pools): implement these functions.
EmitPoolHeader()686 virtual void EmitPoolHeader() VIXL_OVERRIDE {}
EmitPoolFooter()687 virtual void EmitPoolFooter() VIXL_OVERRIDE {}
EmitPaddingBytes(int n)688 virtual void EmitPaddingBytes(int n) VIXL_OVERRIDE { USE(n); }
EmitNopBytes(int n)689 virtual void EmitNopBytes(int n) VIXL_OVERRIDE { USE(n); }
690
691 // Start generating code from the beginning of the buffer, discarding any code
692 // and data that has already been emitted into the buffer.
693 //
694 // In order to avoid any accidental transfer of state, Reset ASSERTs that the
695 // constant pool is not blocked.
696 void Reset();
697
698 // Finalize a code buffer of generated instructions. This function must be
699 // called before executing or copying code from the buffer. By default,
700 // anything generated after this should not be reachable (the last instruction
701 // generated is an unconditional branch). If you need to generate more code,
702 // then set `option` to kFallThrough.
703 void FinalizeCode(FinalizeOption option = kUnreachable);
704
705
706 // Constant generation helpers.
707 // These functions return the number of instructions required to move the
708 // immediate into the destination register. Also, if the masm pointer is
709 // non-null, it generates the code to do so.
710 // The two features are implemented using one function to avoid duplication of
711 // the logic.
712 // The function can be used to evaluate the cost of synthesizing an
713 // instruction using 'mov immediate' instructions. A user might prefer loading
714 // a constant using the literal pool instead of using multiple 'mov immediate'
715 // instructions.
716 static int MoveImmediateHelper(MacroAssembler* masm,
717 const Register& rd,
718 uint64_t imm);
719
720
721 // Logical macros.
722 void And(const Register& rd, const Register& rn, const Operand& operand);
723 void Ands(const Register& rd, const Register& rn, const Operand& operand);
724 void Bic(const Register& rd, const Register& rn, const Operand& operand);
725 void Bics(const Register& rd, const Register& rn, const Operand& operand);
726 void Orr(const Register& rd, const Register& rn, const Operand& operand);
727 void Orn(const Register& rd, const Register& rn, const Operand& operand);
728 void Eor(const Register& rd, const Register& rn, const Operand& operand);
729 void Eon(const Register& rd, const Register& rn, const Operand& operand);
730 void Tst(const Register& rn, const Operand& operand);
731 void LogicalMacro(const Register& rd,
732 const Register& rn,
733 const Operand& operand,
734 LogicalOp op);
735
736 // Add and sub macros.
737 void Add(const Register& rd,
738 const Register& rn,
739 const Operand& operand,
740 FlagsUpdate S = LeaveFlags);
741 void Adds(const Register& rd, const Register& rn, const Operand& operand);
742 void Sub(const Register& rd,
743 const Register& rn,
744 const Operand& operand,
745 FlagsUpdate S = LeaveFlags);
746 void Subs(const Register& rd, const Register& rn, const Operand& operand);
747 void Cmn(const Register& rn, const Operand& operand);
748 void Cmp(const Register& rn, const Operand& operand);
749 void Neg(const Register& rd, const Operand& operand);
750 void Negs(const Register& rd, const Operand& operand);
751
752 void AddSubMacro(const Register& rd,
753 const Register& rn,
754 const Operand& operand,
755 FlagsUpdate S,
756 AddSubOp op);
757
758 // Add/sub with carry macros.
759 void Adc(const Register& rd, const Register& rn, const Operand& operand);
760 void Adcs(const Register& rd, const Register& rn, const Operand& operand);
761 void Sbc(const Register& rd, const Register& rn, const Operand& operand);
762 void Sbcs(const Register& rd, const Register& rn, const Operand& operand);
763 void Ngc(const Register& rd, const Operand& operand);
764 void Ngcs(const Register& rd, const Operand& operand);
765 void AddSubWithCarryMacro(const Register& rd,
766 const Register& rn,
767 const Operand& operand,
768 FlagsUpdate S,
769 AddSubWithCarryOp op);
770
771 void Rmif(const Register& xn, unsigned shift, StatusFlags flags);
772 void Setf8(const Register& wn);
773 void Setf16(const Register& wn);
774
775 // Move macros.
776 void Mov(const Register& rd, uint64_t imm);
777 void Mov(const Register& rd,
778 const Operand& operand,
779 DiscardMoveMode discard_mode = kDontDiscardForSameWReg);
Mvn(const Register & rd,uint64_t imm)780 void Mvn(const Register& rd, uint64_t imm) {
781 Mov(rd, (rd.GetSizeInBits() == kXRegSize) ? ~imm : (~imm & kWRegMask));
782 }
783 void Mvn(const Register& rd, const Operand& operand);
784
785 // Try to move an immediate into the destination register in a single
786 // instruction. Returns true for success, and updates the contents of dst.
787 // Returns false, otherwise.
788 bool TryOneInstrMoveImmediate(const Register& dst, uint64_t imm);
789
790 // Move an immediate into register dst, and return an Operand object for
791 // use with a subsequent instruction that accepts a shift. The value moved
792 // into dst is not necessarily equal to imm; it may have had a shifting
793 // operation applied to it that will be subsequently undone by the shift
794 // applied in the Operand.
795 Operand MoveImmediateForShiftedOp(const Register& dst,
796 uint64_t imm,
797 PreShiftImmMode mode);
798
799 void Move(const GenericOperand& dst, const GenericOperand& src);
800
801 // Synthesises the address represented by a MemOperand into a register.
802 void ComputeAddress(const Register& dst, const MemOperand& mem_op);
803
804 // Conditional macros.
805 void Ccmp(const Register& rn,
806 const Operand& operand,
807 StatusFlags nzcv,
808 Condition cond);
809 void Ccmn(const Register& rn,
810 const Operand& operand,
811 StatusFlags nzcv,
812 Condition cond);
813 void ConditionalCompareMacro(const Register& rn,
814 const Operand& operand,
815 StatusFlags nzcv,
816 Condition cond,
817 ConditionalCompareOp op);
818
819 // On return, the boolean values pointed to will indicate whether `left` and
820 // `right` should be synthesised in a temporary register.
GetCselSynthesisInformation(const Register & rd,const Operand & left,const Operand & right,bool * should_synthesise_left,bool * should_synthesise_right)821 static void GetCselSynthesisInformation(const Register& rd,
822 const Operand& left,
823 const Operand& right,
824 bool* should_synthesise_left,
825 bool* should_synthesise_right) {
826 // Note that the helper does not need to look at the condition.
827 CselHelper(NULL,
828 rd,
829 left,
830 right,
831 eq,
832 should_synthesise_left,
833 should_synthesise_right);
834 }
835
Csel(const Register & rd,const Operand & left,const Operand & right,Condition cond)836 void Csel(const Register& rd,
837 const Operand& left,
838 const Operand& right,
839 Condition cond) {
840 CselHelper(this, rd, left, right, cond);
841 }
842
843 // Load/store macros.
844 #define DECLARE_FUNCTION(FN, REGTYPE, REG, OP) \
845 void FN(const REGTYPE REG, const MemOperand& addr);
846 LS_MACRO_LIST(DECLARE_FUNCTION)
847 #undef DECLARE_FUNCTION
848
849 void LoadStoreMacro(const CPURegister& rt,
850 const MemOperand& addr,
851 LoadStoreOp op);
852
853 #define DECLARE_FUNCTION(FN, REGTYPE, REG, REG2, OP) \
854 void FN(const REGTYPE REG, const REGTYPE REG2, const MemOperand& addr);
855 LSPAIR_MACRO_LIST(DECLARE_FUNCTION)
856 #undef DECLARE_FUNCTION
857
858 void LoadStorePairMacro(const CPURegister& rt,
859 const CPURegister& rt2,
860 const MemOperand& addr,
861 LoadStorePairOp op);
862
863 void Prfm(PrefetchOperation op, const MemOperand& addr);
864
865 // Push or pop up to 4 registers of the same width to or from the stack,
866 // using the current stack pointer as set by SetStackPointer.
867 //
868 // If an argument register is 'NoReg', all further arguments are also assumed
869 // to be 'NoReg', and are thus not pushed or popped.
870 //
871 // Arguments are ordered such that "Push(a, b);" is functionally equivalent
872 // to "Push(a); Push(b);".
873 //
874 // It is valid to push the same register more than once, and there is no
875 // restriction on the order in which registers are specified.
876 //
877 // It is not valid to pop into the same register more than once in one
878 // operation, not even into the zero register.
879 //
880 // If the current stack pointer (as set by SetStackPointer) is sp, then it
881 // must be aligned to 16 bytes on entry and the total size of the specified
882 // registers must also be a multiple of 16 bytes.
883 //
884 // Even if the current stack pointer is not the system stack pointer (sp),
885 // Push (and derived methods) will still modify the system stack pointer in
886 // order to comply with ABI rules about accessing memory below the system
887 // stack pointer.
888 //
889 // Other than the registers passed into Pop, the stack pointer and (possibly)
890 // the system stack pointer, these methods do not modify any other registers.
891 void Push(const CPURegister& src0,
892 const CPURegister& src1 = NoReg,
893 const CPURegister& src2 = NoReg,
894 const CPURegister& src3 = NoReg);
895 void Pop(const CPURegister& dst0,
896 const CPURegister& dst1 = NoReg,
897 const CPURegister& dst2 = NoReg,
898 const CPURegister& dst3 = NoReg);
899
900 // Alternative forms of Push and Pop, taking a RegList or CPURegList that
901 // specifies the registers that are to be pushed or popped. Higher-numbered
902 // registers are associated with higher memory addresses (as in the A32 push
903 // and pop instructions).
904 //
905 // (Push|Pop)SizeRegList allow you to specify the register size as a
906 // parameter. Only kXRegSize, kWRegSize, kDRegSize and kSRegSize are
907 // supported.
908 //
909 // Otherwise, (Push|Pop)(CPU|X|W|D|S)RegList is preferred.
910 void PushCPURegList(CPURegList registers);
911 void PopCPURegList(CPURegList registers);
912
913 void PushSizeRegList(
914 RegList registers,
915 unsigned reg_size,
916 CPURegister::RegisterType type = CPURegister::kRegister) {
917 PushCPURegList(CPURegList(type, reg_size, registers));
918 }
919 void PopSizeRegList(RegList registers,
920 unsigned reg_size,
921 CPURegister::RegisterType type = CPURegister::kRegister) {
922 PopCPURegList(CPURegList(type, reg_size, registers));
923 }
PushXRegList(RegList regs)924 void PushXRegList(RegList regs) { PushSizeRegList(regs, kXRegSize); }
PopXRegList(RegList regs)925 void PopXRegList(RegList regs) { PopSizeRegList(regs, kXRegSize); }
PushWRegList(RegList regs)926 void PushWRegList(RegList regs) { PushSizeRegList(regs, kWRegSize); }
PopWRegList(RegList regs)927 void PopWRegList(RegList regs) { PopSizeRegList(regs, kWRegSize); }
PushDRegList(RegList regs)928 void PushDRegList(RegList regs) {
929 PushSizeRegList(regs, kDRegSize, CPURegister::kVRegister);
930 }
PopDRegList(RegList regs)931 void PopDRegList(RegList regs) {
932 PopSizeRegList(regs, kDRegSize, CPURegister::kVRegister);
933 }
PushSRegList(RegList regs)934 void PushSRegList(RegList regs) {
935 PushSizeRegList(regs, kSRegSize, CPURegister::kVRegister);
936 }
PopSRegList(RegList regs)937 void PopSRegList(RegList regs) {
938 PopSizeRegList(regs, kSRegSize, CPURegister::kVRegister);
939 }
940
941 // Push the specified register 'count' times.
942 void PushMultipleTimes(int count, Register src);
943
944 // Poke 'src' onto the stack. The offset is in bytes.
945 //
946 // If the current stack pointer (as set by SetStackPointer) is sp, then sp
947 // must be aligned to 16 bytes.
948 void Poke(const Register& src, const Operand& offset);
949
950 // Peek at a value on the stack, and put it in 'dst'. The offset is in bytes.
951 //
952 // If the current stack pointer (as set by SetStackPointer) is sp, then sp
953 // must be aligned to 16 bytes.
954 void Peek(const Register& dst, const Operand& offset);
955
956 // Alternative forms of Peek and Poke, taking a RegList or CPURegList that
957 // specifies the registers that are to be pushed or popped. Higher-numbered
958 // registers are associated with higher memory addresses.
959 //
960 // (Peek|Poke)SizeRegList allow you to specify the register size as a
961 // parameter. Only kXRegSize, kWRegSize, kDRegSize and kSRegSize are
962 // supported.
963 //
964 // Otherwise, (Peek|Poke)(CPU|X|W|D|S)RegList is preferred.
PeekCPURegList(CPURegList registers,int64_t offset)965 void PeekCPURegList(CPURegList registers, int64_t offset) {
966 LoadCPURegList(registers, MemOperand(StackPointer(), offset));
967 }
PokeCPURegList(CPURegList registers,int64_t offset)968 void PokeCPURegList(CPURegList registers, int64_t offset) {
969 StoreCPURegList(registers, MemOperand(StackPointer(), offset));
970 }
971
972 void PeekSizeRegList(
973 RegList registers,
974 int64_t offset,
975 unsigned reg_size,
976 CPURegister::RegisterType type = CPURegister::kRegister) {
977 PeekCPURegList(CPURegList(type, reg_size, registers), offset);
978 }
979 void PokeSizeRegList(
980 RegList registers,
981 int64_t offset,
982 unsigned reg_size,
983 CPURegister::RegisterType type = CPURegister::kRegister) {
984 PokeCPURegList(CPURegList(type, reg_size, registers), offset);
985 }
PeekXRegList(RegList regs,int64_t offset)986 void PeekXRegList(RegList regs, int64_t offset) {
987 PeekSizeRegList(regs, offset, kXRegSize);
988 }
PokeXRegList(RegList regs,int64_t offset)989 void PokeXRegList(RegList regs, int64_t offset) {
990 PokeSizeRegList(regs, offset, kXRegSize);
991 }
PeekWRegList(RegList regs,int64_t offset)992 void PeekWRegList(RegList regs, int64_t offset) {
993 PeekSizeRegList(regs, offset, kWRegSize);
994 }
PokeWRegList(RegList regs,int64_t offset)995 void PokeWRegList(RegList regs, int64_t offset) {
996 PokeSizeRegList(regs, offset, kWRegSize);
997 }
PeekDRegList(RegList regs,int64_t offset)998 void PeekDRegList(RegList regs, int64_t offset) {
999 PeekSizeRegList(regs, offset, kDRegSize, CPURegister::kVRegister);
1000 }
PokeDRegList(RegList regs,int64_t offset)1001 void PokeDRegList(RegList regs, int64_t offset) {
1002 PokeSizeRegList(regs, offset, kDRegSize, CPURegister::kVRegister);
1003 }
PeekSRegList(RegList regs,int64_t offset)1004 void PeekSRegList(RegList regs, int64_t offset) {
1005 PeekSizeRegList(regs, offset, kSRegSize, CPURegister::kVRegister);
1006 }
PokeSRegList(RegList regs,int64_t offset)1007 void PokeSRegList(RegList regs, int64_t offset) {
1008 PokeSizeRegList(regs, offset, kSRegSize, CPURegister::kVRegister);
1009 }
1010
1011
1012 // Claim or drop stack space without actually accessing memory.
1013 //
1014 // If the current stack pointer (as set by SetStackPointer) is sp, then it
1015 // must be aligned to 16 bytes and the size claimed or dropped must be a
1016 // multiple of 16 bytes.
1017 void Claim(const Operand& size);
1018 void Drop(const Operand& size);
1019
1020 // As above, but for multiples of the SVE vector length.
ClaimVL(int64_t multiplier)1021 void ClaimVL(int64_t multiplier) {
1022 // We never need to worry about sp alignment because the VL is always a
1023 // multiple of 16.
1024 VIXL_STATIC_ASSERT((kZRegMinSizeInBytes % 16) == 0);
1025 VIXL_ASSERT(multiplier >= 0);
1026 Addvl(sp, sp, -multiplier);
1027 }
DropVL(int64_t multiplier)1028 void DropVL(int64_t multiplier) {
1029 VIXL_STATIC_ASSERT((kZRegMinSizeInBytes % 16) == 0);
1030 VIXL_ASSERT(multiplier >= 0);
1031 Addvl(sp, sp, multiplier);
1032 }
1033
1034 // Preserve the callee-saved registers (as defined by AAPCS64).
1035 //
1036 // Higher-numbered registers are pushed before lower-numbered registers, and
1037 // thus get higher addresses.
1038 // Floating-point registers are pushed before general-purpose registers, and
1039 // thus get higher addresses.
1040 //
1041 // This method must not be called unless StackPointer() is sp, and it is
1042 // aligned to 16 bytes.
1043 void PushCalleeSavedRegisters();
1044
1045 // Restore the callee-saved registers (as defined by AAPCS64).
1046 //
1047 // Higher-numbered registers are popped after lower-numbered registers, and
1048 // thus come from higher addresses.
1049 // Floating-point registers are popped after general-purpose registers, and
1050 // thus come from higher addresses.
1051 //
1052 // This method must not be called unless StackPointer() is sp, and it is
1053 // aligned to 16 bytes.
1054 void PopCalleeSavedRegisters();
1055
1056 void LoadCPURegList(CPURegList registers, const MemOperand& src);
1057 void StoreCPURegList(CPURegList registers, const MemOperand& dst);
1058
1059 // Remaining instructions are simple pass-through calls to the assembler.
Adr(const Register & rd,Label * label)1060 void Adr(const Register& rd, Label* label) {
1061 VIXL_ASSERT(allow_macro_instructions_);
1062 VIXL_ASSERT(!rd.IsZero());
1063 SingleEmissionCheckScope guard(this);
1064 adr(rd, label);
1065 }
Adrp(const Register & rd,Label * label)1066 void Adrp(const Register& rd, Label* label) {
1067 VIXL_ASSERT(allow_macro_instructions_);
1068 VIXL_ASSERT(!rd.IsZero());
1069 SingleEmissionCheckScope guard(this);
1070 adrp(rd, label);
1071 }
Asr(const Register & rd,const Register & rn,unsigned shift)1072 void Asr(const Register& rd, const Register& rn, unsigned shift) {
1073 VIXL_ASSERT(allow_macro_instructions_);
1074 VIXL_ASSERT(!rd.IsZero());
1075 VIXL_ASSERT(!rn.IsZero());
1076 SingleEmissionCheckScope guard(this);
1077 asr(rd, rn, shift);
1078 }
Asr(const Register & rd,const Register & rn,const Register & rm)1079 void Asr(const Register& rd, const Register& rn, const Register& rm) {
1080 VIXL_ASSERT(allow_macro_instructions_);
1081 VIXL_ASSERT(!rd.IsZero());
1082 VIXL_ASSERT(!rn.IsZero());
1083 VIXL_ASSERT(!rm.IsZero());
1084 SingleEmissionCheckScope guard(this);
1085 asrv(rd, rn, rm);
1086 }
1087
1088 // Branch type inversion relies on these relations.
1089 VIXL_STATIC_ASSERT((reg_zero == (reg_not_zero ^ 1)) &&
1090 (reg_bit_clear == (reg_bit_set ^ 1)) &&
1091 (always == (never ^ 1)));
1092
InvertBranchType(BranchType type)1093 BranchType InvertBranchType(BranchType type) {
1094 if (kBranchTypeFirstCondition <= type && type <= kBranchTypeLastCondition) {
1095 return static_cast<BranchType>(
1096 InvertCondition(static_cast<Condition>(type)));
1097 } else {
1098 return static_cast<BranchType>(type ^ 1);
1099 }
1100 }
1101
1102 void B(Label* label, BranchType type, Register reg = NoReg, int bit = -1);
1103
1104 void B(Label* label);
1105 void B(Label* label, Condition cond);
B(Condition cond,Label * label)1106 void B(Condition cond, Label* label) { B(label, cond); }
Bfm(const Register & rd,const Register & rn,unsigned immr,unsigned imms)1107 void Bfm(const Register& rd,
1108 const Register& rn,
1109 unsigned immr,
1110 unsigned imms) {
1111 VIXL_ASSERT(allow_macro_instructions_);
1112 VIXL_ASSERT(!rd.IsZero());
1113 VIXL_ASSERT(!rn.IsZero());
1114 SingleEmissionCheckScope guard(this);
1115 bfm(rd, rn, immr, imms);
1116 }
Bfi(const Register & rd,const Register & rn,unsigned lsb,unsigned width)1117 void Bfi(const Register& rd,
1118 const Register& rn,
1119 unsigned lsb,
1120 unsigned width) {
1121 VIXL_ASSERT(allow_macro_instructions_);
1122 VIXL_ASSERT(!rd.IsZero());
1123 VIXL_ASSERT(!rn.IsZero());
1124 SingleEmissionCheckScope guard(this);
1125 bfi(rd, rn, lsb, width);
1126 }
Bfc(const Register & rd,unsigned lsb,unsigned width)1127 void Bfc(const Register& rd, unsigned lsb, unsigned width) {
1128 VIXL_ASSERT(allow_macro_instructions_);
1129 VIXL_ASSERT(!rd.IsZero());
1130 SingleEmissionCheckScope guard(this);
1131 bfc(rd, lsb, width);
1132 }
Bfxil(const Register & rd,const Register & rn,unsigned lsb,unsigned width)1133 void Bfxil(const Register& rd,
1134 const Register& rn,
1135 unsigned lsb,
1136 unsigned width) {
1137 VIXL_ASSERT(allow_macro_instructions_);
1138 VIXL_ASSERT(!rd.IsZero());
1139 VIXL_ASSERT(!rn.IsZero());
1140 SingleEmissionCheckScope guard(this);
1141 bfxil(rd, rn, lsb, width);
1142 }
1143 void Bind(Label* label, BranchTargetIdentifier id = EmitBTI_none);
1144 // Bind a label to a specified offset from the start of the buffer.
1145 void BindToOffset(Label* label, ptrdiff_t offset);
Bl(Label * label)1146 void Bl(Label* label) {
1147 VIXL_ASSERT(allow_macro_instructions_);
1148 SingleEmissionCheckScope guard(this);
1149 bl(label);
1150 }
Blr(const Register & xn)1151 void Blr(const Register& xn) {
1152 VIXL_ASSERT(allow_macro_instructions_);
1153 VIXL_ASSERT(!xn.IsZero());
1154 SingleEmissionCheckScope guard(this);
1155 blr(xn);
1156 }
Br(const Register & xn)1157 void Br(const Register& xn) {
1158 VIXL_ASSERT(allow_macro_instructions_);
1159 VIXL_ASSERT(!xn.IsZero());
1160 SingleEmissionCheckScope guard(this);
1161 br(xn);
1162 }
Braaz(const Register & xn)1163 void Braaz(const Register& xn) {
1164 VIXL_ASSERT(allow_macro_instructions_);
1165 SingleEmissionCheckScope guard(this);
1166 braaz(xn);
1167 }
Brabz(const Register & xn)1168 void Brabz(const Register& xn) {
1169 VIXL_ASSERT(allow_macro_instructions_);
1170 SingleEmissionCheckScope guard(this);
1171 brabz(xn);
1172 }
Blraaz(const Register & xn)1173 void Blraaz(const Register& xn) {
1174 VIXL_ASSERT(allow_macro_instructions_);
1175 SingleEmissionCheckScope guard(this);
1176 blraaz(xn);
1177 }
Blrabz(const Register & xn)1178 void Blrabz(const Register& xn) {
1179 VIXL_ASSERT(allow_macro_instructions_);
1180 SingleEmissionCheckScope guard(this);
1181 blrabz(xn);
1182 }
Retaa()1183 void Retaa() {
1184 VIXL_ASSERT(allow_macro_instructions_);
1185 SingleEmissionCheckScope guard(this);
1186 retaa();
1187 }
Retab()1188 void Retab() {
1189 VIXL_ASSERT(allow_macro_instructions_);
1190 SingleEmissionCheckScope guard(this);
1191 retab();
1192 }
Braa(const Register & xn,const Register & xm)1193 void Braa(const Register& xn, const Register& xm) {
1194 VIXL_ASSERT(allow_macro_instructions_);
1195 SingleEmissionCheckScope guard(this);
1196 braa(xn, xm);
1197 }
Brab(const Register & xn,const Register & xm)1198 void Brab(const Register& xn, const Register& xm) {
1199 VIXL_ASSERT(allow_macro_instructions_);
1200 SingleEmissionCheckScope guard(this);
1201 brab(xn, xm);
1202 }
Blraa(const Register & xn,const Register & xm)1203 void Blraa(const Register& xn, const Register& xm) {
1204 VIXL_ASSERT(allow_macro_instructions_);
1205 SingleEmissionCheckScope guard(this);
1206 blraa(xn, xm);
1207 }
Blrab(const Register & xn,const Register & xm)1208 void Blrab(const Register& xn, const Register& xm) {
1209 VIXL_ASSERT(allow_macro_instructions_);
1210 SingleEmissionCheckScope guard(this);
1211 blrab(xn, xm);
1212 }
1213 void Brk(int code = 0) {
1214 VIXL_ASSERT(allow_macro_instructions_);
1215 SingleEmissionCheckScope guard(this);
1216 brk(code);
1217 }
1218 void Cbnz(const Register& rt, Label* label);
1219 void Cbz(const Register& rt, Label* label);
Cinc(const Register & rd,const Register & rn,Condition cond)1220 void Cinc(const Register& rd, const Register& rn, Condition cond) {
1221 VIXL_ASSERT(allow_macro_instructions_);
1222 VIXL_ASSERT(!rd.IsZero());
1223 VIXL_ASSERT(!rn.IsZero());
1224 SingleEmissionCheckScope guard(this);
1225 cinc(rd, rn, cond);
1226 }
Cinv(const Register & rd,const Register & rn,Condition cond)1227 void Cinv(const Register& rd, const Register& rn, Condition cond) {
1228 VIXL_ASSERT(allow_macro_instructions_);
1229 VIXL_ASSERT(!rd.IsZero());
1230 VIXL_ASSERT(!rn.IsZero());
1231 SingleEmissionCheckScope guard(this);
1232 cinv(rd, rn, cond);
1233 }
1234
1235 #define PAUTH_SYSTEM_MODES(V) \
1236 V(az) \
1237 V(bz) \
1238 V(asp) \
1239 V(bsp)
1240
1241 #define DEFINE_MACRO_ASM_FUNCS(SUFFIX) \
1242 void Paci##SUFFIX() { \
1243 VIXL_ASSERT(allow_macro_instructions_); \
1244 SingleEmissionCheckScope guard(this); \
1245 paci##SUFFIX(); \
1246 } \
1247 void Auti##SUFFIX() { \
1248 VIXL_ASSERT(allow_macro_instructions_); \
1249 SingleEmissionCheckScope guard(this); \
1250 auti##SUFFIX(); \
1251 }
1252
PAUTH_SYSTEM_MODES(DEFINE_MACRO_ASM_FUNCS)1253 PAUTH_SYSTEM_MODES(DEFINE_MACRO_ASM_FUNCS)
1254 #undef DEFINE_MACRO_ASM_FUNCS
1255
1256 // The 1716 pac and aut instructions encourage people to use x16 and x17
1257 // directly, perhaps without realising that this is forbidden. For example:
1258 //
1259 // UseScratchRegisterScope temps(&masm);
1260 // Register temp = temps.AcquireX(); // temp will be x16
1261 // __ Mov(x17, ptr);
1262 // __ Mov(x16, modifier); // Will override temp!
1263 // __ Pacia1716();
1264 //
1265 // To work around this issue, you must exclude x16 and x17 from the scratch
1266 // register list. You may need to replace them with other registers:
1267 //
1268 // UseScratchRegisterScope temps(&masm);
1269 // temps.Exclude(x16, x17);
1270 // temps.Include(x10, x11);
1271 // __ Mov(x17, ptr);
1272 // __ Mov(x16, modifier);
1273 // __ Pacia1716();
1274 void Pacia1716() {
1275 VIXL_ASSERT(allow_macro_instructions_);
1276 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x16));
1277 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x17));
1278 SingleEmissionCheckScope guard(this);
1279 pacia1716();
1280 }
Pacib1716()1281 void Pacib1716() {
1282 VIXL_ASSERT(allow_macro_instructions_);
1283 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x16));
1284 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x17));
1285 SingleEmissionCheckScope guard(this);
1286 pacib1716();
1287 }
Autia1716()1288 void Autia1716() {
1289 VIXL_ASSERT(allow_macro_instructions_);
1290 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x16));
1291 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x17));
1292 SingleEmissionCheckScope guard(this);
1293 autia1716();
1294 }
Autib1716()1295 void Autib1716() {
1296 VIXL_ASSERT(allow_macro_instructions_);
1297 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x16));
1298 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x17));
1299 SingleEmissionCheckScope guard(this);
1300 autib1716();
1301 }
Xpaclri()1302 void Xpaclri() {
1303 VIXL_ASSERT(allow_macro_instructions_);
1304 SingleEmissionCheckScope guard(this);
1305 xpaclri();
1306 }
Clrex()1307 void Clrex() {
1308 VIXL_ASSERT(allow_macro_instructions_);
1309 SingleEmissionCheckScope guard(this);
1310 clrex();
1311 }
Cls(const Register & rd,const Register & rn)1312 void Cls(const Register& rd, const Register& rn) {
1313 VIXL_ASSERT(allow_macro_instructions_);
1314 VIXL_ASSERT(!rd.IsZero());
1315 VIXL_ASSERT(!rn.IsZero());
1316 SingleEmissionCheckScope guard(this);
1317 cls(rd, rn);
1318 }
Clz(const Register & rd,const Register & rn)1319 void Clz(const Register& rd, const Register& rn) {
1320 VIXL_ASSERT(allow_macro_instructions_);
1321 VIXL_ASSERT(!rd.IsZero());
1322 VIXL_ASSERT(!rn.IsZero());
1323 SingleEmissionCheckScope guard(this);
1324 clz(rd, rn);
1325 }
Cneg(const Register & rd,const Register & rn,Condition cond)1326 void Cneg(const Register& rd, const Register& rn, Condition cond) {
1327 VIXL_ASSERT(allow_macro_instructions_);
1328 VIXL_ASSERT(!rd.IsZero());
1329 VIXL_ASSERT(!rn.IsZero());
1330 SingleEmissionCheckScope guard(this);
1331 cneg(rd, rn, cond);
1332 }
Esb()1333 void Esb() {
1334 VIXL_ASSERT(allow_macro_instructions_);
1335 SingleEmissionCheckScope guard(this);
1336 esb();
1337 }
Csdb()1338 void Csdb() {
1339 VIXL_ASSERT(allow_macro_instructions_);
1340 SingleEmissionCheckScope guard(this);
1341 csdb();
1342 }
Cset(const Register & rd,Condition cond)1343 void Cset(const Register& rd, Condition cond) {
1344 VIXL_ASSERT(allow_macro_instructions_);
1345 VIXL_ASSERT(!rd.IsZero());
1346 SingleEmissionCheckScope guard(this);
1347 cset(rd, cond);
1348 }
Csetm(const Register & rd,Condition cond)1349 void Csetm(const Register& rd, Condition cond) {
1350 VIXL_ASSERT(allow_macro_instructions_);
1351 VIXL_ASSERT(!rd.IsZero());
1352 SingleEmissionCheckScope guard(this);
1353 csetm(rd, cond);
1354 }
Csinc(const Register & rd,const Register & rn,const Register & rm,Condition cond)1355 void Csinc(const Register& rd,
1356 const Register& rn,
1357 const Register& rm,
1358 Condition cond) {
1359 VIXL_ASSERT(allow_macro_instructions_);
1360 VIXL_ASSERT(!rd.IsZero());
1361 VIXL_ASSERT((cond != al) && (cond != nv));
1362 SingleEmissionCheckScope guard(this);
1363 csinc(rd, rn, rm, cond);
1364 }
Csinv(const Register & rd,const Register & rn,const Register & rm,Condition cond)1365 void Csinv(const Register& rd,
1366 const Register& rn,
1367 const Register& rm,
1368 Condition cond) {
1369 VIXL_ASSERT(allow_macro_instructions_);
1370 VIXL_ASSERT(!rd.IsZero());
1371 VIXL_ASSERT((cond != al) && (cond != nv));
1372 SingleEmissionCheckScope guard(this);
1373 csinv(rd, rn, rm, cond);
1374 }
Csneg(const Register & rd,const Register & rn,const Register & rm,Condition cond)1375 void Csneg(const Register& rd,
1376 const Register& rn,
1377 const Register& rm,
1378 Condition cond) {
1379 VIXL_ASSERT(allow_macro_instructions_);
1380 VIXL_ASSERT(!rd.IsZero());
1381 VIXL_ASSERT((cond != al) && (cond != nv));
1382 SingleEmissionCheckScope guard(this);
1383 csneg(rd, rn, rm, cond);
1384 }
Dmb(BarrierDomain domain,BarrierType type)1385 void Dmb(BarrierDomain domain, BarrierType type) {
1386 VIXL_ASSERT(allow_macro_instructions_);
1387 SingleEmissionCheckScope guard(this);
1388 dmb(domain, type);
1389 }
Dsb(BarrierDomain domain,BarrierType type)1390 void Dsb(BarrierDomain domain, BarrierType type) {
1391 VIXL_ASSERT(allow_macro_instructions_);
1392 SingleEmissionCheckScope guard(this);
1393 dsb(domain, type);
1394 }
Extr(const Register & rd,const Register & rn,const Register & rm,unsigned lsb)1395 void Extr(const Register& rd,
1396 const Register& rn,
1397 const Register& rm,
1398 unsigned lsb) {
1399 VIXL_ASSERT(allow_macro_instructions_);
1400 VIXL_ASSERT(!rd.IsZero());
1401 VIXL_ASSERT(!rn.IsZero());
1402 VIXL_ASSERT(!rm.IsZero());
1403 SingleEmissionCheckScope guard(this);
1404 extr(rd, rn, rm, lsb);
1405 }
Fadd(const VRegister & vd,const VRegister & vn,const VRegister & vm)1406 void Fadd(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1407 VIXL_ASSERT(allow_macro_instructions_);
1408 SingleEmissionCheckScope guard(this);
1409 fadd(vd, vn, vm);
1410 }
1411 void Fccmp(const VRegister& vn,
1412 const VRegister& vm,
1413 StatusFlags nzcv,
1414 Condition cond,
1415 FPTrapFlags trap = DisableTrap) {
1416 VIXL_ASSERT(allow_macro_instructions_);
1417 VIXL_ASSERT((cond != al) && (cond != nv));
1418 SingleEmissionCheckScope guard(this);
1419 FPCCompareMacro(vn, vm, nzcv, cond, trap);
1420 }
Fccmpe(const VRegister & vn,const VRegister & vm,StatusFlags nzcv,Condition cond)1421 void Fccmpe(const VRegister& vn,
1422 const VRegister& vm,
1423 StatusFlags nzcv,
1424 Condition cond) {
1425 Fccmp(vn, vm, nzcv, cond, EnableTrap);
1426 }
1427 void Fcmp(const VRegister& vn,
1428 const VRegister& vm,
1429 FPTrapFlags trap = DisableTrap) {
1430 VIXL_ASSERT(allow_macro_instructions_);
1431 SingleEmissionCheckScope guard(this);
1432 FPCompareMacro(vn, vm, trap);
1433 }
1434 void Fcmp(const VRegister& vn, double value, FPTrapFlags trap = DisableTrap);
1435 void Fcmpe(const VRegister& vn, double value);
Fcmpe(const VRegister & vn,const VRegister & vm)1436 void Fcmpe(const VRegister& vn, const VRegister& vm) {
1437 Fcmp(vn, vm, EnableTrap);
1438 }
Fcsel(const VRegister & vd,const VRegister & vn,const VRegister & vm,Condition cond)1439 void Fcsel(const VRegister& vd,
1440 const VRegister& vn,
1441 const VRegister& vm,
1442 Condition cond) {
1443 VIXL_ASSERT(allow_macro_instructions_);
1444 VIXL_ASSERT((cond != al) && (cond != nv));
1445 SingleEmissionCheckScope guard(this);
1446 fcsel(vd, vn, vm, cond);
1447 }
Fcvt(const VRegister & vd,const VRegister & vn)1448 void Fcvt(const VRegister& vd, const VRegister& vn) {
1449 VIXL_ASSERT(allow_macro_instructions_);
1450 SingleEmissionCheckScope guard(this);
1451 fcvt(vd, vn);
1452 }
Fcvtl(const VRegister & vd,const VRegister & vn)1453 void Fcvtl(const VRegister& vd, const VRegister& vn) {
1454 VIXL_ASSERT(allow_macro_instructions_);
1455 SingleEmissionCheckScope guard(this);
1456 fcvtl(vd, vn);
1457 }
Fcvtl2(const VRegister & vd,const VRegister & vn)1458 void Fcvtl2(const VRegister& vd, const VRegister& vn) {
1459 VIXL_ASSERT(allow_macro_instructions_);
1460 SingleEmissionCheckScope guard(this);
1461 fcvtl2(vd, vn);
1462 }
Fcvtn(const VRegister & vd,const VRegister & vn)1463 void Fcvtn(const VRegister& vd, const VRegister& vn) {
1464 VIXL_ASSERT(allow_macro_instructions_);
1465 SingleEmissionCheckScope guard(this);
1466 fcvtn(vd, vn);
1467 }
Fcvtn2(const VRegister & vd,const VRegister & vn)1468 void Fcvtn2(const VRegister& vd, const VRegister& vn) {
1469 VIXL_ASSERT(allow_macro_instructions_);
1470 SingleEmissionCheckScope guard(this);
1471 fcvtn2(vd, vn);
1472 }
Fcvtxn(const VRegister & vd,const VRegister & vn)1473 void Fcvtxn(const VRegister& vd, const VRegister& vn) {
1474 VIXL_ASSERT(allow_macro_instructions_);
1475 SingleEmissionCheckScope guard(this);
1476 fcvtxn(vd, vn);
1477 }
Fcvtxn2(const VRegister & vd,const VRegister & vn)1478 void Fcvtxn2(const VRegister& vd, const VRegister& vn) {
1479 VIXL_ASSERT(allow_macro_instructions_);
1480 SingleEmissionCheckScope guard(this);
1481 fcvtxn2(vd, vn);
1482 }
Fcvtas(const Register & rd,const VRegister & vn)1483 void Fcvtas(const Register& rd, const VRegister& vn) {
1484 VIXL_ASSERT(allow_macro_instructions_);
1485 VIXL_ASSERT(!rd.IsZero());
1486 SingleEmissionCheckScope guard(this);
1487 fcvtas(rd, vn);
1488 }
Fcvtau(const Register & rd,const VRegister & vn)1489 void Fcvtau(const Register& rd, const VRegister& vn) {
1490 VIXL_ASSERT(allow_macro_instructions_);
1491 VIXL_ASSERT(!rd.IsZero());
1492 SingleEmissionCheckScope guard(this);
1493 fcvtau(rd, vn);
1494 }
Fcvtms(const Register & rd,const VRegister & vn)1495 void Fcvtms(const Register& rd, const VRegister& vn) {
1496 VIXL_ASSERT(allow_macro_instructions_);
1497 VIXL_ASSERT(!rd.IsZero());
1498 SingleEmissionCheckScope guard(this);
1499 fcvtms(rd, vn);
1500 }
Fcvtmu(const Register & rd,const VRegister & vn)1501 void Fcvtmu(const Register& rd, const VRegister& vn) {
1502 VIXL_ASSERT(allow_macro_instructions_);
1503 VIXL_ASSERT(!rd.IsZero());
1504 SingleEmissionCheckScope guard(this);
1505 fcvtmu(rd, vn);
1506 }
Fcvtns(const Register & rd,const VRegister & vn)1507 void Fcvtns(const Register& rd, const VRegister& vn) {
1508 VIXL_ASSERT(allow_macro_instructions_);
1509 VIXL_ASSERT(!rd.IsZero());
1510 SingleEmissionCheckScope guard(this);
1511 fcvtns(rd, vn);
1512 }
Fcvtnu(const Register & rd,const VRegister & vn)1513 void Fcvtnu(const Register& rd, const VRegister& vn) {
1514 VIXL_ASSERT(allow_macro_instructions_);
1515 VIXL_ASSERT(!rd.IsZero());
1516 SingleEmissionCheckScope guard(this);
1517 fcvtnu(rd, vn);
1518 }
Fcvtps(const Register & rd,const VRegister & vn)1519 void Fcvtps(const Register& rd, const VRegister& vn) {
1520 VIXL_ASSERT(allow_macro_instructions_);
1521 VIXL_ASSERT(!rd.IsZero());
1522 SingleEmissionCheckScope guard(this);
1523 fcvtps(rd, vn);
1524 }
Fcvtpu(const Register & rd,const VRegister & vn)1525 void Fcvtpu(const Register& rd, const VRegister& vn) {
1526 VIXL_ASSERT(allow_macro_instructions_);
1527 VIXL_ASSERT(!rd.IsZero());
1528 SingleEmissionCheckScope guard(this);
1529 fcvtpu(rd, vn);
1530 }
1531 void Fcvtzs(const Register& rd, const VRegister& vn, int fbits = 0) {
1532 VIXL_ASSERT(allow_macro_instructions_);
1533 VIXL_ASSERT(!rd.IsZero());
1534 SingleEmissionCheckScope guard(this);
1535 fcvtzs(rd, vn, fbits);
1536 }
Fjcvtzs(const Register & rd,const VRegister & vn)1537 void Fjcvtzs(const Register& rd, const VRegister& vn) {
1538 VIXL_ASSERT(allow_macro_instructions_);
1539 VIXL_ASSERT(!rd.IsZero());
1540 SingleEmissionCheckScope guard(this);
1541 fjcvtzs(rd, vn);
1542 }
1543 void Fcvtzu(const Register& rd, const VRegister& vn, int fbits = 0) {
1544 VIXL_ASSERT(allow_macro_instructions_);
1545 VIXL_ASSERT(!rd.IsZero());
1546 SingleEmissionCheckScope guard(this);
1547 fcvtzu(rd, vn, fbits);
1548 }
Fdiv(const VRegister & vd,const VRegister & vn,const VRegister & vm)1549 void Fdiv(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1550 VIXL_ASSERT(allow_macro_instructions_);
1551 SingleEmissionCheckScope guard(this);
1552 fdiv(vd, vn, vm);
1553 }
Fmax(const VRegister & vd,const VRegister & vn,const VRegister & vm)1554 void Fmax(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1555 VIXL_ASSERT(allow_macro_instructions_);
1556 SingleEmissionCheckScope guard(this);
1557 fmax(vd, vn, vm);
1558 }
Fmaxnm(const VRegister & vd,const VRegister & vn,const VRegister & vm)1559 void Fmaxnm(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1560 VIXL_ASSERT(allow_macro_instructions_);
1561 SingleEmissionCheckScope guard(this);
1562 fmaxnm(vd, vn, vm);
1563 }
Fmin(const VRegister & vd,const VRegister & vn,const VRegister & vm)1564 void Fmin(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1565 VIXL_ASSERT(allow_macro_instructions_);
1566 SingleEmissionCheckScope guard(this);
1567 fmin(vd, vn, vm);
1568 }
Fminnm(const VRegister & vd,const VRegister & vn,const VRegister & vm)1569 void Fminnm(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1570 VIXL_ASSERT(allow_macro_instructions_);
1571 SingleEmissionCheckScope guard(this);
1572 fminnm(vd, vn, vm);
1573 }
Fmov(const VRegister & vd,const VRegister & vn)1574 void Fmov(const VRegister& vd, const VRegister& vn) {
1575 VIXL_ASSERT(allow_macro_instructions_);
1576 SingleEmissionCheckScope guard(this);
1577 // TODO: Use DiscardMoveMode to allow this move to be elided if vd.Is(vn).
1578 fmov(vd, vn);
1579 }
Fmov(const VRegister & vd,const Register & rn)1580 void Fmov(const VRegister& vd, const Register& rn) {
1581 VIXL_ASSERT(allow_macro_instructions_);
1582 VIXL_ASSERT(!rn.IsZero());
1583 SingleEmissionCheckScope guard(this);
1584 fmov(vd, rn);
1585 }
Fmov(const VRegister & vd,int index,const Register & rn)1586 void Fmov(const VRegister& vd, int index, const Register& rn) {
1587 VIXL_ASSERT(allow_macro_instructions_);
1588 SingleEmissionCheckScope guard(this);
1589 if (vd.Is1D() && (index == 0)) {
1590 mov(vd, index, rn);
1591 } else {
1592 fmov(vd, index, rn);
1593 }
1594 }
Fmov(const Register & rd,const VRegister & vn,int index)1595 void Fmov(const Register& rd, const VRegister& vn, int index) {
1596 VIXL_ASSERT(allow_macro_instructions_);
1597 SingleEmissionCheckScope guard(this);
1598 if (vn.Is1D() && (index == 0)) {
1599 mov(rd, vn, index);
1600 } else {
1601 fmov(rd, vn, index);
1602 }
1603 }
1604
1605 // Provide explicit double and float interfaces for FP immediate moves, rather
1606 // than relying on implicit C++ casts. This allows signalling NaNs to be
1607 // preserved when the immediate matches the format of vd. Most systems convert
1608 // signalling NaNs to quiet NaNs when converting between float and double.
1609 void Fmov(VRegister vd, double imm);
1610 void Fmov(VRegister vd, float imm);
1611 void Fmov(VRegister vd, const Float16 imm);
1612 // Provide a template to allow other types to be converted automatically.
1613 template <typename T>
Fmov(VRegister vd,T imm)1614 void Fmov(VRegister vd, T imm) {
1615 VIXL_ASSERT(allow_macro_instructions_);
1616 Fmov(vd, static_cast<double>(imm));
1617 }
Fmov(Register rd,VRegister vn)1618 void Fmov(Register rd, VRegister vn) {
1619 VIXL_ASSERT(allow_macro_instructions_);
1620 VIXL_ASSERT(!rd.IsZero());
1621 SingleEmissionCheckScope guard(this);
1622 fmov(rd, vn);
1623 }
Fmul(const VRegister & vd,const VRegister & vn,const VRegister & vm)1624 void Fmul(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1625 VIXL_ASSERT(allow_macro_instructions_);
1626 SingleEmissionCheckScope guard(this);
1627 fmul(vd, vn, vm);
1628 }
Fnmul(const VRegister & vd,const VRegister & vn,const VRegister & vm)1629 void Fnmul(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1630 VIXL_ASSERT(allow_macro_instructions_);
1631 SingleEmissionCheckScope guard(this);
1632 fnmul(vd, vn, vm);
1633 }
Fmadd(const VRegister & vd,const VRegister & vn,const VRegister & vm,const VRegister & va)1634 void Fmadd(const VRegister& vd,
1635 const VRegister& vn,
1636 const VRegister& vm,
1637 const VRegister& va) {
1638 VIXL_ASSERT(allow_macro_instructions_);
1639 SingleEmissionCheckScope guard(this);
1640 fmadd(vd, vn, vm, va);
1641 }
Fmsub(const VRegister & vd,const VRegister & vn,const VRegister & vm,const VRegister & va)1642 void Fmsub(const VRegister& vd,
1643 const VRegister& vn,
1644 const VRegister& vm,
1645 const VRegister& va) {
1646 VIXL_ASSERT(allow_macro_instructions_);
1647 SingleEmissionCheckScope guard(this);
1648 fmsub(vd, vn, vm, va);
1649 }
Fnmadd(const VRegister & vd,const VRegister & vn,const VRegister & vm,const VRegister & va)1650 void Fnmadd(const VRegister& vd,
1651 const VRegister& vn,
1652 const VRegister& vm,
1653 const VRegister& va) {
1654 VIXL_ASSERT(allow_macro_instructions_);
1655 SingleEmissionCheckScope guard(this);
1656 fnmadd(vd, vn, vm, va);
1657 }
Fnmsub(const VRegister & vd,const VRegister & vn,const VRegister & vm,const VRegister & va)1658 void Fnmsub(const VRegister& vd,
1659 const VRegister& vn,
1660 const VRegister& vm,
1661 const VRegister& va) {
1662 VIXL_ASSERT(allow_macro_instructions_);
1663 SingleEmissionCheckScope guard(this);
1664 fnmsub(vd, vn, vm, va);
1665 }
Fsub(const VRegister & vd,const VRegister & vn,const VRegister & vm)1666 void Fsub(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1667 VIXL_ASSERT(allow_macro_instructions_);
1668 SingleEmissionCheckScope guard(this);
1669 fsub(vd, vn, vm);
1670 }
Hint(SystemHint code)1671 void Hint(SystemHint code) {
1672 VIXL_ASSERT(allow_macro_instructions_);
1673 SingleEmissionCheckScope guard(this);
1674 hint(code);
1675 }
Hint(int imm7)1676 void Hint(int imm7) {
1677 VIXL_ASSERT(allow_macro_instructions_);
1678 SingleEmissionCheckScope guard(this);
1679 hint(imm7);
1680 }
Hlt(int code)1681 void Hlt(int code) {
1682 VIXL_ASSERT(allow_macro_instructions_);
1683 SingleEmissionCheckScope guard(this);
1684 hlt(code);
1685 }
Isb()1686 void Isb() {
1687 VIXL_ASSERT(allow_macro_instructions_);
1688 SingleEmissionCheckScope guard(this);
1689 isb();
1690 }
Ldar(const Register & rt,const MemOperand & src)1691 void Ldar(const Register& rt, const MemOperand& src) {
1692 VIXL_ASSERT(allow_macro_instructions_);
1693 SingleEmissionCheckScope guard(this);
1694 ldar(rt, src);
1695 }
Ldarb(const Register & rt,const MemOperand & src)1696 void Ldarb(const Register& rt, const MemOperand& src) {
1697 VIXL_ASSERT(allow_macro_instructions_);
1698 SingleEmissionCheckScope guard(this);
1699 ldarb(rt, src);
1700 }
Ldarh(const Register & rt,const MemOperand & src)1701 void Ldarh(const Register& rt, const MemOperand& src) {
1702 VIXL_ASSERT(allow_macro_instructions_);
1703 SingleEmissionCheckScope guard(this);
1704 ldarh(rt, src);
1705 }
Ldlar(const Register & rt,const MemOperand & src)1706 void Ldlar(const Register& rt, const MemOperand& src) {
1707 VIXL_ASSERT(allow_macro_instructions_);
1708 SingleEmissionCheckScope guard(this);
1709 ldlar(rt, src);
1710 }
Ldlarb(const Register & rt,const MemOperand & src)1711 void Ldlarb(const Register& rt, const MemOperand& src) {
1712 VIXL_ASSERT(allow_macro_instructions_);
1713 SingleEmissionCheckScope guard(this);
1714 ldlarb(rt, src);
1715 }
Ldlarh(const Register & rt,const MemOperand & src)1716 void Ldlarh(const Register& rt, const MemOperand& src) {
1717 VIXL_ASSERT(allow_macro_instructions_);
1718 SingleEmissionCheckScope guard(this);
1719 ldlarh(rt, src);
1720 }
Ldaxp(const Register & rt,const Register & rt2,const MemOperand & src)1721 void Ldaxp(const Register& rt, const Register& rt2, const MemOperand& src) {
1722 VIXL_ASSERT(allow_macro_instructions_);
1723 VIXL_ASSERT(!rt.Aliases(rt2));
1724 SingleEmissionCheckScope guard(this);
1725 ldaxp(rt, rt2, src);
1726 }
Ldaxr(const Register & rt,const MemOperand & src)1727 void Ldaxr(const Register& rt, const MemOperand& src) {
1728 VIXL_ASSERT(allow_macro_instructions_);
1729 SingleEmissionCheckScope guard(this);
1730 ldaxr(rt, src);
1731 }
Ldaxrb(const Register & rt,const MemOperand & src)1732 void Ldaxrb(const Register& rt, const MemOperand& src) {
1733 VIXL_ASSERT(allow_macro_instructions_);
1734 SingleEmissionCheckScope guard(this);
1735 ldaxrb(rt, src);
1736 }
Ldaxrh(const Register & rt,const MemOperand & src)1737 void Ldaxrh(const Register& rt, const MemOperand& src) {
1738 VIXL_ASSERT(allow_macro_instructions_);
1739 SingleEmissionCheckScope guard(this);
1740 ldaxrh(rt, src);
1741 }
1742
1743 // clang-format off
1744 #define COMPARE_AND_SWAP_SINGLE_MACRO_LIST(V) \
1745 V(cas, Cas) \
1746 V(casa, Casa) \
1747 V(casl, Casl) \
1748 V(casal, Casal) \
1749 V(casb, Casb) \
1750 V(casab, Casab) \
1751 V(caslb, Caslb) \
1752 V(casalb, Casalb) \
1753 V(cash, Cash) \
1754 V(casah, Casah) \
1755 V(caslh, Caslh) \
1756 V(casalh, Casalh)
1757 // clang-format on
1758
1759 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
1760 void MASM(const Register& rs, const Register& rt, const MemOperand& src) { \
1761 VIXL_ASSERT(allow_macro_instructions_); \
1762 SingleEmissionCheckScope guard(this); \
1763 ASM(rs, rt, src); \
1764 }
1765 COMPARE_AND_SWAP_SINGLE_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
1766 #undef DEFINE_MACRO_ASM_FUNC
1767
1768
1769 // clang-format off
1770 #define COMPARE_AND_SWAP_PAIR_MACRO_LIST(V) \
1771 V(casp, Casp) \
1772 V(caspa, Caspa) \
1773 V(caspl, Caspl) \
1774 V(caspal, Caspal)
1775 // clang-format on
1776
1777 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
1778 void MASM(const Register& rs, \
1779 const Register& rs2, \
1780 const Register& rt, \
1781 const Register& rt2, \
1782 const MemOperand& src) { \
1783 VIXL_ASSERT(allow_macro_instructions_); \
1784 SingleEmissionCheckScope guard(this); \
1785 ASM(rs, rs2, rt, rt2, src); \
1786 }
COMPARE_AND_SWAP_PAIR_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)1787 COMPARE_AND_SWAP_PAIR_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
1788 #undef DEFINE_MACRO_ASM_FUNC
1789
1790 // These macros generate all the variations of the atomic memory operations,
1791 // e.g. ldadd, ldadda, ldaddb, staddl, etc.
1792
1793 // clang-format off
1794 #define ATOMIC_MEMORY_SIMPLE_MACRO_LIST(V, DEF, MASM_PRE, ASM_PRE) \
1795 V(DEF, MASM_PRE##add, ASM_PRE##add) \
1796 V(DEF, MASM_PRE##clr, ASM_PRE##clr) \
1797 V(DEF, MASM_PRE##eor, ASM_PRE##eor) \
1798 V(DEF, MASM_PRE##set, ASM_PRE##set) \
1799 V(DEF, MASM_PRE##smax, ASM_PRE##smax) \
1800 V(DEF, MASM_PRE##smin, ASM_PRE##smin) \
1801 V(DEF, MASM_PRE##umax, ASM_PRE##umax) \
1802 V(DEF, MASM_PRE##umin, ASM_PRE##umin)
1803
1804 #define ATOMIC_MEMORY_STORE_MACRO_MODES(V, MASM, ASM) \
1805 V(MASM, ASM) \
1806 V(MASM##l, ASM##l) \
1807 V(MASM##b, ASM##b) \
1808 V(MASM##lb, ASM##lb) \
1809 V(MASM##h, ASM##h) \
1810 V(MASM##lh, ASM##lh)
1811
1812 #define ATOMIC_MEMORY_LOAD_MACRO_MODES(V, MASM, ASM) \
1813 ATOMIC_MEMORY_STORE_MACRO_MODES(V, MASM, ASM) \
1814 V(MASM##a, ASM##a) \
1815 V(MASM##al, ASM##al) \
1816 V(MASM##ab, ASM##ab) \
1817 V(MASM##alb, ASM##alb) \
1818 V(MASM##ah, ASM##ah) \
1819 V(MASM##alh, ASM##alh)
1820 // clang-format on
1821
1822 #define DEFINE_MACRO_LOAD_ASM_FUNC(MASM, ASM) \
1823 void MASM(const Register& rs, const Register& rt, const MemOperand& src) { \
1824 VIXL_ASSERT(allow_macro_instructions_); \
1825 SingleEmissionCheckScope guard(this); \
1826 ASM(rs, rt, src); \
1827 }
1828 #define DEFINE_MACRO_STORE_ASM_FUNC(MASM, ASM) \
1829 void MASM(const Register& rs, const MemOperand& src) { \
1830 VIXL_ASSERT(allow_macro_instructions_); \
1831 SingleEmissionCheckScope guard(this); \
1832 ASM(rs, src); \
1833 }
1834
1835 ATOMIC_MEMORY_SIMPLE_MACRO_LIST(ATOMIC_MEMORY_LOAD_MACRO_MODES,
1836 DEFINE_MACRO_LOAD_ASM_FUNC,
1837 Ld,
1838 ld)
1839 ATOMIC_MEMORY_SIMPLE_MACRO_LIST(ATOMIC_MEMORY_STORE_MACRO_MODES,
1840 DEFINE_MACRO_STORE_ASM_FUNC,
1841 St,
1842 st)
1843
1844 #define DEFINE_MACRO_SWP_ASM_FUNC(MASM, ASM) \
1845 void MASM(const Register& rs, const Register& rt, const MemOperand& src) { \
1846 VIXL_ASSERT(allow_macro_instructions_); \
1847 SingleEmissionCheckScope guard(this); \
1848 ASM(rs, rt, src); \
1849 }
1850
1851 ATOMIC_MEMORY_LOAD_MACRO_MODES(DEFINE_MACRO_SWP_ASM_FUNC, Swp, swp)
1852
1853 #undef DEFINE_MACRO_LOAD_ASM_FUNC
1854 #undef DEFINE_MACRO_STORE_ASM_FUNC
1855 #undef DEFINE_MACRO_SWP_ASM_FUNC
1856
1857 void Ldaprb(const Register& rt, const MemOperand& src) {
1858 VIXL_ASSERT(allow_macro_instructions_);
1859 SingleEmissionCheckScope guard(this);
1860 VIXL_ASSERT(src.IsImmediateOffset());
1861 if (src.GetOffset() == 0) {
1862 ldaprb(rt, src);
1863 } else {
1864 ldapurb(rt, src);
1865 }
1866 }
1867
Ldapursb(const Register & rt,const MemOperand & src)1868 void Ldapursb(const Register& rt, const MemOperand& src) {
1869 VIXL_ASSERT(allow_macro_instructions_);
1870 SingleEmissionCheckScope guard(this);
1871 ldapursb(rt, src);
1872 }
1873
Ldaprh(const Register & rt,const MemOperand & src)1874 void Ldaprh(const Register& rt, const MemOperand& src) {
1875 VIXL_ASSERT(allow_macro_instructions_);
1876 SingleEmissionCheckScope guard(this);
1877 VIXL_ASSERT(src.IsImmediateOffset());
1878 if (src.GetOffset() == 0) {
1879 ldaprh(rt, src);
1880 } else {
1881 ldapurh(rt, src);
1882 }
1883 }
1884
Ldapursh(const Register & rt,const MemOperand & src)1885 void Ldapursh(const Register& rt, const MemOperand& src) {
1886 VIXL_ASSERT(allow_macro_instructions_);
1887 SingleEmissionCheckScope guard(this);
1888 ldapursh(rt, src);
1889 }
1890
Ldapr(const Register & rt,const MemOperand & src)1891 void Ldapr(const Register& rt, const MemOperand& src) {
1892 VIXL_ASSERT(allow_macro_instructions_);
1893 SingleEmissionCheckScope guard(this);
1894 VIXL_ASSERT(src.IsImmediateOffset());
1895 if (src.GetOffset() == 0) {
1896 ldapr(rt, src);
1897 } else {
1898 ldapur(rt, src);
1899 }
1900 }
1901
Ldapursw(const Register & rt,const MemOperand & src)1902 void Ldapursw(const Register& rt, const MemOperand& src) {
1903 VIXL_ASSERT(allow_macro_instructions_);
1904 SingleEmissionCheckScope guard(this);
1905 ldapursw(rt, src);
1906 }
1907
Ldnp(const CPURegister & rt,const CPURegister & rt2,const MemOperand & src)1908 void Ldnp(const CPURegister& rt,
1909 const CPURegister& rt2,
1910 const MemOperand& src) {
1911 VIXL_ASSERT(allow_macro_instructions_);
1912 SingleEmissionCheckScope guard(this);
1913 ldnp(rt, rt2, src);
1914 }
1915 // Provide both double and float interfaces for FP immediate loads, rather
1916 // than relying on implicit C++ casts. This allows signalling NaNs to be
1917 // preserved when the immediate matches the format of fd. Most systems convert
1918 // signalling NaNs to quiet NaNs when converting between float and double.
Ldr(const VRegister & vt,double imm)1919 void Ldr(const VRegister& vt, double imm) {
1920 VIXL_ASSERT(allow_macro_instructions_);
1921 SingleEmissionCheckScope guard(this);
1922 RawLiteral* literal;
1923 if (vt.IsD()) {
1924 literal = new Literal<double>(imm,
1925 &literal_pool_,
1926 RawLiteral::kDeletedOnPlacementByPool);
1927 } else {
1928 literal = new Literal<float>(static_cast<float>(imm),
1929 &literal_pool_,
1930 RawLiteral::kDeletedOnPlacementByPool);
1931 }
1932 ldr(vt, literal);
1933 }
Ldr(const VRegister & vt,float imm)1934 void Ldr(const VRegister& vt, float imm) {
1935 VIXL_ASSERT(allow_macro_instructions_);
1936 SingleEmissionCheckScope guard(this);
1937 RawLiteral* literal;
1938 if (vt.IsS()) {
1939 literal = new Literal<float>(imm,
1940 &literal_pool_,
1941 RawLiteral::kDeletedOnPlacementByPool);
1942 } else {
1943 literal = new Literal<double>(static_cast<double>(imm),
1944 &literal_pool_,
1945 RawLiteral::kDeletedOnPlacementByPool);
1946 }
1947 ldr(vt, literal);
1948 }
Ldr(const VRegister & vt,uint64_t high64,uint64_t low64)1949 void Ldr(const VRegister& vt, uint64_t high64, uint64_t low64) {
1950 VIXL_ASSERT(allow_macro_instructions_);
1951 VIXL_ASSERT(vt.IsQ());
1952 SingleEmissionCheckScope guard(this);
1953 ldr(vt,
1954 new Literal<uint64_t>(high64,
1955 low64,
1956 &literal_pool_,
1957 RawLiteral::kDeletedOnPlacementByPool));
1958 }
Ldr(const Register & rt,uint64_t imm)1959 void Ldr(const Register& rt, uint64_t imm) {
1960 VIXL_ASSERT(allow_macro_instructions_);
1961 VIXL_ASSERT(!rt.IsZero());
1962 SingleEmissionCheckScope guard(this);
1963 RawLiteral* literal;
1964 if (rt.Is64Bits()) {
1965 literal = new Literal<uint64_t>(imm,
1966 &literal_pool_,
1967 RawLiteral::kDeletedOnPlacementByPool);
1968 } else {
1969 VIXL_ASSERT(rt.Is32Bits());
1970 VIXL_ASSERT(IsUint32(imm) || IsInt32(imm));
1971 literal = new Literal<uint32_t>(static_cast<uint32_t>(imm),
1972 &literal_pool_,
1973 RawLiteral::kDeletedOnPlacementByPool);
1974 }
1975 ldr(rt, literal);
1976 }
Ldrsw(const Register & rt,uint32_t imm)1977 void Ldrsw(const Register& rt, uint32_t imm) {
1978 VIXL_ASSERT(allow_macro_instructions_);
1979 VIXL_ASSERT(!rt.IsZero());
1980 SingleEmissionCheckScope guard(this);
1981 ldrsw(rt,
1982 new Literal<uint32_t>(imm,
1983 &literal_pool_,
1984 RawLiteral::kDeletedOnPlacementByPool));
1985 }
Ldr(const CPURegister & rt,RawLiteral * literal)1986 void Ldr(const CPURegister& rt, RawLiteral* literal) {
1987 VIXL_ASSERT(allow_macro_instructions_);
1988 SingleEmissionCheckScope guard(this);
1989 ldr(rt, literal);
1990 }
Ldrsw(const Register & rt,RawLiteral * literal)1991 void Ldrsw(const Register& rt, RawLiteral* literal) {
1992 VIXL_ASSERT(allow_macro_instructions_);
1993 SingleEmissionCheckScope guard(this);
1994 ldrsw(rt, literal);
1995 }
Ldxp(const Register & rt,const Register & rt2,const MemOperand & src)1996 void Ldxp(const Register& rt, const Register& rt2, const MemOperand& src) {
1997 VIXL_ASSERT(allow_macro_instructions_);
1998 VIXL_ASSERT(!rt.Aliases(rt2));
1999 SingleEmissionCheckScope guard(this);
2000 ldxp(rt, rt2, src);
2001 }
Ldxr(const Register & rt,const MemOperand & src)2002 void Ldxr(const Register& rt, const MemOperand& src) {
2003 VIXL_ASSERT(allow_macro_instructions_);
2004 SingleEmissionCheckScope guard(this);
2005 ldxr(rt, src);
2006 }
Ldxrb(const Register & rt,const MemOperand & src)2007 void Ldxrb(const Register& rt, const MemOperand& src) {
2008 VIXL_ASSERT(allow_macro_instructions_);
2009 SingleEmissionCheckScope guard(this);
2010 ldxrb(rt, src);
2011 }
Ldxrh(const Register & rt,const MemOperand & src)2012 void Ldxrh(const Register& rt, const MemOperand& src) {
2013 VIXL_ASSERT(allow_macro_instructions_);
2014 SingleEmissionCheckScope guard(this);
2015 ldxrh(rt, src);
2016 }
Lsl(const Register & rd,const Register & rn,unsigned shift)2017 void Lsl(const Register& rd, const Register& rn, unsigned shift) {
2018 VIXL_ASSERT(allow_macro_instructions_);
2019 VIXL_ASSERT(!rd.IsZero());
2020 VIXL_ASSERT(!rn.IsZero());
2021 SingleEmissionCheckScope guard(this);
2022 lsl(rd, rn, shift);
2023 }
Lsl(const Register & rd,const Register & rn,const Register & rm)2024 void Lsl(const Register& rd, const Register& rn, const Register& rm) {
2025 VIXL_ASSERT(allow_macro_instructions_);
2026 VIXL_ASSERT(!rd.IsZero());
2027 VIXL_ASSERT(!rn.IsZero());
2028 VIXL_ASSERT(!rm.IsZero());
2029 SingleEmissionCheckScope guard(this);
2030 lslv(rd, rn, rm);
2031 }
Lsr(const Register & rd,const Register & rn,unsigned shift)2032 void Lsr(const Register& rd, const Register& rn, unsigned shift) {
2033 VIXL_ASSERT(allow_macro_instructions_);
2034 VIXL_ASSERT(!rd.IsZero());
2035 VIXL_ASSERT(!rn.IsZero());
2036 SingleEmissionCheckScope guard(this);
2037 lsr(rd, rn, shift);
2038 }
Lsr(const Register & rd,const Register & rn,const Register & rm)2039 void Lsr(const Register& rd, const Register& rn, const Register& rm) {
2040 VIXL_ASSERT(allow_macro_instructions_);
2041 VIXL_ASSERT(!rd.IsZero());
2042 VIXL_ASSERT(!rn.IsZero());
2043 VIXL_ASSERT(!rm.IsZero());
2044 SingleEmissionCheckScope guard(this);
2045 lsrv(rd, rn, rm);
2046 }
Ldraa(const Register & xt,const MemOperand & src)2047 void Ldraa(const Register& xt, const MemOperand& src) {
2048 VIXL_ASSERT(allow_macro_instructions_);
2049 SingleEmissionCheckScope guard(this);
2050 ldraa(xt, src);
2051 }
Ldrab(const Register & xt,const MemOperand & src)2052 void Ldrab(const Register& xt, const MemOperand& src) {
2053 VIXL_ASSERT(allow_macro_instructions_);
2054 SingleEmissionCheckScope guard(this);
2055 ldrab(xt, src);
2056 }
Madd(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2057 void Madd(const Register& rd,
2058 const Register& rn,
2059 const Register& rm,
2060 const Register& ra) {
2061 VIXL_ASSERT(allow_macro_instructions_);
2062 VIXL_ASSERT(!rd.IsZero());
2063 VIXL_ASSERT(!rn.IsZero());
2064 VIXL_ASSERT(!rm.IsZero());
2065 VIXL_ASSERT(!ra.IsZero());
2066 SingleEmissionCheckScope guard(this);
2067 madd(rd, rn, rm, ra);
2068 }
Mneg(const Register & rd,const Register & rn,const Register & rm)2069 void Mneg(const Register& rd, const Register& rn, const Register& rm) {
2070 VIXL_ASSERT(allow_macro_instructions_);
2071 VIXL_ASSERT(!rd.IsZero());
2072 VIXL_ASSERT(!rn.IsZero());
2073 VIXL_ASSERT(!rm.IsZero());
2074 SingleEmissionCheckScope guard(this);
2075 mneg(rd, rn, rm);
2076 }
2077 void Mov(const Register& rd,
2078 const Register& rn,
2079 DiscardMoveMode discard_mode = kDontDiscardForSameWReg) {
2080 VIXL_ASSERT(allow_macro_instructions_);
2081 // Emit a register move only if the registers are distinct, or if they are
2082 // not X registers.
2083 //
2084 // Note that mov(w0, w0) is not a no-op because it clears the top word of
2085 // x0. A flag is provided (kDiscardForSameWReg) if a move between the same W
2086 // registers is not required to clear the top word of the X register. In
2087 // this case, the instruction is discarded.
2088 //
2089 // If the sp is an operand, add #0 is emitted, otherwise, orr #0.
2090 if (!rd.Is(rn) ||
2091 (rd.Is32Bits() && (discard_mode == kDontDiscardForSameWReg))) {
2092 SingleEmissionCheckScope guard(this);
2093 mov(rd, rn);
2094 }
2095 }
2096 void Movk(const Register& rd, uint64_t imm, int shift = -1) {
2097 VIXL_ASSERT(allow_macro_instructions_);
2098 VIXL_ASSERT(!rd.IsZero());
2099 SingleEmissionCheckScope guard(this);
2100 movk(rd, imm, shift);
2101 }
Mrs(const Register & rt,SystemRegister sysreg)2102 void Mrs(const Register& rt, SystemRegister sysreg) {
2103 VIXL_ASSERT(allow_macro_instructions_);
2104 VIXL_ASSERT(!rt.IsZero());
2105 SingleEmissionCheckScope guard(this);
2106 mrs(rt, sysreg);
2107 }
Msr(SystemRegister sysreg,const Register & rt)2108 void Msr(SystemRegister sysreg, const Register& rt) {
2109 VIXL_ASSERT(allow_macro_instructions_);
2110 VIXL_ASSERT(!rt.IsZero());
2111 SingleEmissionCheckScope guard(this);
2112 msr(sysreg, rt);
2113 }
Cfinv()2114 void Cfinv() {
2115 VIXL_ASSERT(allow_macro_instructions_);
2116 SingleEmissionCheckScope guard(this);
2117 cfinv();
2118 }
Axflag()2119 void Axflag() {
2120 VIXL_ASSERT(allow_macro_instructions_);
2121 SingleEmissionCheckScope guard(this);
2122 axflag();
2123 }
Xaflag()2124 void Xaflag() {
2125 VIXL_ASSERT(allow_macro_instructions_);
2126 SingleEmissionCheckScope guard(this);
2127 xaflag();
2128 }
2129 void Sys(int op1, int crn, int crm, int op2, const Register& rt = xzr) {
2130 VIXL_ASSERT(allow_macro_instructions_);
2131 SingleEmissionCheckScope guard(this);
2132 sys(op1, crn, crm, op2, rt);
2133 }
Dc(DataCacheOp op,const Register & rt)2134 void Dc(DataCacheOp op, const Register& rt) {
2135 VIXL_ASSERT(allow_macro_instructions_);
2136 SingleEmissionCheckScope guard(this);
2137 dc(op, rt);
2138 }
Ic(InstructionCacheOp op,const Register & rt)2139 void Ic(InstructionCacheOp op, const Register& rt) {
2140 VIXL_ASSERT(allow_macro_instructions_);
2141 SingleEmissionCheckScope guard(this);
2142 ic(op, rt);
2143 }
Msub(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2144 void Msub(const Register& rd,
2145 const Register& rn,
2146 const Register& rm,
2147 const Register& ra) {
2148 VIXL_ASSERT(allow_macro_instructions_);
2149 VIXL_ASSERT(!rd.IsZero());
2150 VIXL_ASSERT(!rn.IsZero());
2151 VIXL_ASSERT(!rm.IsZero());
2152 VIXL_ASSERT(!ra.IsZero());
2153 SingleEmissionCheckScope guard(this);
2154 msub(rd, rn, rm, ra);
2155 }
Mul(const Register & rd,const Register & rn,const Register & rm)2156 void Mul(const Register& rd, const Register& rn, const Register& rm) {
2157 VIXL_ASSERT(allow_macro_instructions_);
2158 VIXL_ASSERT(!rd.IsZero());
2159 VIXL_ASSERT(!rn.IsZero());
2160 VIXL_ASSERT(!rm.IsZero());
2161 SingleEmissionCheckScope guard(this);
2162 mul(rd, rn, rm);
2163 }
Nop()2164 void Nop() {
2165 VIXL_ASSERT(allow_macro_instructions_);
2166 SingleEmissionCheckScope guard(this);
2167 nop();
2168 }
Rbit(const Register & rd,const Register & rn)2169 void Rbit(const Register& rd, const Register& rn) {
2170 VIXL_ASSERT(allow_macro_instructions_);
2171 VIXL_ASSERT(!rd.IsZero());
2172 VIXL_ASSERT(!rn.IsZero());
2173 SingleEmissionCheckScope guard(this);
2174 rbit(rd, rn);
2175 }
2176 void Ret(const Register& xn = lr) {
2177 VIXL_ASSERT(allow_macro_instructions_);
2178 VIXL_ASSERT(!xn.IsZero());
2179 SingleEmissionCheckScope guard(this);
2180 ret(xn);
2181 }
Rev(const Register & rd,const Register & rn)2182 void Rev(const Register& rd, const Register& rn) {
2183 VIXL_ASSERT(allow_macro_instructions_);
2184 VIXL_ASSERT(!rd.IsZero());
2185 VIXL_ASSERT(!rn.IsZero());
2186 SingleEmissionCheckScope guard(this);
2187 rev(rd, rn);
2188 }
Rev16(const Register & rd,const Register & rn)2189 void Rev16(const Register& rd, const Register& rn) {
2190 VIXL_ASSERT(allow_macro_instructions_);
2191 VIXL_ASSERT(!rd.IsZero());
2192 VIXL_ASSERT(!rn.IsZero());
2193 SingleEmissionCheckScope guard(this);
2194 rev16(rd, rn);
2195 }
Rev32(const Register & rd,const Register & rn)2196 void Rev32(const Register& rd, const Register& rn) {
2197 VIXL_ASSERT(allow_macro_instructions_);
2198 VIXL_ASSERT(!rd.IsZero());
2199 VIXL_ASSERT(!rn.IsZero());
2200 SingleEmissionCheckScope guard(this);
2201 rev32(rd, rn);
2202 }
Rev64(const Register & rd,const Register & rn)2203 void Rev64(const Register& rd, const Register& rn) {
2204 VIXL_ASSERT(allow_macro_instructions_);
2205 VIXL_ASSERT(!rd.IsZero());
2206 VIXL_ASSERT(!rn.IsZero());
2207 SingleEmissionCheckScope guard(this);
2208 rev64(rd, rn);
2209 }
2210
2211 #define PAUTH_MASM_VARIATIONS(V) \
2212 V(Paci, paci) \
2213 V(Pacd, pacd) \
2214 V(Auti, auti) \
2215 V(Autd, autd)
2216
2217 #define DEFINE_MACRO_ASM_FUNCS(MASM_PRE, ASM_PRE) \
2218 void MASM_PRE##a(const Register& xd, const Register& xn) { \
2219 VIXL_ASSERT(allow_macro_instructions_); \
2220 SingleEmissionCheckScope guard(this); \
2221 ASM_PRE##a(xd, xn); \
2222 } \
2223 void MASM_PRE##za(const Register& xd) { \
2224 VIXL_ASSERT(allow_macro_instructions_); \
2225 SingleEmissionCheckScope guard(this); \
2226 ASM_PRE##za(xd); \
2227 } \
2228 void MASM_PRE##b(const Register& xd, const Register& xn) { \
2229 VIXL_ASSERT(allow_macro_instructions_); \
2230 SingleEmissionCheckScope guard(this); \
2231 ASM_PRE##b(xd, xn); \
2232 } \
2233 void MASM_PRE##zb(const Register& xd) { \
2234 VIXL_ASSERT(allow_macro_instructions_); \
2235 SingleEmissionCheckScope guard(this); \
2236 ASM_PRE##zb(xd); \
2237 }
2238
PAUTH_MASM_VARIATIONS(DEFINE_MACRO_ASM_FUNCS)2239 PAUTH_MASM_VARIATIONS(DEFINE_MACRO_ASM_FUNCS)
2240 #undef DEFINE_MACRO_ASM_FUNCS
2241
2242 void Pacga(const Register& xd, const Register& xn, const Register& xm) {
2243 VIXL_ASSERT(allow_macro_instructions_);
2244 SingleEmissionCheckScope guard(this);
2245 pacga(xd, xn, xm);
2246 }
2247
Xpaci(const Register & xd)2248 void Xpaci(const Register& xd) {
2249 VIXL_ASSERT(allow_macro_instructions_);
2250 SingleEmissionCheckScope guard(this);
2251 xpaci(xd);
2252 }
2253
Xpacd(const Register & xd)2254 void Xpacd(const Register& xd) {
2255 VIXL_ASSERT(allow_macro_instructions_);
2256 SingleEmissionCheckScope guard(this);
2257 xpacd(xd);
2258 }
Ror(const Register & rd,const Register & rs,unsigned shift)2259 void Ror(const Register& rd, const Register& rs, unsigned shift) {
2260 VIXL_ASSERT(allow_macro_instructions_);
2261 VIXL_ASSERT(!rd.IsZero());
2262 VIXL_ASSERT(!rs.IsZero());
2263 SingleEmissionCheckScope guard(this);
2264 ror(rd, rs, shift);
2265 }
Ror(const Register & rd,const Register & rn,const Register & rm)2266 void Ror(const Register& rd, const Register& rn, const Register& rm) {
2267 VIXL_ASSERT(allow_macro_instructions_);
2268 VIXL_ASSERT(!rd.IsZero());
2269 VIXL_ASSERT(!rn.IsZero());
2270 VIXL_ASSERT(!rm.IsZero());
2271 SingleEmissionCheckScope guard(this);
2272 rorv(rd, rn, rm);
2273 }
Sbfiz(const Register & rd,const Register & rn,unsigned lsb,unsigned width)2274 void Sbfiz(const Register& rd,
2275 const Register& rn,
2276 unsigned lsb,
2277 unsigned width) {
2278 VIXL_ASSERT(allow_macro_instructions_);
2279 VIXL_ASSERT(!rd.IsZero());
2280 VIXL_ASSERT(!rn.IsZero());
2281 SingleEmissionCheckScope guard(this);
2282 sbfiz(rd, rn, lsb, width);
2283 }
Sbfm(const Register & rd,const Register & rn,unsigned immr,unsigned imms)2284 void Sbfm(const Register& rd,
2285 const Register& rn,
2286 unsigned immr,
2287 unsigned imms) {
2288 VIXL_ASSERT(allow_macro_instructions_);
2289 VIXL_ASSERT(!rd.IsZero());
2290 VIXL_ASSERT(!rn.IsZero());
2291 SingleEmissionCheckScope guard(this);
2292 sbfm(rd, rn, immr, imms);
2293 }
Sbfx(const Register & rd,const Register & rn,unsigned lsb,unsigned width)2294 void Sbfx(const Register& rd,
2295 const Register& rn,
2296 unsigned lsb,
2297 unsigned width) {
2298 VIXL_ASSERT(allow_macro_instructions_);
2299 VIXL_ASSERT(!rd.IsZero());
2300 VIXL_ASSERT(!rn.IsZero());
2301 SingleEmissionCheckScope guard(this);
2302 sbfx(rd, rn, lsb, width);
2303 }
2304 void Scvtf(const VRegister& vd, const Register& rn, int fbits = 0) {
2305 VIXL_ASSERT(allow_macro_instructions_);
2306 VIXL_ASSERT(!rn.IsZero());
2307 SingleEmissionCheckScope guard(this);
2308 scvtf(vd, rn, fbits);
2309 }
Sdiv(const Register & rd,const Register & rn,const Register & rm)2310 void Sdiv(const Register& rd, const Register& rn, const Register& rm) {
2311 VIXL_ASSERT(allow_macro_instructions_);
2312 VIXL_ASSERT(!rd.IsZero());
2313 VIXL_ASSERT(!rn.IsZero());
2314 VIXL_ASSERT(!rm.IsZero());
2315 SingleEmissionCheckScope guard(this);
2316 sdiv(rd, rn, rm);
2317 }
Smaddl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2318 void Smaddl(const Register& rd,
2319 const Register& rn,
2320 const Register& rm,
2321 const Register& ra) {
2322 VIXL_ASSERT(allow_macro_instructions_);
2323 VIXL_ASSERT(!rd.IsZero());
2324 VIXL_ASSERT(!rn.IsZero());
2325 VIXL_ASSERT(!rm.IsZero());
2326 VIXL_ASSERT(!ra.IsZero());
2327 SingleEmissionCheckScope guard(this);
2328 smaddl(rd, rn, rm, ra);
2329 }
Smsubl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2330 void Smsubl(const Register& rd,
2331 const Register& rn,
2332 const Register& rm,
2333 const Register& ra) {
2334 VIXL_ASSERT(allow_macro_instructions_);
2335 VIXL_ASSERT(!rd.IsZero());
2336 VIXL_ASSERT(!rn.IsZero());
2337 VIXL_ASSERT(!rm.IsZero());
2338 VIXL_ASSERT(!ra.IsZero());
2339 SingleEmissionCheckScope guard(this);
2340 smsubl(rd, rn, rm, ra);
2341 }
Smull(const Register & rd,const Register & rn,const Register & rm)2342 void Smull(const Register& rd, const Register& rn, const Register& rm) {
2343 VIXL_ASSERT(allow_macro_instructions_);
2344 VIXL_ASSERT(!rd.IsZero());
2345 VIXL_ASSERT(!rn.IsZero());
2346 VIXL_ASSERT(!rm.IsZero());
2347 SingleEmissionCheckScope guard(this);
2348 smull(rd, rn, rm);
2349 }
Smulh(const Register & xd,const Register & xn,const Register & xm)2350 void Smulh(const Register& xd, const Register& xn, const Register& xm) {
2351 VIXL_ASSERT(allow_macro_instructions_);
2352 VIXL_ASSERT(!xd.IsZero());
2353 VIXL_ASSERT(!xn.IsZero());
2354 VIXL_ASSERT(!xm.IsZero());
2355 SingleEmissionCheckScope guard(this);
2356 smulh(xd, xn, xm);
2357 }
Stlr(const Register & rt,const MemOperand & dst)2358 void Stlr(const Register& rt, const MemOperand& dst) {
2359 VIXL_ASSERT(allow_macro_instructions_);
2360 SingleEmissionCheckScope guard(this);
2361 VIXL_ASSERT(dst.IsImmediateOffset());
2362 if (dst.GetOffset() == 0) {
2363 stlr(rt, dst);
2364 } else {
2365 stlur(rt, dst);
2366 }
2367 }
Stlrb(const Register & rt,const MemOperand & dst)2368 void Stlrb(const Register& rt, const MemOperand& dst) {
2369 VIXL_ASSERT(allow_macro_instructions_);
2370 SingleEmissionCheckScope guard(this);
2371 VIXL_ASSERT(dst.IsImmediateOffset());
2372 if (dst.GetOffset() == 0) {
2373 stlrb(rt, dst);
2374 } else {
2375 stlurb(rt, dst);
2376 }
2377 }
Stlrh(const Register & rt,const MemOperand & dst)2378 void Stlrh(const Register& rt, const MemOperand& dst) {
2379 VIXL_ASSERT(allow_macro_instructions_);
2380 SingleEmissionCheckScope guard(this);
2381 VIXL_ASSERT(dst.IsImmediateOffset());
2382 if (dst.GetOffset() == 0) {
2383 stlrh(rt, dst);
2384 } else {
2385 stlurh(rt, dst);
2386 }
2387 }
Stllr(const Register & rt,const MemOperand & dst)2388 void Stllr(const Register& rt, const MemOperand& dst) {
2389 VIXL_ASSERT(allow_macro_instructions_);
2390 SingleEmissionCheckScope guard(this);
2391 stllr(rt, dst);
2392 }
Stllrb(const Register & rt,const MemOperand & dst)2393 void Stllrb(const Register& rt, const MemOperand& dst) {
2394 VIXL_ASSERT(allow_macro_instructions_);
2395 SingleEmissionCheckScope guard(this);
2396 stllrb(rt, dst);
2397 }
Stllrh(const Register & rt,const MemOperand & dst)2398 void Stllrh(const Register& rt, const MemOperand& dst) {
2399 VIXL_ASSERT(allow_macro_instructions_);
2400 SingleEmissionCheckScope guard(this);
2401 stllrh(rt, dst);
2402 }
Stlxp(const Register & rs,const Register & rt,const Register & rt2,const MemOperand & dst)2403 void Stlxp(const Register& rs,
2404 const Register& rt,
2405 const Register& rt2,
2406 const MemOperand& dst) {
2407 VIXL_ASSERT(allow_macro_instructions_);
2408 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2409 VIXL_ASSERT(!rs.Aliases(rt));
2410 VIXL_ASSERT(!rs.Aliases(rt2));
2411 SingleEmissionCheckScope guard(this);
2412 stlxp(rs, rt, rt2, dst);
2413 }
Stlxr(const Register & rs,const Register & rt,const MemOperand & dst)2414 void Stlxr(const Register& rs, const Register& rt, const MemOperand& dst) {
2415 VIXL_ASSERT(allow_macro_instructions_);
2416 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2417 VIXL_ASSERT(!rs.Aliases(rt));
2418 SingleEmissionCheckScope guard(this);
2419 stlxr(rs, rt, dst);
2420 }
Stlxrb(const Register & rs,const Register & rt,const MemOperand & dst)2421 void Stlxrb(const Register& rs, const Register& rt, const MemOperand& dst) {
2422 VIXL_ASSERT(allow_macro_instructions_);
2423 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2424 VIXL_ASSERT(!rs.Aliases(rt));
2425 SingleEmissionCheckScope guard(this);
2426 stlxrb(rs, rt, dst);
2427 }
Stlxrh(const Register & rs,const Register & rt,const MemOperand & dst)2428 void Stlxrh(const Register& rs, const Register& rt, const MemOperand& dst) {
2429 VIXL_ASSERT(allow_macro_instructions_);
2430 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2431 VIXL_ASSERT(!rs.Aliases(rt));
2432 SingleEmissionCheckScope guard(this);
2433 stlxrh(rs, rt, dst);
2434 }
Stnp(const CPURegister & rt,const CPURegister & rt2,const MemOperand & dst)2435 void Stnp(const CPURegister& rt,
2436 const CPURegister& rt2,
2437 const MemOperand& dst) {
2438 VIXL_ASSERT(allow_macro_instructions_);
2439 SingleEmissionCheckScope guard(this);
2440 stnp(rt, rt2, dst);
2441 }
Stxp(const Register & rs,const Register & rt,const Register & rt2,const MemOperand & dst)2442 void Stxp(const Register& rs,
2443 const Register& rt,
2444 const Register& rt2,
2445 const MemOperand& dst) {
2446 VIXL_ASSERT(allow_macro_instructions_);
2447 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2448 VIXL_ASSERT(!rs.Aliases(rt));
2449 VIXL_ASSERT(!rs.Aliases(rt2));
2450 SingleEmissionCheckScope guard(this);
2451 stxp(rs, rt, rt2, dst);
2452 }
Stxr(const Register & rs,const Register & rt,const MemOperand & dst)2453 void Stxr(const Register& rs, const Register& rt, const MemOperand& dst) {
2454 VIXL_ASSERT(allow_macro_instructions_);
2455 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2456 VIXL_ASSERT(!rs.Aliases(rt));
2457 SingleEmissionCheckScope guard(this);
2458 stxr(rs, rt, dst);
2459 }
Stxrb(const Register & rs,const Register & rt,const MemOperand & dst)2460 void Stxrb(const Register& rs, const Register& rt, const MemOperand& dst) {
2461 VIXL_ASSERT(allow_macro_instructions_);
2462 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2463 VIXL_ASSERT(!rs.Aliases(rt));
2464 SingleEmissionCheckScope guard(this);
2465 stxrb(rs, rt, dst);
2466 }
Stxrh(const Register & rs,const Register & rt,const MemOperand & dst)2467 void Stxrh(const Register& rs, const Register& rt, const MemOperand& dst) {
2468 VIXL_ASSERT(allow_macro_instructions_);
2469 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2470 VIXL_ASSERT(!rs.Aliases(rt));
2471 SingleEmissionCheckScope guard(this);
2472 stxrh(rs, rt, dst);
2473 }
Svc(int code)2474 void Svc(int code) {
2475 VIXL_ASSERT(allow_macro_instructions_);
2476 SingleEmissionCheckScope guard(this);
2477 svc(code);
2478 }
Sxtb(const Register & rd,const Register & rn)2479 void Sxtb(const Register& rd, const Register& rn) {
2480 VIXL_ASSERT(allow_macro_instructions_);
2481 VIXL_ASSERT(!rd.IsZero());
2482 VIXL_ASSERT(!rn.IsZero());
2483 SingleEmissionCheckScope guard(this);
2484 sxtb(rd, rn);
2485 }
Sxth(const Register & rd,const Register & rn)2486 void Sxth(const Register& rd, const Register& rn) {
2487 VIXL_ASSERT(allow_macro_instructions_);
2488 VIXL_ASSERT(!rd.IsZero());
2489 VIXL_ASSERT(!rn.IsZero());
2490 SingleEmissionCheckScope guard(this);
2491 sxth(rd, rn);
2492 }
Sxtw(const Register & rd,const Register & rn)2493 void Sxtw(const Register& rd, const Register& rn) {
2494 VIXL_ASSERT(allow_macro_instructions_);
2495 VIXL_ASSERT(!rd.IsZero());
2496 VIXL_ASSERT(!rn.IsZero());
2497 SingleEmissionCheckScope guard(this);
2498 sxtw(rd, rn);
2499 }
Tbl(const VRegister & vd,const VRegister & vn,const VRegister & vm)2500 void Tbl(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
2501 VIXL_ASSERT(allow_macro_instructions_);
2502 SingleEmissionCheckScope guard(this);
2503 tbl(vd, vn, vm);
2504 }
Tbl(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vm)2505 void Tbl(const VRegister& vd,
2506 const VRegister& vn,
2507 const VRegister& vn2,
2508 const VRegister& vm) {
2509 VIXL_ASSERT(allow_macro_instructions_);
2510 SingleEmissionCheckScope guard(this);
2511 tbl(vd, vn, vn2, vm);
2512 }
Tbl(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vn3,const VRegister & vm)2513 void Tbl(const VRegister& vd,
2514 const VRegister& vn,
2515 const VRegister& vn2,
2516 const VRegister& vn3,
2517 const VRegister& vm) {
2518 VIXL_ASSERT(allow_macro_instructions_);
2519 SingleEmissionCheckScope guard(this);
2520 tbl(vd, vn, vn2, vn3, vm);
2521 }
Tbl(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vn3,const VRegister & vn4,const VRegister & vm)2522 void Tbl(const VRegister& vd,
2523 const VRegister& vn,
2524 const VRegister& vn2,
2525 const VRegister& vn3,
2526 const VRegister& vn4,
2527 const VRegister& vm) {
2528 VIXL_ASSERT(allow_macro_instructions_);
2529 SingleEmissionCheckScope guard(this);
2530 tbl(vd, vn, vn2, vn3, vn4, vm);
2531 }
Tbx(const VRegister & vd,const VRegister & vn,const VRegister & vm)2532 void Tbx(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
2533 VIXL_ASSERT(allow_macro_instructions_);
2534 SingleEmissionCheckScope guard(this);
2535 tbx(vd, vn, vm);
2536 }
Tbx(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vm)2537 void Tbx(const VRegister& vd,
2538 const VRegister& vn,
2539 const VRegister& vn2,
2540 const VRegister& vm) {
2541 VIXL_ASSERT(allow_macro_instructions_);
2542 SingleEmissionCheckScope guard(this);
2543 tbx(vd, vn, vn2, vm);
2544 }
Tbx(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vn3,const VRegister & vm)2545 void Tbx(const VRegister& vd,
2546 const VRegister& vn,
2547 const VRegister& vn2,
2548 const VRegister& vn3,
2549 const VRegister& vm) {
2550 VIXL_ASSERT(allow_macro_instructions_);
2551 SingleEmissionCheckScope guard(this);
2552 tbx(vd, vn, vn2, vn3, vm);
2553 }
Tbx(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vn3,const VRegister & vn4,const VRegister & vm)2554 void Tbx(const VRegister& vd,
2555 const VRegister& vn,
2556 const VRegister& vn2,
2557 const VRegister& vn3,
2558 const VRegister& vn4,
2559 const VRegister& vm) {
2560 VIXL_ASSERT(allow_macro_instructions_);
2561 SingleEmissionCheckScope guard(this);
2562 tbx(vd, vn, vn2, vn3, vn4, vm);
2563 }
2564 void Tbnz(const Register& rt, unsigned bit_pos, Label* label);
2565 void Tbz(const Register& rt, unsigned bit_pos, Label* label);
Ubfiz(const Register & rd,const Register & rn,unsigned lsb,unsigned width)2566 void Ubfiz(const Register& rd,
2567 const Register& rn,
2568 unsigned lsb,
2569 unsigned width) {
2570 VIXL_ASSERT(allow_macro_instructions_);
2571 VIXL_ASSERT(!rd.IsZero());
2572 VIXL_ASSERT(!rn.IsZero());
2573 SingleEmissionCheckScope guard(this);
2574 ubfiz(rd, rn, lsb, width);
2575 }
Ubfm(const Register & rd,const Register & rn,unsigned immr,unsigned imms)2576 void Ubfm(const Register& rd,
2577 const Register& rn,
2578 unsigned immr,
2579 unsigned imms) {
2580 VIXL_ASSERT(allow_macro_instructions_);
2581 VIXL_ASSERT(!rd.IsZero());
2582 VIXL_ASSERT(!rn.IsZero());
2583 SingleEmissionCheckScope guard(this);
2584 ubfm(rd, rn, immr, imms);
2585 }
Ubfx(const Register & rd,const Register & rn,unsigned lsb,unsigned width)2586 void Ubfx(const Register& rd,
2587 const Register& rn,
2588 unsigned lsb,
2589 unsigned width) {
2590 VIXL_ASSERT(allow_macro_instructions_);
2591 VIXL_ASSERT(!rd.IsZero());
2592 VIXL_ASSERT(!rn.IsZero());
2593 SingleEmissionCheckScope guard(this);
2594 ubfx(rd, rn, lsb, width);
2595 }
2596 void Ucvtf(const VRegister& vd, const Register& rn, int fbits = 0) {
2597 VIXL_ASSERT(allow_macro_instructions_);
2598 VIXL_ASSERT(!rn.IsZero());
2599 SingleEmissionCheckScope guard(this);
2600 ucvtf(vd, rn, fbits);
2601 }
Udiv(const Register & rd,const Register & rn,const Register & rm)2602 void Udiv(const Register& rd, const Register& rn, const Register& rm) {
2603 VIXL_ASSERT(allow_macro_instructions_);
2604 VIXL_ASSERT(!rd.IsZero());
2605 VIXL_ASSERT(!rn.IsZero());
2606 VIXL_ASSERT(!rm.IsZero());
2607 SingleEmissionCheckScope guard(this);
2608 udiv(rd, rn, rm);
2609 }
Umaddl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2610 void Umaddl(const Register& rd,
2611 const Register& rn,
2612 const Register& rm,
2613 const Register& ra) {
2614 VIXL_ASSERT(allow_macro_instructions_);
2615 VIXL_ASSERT(!rd.IsZero());
2616 VIXL_ASSERT(!rn.IsZero());
2617 VIXL_ASSERT(!rm.IsZero());
2618 VIXL_ASSERT(!ra.IsZero());
2619 SingleEmissionCheckScope guard(this);
2620 umaddl(rd, rn, rm, ra);
2621 }
Umull(const Register & rd,const Register & rn,const Register & rm)2622 void Umull(const Register& rd, const Register& rn, const Register& rm) {
2623 VIXL_ASSERT(allow_macro_instructions_);
2624 VIXL_ASSERT(!rd.IsZero());
2625 VIXL_ASSERT(!rn.IsZero());
2626 VIXL_ASSERT(!rm.IsZero());
2627 SingleEmissionCheckScope guard(this);
2628 umull(rd, rn, rm);
2629 }
Umulh(const Register & xd,const Register & xn,const Register & xm)2630 void Umulh(const Register& xd, const Register& xn, const Register& xm) {
2631 VIXL_ASSERT(allow_macro_instructions_);
2632 VIXL_ASSERT(!xd.IsZero());
2633 VIXL_ASSERT(!xn.IsZero());
2634 VIXL_ASSERT(!xm.IsZero());
2635 SingleEmissionCheckScope guard(this);
2636 umulh(xd, xn, xm);
2637 }
Umsubl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2638 void Umsubl(const Register& rd,
2639 const Register& rn,
2640 const Register& rm,
2641 const Register& ra) {
2642 VIXL_ASSERT(allow_macro_instructions_);
2643 VIXL_ASSERT(!rd.IsZero());
2644 VIXL_ASSERT(!rn.IsZero());
2645 VIXL_ASSERT(!rm.IsZero());
2646 VIXL_ASSERT(!ra.IsZero());
2647 SingleEmissionCheckScope guard(this);
2648 umsubl(rd, rn, rm, ra);
2649 }
Unreachable()2650 void Unreachable() {
2651 VIXL_ASSERT(allow_macro_instructions_);
2652 SingleEmissionCheckScope guard(this);
2653 if (generate_simulator_code_) {
2654 hlt(kUnreachableOpcode);
2655 } else {
2656 // Use the architecturally-defined UDF instruction to abort on hardware,
2657 // because using HLT and BRK tends to make the process difficult to debug.
2658 udf(kUnreachableOpcode);
2659 }
2660 }
Uxtb(const Register & rd,const Register & rn)2661 void Uxtb(const Register& rd, const Register& rn) {
2662 VIXL_ASSERT(allow_macro_instructions_);
2663 VIXL_ASSERT(!rd.IsZero());
2664 VIXL_ASSERT(!rn.IsZero());
2665 SingleEmissionCheckScope guard(this);
2666 uxtb(rd, rn);
2667 }
Uxth(const Register & rd,const Register & rn)2668 void Uxth(const Register& rd, const Register& rn) {
2669 VIXL_ASSERT(allow_macro_instructions_);
2670 VIXL_ASSERT(!rd.IsZero());
2671 VIXL_ASSERT(!rn.IsZero());
2672 SingleEmissionCheckScope guard(this);
2673 uxth(rd, rn);
2674 }
Uxtw(const Register & rd,const Register & rn)2675 void Uxtw(const Register& rd, const Register& rn) {
2676 VIXL_ASSERT(allow_macro_instructions_);
2677 VIXL_ASSERT(!rd.IsZero());
2678 VIXL_ASSERT(!rn.IsZero());
2679 SingleEmissionCheckScope guard(this);
2680 uxtw(rd, rn);
2681 }
2682
2683 // NEON 3 vector register instructions.
2684 #define NEON_3VREG_MACRO_LIST(V) \
2685 V(add, Add) \
2686 V(addhn, Addhn) \
2687 V(addhn2, Addhn2) \
2688 V(addp, Addp) \
2689 V(and_, And) \
2690 V(bic, Bic) \
2691 V(bif, Bif) \
2692 V(bit, Bit) \
2693 V(bsl, Bsl) \
2694 V(cmeq, Cmeq) \
2695 V(cmge, Cmge) \
2696 V(cmgt, Cmgt) \
2697 V(cmhi, Cmhi) \
2698 V(cmhs, Cmhs) \
2699 V(cmtst, Cmtst) \
2700 V(eor, Eor) \
2701 V(fabd, Fabd) \
2702 V(facge, Facge) \
2703 V(facgt, Facgt) \
2704 V(faddp, Faddp) \
2705 V(fcmeq, Fcmeq) \
2706 V(fcmge, Fcmge) \
2707 V(fcmgt, Fcmgt) \
2708 V(fmaxnmp, Fmaxnmp) \
2709 V(fmaxp, Fmaxp) \
2710 V(fminnmp, Fminnmp) \
2711 V(fminp, Fminp) \
2712 V(fmla, Fmla) \
2713 V(fmlal, Fmlal) \
2714 V(fmlal2, Fmlal2) \
2715 V(fmls, Fmls) \
2716 V(fmlsl, Fmlsl) \
2717 V(fmlsl2, Fmlsl2) \
2718 V(fmulx, Fmulx) \
2719 V(frecps, Frecps) \
2720 V(frsqrts, Frsqrts) \
2721 V(mla, Mla) \
2722 V(mls, Mls) \
2723 V(mul, Mul) \
2724 V(orn, Orn) \
2725 V(orr, Orr) \
2726 V(pmul, Pmul) \
2727 V(pmull, Pmull) \
2728 V(pmull2, Pmull2) \
2729 V(raddhn, Raddhn) \
2730 V(raddhn2, Raddhn2) \
2731 V(rsubhn, Rsubhn) \
2732 V(rsubhn2, Rsubhn2) \
2733 V(saba, Saba) \
2734 V(sabal, Sabal) \
2735 V(sabal2, Sabal2) \
2736 V(sabd, Sabd) \
2737 V(sabdl, Sabdl) \
2738 V(sabdl2, Sabdl2) \
2739 V(saddl, Saddl) \
2740 V(saddl2, Saddl2) \
2741 V(saddw, Saddw) \
2742 V(saddw2, Saddw2) \
2743 V(shadd, Shadd) \
2744 V(shsub, Shsub) \
2745 V(smax, Smax) \
2746 V(smaxp, Smaxp) \
2747 V(smin, Smin) \
2748 V(sminp, Sminp) \
2749 V(smlal, Smlal) \
2750 V(smlal2, Smlal2) \
2751 V(smlsl, Smlsl) \
2752 V(smlsl2, Smlsl2) \
2753 V(smull, Smull) \
2754 V(smull2, Smull2) \
2755 V(sqadd, Sqadd) \
2756 V(sqdmlal, Sqdmlal) \
2757 V(sqdmlal2, Sqdmlal2) \
2758 V(sqdmlsl, Sqdmlsl) \
2759 V(sqdmlsl2, Sqdmlsl2) \
2760 V(sqdmulh, Sqdmulh) \
2761 V(sqdmull, Sqdmull) \
2762 V(sqdmull2, Sqdmull2) \
2763 V(sqrdmulh, Sqrdmulh) \
2764 V(sdot, Sdot) \
2765 V(sqrdmlah, Sqrdmlah) \
2766 V(udot, Udot) \
2767 V(sqrdmlsh, Sqrdmlsh) \
2768 V(sqrshl, Sqrshl) \
2769 V(sqshl, Sqshl) \
2770 V(sqsub, Sqsub) \
2771 V(srhadd, Srhadd) \
2772 V(srshl, Srshl) \
2773 V(sshl, Sshl) \
2774 V(ssubl, Ssubl) \
2775 V(ssubl2, Ssubl2) \
2776 V(ssubw, Ssubw) \
2777 V(ssubw2, Ssubw2) \
2778 V(sub, Sub) \
2779 V(subhn, Subhn) \
2780 V(subhn2, Subhn2) \
2781 V(trn1, Trn1) \
2782 V(trn2, Trn2) \
2783 V(uaba, Uaba) \
2784 V(uabal, Uabal) \
2785 V(uabal2, Uabal2) \
2786 V(uabd, Uabd) \
2787 V(uabdl, Uabdl) \
2788 V(uabdl2, Uabdl2) \
2789 V(uaddl, Uaddl) \
2790 V(uaddl2, Uaddl2) \
2791 V(uaddw, Uaddw) \
2792 V(uaddw2, Uaddw2) \
2793 V(uhadd, Uhadd) \
2794 V(uhsub, Uhsub) \
2795 V(umax, Umax) \
2796 V(umaxp, Umaxp) \
2797 V(umin, Umin) \
2798 V(uminp, Uminp) \
2799 V(umlal, Umlal) \
2800 V(umlal2, Umlal2) \
2801 V(umlsl, Umlsl) \
2802 V(umlsl2, Umlsl2) \
2803 V(umull, Umull) \
2804 V(umull2, Umull2) \
2805 V(uqadd, Uqadd) \
2806 V(uqrshl, Uqrshl) \
2807 V(uqshl, Uqshl) \
2808 V(uqsub, Uqsub) \
2809 V(urhadd, Urhadd) \
2810 V(urshl, Urshl) \
2811 V(ushl, Ushl) \
2812 V(usubl, Usubl) \
2813 V(usubl2, Usubl2) \
2814 V(usubw, Usubw) \
2815 V(usubw2, Usubw2) \
2816 V(uzp1, Uzp1) \
2817 V(uzp2, Uzp2) \
2818 V(zip1, Zip1) \
2819 V(zip2, Zip2) \
2820 V(smmla, Smmla) \
2821 V(ummla, Ummla) \
2822 V(usmmla, Usmmla) \
2823 V(usdot, Usdot)
2824
2825 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
2826 void MASM(const VRegister& vd, const VRegister& vn, const VRegister& vm) { \
2827 VIXL_ASSERT(allow_macro_instructions_); \
2828 SingleEmissionCheckScope guard(this); \
2829 ASM(vd, vn, vm); \
2830 }
2831 NEON_3VREG_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2832 #undef DEFINE_MACRO_ASM_FUNC
2833
2834 // NEON 2 vector register instructions.
2835 #define NEON_2VREG_MACRO_LIST(V) \
2836 V(abs, Abs) \
2837 V(addp, Addp) \
2838 V(addv, Addv) \
2839 V(cls, Cls) \
2840 V(clz, Clz) \
2841 V(cnt, Cnt) \
2842 V(fabs, Fabs) \
2843 V(faddp, Faddp) \
2844 V(fcvtas, Fcvtas) \
2845 V(fcvtau, Fcvtau) \
2846 V(fcvtms, Fcvtms) \
2847 V(fcvtmu, Fcvtmu) \
2848 V(fcvtns, Fcvtns) \
2849 V(fcvtnu, Fcvtnu) \
2850 V(fcvtps, Fcvtps) \
2851 V(fcvtpu, Fcvtpu) \
2852 V(fmaxnmp, Fmaxnmp) \
2853 V(fmaxnmv, Fmaxnmv) \
2854 V(fmaxp, Fmaxp) \
2855 V(fmaxv, Fmaxv) \
2856 V(fminnmp, Fminnmp) \
2857 V(fminnmv, Fminnmv) \
2858 V(fminp, Fminp) \
2859 V(fminv, Fminv) \
2860 V(fneg, Fneg) \
2861 V(frecpe, Frecpe) \
2862 V(frecpx, Frecpx) \
2863 V(frint32x, Frint32x) \
2864 V(frint32z, Frint32z) \
2865 V(frint64x, Frint64x) \
2866 V(frint64z, Frint64z) \
2867 V(frinta, Frinta) \
2868 V(frinti, Frinti) \
2869 V(frintm, Frintm) \
2870 V(frintn, Frintn) \
2871 V(frintp, Frintp) \
2872 V(frintx, Frintx) \
2873 V(frintz, Frintz) \
2874 V(frsqrte, Frsqrte) \
2875 V(fsqrt, Fsqrt) \
2876 V(mov, Mov) \
2877 V(mvn, Mvn) \
2878 V(neg, Neg) \
2879 V(not_, Not) \
2880 V(rbit, Rbit) \
2881 V(rev16, Rev16) \
2882 V(rev32, Rev32) \
2883 V(rev64, Rev64) \
2884 V(sadalp, Sadalp) \
2885 V(saddlp, Saddlp) \
2886 V(saddlv, Saddlv) \
2887 V(smaxv, Smaxv) \
2888 V(sminv, Sminv) \
2889 V(sqabs, Sqabs) \
2890 V(sqneg, Sqneg) \
2891 V(sqxtn, Sqxtn) \
2892 V(sqxtn2, Sqxtn2) \
2893 V(sqxtun, Sqxtun) \
2894 V(sqxtun2, Sqxtun2) \
2895 V(suqadd, Suqadd) \
2896 V(sxtl, Sxtl) \
2897 V(sxtl2, Sxtl2) \
2898 V(uadalp, Uadalp) \
2899 V(uaddlp, Uaddlp) \
2900 V(uaddlv, Uaddlv) \
2901 V(umaxv, Umaxv) \
2902 V(uminv, Uminv) \
2903 V(uqxtn, Uqxtn) \
2904 V(uqxtn2, Uqxtn2) \
2905 V(urecpe, Urecpe) \
2906 V(ursqrte, Ursqrte) \
2907 V(usqadd, Usqadd) \
2908 V(uxtl, Uxtl) \
2909 V(uxtl2, Uxtl2) \
2910 V(xtn, Xtn) \
2911 V(xtn2, Xtn2)
2912
2913 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
2914 void MASM(const VRegister& vd, const VRegister& vn) { \
2915 VIXL_ASSERT(allow_macro_instructions_); \
2916 SingleEmissionCheckScope guard(this); \
2917 ASM(vd, vn); \
2918 }
NEON_2VREG_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)2919 NEON_2VREG_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2920 #undef DEFINE_MACRO_ASM_FUNC
2921
2922 // NEON 2 vector register with immediate instructions.
2923 #define NEON_2VREG_FPIMM_MACRO_LIST(V) \
2924 V(fcmeq, Fcmeq) \
2925 V(fcmge, Fcmge) \
2926 V(fcmgt, Fcmgt) \
2927 V(fcmle, Fcmle) \
2928 V(fcmlt, Fcmlt)
2929
2930 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
2931 void MASM(const VRegister& vd, const VRegister& vn, double imm) { \
2932 VIXL_ASSERT(allow_macro_instructions_); \
2933 SingleEmissionCheckScope guard(this); \
2934 ASM(vd, vn, imm); \
2935 }
2936 NEON_2VREG_FPIMM_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2937 #undef DEFINE_MACRO_ASM_FUNC
2938
2939 // NEON by element instructions.
2940 #define NEON_BYELEMENT_MACRO_LIST(V) \
2941 V(fmul, Fmul) \
2942 V(fmla, Fmla) \
2943 V(fmlal, Fmlal) \
2944 V(fmlal2, Fmlal2) \
2945 V(fmls, Fmls) \
2946 V(fmlsl, Fmlsl) \
2947 V(fmlsl2, Fmlsl2) \
2948 V(fmulx, Fmulx) \
2949 V(mul, Mul) \
2950 V(mla, Mla) \
2951 V(mls, Mls) \
2952 V(sqdmulh, Sqdmulh) \
2953 V(sqrdmulh, Sqrdmulh) \
2954 V(sdot, Sdot) \
2955 V(sqrdmlah, Sqrdmlah) \
2956 V(udot, Udot) \
2957 V(sqrdmlsh, Sqrdmlsh) \
2958 V(sqdmull, Sqdmull) \
2959 V(sqdmull2, Sqdmull2) \
2960 V(sqdmlal, Sqdmlal) \
2961 V(sqdmlal2, Sqdmlal2) \
2962 V(sqdmlsl, Sqdmlsl) \
2963 V(sqdmlsl2, Sqdmlsl2) \
2964 V(smull, Smull) \
2965 V(smull2, Smull2) \
2966 V(smlal, Smlal) \
2967 V(smlal2, Smlal2) \
2968 V(smlsl, Smlsl) \
2969 V(smlsl2, Smlsl2) \
2970 V(umull, Umull) \
2971 V(umull2, Umull2) \
2972 V(umlal, Umlal) \
2973 V(umlal2, Umlal2) \
2974 V(umlsl, Umlsl) \
2975 V(umlsl2, Umlsl2) \
2976 V(sudot, Sudot) \
2977 V(usdot, Usdot)
2978
2979
2980 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
2981 void MASM(const VRegister& vd, \
2982 const VRegister& vn, \
2983 const VRegister& vm, \
2984 int vm_index) { \
2985 VIXL_ASSERT(allow_macro_instructions_); \
2986 SingleEmissionCheckScope guard(this); \
2987 ASM(vd, vn, vm, vm_index); \
2988 }
2989 NEON_BYELEMENT_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2990 #undef DEFINE_MACRO_ASM_FUNC
2991
2992 #define NEON_2VREG_SHIFT_MACRO_LIST(V) \
2993 V(rshrn, Rshrn) \
2994 V(rshrn2, Rshrn2) \
2995 V(shl, Shl) \
2996 V(shll, Shll) \
2997 V(shll2, Shll2) \
2998 V(shrn, Shrn) \
2999 V(shrn2, Shrn2) \
3000 V(sli, Sli) \
3001 V(sqrshrn, Sqrshrn) \
3002 V(sqrshrn2, Sqrshrn2) \
3003 V(sqrshrun, Sqrshrun) \
3004 V(sqrshrun2, Sqrshrun2) \
3005 V(sqshl, Sqshl) \
3006 V(sqshlu, Sqshlu) \
3007 V(sqshrn, Sqshrn) \
3008 V(sqshrn2, Sqshrn2) \
3009 V(sqshrun, Sqshrun) \
3010 V(sqshrun2, Sqshrun2) \
3011 V(sri, Sri) \
3012 V(srshr, Srshr) \
3013 V(srsra, Srsra) \
3014 V(sshr, Sshr) \
3015 V(ssra, Ssra) \
3016 V(uqrshrn, Uqrshrn) \
3017 V(uqrshrn2, Uqrshrn2) \
3018 V(uqshl, Uqshl) \
3019 V(uqshrn, Uqshrn) \
3020 V(uqshrn2, Uqshrn2) \
3021 V(urshr, Urshr) \
3022 V(ursra, Ursra) \
3023 V(ushr, Ushr) \
3024 V(usra, Usra)
3025
3026 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
3027 void MASM(const VRegister& vd, const VRegister& vn, int shift) { \
3028 VIXL_ASSERT(allow_macro_instructions_); \
3029 SingleEmissionCheckScope guard(this); \
3030 ASM(vd, vn, shift); \
3031 }
3032 NEON_2VREG_SHIFT_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
3033 #undef DEFINE_MACRO_ASM_FUNC
3034
3035 #define NEON_2VREG_SHIFT_LONG_MACRO_LIST(V) \
3036 V(shll, sshll, Sshll) \
3037 V(shll, ushll, Ushll) \
3038 V(shll2, sshll2, Sshll2) \
3039 V(shll2, ushll2, Ushll2)
3040
3041 #define DEFINE_MACRO_ASM_FUNC(ASM1, ASM2, MASM) \
3042 void MASM(const VRegister& vd, const VRegister& vn, int shift) { \
3043 VIXL_ASSERT(allow_macro_instructions_); \
3044 SingleEmissionCheckScope guard(this); \
3045 if (vn.GetLaneSizeInBits() == static_cast<unsigned>(shift)) { \
3046 ASM1(vd, vn, shift); \
3047 } else { \
3048 ASM2(vd, vn, shift); \
3049 } \
3050 }
3051 NEON_2VREG_SHIFT_LONG_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
3052 #undef DEFINE_MACRO_ASM_FUNC
3053
3054 // SVE 3 vector register instructions.
3055 #define SVE_3VREG_COMMUTATIVE_MACRO_LIST(V) \
3056 V(add, Add) \
3057 V(and_, And) \
3058 V(bic, Bic) \
3059 V(eor, Eor) \
3060 V(mul, Mul) \
3061 V(orr, Orr) \
3062 V(sabd, Sabd) \
3063 V(shadd, Shadd) \
3064 V(smax, Smax) \
3065 V(smin, Smin) \
3066 V(smulh, Smulh) \
3067 V(sqadd, Sqadd) \
3068 V(srhadd, Srhadd) \
3069 V(uabd, Uabd) \
3070 V(uhadd, Uhadd) \
3071 V(umax, Umax) \
3072 V(umin, Umin) \
3073 V(umulh, Umulh) \
3074 V(uqadd, Uqadd) \
3075 V(urhadd, Urhadd)
3076
3077 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
3078 void MASM(const ZRegister& zd, \
3079 const PRegisterM& pg, \
3080 const ZRegister& zn, \
3081 const ZRegister& zm) { \
3082 VIXL_ASSERT(allow_macro_instructions_); \
3083 if (zd.Aliases(zn)) { \
3084 SingleEmissionCheckScope guard(this); \
3085 ASM(zd, pg, zd, zm); \
3086 } else if (zd.Aliases(zm)) { \
3087 SingleEmissionCheckScope guard(this); \
3088 ASM(zd, pg, zd, zn); \
3089 } else { \
3090 MovprfxHelperScope guard(this, zd, pg, zn); \
3091 ASM(zd, pg, zd, zm); \
3092 } \
3093 }
3094 SVE_3VREG_COMMUTATIVE_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
3095 #undef DEFINE_MACRO_ASM_FUNC
3096
3097 void Bic(const VRegister& vd, const int imm8, const int left_shift = 0) {
3098 VIXL_ASSERT(allow_macro_instructions_);
3099 SingleEmissionCheckScope guard(this);
3100 bic(vd, imm8, left_shift);
3101 }
Cmeq(const VRegister & vd,const VRegister & vn,int imm)3102 void Cmeq(const VRegister& vd, const VRegister& vn, int imm) {
3103 VIXL_ASSERT(allow_macro_instructions_);
3104 SingleEmissionCheckScope guard(this);
3105 cmeq(vd, vn, imm);
3106 }
Cmge(const VRegister & vd,const VRegister & vn,int imm)3107 void Cmge(const VRegister& vd, const VRegister& vn, int imm) {
3108 VIXL_ASSERT(allow_macro_instructions_);
3109 SingleEmissionCheckScope guard(this);
3110 cmge(vd, vn, imm);
3111 }
Cmgt(const VRegister & vd,const VRegister & vn,int imm)3112 void Cmgt(const VRegister& vd, const VRegister& vn, int imm) {
3113 VIXL_ASSERT(allow_macro_instructions_);
3114 SingleEmissionCheckScope guard(this);
3115 cmgt(vd, vn, imm);
3116 }
Cmle(const VRegister & vd,const VRegister & vn,int imm)3117 void Cmle(const VRegister& vd, const VRegister& vn, int imm) {
3118 VIXL_ASSERT(allow_macro_instructions_);
3119 SingleEmissionCheckScope guard(this);
3120 cmle(vd, vn, imm);
3121 }
Cmlt(const VRegister & vd,const VRegister & vn,int imm)3122 void Cmlt(const VRegister& vd, const VRegister& vn, int imm) {
3123 VIXL_ASSERT(allow_macro_instructions_);
3124 SingleEmissionCheckScope guard(this);
3125 cmlt(vd, vn, imm);
3126 }
Dup(const VRegister & vd,const VRegister & vn,int index)3127 void Dup(const VRegister& vd, const VRegister& vn, int index) {
3128 VIXL_ASSERT(allow_macro_instructions_);
3129 SingleEmissionCheckScope guard(this);
3130 dup(vd, vn, index);
3131 }
Dup(const VRegister & vd,const Register & rn)3132 void Dup(const VRegister& vd, const Register& rn) {
3133 VIXL_ASSERT(allow_macro_instructions_);
3134 SingleEmissionCheckScope guard(this);
3135 dup(vd, rn);
3136 }
Ext(const VRegister & vd,const VRegister & vn,const VRegister & vm,int index)3137 void Ext(const VRegister& vd,
3138 const VRegister& vn,
3139 const VRegister& vm,
3140 int index) {
3141 VIXL_ASSERT(allow_macro_instructions_);
3142 SingleEmissionCheckScope guard(this);
3143 ext(vd, vn, vm, index);
3144 }
Fcadd(const VRegister & vd,const VRegister & vn,const VRegister & vm,int rot)3145 void Fcadd(const VRegister& vd,
3146 const VRegister& vn,
3147 const VRegister& vm,
3148 int rot) {
3149 VIXL_ASSERT(allow_macro_instructions_);
3150 SingleEmissionCheckScope guard(this);
3151 fcadd(vd, vn, vm, rot);
3152 }
Fcmla(const VRegister & vd,const VRegister & vn,const VRegister & vm,int vm_index,int rot)3153 void Fcmla(const VRegister& vd,
3154 const VRegister& vn,
3155 const VRegister& vm,
3156 int vm_index,
3157 int rot) {
3158 VIXL_ASSERT(allow_macro_instructions_);
3159 SingleEmissionCheckScope guard(this);
3160 fcmla(vd, vn, vm, vm_index, rot);
3161 }
Fcmla(const VRegister & vd,const VRegister & vn,const VRegister & vm,int rot)3162 void Fcmla(const VRegister& vd,
3163 const VRegister& vn,
3164 const VRegister& vm,
3165 int rot) {
3166 VIXL_ASSERT(allow_macro_instructions_);
3167 SingleEmissionCheckScope guard(this);
3168 fcmla(vd, vn, vm, rot);
3169 }
Ins(const VRegister & vd,int vd_index,const VRegister & vn,int vn_index)3170 void Ins(const VRegister& vd,
3171 int vd_index,
3172 const VRegister& vn,
3173 int vn_index) {
3174 VIXL_ASSERT(allow_macro_instructions_);
3175 SingleEmissionCheckScope guard(this);
3176 ins(vd, vd_index, vn, vn_index);
3177 }
Ins(const VRegister & vd,int vd_index,const Register & rn)3178 void Ins(const VRegister& vd, int vd_index, const Register& rn) {
3179 VIXL_ASSERT(allow_macro_instructions_);
3180 SingleEmissionCheckScope guard(this);
3181 ins(vd, vd_index, rn);
3182 }
Ld1(const VRegister & vt,const MemOperand & src)3183 void Ld1(const VRegister& vt, const MemOperand& src) {
3184 VIXL_ASSERT(allow_macro_instructions_);
3185 SingleEmissionCheckScope guard(this);
3186 ld1(vt, src);
3187 }
Ld1(const VRegister & vt,const VRegister & vt2,const MemOperand & src)3188 void Ld1(const VRegister& vt, const VRegister& vt2, const MemOperand& src) {
3189 VIXL_ASSERT(allow_macro_instructions_);
3190 SingleEmissionCheckScope guard(this);
3191 ld1(vt, vt2, src);
3192 }
Ld1(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & src)3193 void Ld1(const VRegister& vt,
3194 const VRegister& vt2,
3195 const VRegister& vt3,
3196 const MemOperand& src) {
3197 VIXL_ASSERT(allow_macro_instructions_);
3198 SingleEmissionCheckScope guard(this);
3199 ld1(vt, vt2, vt3, src);
3200 }
Ld1(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & src)3201 void Ld1(const VRegister& vt,
3202 const VRegister& vt2,
3203 const VRegister& vt3,
3204 const VRegister& vt4,
3205 const MemOperand& src) {
3206 VIXL_ASSERT(allow_macro_instructions_);
3207 SingleEmissionCheckScope guard(this);
3208 ld1(vt, vt2, vt3, vt4, src);
3209 }
Ld1(const VRegister & vt,int lane,const MemOperand & src)3210 void Ld1(const VRegister& vt, int lane, const MemOperand& src) {
3211 VIXL_ASSERT(allow_macro_instructions_);
3212 SingleEmissionCheckScope guard(this);
3213 ld1(vt, lane, src);
3214 }
Ld1r(const VRegister & vt,const MemOperand & src)3215 void Ld1r(const VRegister& vt, const MemOperand& src) {
3216 VIXL_ASSERT(allow_macro_instructions_);
3217 SingleEmissionCheckScope guard(this);
3218 ld1r(vt, src);
3219 }
Ld2(const VRegister & vt,const VRegister & vt2,const MemOperand & src)3220 void Ld2(const VRegister& vt, const VRegister& vt2, const MemOperand& src) {
3221 VIXL_ASSERT(allow_macro_instructions_);
3222 SingleEmissionCheckScope guard(this);
3223 ld2(vt, vt2, src);
3224 }
Ld2(const VRegister & vt,const VRegister & vt2,int lane,const MemOperand & src)3225 void Ld2(const VRegister& vt,
3226 const VRegister& vt2,
3227 int lane,
3228 const MemOperand& src) {
3229 VIXL_ASSERT(allow_macro_instructions_);
3230 SingleEmissionCheckScope guard(this);
3231 ld2(vt, vt2, lane, src);
3232 }
Ld2r(const VRegister & vt,const VRegister & vt2,const MemOperand & src)3233 void Ld2r(const VRegister& vt, const VRegister& vt2, const MemOperand& src) {
3234 VIXL_ASSERT(allow_macro_instructions_);
3235 SingleEmissionCheckScope guard(this);
3236 ld2r(vt, vt2, src);
3237 }
Ld3(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & src)3238 void Ld3(const VRegister& vt,
3239 const VRegister& vt2,
3240 const VRegister& vt3,
3241 const MemOperand& src) {
3242 VIXL_ASSERT(allow_macro_instructions_);
3243 SingleEmissionCheckScope guard(this);
3244 ld3(vt, vt2, vt3, src);
3245 }
Ld3(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,int lane,const MemOperand & src)3246 void Ld3(const VRegister& vt,
3247 const VRegister& vt2,
3248 const VRegister& vt3,
3249 int lane,
3250 const MemOperand& src) {
3251 VIXL_ASSERT(allow_macro_instructions_);
3252 SingleEmissionCheckScope guard(this);
3253 ld3(vt, vt2, vt3, lane, src);
3254 }
Ld3r(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & src)3255 void Ld3r(const VRegister& vt,
3256 const VRegister& vt2,
3257 const VRegister& vt3,
3258 const MemOperand& src) {
3259 VIXL_ASSERT(allow_macro_instructions_);
3260 SingleEmissionCheckScope guard(this);
3261 ld3r(vt, vt2, vt3, src);
3262 }
Ld4(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & src)3263 void Ld4(const VRegister& vt,
3264 const VRegister& vt2,
3265 const VRegister& vt3,
3266 const VRegister& vt4,
3267 const MemOperand& src) {
3268 VIXL_ASSERT(allow_macro_instructions_);
3269 SingleEmissionCheckScope guard(this);
3270 ld4(vt, vt2, vt3, vt4, src);
3271 }
Ld4(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,int lane,const MemOperand & src)3272 void Ld4(const VRegister& vt,
3273 const VRegister& vt2,
3274 const VRegister& vt3,
3275 const VRegister& vt4,
3276 int lane,
3277 const MemOperand& src) {
3278 VIXL_ASSERT(allow_macro_instructions_);
3279 SingleEmissionCheckScope guard(this);
3280 ld4(vt, vt2, vt3, vt4, lane, src);
3281 }
Ld4r(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & src)3282 void Ld4r(const VRegister& vt,
3283 const VRegister& vt2,
3284 const VRegister& vt3,
3285 const VRegister& vt4,
3286 const MemOperand& src) {
3287 VIXL_ASSERT(allow_macro_instructions_);
3288 SingleEmissionCheckScope guard(this);
3289 ld4r(vt, vt2, vt3, vt4, src);
3290 }
Mov(const VRegister & vd,int vd_index,const VRegister & vn,int vn_index)3291 void Mov(const VRegister& vd,
3292 int vd_index,
3293 const VRegister& vn,
3294 int vn_index) {
3295 VIXL_ASSERT(allow_macro_instructions_);
3296 SingleEmissionCheckScope guard(this);
3297 mov(vd, vd_index, vn, vn_index);
3298 }
Mov(const VRegister & vd,const VRegister & vn,int index)3299 void Mov(const VRegister& vd, const VRegister& vn, int index) {
3300 VIXL_ASSERT(allow_macro_instructions_);
3301 SingleEmissionCheckScope guard(this);
3302 mov(vd, vn, index);
3303 }
Mov(const VRegister & vd,int vd_index,const Register & rn)3304 void Mov(const VRegister& vd, int vd_index, const Register& rn) {
3305 VIXL_ASSERT(allow_macro_instructions_);
3306 SingleEmissionCheckScope guard(this);
3307 mov(vd, vd_index, rn);
3308 }
Mov(const Register & rd,const VRegister & vn,int vn_index)3309 void Mov(const Register& rd, const VRegister& vn, int vn_index) {
3310 VIXL_ASSERT(allow_macro_instructions_);
3311 SingleEmissionCheckScope guard(this);
3312 mov(rd, vn, vn_index);
3313 }
3314 void Movi(const VRegister& vd,
3315 uint64_t imm,
3316 Shift shift = LSL,
3317 int shift_amount = 0);
3318 void Movi(const VRegister& vd, uint64_t hi, uint64_t lo);
3319 void Mvni(const VRegister& vd,
3320 const int imm8,
3321 Shift shift = LSL,
3322 const int shift_amount = 0) {
3323 VIXL_ASSERT(allow_macro_instructions_);
3324 SingleEmissionCheckScope guard(this);
3325 mvni(vd, imm8, shift, shift_amount);
3326 }
3327 void Orr(const VRegister& vd, const int imm8, const int left_shift = 0) {
3328 VIXL_ASSERT(allow_macro_instructions_);
3329 SingleEmissionCheckScope guard(this);
3330 orr(vd, imm8, left_shift);
3331 }
3332 void Scvtf(const VRegister& vd, const VRegister& vn, int fbits = 0) {
3333 VIXL_ASSERT(allow_macro_instructions_);
3334 SingleEmissionCheckScope guard(this);
3335 scvtf(vd, vn, fbits);
3336 }
3337 void Ucvtf(const VRegister& vd, const VRegister& vn, int fbits = 0) {
3338 VIXL_ASSERT(allow_macro_instructions_);
3339 SingleEmissionCheckScope guard(this);
3340 ucvtf(vd, vn, fbits);
3341 }
3342 void Fcvtzs(const VRegister& vd, const VRegister& vn, int fbits = 0) {
3343 VIXL_ASSERT(allow_macro_instructions_);
3344 SingleEmissionCheckScope guard(this);
3345 fcvtzs(vd, vn, fbits);
3346 }
3347 void Fcvtzu(const VRegister& vd, const VRegister& vn, int fbits = 0) {
3348 VIXL_ASSERT(allow_macro_instructions_);
3349 SingleEmissionCheckScope guard(this);
3350 fcvtzu(vd, vn, fbits);
3351 }
St1(const VRegister & vt,const MemOperand & dst)3352 void St1(const VRegister& vt, const MemOperand& dst) {
3353 VIXL_ASSERT(allow_macro_instructions_);
3354 SingleEmissionCheckScope guard(this);
3355 st1(vt, dst);
3356 }
St1(const VRegister & vt,const VRegister & vt2,const MemOperand & dst)3357 void St1(const VRegister& vt, const VRegister& vt2, const MemOperand& dst) {
3358 VIXL_ASSERT(allow_macro_instructions_);
3359 SingleEmissionCheckScope guard(this);
3360 st1(vt, vt2, dst);
3361 }
St1(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & dst)3362 void St1(const VRegister& vt,
3363 const VRegister& vt2,
3364 const VRegister& vt3,
3365 const MemOperand& dst) {
3366 VIXL_ASSERT(allow_macro_instructions_);
3367 SingleEmissionCheckScope guard(this);
3368 st1(vt, vt2, vt3, dst);
3369 }
St1(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & dst)3370 void St1(const VRegister& vt,
3371 const VRegister& vt2,
3372 const VRegister& vt3,
3373 const VRegister& vt4,
3374 const MemOperand& dst) {
3375 VIXL_ASSERT(allow_macro_instructions_);
3376 SingleEmissionCheckScope guard(this);
3377 st1(vt, vt2, vt3, vt4, dst);
3378 }
St1(const VRegister & vt,int lane,const MemOperand & dst)3379 void St1(const VRegister& vt, int lane, const MemOperand& dst) {
3380 VIXL_ASSERT(allow_macro_instructions_);
3381 SingleEmissionCheckScope guard(this);
3382 st1(vt, lane, dst);
3383 }
St2(const VRegister & vt,const VRegister & vt2,const MemOperand & dst)3384 void St2(const VRegister& vt, const VRegister& vt2, const MemOperand& dst) {
3385 VIXL_ASSERT(allow_macro_instructions_);
3386 SingleEmissionCheckScope guard(this);
3387 st2(vt, vt2, dst);
3388 }
St3(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & dst)3389 void St3(const VRegister& vt,
3390 const VRegister& vt2,
3391 const VRegister& vt3,
3392 const MemOperand& dst) {
3393 VIXL_ASSERT(allow_macro_instructions_);
3394 SingleEmissionCheckScope guard(this);
3395 st3(vt, vt2, vt3, dst);
3396 }
St4(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & dst)3397 void St4(const VRegister& vt,
3398 const VRegister& vt2,
3399 const VRegister& vt3,
3400 const VRegister& vt4,
3401 const MemOperand& dst) {
3402 VIXL_ASSERT(allow_macro_instructions_);
3403 SingleEmissionCheckScope guard(this);
3404 st4(vt, vt2, vt3, vt4, dst);
3405 }
St2(const VRegister & vt,const VRegister & vt2,int lane,const MemOperand & dst)3406 void St2(const VRegister& vt,
3407 const VRegister& vt2,
3408 int lane,
3409 const MemOperand& dst) {
3410 VIXL_ASSERT(allow_macro_instructions_);
3411 SingleEmissionCheckScope guard(this);
3412 st2(vt, vt2, lane, dst);
3413 }
St3(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,int lane,const MemOperand & dst)3414 void St3(const VRegister& vt,
3415 const VRegister& vt2,
3416 const VRegister& vt3,
3417 int lane,
3418 const MemOperand& dst) {
3419 VIXL_ASSERT(allow_macro_instructions_);
3420 SingleEmissionCheckScope guard(this);
3421 st3(vt, vt2, vt3, lane, dst);
3422 }
St4(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,int lane,const MemOperand & dst)3423 void St4(const VRegister& vt,
3424 const VRegister& vt2,
3425 const VRegister& vt3,
3426 const VRegister& vt4,
3427 int lane,
3428 const MemOperand& dst) {
3429 VIXL_ASSERT(allow_macro_instructions_);
3430 SingleEmissionCheckScope guard(this);
3431 st4(vt, vt2, vt3, vt4, lane, dst);
3432 }
Smov(const Register & rd,const VRegister & vn,int vn_index)3433 void Smov(const Register& rd, const VRegister& vn, int vn_index) {
3434 VIXL_ASSERT(allow_macro_instructions_);
3435 SingleEmissionCheckScope guard(this);
3436 smov(rd, vn, vn_index);
3437 }
Umov(const Register & rd,const VRegister & vn,int vn_index)3438 void Umov(const Register& rd, const VRegister& vn, int vn_index) {
3439 VIXL_ASSERT(allow_macro_instructions_);
3440 SingleEmissionCheckScope guard(this);
3441 umov(rd, vn, vn_index);
3442 }
Crc32b(const Register & rd,const Register & rn,const Register & rm)3443 void Crc32b(const Register& rd, const Register& rn, const Register& rm) {
3444 VIXL_ASSERT(allow_macro_instructions_);
3445 SingleEmissionCheckScope guard(this);
3446 crc32b(rd, rn, rm);
3447 }
Crc32h(const Register & rd,const Register & rn,const Register & rm)3448 void Crc32h(const Register& rd, const Register& rn, const Register& rm) {
3449 VIXL_ASSERT(allow_macro_instructions_);
3450 SingleEmissionCheckScope guard(this);
3451 crc32h(rd, rn, rm);
3452 }
Crc32w(const Register & rd,const Register & rn,const Register & rm)3453 void Crc32w(const Register& rd, const Register& rn, const Register& rm) {
3454 VIXL_ASSERT(allow_macro_instructions_);
3455 SingleEmissionCheckScope guard(this);
3456 crc32w(rd, rn, rm);
3457 }
Crc32x(const Register & rd,const Register & rn,const Register & rm)3458 void Crc32x(const Register& rd, const Register& rn, const Register& rm) {
3459 VIXL_ASSERT(allow_macro_instructions_);
3460 SingleEmissionCheckScope guard(this);
3461 crc32x(rd, rn, rm);
3462 }
Crc32cb(const Register & rd,const Register & rn,const Register & rm)3463 void Crc32cb(const Register& rd, const Register& rn, const Register& rm) {
3464 VIXL_ASSERT(allow_macro_instructions_);
3465 SingleEmissionCheckScope guard(this);
3466 crc32cb(rd, rn, rm);
3467 }
Crc32ch(const Register & rd,const Register & rn,const Register & rm)3468 void Crc32ch(const Register& rd, const Register& rn, const Register& rm) {
3469 VIXL_ASSERT(allow_macro_instructions_);
3470 SingleEmissionCheckScope guard(this);
3471 crc32ch(rd, rn, rm);
3472 }
Crc32cw(const Register & rd,const Register & rn,const Register & rm)3473 void Crc32cw(const Register& rd, const Register& rn, const Register& rm) {
3474 VIXL_ASSERT(allow_macro_instructions_);
3475 SingleEmissionCheckScope guard(this);
3476 crc32cw(rd, rn, rm);
3477 }
Crc32cx(const Register & rd,const Register & rn,const Register & rm)3478 void Crc32cx(const Register& rd, const Register& rn, const Register& rm) {
3479 VIXL_ASSERT(allow_macro_instructions_);
3480 SingleEmissionCheckScope guard(this);
3481 crc32cx(rd, rn, rm);
3482 }
3483
3484 // Scalable Vector Extensions.
Abs(const ZRegister & zd,const PRegisterM & pg,const ZRegister & zn)3485 void Abs(const ZRegister& zd, const PRegisterM& pg, const ZRegister& zn) {
3486 VIXL_ASSERT(allow_macro_instructions_);
3487 SingleEmissionCheckScope guard(this);
3488 abs(zd, pg, zn);
3489 }
Add(const ZRegister & zd,const ZRegister & zn,const ZRegister & zm)3490 void Add(const ZRegister& zd, const ZRegister& zn, const ZRegister& zm) {
3491 VIXL_ASSERT(allow_macro_instructions_);
3492 SingleEmissionCheckScope guard(this);
3493 add(zd, zn, zm);
3494 }
Add(const ZRegister & zd,const ZRegister & zn,IntegerOperand imm)3495 void Add(const ZRegister& zd, const ZRegister& zn, IntegerOperand imm) {
3496 VIXL_ASSERT(allow_macro_instructions_);
3497 AddSubHelper(kAddImmediate, zd, zn, imm);
3498 }
3499 void Addpl(const Register& xd, const Register& xn, int64_t multiplier);
3500 void Addvl(const Register& xd, const Register& xn, int64_t multiplier);
3501 // Note that unlike the core ISA, SVE's `adr` is not PC-relative.
Adr(const ZRegister & zd,const SVEMemOperand & addr)3502 void Adr(const ZRegister& zd, const SVEMemOperand& addr) {
3503 VIXL_ASSERT(allow_macro_instructions_);
3504 SingleEmissionCheckScope guard(this);
3505 adr(zd, addr);
3506 }
And(const PRegisterWithLaneSize & pd,const PRegisterZ & pg,const PRegisterWithLaneSize & pn,const PRegisterWithLaneSize & pm)3507 void And(const PRegisterWithLaneSize& pd,
3508 const PRegisterZ& pg,
3509 const PRegisterWithLaneSize& pn,
3510 const PRegisterWithLaneSize& pm) {
3511 VIXL_ASSERT(allow_macro_instructions_);
3512 SingleEmissionCheckScope guard(this);
3513 and_(pd, pg, pn, pm);
3514 }
And(const ZRegister & zd,const ZRegister & zn,uint64_t imm)3515 void And(const ZRegister& zd, const ZRegister& zn, uint64_t imm) {
3516 VIXL_ASSERT(allow_macro_instructions_);
3517 SingleEmissionCheckScope guard(this);
3518 if (IsImmLogical(imm, zd.GetLaneSizeInBits())) {
3519 and_(zd, zn, imm);
3520 } else {
3521 // TODO: Synthesise the immediate once 'Mov' is implemented.
3522 VIXL_UNIMPLEMENTED();
3523 }
3524 }
And(const ZRegister & zd,const ZRegister & zn,const ZRegister & zm)3525 void And(const ZRegister& zd, const ZRegister& zn, const ZRegister& zm) {
3526 VIXL_ASSERT(allow_macro_instructions_);
3527 VIXL_ASSERT(AreSameLaneSize(zd, zn, zm));
3528 SingleEmissionCheckScope guard(this);
3529 and_(zd.VnD(), zn.VnD(), zm.VnD());
3530 }
Ands(const PRegisterWithLaneSize & pd,const PRegisterZ & pg,const PRegisterWithLaneSize & pn,const PRegisterWithLaneSize & pm)3531 void Ands(const PRegisterWithLaneSize& pd,
3532 const PRegisterZ& pg,
3533 const PRegisterWithLaneSize& pn,
3534 const PRegisterWithLaneSize& pm) {
3535 VIXL_ASSERT(allow_macro_instructions_);
3536 SingleEmissionCheckScope guard(this);
3537 ands(pd, pg, pn, pm);
3538 }
Andv(const VRegister & vd,const PRegister & pg,const ZRegister & zn)3539 void Andv(const VRegister& vd, const PRegister& pg, const ZRegister& zn) {
3540 VIXL_ASSERT(allow_macro_instructions_);
3541 SingleEmissionCheckScope guard(this);
3542 andv(vd, pg, zn);
3543 }
Asr(const ZRegister & zd,const PRegisterM & pg,const ZRegister & zn,int shift)3544 void Asr(const ZRegister& zd,
3545 const PRegisterM& pg,
3546 const ZRegister& zn,
3547 int shift) {
3548 VIXL_ASSERT(allow_macro_instructions_);
3549 MovprfxHelperScope guard(this, zd, pg, zn);
3550 asr(zd, pg, zd, shift);
3551 }
3552 void Asr(const ZRegister& zd,
3553 const PRegisterM& pg,
3554 const ZRegister& zn,
3555 const ZRegister& zm);
Asr(const ZRegister & zd,const ZRegister & zn,int shift)3556 void Asr(const ZRegister& zd, const ZRegister& zn, int shift) {
3557 VIXL_ASSERT(allow_macro_instructions_);
3558 SingleEmissionCheckScope guard(this);
3559 asr(zd, zn, shift);
3560 }
Asr(const ZRegister & zd,const ZRegister & zn,const ZRegister & zm)3561 void Asr(const ZRegister& zd, const ZRegister& zn, const ZRegister& zm) {
3562 VIXL_ASSERT(allow_macro_instructions_);
3563 SingleEmissionCheckScope guard(this);
3564 asr(zd, zn, zm);
3565 }
Asrd(const ZRegister & zd,const PRegisterM & pg,const ZRegister & zn,int shift)3566 void Asrd(const ZRegister& zd,
3567 const PRegisterM& pg,
3568 const ZRegister& zn,
3569 int shift) {
3570 VIXL_ASSERT(allow_macro_instructions_);
3571 MovprfxHelperScope guard(this, zd, pg, zn);
3572 asrd(zd, pg, zd, shift);
3573 }
Bic(const PRegisterWithLaneSize & pd,const PRegisterZ & pg,const PRegisterWithLaneSize & pn,const PRegisterWithLaneSize & pm)3574 void Bic(const PRegisterWithLaneSize& pd,
3575 const PRegisterZ& pg,
3576 const PRegisterWithLaneSize& pn,
3577 const PRegisterWithLaneSize& pm) {
3578 VIXL_ASSERT(allow_macro_instructions_);
3579 SingleEmissionCheckScope guard(this);
3580 bic(pd, pg, pn, pm);
3581 }
Bic(const ZRegister & zd,const ZRegister & zn,const ZRegister & zm)3582 void Bic(const ZRegister& zd, const ZRegister& zn, const ZRegister& zm) {
3583 VIXL_ASSERT(allow_macro_instructions_);
3584 VIXL_ASSERT(AreSameLaneSize(zd, zn, zm));
3585 SingleEmissionCheckScope guard(this);
3586 bic(zd.VnD(), zn.VnD(), zm.VnD());
3587 }
Bic(const ZRegister & zd,const ZRegister & zn,uint64_t imm)3588