1 // Copyright 2015, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 // * Redistributions of source code must retain the above copyright notice,
8 // this list of conditions and the following disclaimer.
9 // * Redistributions in binary form must reproduce the above copyright notice,
10 // this list of conditions and the following disclaimer in the documentation
11 // and/or other materials provided with the distribution.
12 // * Neither the name of ARM Limited nor the names of its contributors may be
13 // used to endorse or promote products derived from this software without
14 // specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27 #ifndef VIXL_AARCH64_MACRO_ASSEMBLER_AARCH64_H_
28 #define VIXL_AARCH64_MACRO_ASSEMBLER_AARCH64_H_
29
30 #include <algorithm>
31 #include <limits>
32
33 #include "../code-generation-scopes-vixl.h"
34 #include "../globals-vixl.h"
35 #include "../macro-assembler-interface.h"
36
37 #include "assembler-aarch64.h"
38 // Required for runtime call support.
39 // TODO: Break this dependency. We should be able to separate out the necessary
40 // parts so that we don't need to include the whole simulator header.
41 #include "simulator-aarch64.h"
42 // Required in order to generate debugging instructions for the simulator. This
43 // is needed regardless of whether the simulator is included or not, since
44 // generating simulator specific instructions is controlled at runtime.
45 #include "simulator-constants-aarch64.h"
46
47
48 #define LS_MACRO_LIST(V) \
49 V(Ldrb, Register&, rt, LDRB_w) \
50 V(Strb, Register&, rt, STRB_w) \
51 V(Ldrsb, Register&, rt, rt.Is64Bits() ? LDRSB_x : LDRSB_w) \
52 V(Ldrh, Register&, rt, LDRH_w) \
53 V(Strh, Register&, rt, STRH_w) \
54 V(Ldrsh, Register&, rt, rt.Is64Bits() ? LDRSH_x : LDRSH_w) \
55 V(Ldr, CPURegister&, rt, LoadOpFor(rt)) \
56 V(Str, CPURegister&, rt, StoreOpFor(rt)) \
57 V(Ldrsw, Register&, rt, LDRSW_x)
58
59
60 #define LSPAIR_MACRO_LIST(V) \
61 V(Ldp, CPURegister&, rt, rt2, LoadPairOpFor(rt, rt2)) \
62 V(Stp, CPURegister&, rt, rt2, StorePairOpFor(rt, rt2)) \
63 V(Ldpsw, Register&, rt, rt2, LDPSW_x)
64
65 namespace vixl {
66 namespace aarch64 {
67
68 // Forward declaration
69 class MacroAssembler;
70 class UseScratchRegisterScope;
71
72 class Pool {
73 public:
Pool(MacroAssembler * masm)74 explicit Pool(MacroAssembler* masm)
75 : checkpoint_(kNoCheckpointRequired), masm_(masm) {
76 Reset();
77 }
78
Reset()79 void Reset() {
80 checkpoint_ = kNoCheckpointRequired;
81 monitor_ = 0;
82 }
83
Block()84 void Block() { monitor_++; }
85 void Release();
IsBlocked()86 bool IsBlocked() const { return monitor_ != 0; }
87
88 static const ptrdiff_t kNoCheckpointRequired = PTRDIFF_MAX;
89
90 void SetNextCheckpoint(ptrdiff_t checkpoint);
GetCheckpoint()91 ptrdiff_t GetCheckpoint() const { return checkpoint_; }
92 VIXL_DEPRECATED("GetCheckpoint", ptrdiff_t checkpoint() const) {
93 return GetCheckpoint();
94 }
95
96 enum EmitOption { kBranchRequired, kNoBranchRequired };
97
98 protected:
99 // Next buffer offset at which a check is required for this pool.
100 ptrdiff_t checkpoint_;
101 // Indicates whether the emission of this pool is blocked.
102 int monitor_;
103 // The MacroAssembler using this pool.
104 MacroAssembler* masm_;
105 };
106
107
108 class LiteralPool : public Pool {
109 public:
110 explicit LiteralPool(MacroAssembler* masm);
111 ~LiteralPool() VIXL_NEGATIVE_TESTING_ALLOW_EXCEPTION;
112 void Reset();
113
114 void AddEntry(RawLiteral* literal);
IsEmpty()115 bool IsEmpty() const { return entries_.empty(); }
116 size_t GetSize() const;
117 VIXL_DEPRECATED("GetSize", size_t Size() const) { return GetSize(); }
118
119 size_t GetMaxSize() const;
120 VIXL_DEPRECATED("GetMaxSize", size_t MaxSize() const) { return GetMaxSize(); }
121
122 size_t GetOtherPoolsMaxSize() const;
123 VIXL_DEPRECATED("GetOtherPoolsMaxSize", size_t OtherPoolsMaxSize() const) {
124 return GetOtherPoolsMaxSize();
125 }
126
127 void CheckEmitFor(size_t amount, EmitOption option = kBranchRequired);
128 // Check whether we need to emit the literal pool in order to be able to
129 // safely emit a branch with a given range.
130 void CheckEmitForBranch(size_t range);
131 void Emit(EmitOption option = kNoBranchRequired);
132
133 void SetNextRecommendedCheckpoint(ptrdiff_t offset);
134 ptrdiff_t GetNextRecommendedCheckpoint();
135 VIXL_DEPRECATED("GetNextRecommendedCheckpoint",
136 ptrdiff_t NextRecommendedCheckpoint()) {
137 return GetNextRecommendedCheckpoint();
138 }
139
140 void UpdateFirstUse(ptrdiff_t use_position);
141
DeleteOnDestruction(RawLiteral * literal)142 void DeleteOnDestruction(RawLiteral* literal) {
143 deleted_on_destruction_.push_back(literal);
144 }
145
146 // Recommended not exact since the pool can be blocked for short periods.
147 static const ptrdiff_t kRecommendedLiteralPoolRange = 128 * KBytes;
148
149 private:
150 std::vector<RawLiteral*> entries_;
151 size_t size_;
152 ptrdiff_t first_use_;
153 // The parent class `Pool` provides a `checkpoint_`, which is the buffer
154 // offset before which a check *must* occur. This recommended checkpoint
155 // indicates when we would like to start emitting the constant pool. The
156 // MacroAssembler can, but does not have to, check the buffer when the
157 // checkpoint is reached.
158 ptrdiff_t recommended_checkpoint_;
159
160 std::vector<RawLiteral*> deleted_on_destruction_;
161 };
162
163
GetSize()164 inline size_t LiteralPool::GetSize() const {
165 // Account for the pool header.
166 return size_ + kInstructionSize;
167 }
168
169
GetMaxSize()170 inline size_t LiteralPool::GetMaxSize() const {
171 // Account for the potential branch over the pool.
172 return GetSize() + kInstructionSize;
173 }
174
175
GetNextRecommendedCheckpoint()176 inline ptrdiff_t LiteralPool::GetNextRecommendedCheckpoint() {
177 return first_use_ + kRecommendedLiteralPoolRange;
178 }
179
180
181 class VeneerPool : public Pool {
182 public:
VeneerPool(MacroAssembler * masm)183 explicit VeneerPool(MacroAssembler* masm) : Pool(masm) {}
184
185 void Reset();
186
Block()187 void Block() { monitor_++; }
188 void Release();
IsBlocked()189 bool IsBlocked() const { return monitor_ != 0; }
IsEmpty()190 bool IsEmpty() const { return unresolved_branches_.IsEmpty(); }
191
192 class BranchInfo {
193 public:
BranchInfo()194 BranchInfo()
195 : first_unreacheable_pc_(0),
196 pc_offset_(0),
197 label_(NULL),
198 branch_type_(UnknownBranchType) {}
BranchInfo(ptrdiff_t offset,Label * label,ImmBranchType branch_type)199 BranchInfo(ptrdiff_t offset, Label* label, ImmBranchType branch_type)
200 : pc_offset_(offset), label_(label), branch_type_(branch_type) {
201 first_unreacheable_pc_ =
202 pc_offset_ + Instruction::GetImmBranchForwardRange(branch_type_);
203 }
204
IsValidComparison(const BranchInfo & branch_1,const BranchInfo & branch_2)205 static bool IsValidComparison(const BranchInfo& branch_1,
206 const BranchInfo& branch_2) {
207 // BranchInfo are always compared against against other objects with
208 // the same branch type.
209 if (branch_1.branch_type_ != branch_2.branch_type_) {
210 return false;
211 }
212 // Since we should never have two branch infos with the same offsets, it
213 // first looks like we should check that offsets are different. However
214 // the operators may also be used to *search* for a branch info in the
215 // set.
216 bool same_offsets = (branch_1.pc_offset_ == branch_2.pc_offset_);
217 return (!same_offsets || ((branch_1.label_ == branch_2.label_) &&
218 (branch_1.first_unreacheable_pc_ ==
219 branch_2.first_unreacheable_pc_)));
220 }
221
222 // We must provide comparison operators to work with InvalSet.
223 bool operator==(const BranchInfo& other) const {
224 VIXL_ASSERT(IsValidComparison(*this, other));
225 return pc_offset_ == other.pc_offset_;
226 }
227 bool operator<(const BranchInfo& other) const {
228 VIXL_ASSERT(IsValidComparison(*this, other));
229 return pc_offset_ < other.pc_offset_;
230 }
231 bool operator<=(const BranchInfo& other) const {
232 VIXL_ASSERT(IsValidComparison(*this, other));
233 return pc_offset_ <= other.pc_offset_;
234 }
235 bool operator>(const BranchInfo& other) const {
236 VIXL_ASSERT(IsValidComparison(*this, other));
237 return pc_offset_ > other.pc_offset_;
238 }
239
240 // First instruction position that is not reachable by the branch using a
241 // positive branch offset.
242 ptrdiff_t first_unreacheable_pc_;
243 // Offset of the branch in the code generation buffer.
244 ptrdiff_t pc_offset_;
245 // The label branched to.
246 Label* label_;
247 ImmBranchType branch_type_;
248 };
249
BranchTypeUsesVeneers(ImmBranchType type)250 bool BranchTypeUsesVeneers(ImmBranchType type) {
251 return (type != UnknownBranchType) && (type != UncondBranchType);
252 }
253
254 void RegisterUnresolvedBranch(ptrdiff_t branch_pos,
255 Label* label,
256 ImmBranchType branch_type);
257 void DeleteUnresolvedBranchInfoForLabel(Label* label);
258
259 bool ShouldEmitVeneer(int64_t first_unreacheable_pc, size_t amount);
ShouldEmitVeneers(size_t amount)260 bool ShouldEmitVeneers(size_t amount) {
261 return ShouldEmitVeneer(unresolved_branches_.GetFirstLimit(), amount);
262 }
263
264 void CheckEmitFor(size_t amount, EmitOption option = kBranchRequired);
265 void Emit(EmitOption option, size_t margin);
266
267 // The code size generated for a veneer. Currently one branch instruction.
268 // This is for code size checking purposes, and can be extended in the future
269 // for example if we decide to add nops between the veneers.
270 static const int kVeneerCodeSize = 1 * kInstructionSize;
271 // The maximum size of code other than veneers that can be generated when
272 // emitting a veneer pool. Currently there can be an additional branch to jump
273 // over the pool.
274 static const int kPoolNonVeneerCodeSize = 1 * kInstructionSize;
275
UpdateNextCheckPoint()276 void UpdateNextCheckPoint() { SetNextCheckpoint(GetNextCheckPoint()); }
277
GetNumberOfPotentialVeneers()278 int GetNumberOfPotentialVeneers() const {
279 return static_cast<int>(unresolved_branches_.GetSize());
280 }
281 VIXL_DEPRECATED("GetNumberOfPotentialVeneers",
NumberOfPotentialVeneers()282 int NumberOfPotentialVeneers() const) {
283 return GetNumberOfPotentialVeneers();
284 }
285
GetMaxSize()286 size_t GetMaxSize() const {
287 return kPoolNonVeneerCodeSize +
288 unresolved_branches_.GetSize() * kVeneerCodeSize;
289 }
290 VIXL_DEPRECATED("GetMaxSize", size_t MaxSize() const) { return GetMaxSize(); }
291
292 size_t GetOtherPoolsMaxSize() const;
293 VIXL_DEPRECATED("GetOtherPoolsMaxSize", size_t OtherPoolsMaxSize() const) {
294 return GetOtherPoolsMaxSize();
295 }
296
297 static const int kNPreallocatedInfos = 4;
298 static const ptrdiff_t kInvalidOffset = PTRDIFF_MAX;
299 static const size_t kReclaimFrom = 128;
300 static const size_t kReclaimFactor = 16;
301
302 private:
303 typedef InvalSet<BranchInfo,
304 kNPreallocatedInfos,
305 ptrdiff_t,
306 kInvalidOffset,
307 kReclaimFrom,
308 kReclaimFactor>
309 BranchInfoTypedSetBase;
310 typedef InvalSetIterator<BranchInfoTypedSetBase> BranchInfoTypedSetIterBase;
311
312 class BranchInfoTypedSet : public BranchInfoTypedSetBase {
313 public:
BranchInfoTypedSet()314 BranchInfoTypedSet() : BranchInfoTypedSetBase() {}
315
GetFirstLimit()316 ptrdiff_t GetFirstLimit() {
317 if (empty()) {
318 return kInvalidOffset;
319 }
320 return GetMinElementKey();
321 }
322 VIXL_DEPRECATED("GetFirstLimit", ptrdiff_t FirstLimit()) {
323 return GetFirstLimit();
324 }
325 };
326
327 class BranchInfoTypedSetIterator : public BranchInfoTypedSetIterBase {
328 public:
BranchInfoTypedSetIterator()329 BranchInfoTypedSetIterator() : BranchInfoTypedSetIterBase(NULL) {}
BranchInfoTypedSetIterator(BranchInfoTypedSet * typed_set)330 explicit BranchInfoTypedSetIterator(BranchInfoTypedSet* typed_set)
331 : BranchInfoTypedSetIterBase(typed_set) {}
332
333 // TODO: Remove these and use the STL-like interface instead.
334 using BranchInfoTypedSetIterBase::Advance;
335 using BranchInfoTypedSetIterBase::Current;
336 };
337
338 class BranchInfoSet {
339 public:
insert(BranchInfo branch_info)340 void insert(BranchInfo branch_info) {
341 ImmBranchType type = branch_info.branch_type_;
342 VIXL_ASSERT(IsValidBranchType(type));
343 typed_set_[BranchIndexFromType(type)].insert(branch_info);
344 }
345
erase(BranchInfo branch_info)346 void erase(BranchInfo branch_info) {
347 if (IsValidBranchType(branch_info.branch_type_)) {
348 int index =
349 BranchInfoSet::BranchIndexFromType(branch_info.branch_type_);
350 typed_set_[index].erase(branch_info);
351 }
352 }
353
GetSize()354 size_t GetSize() const {
355 size_t res = 0;
356 for (int i = 0; i < kNumberOfTrackedBranchTypes; i++) {
357 res += typed_set_[i].size();
358 }
359 return res;
360 }
361 VIXL_DEPRECATED("GetSize", size_t size() const) { return GetSize(); }
362
IsEmpty()363 bool IsEmpty() const {
364 for (int i = 0; i < kNumberOfTrackedBranchTypes; i++) {
365 if (!typed_set_[i].empty()) {
366 return false;
367 }
368 }
369 return true;
370 }
empty()371 VIXL_DEPRECATED("IsEmpty", bool empty() const) { return IsEmpty(); }
372
GetFirstLimit()373 ptrdiff_t GetFirstLimit() {
374 ptrdiff_t res = kInvalidOffset;
375 for (int i = 0; i < kNumberOfTrackedBranchTypes; i++) {
376 res = std::min(res, typed_set_[i].GetFirstLimit());
377 }
378 return res;
379 }
380 VIXL_DEPRECATED("GetFirstLimit", ptrdiff_t FirstLimit()) {
381 return GetFirstLimit();
382 }
383
Reset()384 void Reset() {
385 for (int i = 0; i < kNumberOfTrackedBranchTypes; i++) {
386 typed_set_[i].clear();
387 }
388 }
389
BranchTypeFromIndex(int index)390 static ImmBranchType BranchTypeFromIndex(int index) {
391 switch (index) {
392 case 0:
393 return CondBranchType;
394 case 1:
395 return CompareBranchType;
396 case 2:
397 return TestBranchType;
398 default:
399 VIXL_UNREACHABLE();
400 return UnknownBranchType;
401 }
402 }
BranchIndexFromType(ImmBranchType branch_type)403 static int BranchIndexFromType(ImmBranchType branch_type) {
404 switch (branch_type) {
405 case CondBranchType:
406 return 0;
407 case CompareBranchType:
408 return 1;
409 case TestBranchType:
410 return 2;
411 default:
412 VIXL_UNREACHABLE();
413 return 0;
414 }
415 }
416
IsValidBranchType(ImmBranchType branch_type)417 bool IsValidBranchType(ImmBranchType branch_type) {
418 return (branch_type != UnknownBranchType) &&
419 (branch_type != UncondBranchType);
420 }
421
422 private:
423 static const int kNumberOfTrackedBranchTypes = 3;
424 BranchInfoTypedSet typed_set_[kNumberOfTrackedBranchTypes];
425
426 friend class VeneerPool;
427 friend class BranchInfoSetIterator;
428 };
429
430 class BranchInfoSetIterator {
431 public:
BranchInfoSetIterator(BranchInfoSet * set)432 explicit BranchInfoSetIterator(BranchInfoSet* set) : set_(set) {
433 for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
434 new (&sub_iterator_[i])
435 BranchInfoTypedSetIterator(&(set_->typed_set_[i]));
436 }
437 }
438
Current()439 VeneerPool::BranchInfo* Current() {
440 for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
441 if (!sub_iterator_[i].Done()) {
442 return sub_iterator_[i].Current();
443 }
444 }
445 VIXL_UNREACHABLE();
446 return NULL;
447 }
448
Advance()449 void Advance() {
450 VIXL_ASSERT(!Done());
451 for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
452 if (!sub_iterator_[i].Done()) {
453 sub_iterator_[i].Advance();
454 return;
455 }
456 }
457 VIXL_UNREACHABLE();
458 }
459
Done()460 bool Done() const {
461 for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
462 if (!sub_iterator_[i].Done()) return false;
463 }
464 return true;
465 }
466
AdvanceToNextType()467 void AdvanceToNextType() {
468 VIXL_ASSERT(!Done());
469 for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
470 if (!sub_iterator_[i].Done()) {
471 sub_iterator_[i].Finish();
472 return;
473 }
474 }
475 VIXL_UNREACHABLE();
476 }
477
DeleteCurrentAndAdvance()478 void DeleteCurrentAndAdvance() {
479 for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
480 if (!sub_iterator_[i].Done()) {
481 sub_iterator_[i].DeleteCurrentAndAdvance();
482 return;
483 }
484 }
485 }
486
487 private:
488 BranchInfoSet* set_;
489 BranchInfoTypedSetIterator
490 sub_iterator_[BranchInfoSet::kNumberOfTrackedBranchTypes];
491 };
492
GetNextCheckPoint()493 ptrdiff_t GetNextCheckPoint() {
494 if (unresolved_branches_.IsEmpty()) {
495 return kNoCheckpointRequired;
496 } else {
497 return unresolved_branches_.GetFirstLimit();
498 }
499 }
500 VIXL_DEPRECATED("GetNextCheckPoint", ptrdiff_t NextCheckPoint()) {
501 return GetNextCheckPoint();
502 }
503
504 // Information about unresolved (forward) branches.
505 BranchInfoSet unresolved_branches_;
506 };
507
508
509 // Helper for common Emission checks.
510 // The macro-instruction maps to a single instruction.
511 class SingleEmissionCheckScope : public EmissionCheckScope {
512 public:
SingleEmissionCheckScope(MacroAssemblerInterface * masm)513 explicit SingleEmissionCheckScope(MacroAssemblerInterface* masm)
514 : EmissionCheckScope(masm, kInstructionSize) {}
515 };
516
517
518 // The macro instruction is a "typical" macro-instruction. Typical macro-
519 // instruction only emit a few instructions, a few being defined as 8 here.
520 class MacroEmissionCheckScope : public EmissionCheckScope {
521 public:
MacroEmissionCheckScope(MacroAssemblerInterface * masm)522 explicit MacroEmissionCheckScope(MacroAssemblerInterface* masm)
523 : EmissionCheckScope(masm, kTypicalMacroInstructionMaxSize) {}
524
525 private:
526 static const size_t kTypicalMacroInstructionMaxSize = 8 * kInstructionSize;
527 };
528
529
530 // This scope simplifies the handling of the SVE `movprfx` instruction.
531 //
532 // If dst.Aliases(src):
533 // - Start an ExactAssemblyScope(masm, kInstructionSize).
534 // Otherwise:
535 // - Start an ExactAssemblyScope(masm, 2 * kInstructionSize).
536 // - Generate a suitable `movprfx` instruction.
537 //
538 // In both cases, the ExactAssemblyScope is left with enough remaining space for
539 // exactly one destructive instruction.
540 class MovprfxHelperScope : public ExactAssemblyScope {
541 public:
542 inline MovprfxHelperScope(MacroAssembler* masm,
543 const ZRegister& dst,
544 const ZRegister& src);
545
546 inline MovprfxHelperScope(MacroAssembler* masm,
547 const ZRegister& dst,
548 const PRegister& pg,
549 const ZRegister& src);
550
551 // TODO: Implement constructors that examine _all_ sources. If `dst` aliases
552 // any other source register, we can't use `movprfx`. This isn't obviously
553 // useful, but the MacroAssembler should not generate invalid code for it.
554 // Valid behaviour can be implemented using `mov`.
555 //
556 // The best way to handle this in an instruction-agnostic way is probably to
557 // use variadic templates.
558
559 private:
ShouldGenerateMovprfx(const ZRegister & dst,const ZRegister & src)560 inline bool ShouldGenerateMovprfx(const ZRegister& dst,
561 const ZRegister& src) {
562 VIXL_ASSERT(AreSameLaneSize(dst, src));
563 return !dst.Aliases(src);
564 }
565
ShouldGenerateMovprfx(const ZRegister & dst,const PRegister & pg,const ZRegister & src)566 inline bool ShouldGenerateMovprfx(const ZRegister& dst,
567 const PRegister& pg,
568 const ZRegister& src) {
569 VIXL_ASSERT(pg.IsMerging() || pg.IsZeroing());
570 // We need to emit movprfx in two cases:
571 // 1. To give a predicated merging unary instruction zeroing predication.
572 // 2. To make destructive instructions constructive.
573 //
574 // There are no predicated zeroing instructions that can take movprfx, so we
575 // will never generate an unnecessary movprfx with this logic.
576 return pg.IsZeroing() || ShouldGenerateMovprfx(dst, src);
577 }
578 };
579
580
581 enum BranchType {
582 // Copies of architectural conditions.
583 // The associated conditions can be used in place of those, the code will
584 // take care of reinterpreting them with the correct type.
585 integer_eq = eq,
586 integer_ne = ne,
587 integer_hs = hs,
588 integer_lo = lo,
589 integer_mi = mi,
590 integer_pl = pl,
591 integer_vs = vs,
592 integer_vc = vc,
593 integer_hi = hi,
594 integer_ls = ls,
595 integer_ge = ge,
596 integer_lt = lt,
597 integer_gt = gt,
598 integer_le = le,
599 integer_al = al,
600 integer_nv = nv,
601
602 // These two are *different* from the architectural codes al and nv.
603 // 'always' is used to generate unconditional branches.
604 // 'never' is used to not generate a branch (generally as the inverse
605 // branch type of 'always).
606 always,
607 never,
608 // cbz and cbnz
609 reg_zero,
610 reg_not_zero,
611 // tbz and tbnz
612 reg_bit_clear,
613 reg_bit_set,
614
615 // Aliases.
616 kBranchTypeFirstCondition = eq,
617 kBranchTypeLastCondition = nv,
618 kBranchTypeFirstUsingReg = reg_zero,
619 kBranchTypeFirstUsingBit = reg_bit_clear,
620
621 // SVE branch conditions.
622 integer_none = eq,
623 integer_any = ne,
624 integer_nlast = cs,
625 integer_last = cc,
626 integer_first = mi,
627 integer_nfrst = pl,
628 integer_pmore = hi,
629 integer_plast = ls,
630 integer_tcont = ge,
631 integer_tstop = lt
632 };
633
634
635 enum DiscardMoveMode { kDontDiscardForSameWReg, kDiscardForSameWReg };
636
637 // The macro assembler supports moving automatically pre-shifted immediates for
638 // arithmetic and logical instructions, and then applying a post shift in the
639 // instruction to undo the modification, in order to reduce the code emitted for
640 // an operation. For example:
641 //
642 // Add(x0, x0, 0x1f7de) => movz x16, 0xfbef; add x0, x0, x16, lsl #1.
643 //
644 // This optimisation can be only partially applied when the stack pointer is an
645 // operand or destination, so this enumeration is used to control the shift.
646 enum PreShiftImmMode {
647 kNoShift, // Don't pre-shift.
648 kLimitShiftForSP, // Limit pre-shift for add/sub extend use.
649 kAnyShift // Allow any pre-shift.
650 };
651
652 enum FPMacroNaNPropagationOption {
653 // The default option. This generates a run-time error in macros that respect
654 // this option.
655 NoFPMacroNaNPropagationSelected,
656 // For example, Fmin(result, NaN(a), NaN(b)) always selects NaN(a) if both
657 // NaN(a) and NaN(b) are both quiet, or both are signalling, at the
658 // cost of extra code generation in some cases.
659 StrictNaNPropagation,
660 // For example, Fmin(result, NaN(a), NaN(b)) selects either NaN, but using the
661 // fewest instructions.
662 FastNaNPropagation
663 };
664
665 class MacroAssembler : public Assembler, public MacroAssemblerInterface {
666 public:
667 explicit MacroAssembler(
668 PositionIndependentCodeOption pic = PositionIndependentCode);
669 MacroAssembler(size_t capacity,
670 PositionIndependentCodeOption pic = PositionIndependentCode);
671 MacroAssembler(byte* buffer,
672 size_t capacity,
673 PositionIndependentCodeOption pic = PositionIndependentCode);
674 ~MacroAssembler();
675
676 enum FinalizeOption {
677 kFallThrough, // There may be more code to execute after calling Finalize.
678 kUnreachable // Anything generated after calling Finalize is unreachable.
679 };
680
AsAssemblerBase()681 virtual vixl::internal::AssemblerBase* AsAssemblerBase() VIXL_OVERRIDE {
682 return this;
683 }
684
685 // TODO(pools): implement these functions.
EmitPoolHeader()686 virtual void EmitPoolHeader() VIXL_OVERRIDE {}
EmitPoolFooter()687 virtual void EmitPoolFooter() VIXL_OVERRIDE {}
EmitPaddingBytes(int n)688 virtual void EmitPaddingBytes(int n) VIXL_OVERRIDE { USE(n); }
EmitNopBytes(int n)689 virtual void EmitNopBytes(int n) VIXL_OVERRIDE { USE(n); }
690
691 // Start generating code from the beginning of the buffer, discarding any code
692 // and data that has already been emitted into the buffer.
693 //
694 // In order to avoid any accidental transfer of state, Reset ASSERTs that the
695 // constant pool is not blocked.
696 void Reset();
697
698 // Finalize a code buffer of generated instructions. This function must be
699 // called before executing or copying code from the buffer. By default,
700 // anything generated after this should not be reachable (the last instruction
701 // generated is an unconditional branch). If you need to generate more code,
702 // then set `option` to kFallThrough.
703 void FinalizeCode(FinalizeOption option = kUnreachable);
704
705
706 // Constant generation helpers.
707 // These functions return the number of instructions required to move the
708 // immediate into the destination register. Also, if the masm pointer is
709 // non-null, it generates the code to do so.
710 // The two features are implemented using one function to avoid duplication of
711 // the logic.
712 // The function can be used to evaluate the cost of synthesizing an
713 // instruction using 'mov immediate' instructions. A user might prefer loading
714 // a constant using the literal pool instead of using multiple 'mov immediate'
715 // instructions.
716 static int MoveImmediateHelper(MacroAssembler* masm,
717 const Register& rd,
718 uint64_t imm);
719 static bool OneInstrMoveImmediateHelper(MacroAssembler* masm,
720 const Register& dst,
721 uint64_t imm);
722
723
724 // Logical macros.
725 void And(const Register& rd, const Register& rn, const Operand& operand);
726 void Ands(const Register& rd, const Register& rn, const Operand& operand);
727 void Bic(const Register& rd, const Register& rn, const Operand& operand);
728 void Bics(const Register& rd, const Register& rn, const Operand& operand);
729 void Orr(const Register& rd, const Register& rn, const Operand& operand);
730 void Orn(const Register& rd, const Register& rn, const Operand& operand);
731 void Eor(const Register& rd, const Register& rn, const Operand& operand);
732 void Eon(const Register& rd, const Register& rn, const Operand& operand);
733 void Tst(const Register& rn, const Operand& operand);
734 void LogicalMacro(const Register& rd,
735 const Register& rn,
736 const Operand& operand,
737 LogicalOp op);
738
739 // Add and sub macros.
740 void Add(const Register& rd,
741 const Register& rn,
742 const Operand& operand,
743 FlagsUpdate S = LeaveFlags);
744 void Adds(const Register& rd, const Register& rn, const Operand& operand);
745 void Sub(const Register& rd,
746 const Register& rn,
747 const Operand& operand,
748 FlagsUpdate S = LeaveFlags);
749 void Subs(const Register& rd, const Register& rn, const Operand& operand);
750 void Cmn(const Register& rn, const Operand& operand);
751 void Cmp(const Register& rn, const Operand& operand);
752 void Neg(const Register& rd, const Operand& operand);
753 void Negs(const Register& rd, const Operand& operand);
754
755 void AddSubMacro(const Register& rd,
756 const Register& rn,
757 const Operand& operand,
758 FlagsUpdate S,
759 AddSubOp op);
760
761 // Add/sub with carry macros.
762 void Adc(const Register& rd, const Register& rn, const Operand& operand);
763 void Adcs(const Register& rd, const Register& rn, const Operand& operand);
764 void Sbc(const Register& rd, const Register& rn, const Operand& operand);
765 void Sbcs(const Register& rd, const Register& rn, const Operand& operand);
766 void Ngc(const Register& rd, const Operand& operand);
767 void Ngcs(const Register& rd, const Operand& operand);
768 void AddSubWithCarryMacro(const Register& rd,
769 const Register& rn,
770 const Operand& operand,
771 FlagsUpdate S,
772 AddSubWithCarryOp op);
773
774 void Rmif(const Register& xn, unsigned shift, StatusFlags flags);
775 void Setf8(const Register& wn);
776 void Setf16(const Register& wn);
777
778 // Move macros.
779 void Mov(const Register& rd, uint64_t imm);
780 void Mov(const Register& rd,
781 const Operand& operand,
782 DiscardMoveMode discard_mode = kDontDiscardForSameWReg);
Mvn(const Register & rd,uint64_t imm)783 void Mvn(const Register& rd, uint64_t imm) {
784 Mov(rd, (rd.GetSizeInBits() == kXRegSize) ? ~imm : (~imm & kWRegMask));
785 }
786 void Mvn(const Register& rd, const Operand& operand);
787
788 // Try to move an immediate into the destination register in a single
789 // instruction. Returns true for success, and updates the contents of dst.
790 // Returns false, otherwise.
791 bool TryOneInstrMoveImmediate(const Register& dst, uint64_t imm);
792
793 // Move an immediate into register dst, and return an Operand object for
794 // use with a subsequent instruction that accepts a shift. The value moved
795 // into dst is not necessarily equal to imm; it may have had a shifting
796 // operation applied to it that will be subsequently undone by the shift
797 // applied in the Operand.
798 Operand MoveImmediateForShiftedOp(const Register& dst,
799 uint64_t imm,
800 PreShiftImmMode mode);
801
802 void Move(const GenericOperand& dst, const GenericOperand& src);
803
804 // Synthesises the address represented by a MemOperand into a register.
805 void ComputeAddress(const Register& dst, const MemOperand& mem_op);
806
807 // Conditional macros.
808 void Ccmp(const Register& rn,
809 const Operand& operand,
810 StatusFlags nzcv,
811 Condition cond);
812 void Ccmn(const Register& rn,
813 const Operand& operand,
814 StatusFlags nzcv,
815 Condition cond);
816 void ConditionalCompareMacro(const Register& rn,
817 const Operand& operand,
818 StatusFlags nzcv,
819 Condition cond,
820 ConditionalCompareOp op);
821
822 // On return, the boolean values pointed to will indicate whether `left` and
823 // `right` should be synthesised in a temporary register.
GetCselSynthesisInformation(const Register & rd,const Operand & left,const Operand & right,bool * should_synthesise_left,bool * should_synthesise_right)824 static void GetCselSynthesisInformation(const Register& rd,
825 const Operand& left,
826 const Operand& right,
827 bool* should_synthesise_left,
828 bool* should_synthesise_right) {
829 // Note that the helper does not need to look at the condition.
830 CselHelper(NULL,
831 rd,
832 left,
833 right,
834 eq,
835 should_synthesise_left,
836 should_synthesise_right);
837 }
838
Csel(const Register & rd,const Operand & left,const Operand & right,Condition cond)839 void Csel(const Register& rd,
840 const Operand& left,
841 const Operand& right,
842 Condition cond) {
843 CselHelper(this, rd, left, right, cond);
844 }
845
846 // Load/store macros.
847 #define DECLARE_FUNCTION(FN, REGTYPE, REG, OP) \
848 void FN(const REGTYPE REG, const MemOperand& addr);
849 LS_MACRO_LIST(DECLARE_FUNCTION)
850 #undef DECLARE_FUNCTION
851
852 void LoadStoreMacro(const CPURegister& rt,
853 const MemOperand& addr,
854 LoadStoreOp op);
855
856 #define DECLARE_FUNCTION(FN, REGTYPE, REG, REG2, OP) \
857 void FN(const REGTYPE REG, const REGTYPE REG2, const MemOperand& addr);
858 LSPAIR_MACRO_LIST(DECLARE_FUNCTION)
859 #undef DECLARE_FUNCTION
860
861 void LoadStorePairMacro(const CPURegister& rt,
862 const CPURegister& rt2,
863 const MemOperand& addr,
864 LoadStorePairOp op);
865
866 void Prfm(PrefetchOperation op, const MemOperand& addr);
867
868 // Push or pop up to 4 registers of the same width to or from the stack,
869 // using the current stack pointer as set by SetStackPointer.
870 //
871 // If an argument register is 'NoReg', all further arguments are also assumed
872 // to be 'NoReg', and are thus not pushed or popped.
873 //
874 // Arguments are ordered such that "Push(a, b);" is functionally equivalent
875 // to "Push(a); Push(b);".
876 //
877 // It is valid to push the same register more than once, and there is no
878 // restriction on the order in which registers are specified.
879 //
880 // It is not valid to pop into the same register more than once in one
881 // operation, not even into the zero register.
882 //
883 // If the current stack pointer (as set by SetStackPointer) is sp, then it
884 // must be aligned to 16 bytes on entry and the total size of the specified
885 // registers must also be a multiple of 16 bytes.
886 //
887 // Even if the current stack pointer is not the system stack pointer (sp),
888 // Push (and derived methods) will still modify the system stack pointer in
889 // order to comply with ABI rules about accessing memory below the system
890 // stack pointer.
891 //
892 // Other than the registers passed into Pop, the stack pointer and (possibly)
893 // the system stack pointer, these methods do not modify any other registers.
894 void Push(const CPURegister& src0,
895 const CPURegister& src1 = NoReg,
896 const CPURegister& src2 = NoReg,
897 const CPURegister& src3 = NoReg);
898 void Pop(const CPURegister& dst0,
899 const CPURegister& dst1 = NoReg,
900 const CPURegister& dst2 = NoReg,
901 const CPURegister& dst3 = NoReg);
902
903 // Alternative forms of Push and Pop, taking a RegList or CPURegList that
904 // specifies the registers that are to be pushed or popped. Higher-numbered
905 // registers are associated with higher memory addresses (as in the A32 push
906 // and pop instructions).
907 //
908 // (Push|Pop)SizeRegList allow you to specify the register size as a
909 // parameter. Only kXRegSize, kWRegSize, kDRegSize and kSRegSize are
910 // supported.
911 //
912 // Otherwise, (Push|Pop)(CPU|X|W|D|S)RegList is preferred.
913 void PushCPURegList(CPURegList registers);
914 void PopCPURegList(CPURegList registers);
915
916 void PushSizeRegList(
917 RegList registers,
918 unsigned reg_size,
919 CPURegister::RegisterType type = CPURegister::kRegister) {
920 PushCPURegList(CPURegList(type, reg_size, registers));
921 }
922 void PopSizeRegList(RegList registers,
923 unsigned reg_size,
924 CPURegister::RegisterType type = CPURegister::kRegister) {
925 PopCPURegList(CPURegList(type, reg_size, registers));
926 }
PushXRegList(RegList regs)927 void PushXRegList(RegList regs) { PushSizeRegList(regs, kXRegSize); }
PopXRegList(RegList regs)928 void PopXRegList(RegList regs) { PopSizeRegList(regs, kXRegSize); }
PushWRegList(RegList regs)929 void PushWRegList(RegList regs) { PushSizeRegList(regs, kWRegSize); }
PopWRegList(RegList regs)930 void PopWRegList(RegList regs) { PopSizeRegList(regs, kWRegSize); }
PushDRegList(RegList regs)931 void PushDRegList(RegList regs) {
932 PushSizeRegList(regs, kDRegSize, CPURegister::kVRegister);
933 }
PopDRegList(RegList regs)934 void PopDRegList(RegList regs) {
935 PopSizeRegList(regs, kDRegSize, CPURegister::kVRegister);
936 }
PushSRegList(RegList regs)937 void PushSRegList(RegList regs) {
938 PushSizeRegList(regs, kSRegSize, CPURegister::kVRegister);
939 }
PopSRegList(RegList regs)940 void PopSRegList(RegList regs) {
941 PopSizeRegList(regs, kSRegSize, CPURegister::kVRegister);
942 }
943
944 // Push the specified register 'count' times.
945 void PushMultipleTimes(int count, Register src);
946
947 // Poke 'src' onto the stack. The offset is in bytes.
948 //
949 // If the current stack pointer (as set by SetStackPointer) is sp, then sp
950 // must be aligned to 16 bytes.
951 void Poke(const Register& src, const Operand& offset);
952
953 // Peek at a value on the stack, and put it in 'dst'. The offset is in bytes.
954 //
955 // If the current stack pointer (as set by SetStackPointer) is sp, then sp
956 // must be aligned to 16 bytes.
957 void Peek(const Register& dst, const Operand& offset);
958
959 // Alternative forms of Peek and Poke, taking a RegList or CPURegList that
960 // specifies the registers that are to be pushed or popped. Higher-numbered
961 // registers are associated with higher memory addresses.
962 //
963 // (Peek|Poke)SizeRegList allow you to specify the register size as a
964 // parameter. Only kXRegSize, kWRegSize, kDRegSize and kSRegSize are
965 // supported.
966 //
967 // Otherwise, (Peek|Poke)(CPU|X|W|D|S)RegList is preferred.
PeekCPURegList(CPURegList registers,int64_t offset)968 void PeekCPURegList(CPURegList registers, int64_t offset) {
969 LoadCPURegList(registers, MemOperand(StackPointer(), offset));
970 }
PokeCPURegList(CPURegList registers,int64_t offset)971 void PokeCPURegList(CPURegList registers, int64_t offset) {
972 StoreCPURegList(registers, MemOperand(StackPointer(), offset));
973 }
974
975 void PeekSizeRegList(
976 RegList registers,
977 int64_t offset,
978 unsigned reg_size,
979 CPURegister::RegisterType type = CPURegister::kRegister) {
980 PeekCPURegList(CPURegList(type, reg_size, registers), offset);
981 }
982 void PokeSizeRegList(
983 RegList registers,
984 int64_t offset,
985 unsigned reg_size,
986 CPURegister::RegisterType type = CPURegister::kRegister) {
987 PokeCPURegList(CPURegList(type, reg_size, registers), offset);
988 }
PeekXRegList(RegList regs,int64_t offset)989 void PeekXRegList(RegList regs, int64_t offset) {
990 PeekSizeRegList(regs, offset, kXRegSize);
991 }
PokeXRegList(RegList regs,int64_t offset)992 void PokeXRegList(RegList regs, int64_t offset) {
993 PokeSizeRegList(regs, offset, kXRegSize);
994 }
PeekWRegList(RegList regs,int64_t offset)995 void PeekWRegList(RegList regs, int64_t offset) {
996 PeekSizeRegList(regs, offset, kWRegSize);
997 }
PokeWRegList(RegList regs,int64_t offset)998 void PokeWRegList(RegList regs, int64_t offset) {
999 PokeSizeRegList(regs, offset, kWRegSize);
1000 }
PeekDRegList(RegList regs,int64_t offset)1001 void PeekDRegList(RegList regs, int64_t offset) {
1002 PeekSizeRegList(regs, offset, kDRegSize, CPURegister::kVRegister);
1003 }
PokeDRegList(RegList regs,int64_t offset)1004 void PokeDRegList(RegList regs, int64_t offset) {
1005 PokeSizeRegList(regs, offset, kDRegSize, CPURegister::kVRegister);
1006 }
PeekSRegList(RegList regs,int64_t offset)1007 void PeekSRegList(RegList regs, int64_t offset) {
1008 PeekSizeRegList(regs, offset, kSRegSize, CPURegister::kVRegister);
1009 }
PokeSRegList(RegList regs,int64_t offset)1010 void PokeSRegList(RegList regs, int64_t offset) {
1011 PokeSizeRegList(regs, offset, kSRegSize, CPURegister::kVRegister);
1012 }
1013
1014
1015 // Claim or drop stack space without actually accessing memory.
1016 //
1017 // If the current stack pointer (as set by SetStackPointer) is sp, then it
1018 // must be aligned to 16 bytes and the size claimed or dropped must be a
1019 // multiple of 16 bytes.
1020 void Claim(const Operand& size);
1021 void Drop(const Operand& size);
1022
1023 // As above, but for multiples of the SVE vector length.
ClaimVL(int64_t multiplier)1024 void ClaimVL(int64_t multiplier) {
1025 // We never need to worry about sp alignment because the VL is always a
1026 // multiple of 16.
1027 VIXL_STATIC_ASSERT((kZRegMinSizeInBytes % 16) == 0);
1028 VIXL_ASSERT(multiplier >= 0);
1029 Addvl(sp, sp, -multiplier);
1030 }
DropVL(int64_t multiplier)1031 void DropVL(int64_t multiplier) {
1032 VIXL_STATIC_ASSERT((kZRegMinSizeInBytes % 16) == 0);
1033 VIXL_ASSERT(multiplier >= 0);
1034 Addvl(sp, sp, multiplier);
1035 }
1036
1037 // Preserve the callee-saved registers (as defined by AAPCS64).
1038 //
1039 // Higher-numbered registers are pushed before lower-numbered registers, and
1040 // thus get higher addresses.
1041 // Floating-point registers are pushed before general-purpose registers, and
1042 // thus get higher addresses.
1043 //
1044 // This method must not be called unless StackPointer() is sp, and it is
1045 // aligned to 16 bytes.
1046 void PushCalleeSavedRegisters();
1047
1048 // Restore the callee-saved registers (as defined by AAPCS64).
1049 //
1050 // Higher-numbered registers are popped after lower-numbered registers, and
1051 // thus come from higher addresses.
1052 // Floating-point registers are popped after general-purpose registers, and
1053 // thus come from higher addresses.
1054 //
1055 // This method must not be called unless StackPointer() is sp, and it is
1056 // aligned to 16 bytes.
1057 void PopCalleeSavedRegisters();
1058
1059 void LoadCPURegList(CPURegList registers, const MemOperand& src);
1060 void StoreCPURegList(CPURegList registers, const MemOperand& dst);
1061
1062 // Remaining instructions are simple pass-through calls to the assembler.
Adr(const Register & rd,Label * label)1063 void Adr(const Register& rd, Label* label) {
1064 VIXL_ASSERT(allow_macro_instructions_);
1065 VIXL_ASSERT(!rd.IsZero());
1066 SingleEmissionCheckScope guard(this);
1067 adr(rd, label);
1068 }
Adrp(const Register & rd,Label * label)1069 void Adrp(const Register& rd, Label* label) {
1070 VIXL_ASSERT(allow_macro_instructions_);
1071 VIXL_ASSERT(!rd.IsZero());
1072 SingleEmissionCheckScope guard(this);
1073 adrp(rd, label);
1074 }
Asr(const Register & rd,const Register & rn,unsigned shift)1075 void Asr(const Register& rd, const Register& rn, unsigned shift) {
1076 VIXL_ASSERT(allow_macro_instructions_);
1077 VIXL_ASSERT(!rd.IsZero());
1078 VIXL_ASSERT(!rn.IsZero());
1079 SingleEmissionCheckScope guard(this);
1080 asr(rd, rn, shift);
1081 }
Asr(const Register & rd,const Register & rn,const Register & rm)1082 void Asr(const Register& rd, const Register& rn, const Register& rm) {
1083 VIXL_ASSERT(allow_macro_instructions_);
1084 VIXL_ASSERT(!rd.IsZero());
1085 VIXL_ASSERT(!rn.IsZero());
1086 VIXL_ASSERT(!rm.IsZero());
1087 SingleEmissionCheckScope guard(this);
1088 asrv(rd, rn, rm);
1089 }
1090
1091 // Branch type inversion relies on these relations.
1092 VIXL_STATIC_ASSERT((reg_zero == (reg_not_zero ^ 1)) &&
1093 (reg_bit_clear == (reg_bit_set ^ 1)) &&
1094 (always == (never ^ 1)));
1095
InvertBranchType(BranchType type)1096 BranchType InvertBranchType(BranchType type) {
1097 if (kBranchTypeFirstCondition <= type && type <= kBranchTypeLastCondition) {
1098 return static_cast<BranchType>(
1099 InvertCondition(static_cast<Condition>(type)));
1100 } else {
1101 return static_cast<BranchType>(type ^ 1);
1102 }
1103 }
1104
1105 void B(Label* label, BranchType type, Register reg = NoReg, int bit = -1);
1106
1107 void B(Label* label);
1108 void B(Label* label, Condition cond);
B(Condition cond,Label * label)1109 void B(Condition cond, Label* label) { B(label, cond); }
Bfm(const Register & rd,const Register & rn,unsigned immr,unsigned imms)1110 void Bfm(const Register& rd,
1111 const Register& rn,
1112 unsigned immr,
1113 unsigned imms) {
1114 VIXL_ASSERT(allow_macro_instructions_);
1115 VIXL_ASSERT(!rd.IsZero());
1116 VIXL_ASSERT(!rn.IsZero());
1117 SingleEmissionCheckScope guard(this);
1118 bfm(rd, rn, immr, imms);
1119 }
Bfi(const Register & rd,const Register & rn,unsigned lsb,unsigned width)1120 void Bfi(const Register& rd,
1121 const Register& rn,
1122 unsigned lsb,
1123 unsigned width) {
1124 VIXL_ASSERT(allow_macro_instructions_);
1125 VIXL_ASSERT(!rd.IsZero());
1126 VIXL_ASSERT(!rn.IsZero());
1127 SingleEmissionCheckScope guard(this);
1128 bfi(rd, rn, lsb, width);
1129 }
Bfc(const Register & rd,unsigned lsb,unsigned width)1130 void Bfc(const Register& rd, unsigned lsb, unsigned width) {
1131 VIXL_ASSERT(allow_macro_instructions_);
1132 VIXL_ASSERT(!rd.IsZero());
1133 SingleEmissionCheckScope guard(this);
1134 bfc(rd, lsb, width);
1135 }
Bfxil(const Register & rd,const Register & rn,unsigned lsb,unsigned width)1136 void Bfxil(const Register& rd,
1137 const Register& rn,
1138 unsigned lsb,
1139 unsigned width) {
1140 VIXL_ASSERT(allow_macro_instructions_);
1141 VIXL_ASSERT(!rd.IsZero());
1142 VIXL_ASSERT(!rn.IsZero());
1143 SingleEmissionCheckScope guard(this);
1144 bfxil(rd, rn, lsb, width);
1145 }
1146 void Bind(Label* label, BranchTargetIdentifier id = EmitBTI_none);
1147 // Bind a label to a specified offset from the start of the buffer.
1148 void BindToOffset(Label* label, ptrdiff_t offset);
Bl(Label * label)1149 void Bl(Label* label) {
1150 VIXL_ASSERT(allow_macro_instructions_);
1151 SingleEmissionCheckScope guard(this);
1152 bl(label);
1153 }
Blr(const Register & xn)1154 void Blr(const Register& xn) {
1155 VIXL_ASSERT(allow_macro_instructions_);
1156 VIXL_ASSERT(!xn.IsZero());
1157 SingleEmissionCheckScope guard(this);
1158 blr(xn);
1159 }
Br(const Register & xn)1160 void Br(const Register& xn) {
1161 VIXL_ASSERT(allow_macro_instructions_);
1162 VIXL_ASSERT(!xn.IsZero());
1163 SingleEmissionCheckScope guard(this);
1164 br(xn);
1165 }
Braaz(const Register & xn)1166 void Braaz(const Register& xn) {
1167 VIXL_ASSERT(allow_macro_instructions_);
1168 SingleEmissionCheckScope guard(this);
1169 braaz(xn);
1170 }
Brabz(const Register & xn)1171 void Brabz(const Register& xn) {
1172 VIXL_ASSERT(allow_macro_instructions_);
1173 SingleEmissionCheckScope guard(this);
1174 brabz(xn);
1175 }
Blraaz(const Register & xn)1176 void Blraaz(const Register& xn) {
1177 VIXL_ASSERT(allow_macro_instructions_);
1178 SingleEmissionCheckScope guard(this);
1179 blraaz(xn);
1180 }
Blrabz(const Register & xn)1181 void Blrabz(const Register& xn) {
1182 VIXL_ASSERT(allow_macro_instructions_);
1183 SingleEmissionCheckScope guard(this);
1184 blrabz(xn);
1185 }
Retaa()1186 void Retaa() {
1187 VIXL_ASSERT(allow_macro_instructions_);
1188 SingleEmissionCheckScope guard(this);
1189 retaa();
1190 }
Retab()1191 void Retab() {
1192 VIXL_ASSERT(allow_macro_instructions_);
1193 SingleEmissionCheckScope guard(this);
1194 retab();
1195 }
Braa(const Register & xn,const Register & xm)1196 void Braa(const Register& xn, const Register& xm) {
1197 VIXL_ASSERT(allow_macro_instructions_);
1198 SingleEmissionCheckScope guard(this);
1199 braa(xn, xm);
1200 }
Brab(const Register & xn,const Register & xm)1201 void Brab(const Register& xn, const Register& xm) {
1202 VIXL_ASSERT(allow_macro_instructions_);
1203 SingleEmissionCheckScope guard(this);
1204 brab(xn, xm);
1205 }
Blraa(const Register & xn,const Register & xm)1206 void Blraa(const Register& xn, const Register& xm) {
1207 VIXL_ASSERT(allow_macro_instructions_);
1208 SingleEmissionCheckScope guard(this);
1209 blraa(xn, xm);
1210 }
Blrab(const Register & xn,const Register & xm)1211 void Blrab(const Register& xn, const Register& xm) {
1212 VIXL_ASSERT(allow_macro_instructions_);
1213 SingleEmissionCheckScope guard(this);
1214 blrab(xn, xm);
1215 }
1216 void Brk(int code = 0) {
1217 VIXL_ASSERT(allow_macro_instructions_);
1218 SingleEmissionCheckScope guard(this);
1219 brk(code);
1220 }
1221 void Cbnz(const Register& rt, Label* label);
1222 void Cbz(const Register& rt, Label* label);
Cinc(const Register & rd,const Register & rn,Condition cond)1223 void Cinc(const Register& rd, const Register& rn, Condition cond) {
1224 VIXL_ASSERT(allow_macro_instructions_);
1225 VIXL_ASSERT(!rd.IsZero());
1226 VIXL_ASSERT(!rn.IsZero());
1227 SingleEmissionCheckScope guard(this);
1228 cinc(rd, rn, cond);
1229 }
Cinv(const Register & rd,const Register & rn,Condition cond)1230 void Cinv(const Register& rd, const Register& rn, Condition cond) {
1231 VIXL_ASSERT(allow_macro_instructions_);
1232 VIXL_ASSERT(!rd.IsZero());
1233 VIXL_ASSERT(!rn.IsZero());
1234 SingleEmissionCheckScope guard(this);
1235 cinv(rd, rn, cond);
1236 }
1237
1238 #define PAUTH_SYSTEM_MODES(V) \
1239 V(az) \
1240 V(bz) \
1241 V(asp) \
1242 V(bsp)
1243
1244 #define DEFINE_MACRO_ASM_FUNCS(SUFFIX) \
1245 void Paci##SUFFIX() { \
1246 VIXL_ASSERT(allow_macro_instructions_); \
1247 SingleEmissionCheckScope guard(this); \
1248 paci##SUFFIX(); \
1249 } \
1250 void Auti##SUFFIX() { \
1251 VIXL_ASSERT(allow_macro_instructions_); \
1252 SingleEmissionCheckScope guard(this); \
1253 auti##SUFFIX(); \
1254 }
1255
PAUTH_SYSTEM_MODES(DEFINE_MACRO_ASM_FUNCS)1256 PAUTH_SYSTEM_MODES(DEFINE_MACRO_ASM_FUNCS)
1257 #undef DEFINE_MACRO_ASM_FUNCS
1258
1259 // The 1716 pac and aut instructions encourage people to use x16 and x17
1260 // directly, perhaps without realising that this is forbidden. For example:
1261 //
1262 // UseScratchRegisterScope temps(&masm);
1263 // Register temp = temps.AcquireX(); // temp will be x16
1264 // __ Mov(x17, ptr);
1265 // __ Mov(x16, modifier); // Will override temp!
1266 // __ Pacia1716();
1267 //
1268 // To work around this issue, you must exclude x16 and x17 from the scratch
1269 // register list. You may need to replace them with other registers:
1270 //
1271 // UseScratchRegisterScope temps(&masm);
1272 // temps.Exclude(x16, x17);
1273 // temps.Include(x10, x11);
1274 // __ Mov(x17, ptr);
1275 // __ Mov(x16, modifier);
1276 // __ Pacia1716();
1277 void Pacia1716() {
1278 VIXL_ASSERT(allow_macro_instructions_);
1279 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x16));
1280 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x17));
1281 SingleEmissionCheckScope guard(this);
1282 pacia1716();
1283 }
Pacib1716()1284 void Pacib1716() {
1285 VIXL_ASSERT(allow_macro_instructions_);
1286 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x16));
1287 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x17));
1288 SingleEmissionCheckScope guard(this);
1289 pacib1716();
1290 }
Autia1716()1291 void Autia1716() {
1292 VIXL_ASSERT(allow_macro_instructions_);
1293 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x16));
1294 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x17));
1295 SingleEmissionCheckScope guard(this);
1296 autia1716();
1297 }
Autib1716()1298 void Autib1716() {
1299 VIXL_ASSERT(allow_macro_instructions_);
1300 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x16));
1301 VIXL_ASSERT(!GetScratchRegisterList()->IncludesAliasOf(x17));
1302 SingleEmissionCheckScope guard(this);
1303 autib1716();
1304 }
Xpaclri()1305 void Xpaclri() {
1306 VIXL_ASSERT(allow_macro_instructions_);
1307 SingleEmissionCheckScope guard(this);
1308 xpaclri();
1309 }
Clrex()1310 void Clrex() {
1311 VIXL_ASSERT(allow_macro_instructions_);
1312 SingleEmissionCheckScope guard(this);
1313 clrex();
1314 }
Cls(const Register & rd,const Register & rn)1315 void Cls(const Register& rd, const Register& rn) {
1316 VIXL_ASSERT(allow_macro_instructions_);
1317 VIXL_ASSERT(!rd.IsZero());
1318 VIXL_ASSERT(!rn.IsZero());
1319 SingleEmissionCheckScope guard(this);
1320 cls(rd, rn);
1321 }
Clz(const Register & rd,const Register & rn)1322 void Clz(const Register& rd, const Register& rn) {
1323 VIXL_ASSERT(allow_macro_instructions_);
1324 VIXL_ASSERT(!rd.IsZero());
1325 VIXL_ASSERT(!rn.IsZero());
1326 SingleEmissionCheckScope guard(this);
1327 clz(rd, rn);
1328 }
Cneg(const Register & rd,const Register & rn,Condition cond)1329 void Cneg(const Register& rd, const Register& rn, Condition cond) {
1330 VIXL_ASSERT(allow_macro_instructions_);
1331 VIXL_ASSERT(!rd.IsZero());
1332 VIXL_ASSERT(!rn.IsZero());
1333 SingleEmissionCheckScope guard(this);
1334 cneg(rd, rn, cond);
1335 }
Esb()1336 void Esb() {
1337 VIXL_ASSERT(allow_macro_instructions_);
1338 SingleEmissionCheckScope guard(this);
1339 esb();
1340 }
Csdb()1341 void Csdb() {
1342 VIXL_ASSERT(allow_macro_instructions_);
1343 SingleEmissionCheckScope guard(this);
1344 csdb();
1345 }
Cset(const Register & rd,Condition cond)1346 void Cset(const Register& rd, Condition cond) {
1347 VIXL_ASSERT(allow_macro_instructions_);
1348 VIXL_ASSERT(!rd.IsZero());
1349 SingleEmissionCheckScope guard(this);
1350 cset(rd, cond);
1351 }
Csetm(const Register & rd,Condition cond)1352 void Csetm(const Register& rd, Condition cond) {
1353 VIXL_ASSERT(allow_macro_instructions_);
1354 VIXL_ASSERT(!rd.IsZero());
1355 SingleEmissionCheckScope guard(this);
1356 csetm(rd, cond);
1357 }
Csinc(const Register & rd,const Register & rn,const Register & rm,Condition cond)1358 void Csinc(const Register& rd,
1359 const Register& rn,
1360 const Register& rm,
1361 Condition cond) {
1362 VIXL_ASSERT(allow_macro_instructions_);
1363 VIXL_ASSERT(!rd.IsZero());
1364 VIXL_ASSERT((cond != al) && (cond != nv));
1365 SingleEmissionCheckScope guard(this);
1366 csinc(rd, rn, rm, cond);
1367 }
Csinv(const Register & rd,const Register & rn,const Register & rm,Condition cond)1368 void Csinv(const Register& rd,
1369 const Register& rn,
1370 const Register& rm,
1371 Condition cond) {
1372 VIXL_ASSERT(allow_macro_instructions_);
1373 VIXL_ASSERT(!rd.IsZero());
1374 VIXL_ASSERT((cond != al) && (cond != nv));
1375 SingleEmissionCheckScope guard(this);
1376 csinv(rd, rn, rm, cond);
1377 }
Csneg(const Register & rd,const Register & rn,const Register & rm,Condition cond)1378 void Csneg(const Register& rd,
1379 const Register& rn,
1380 const Register& rm,
1381 Condition cond) {
1382 VIXL_ASSERT(allow_macro_instructions_);
1383 VIXL_ASSERT(!rd.IsZero());
1384 VIXL_ASSERT((cond != al) && (cond != nv));
1385 SingleEmissionCheckScope guard(this);
1386 csneg(rd, rn, rm, cond);
1387 }
Dmb(BarrierDomain domain,BarrierType type)1388 void Dmb(BarrierDomain domain, BarrierType type) {
1389 VIXL_ASSERT(allow_macro_instructions_);
1390 SingleEmissionCheckScope guard(this);
1391 dmb(domain, type);
1392 }
Dsb(BarrierDomain domain,BarrierType type)1393 void Dsb(BarrierDomain domain, BarrierType type) {
1394 VIXL_ASSERT(allow_macro_instructions_);
1395 SingleEmissionCheckScope guard(this);
1396 dsb(domain, type);
1397 }
Extr(const Register & rd,const Register & rn,const Register & rm,unsigned lsb)1398 void Extr(const Register& rd,
1399 const Register& rn,
1400 const Register& rm,
1401 unsigned lsb) {
1402 VIXL_ASSERT(allow_macro_instructions_);
1403 VIXL_ASSERT(!rd.IsZero());
1404 VIXL_ASSERT(!rn.IsZero());
1405 VIXL_ASSERT(!rm.IsZero());
1406 SingleEmissionCheckScope guard(this);
1407 extr(rd, rn, rm, lsb);
1408 }
Fadd(const VRegister & vd,const VRegister & vn,const VRegister & vm)1409 void Fadd(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1410 VIXL_ASSERT(allow_macro_instructions_);
1411 SingleEmissionCheckScope guard(this);
1412 fadd(vd, vn, vm);
1413 }
1414 void Fccmp(const VRegister& vn,
1415 const VRegister& vm,
1416 StatusFlags nzcv,
1417 Condition cond,
1418 FPTrapFlags trap = DisableTrap) {
1419 VIXL_ASSERT(allow_macro_instructions_);
1420 VIXL_ASSERT((cond != al) && (cond != nv));
1421 SingleEmissionCheckScope guard(this);
1422 FPCCompareMacro(vn, vm, nzcv, cond, trap);
1423 }
Fccmpe(const VRegister & vn,const VRegister & vm,StatusFlags nzcv,Condition cond)1424 void Fccmpe(const VRegister& vn,
1425 const VRegister& vm,
1426 StatusFlags nzcv,
1427 Condition cond) {
1428 Fccmp(vn, vm, nzcv, cond, EnableTrap);
1429 }
1430 void Fcmp(const VRegister& vn,
1431 const VRegister& vm,
1432 FPTrapFlags trap = DisableTrap) {
1433 VIXL_ASSERT(allow_macro_instructions_);
1434 SingleEmissionCheckScope guard(this);
1435 FPCompareMacro(vn, vm, trap);
1436 }
1437 void Fcmp(const VRegister& vn, double value, FPTrapFlags trap = DisableTrap);
1438 void Fcmpe(const VRegister& vn, double value);
Fcmpe(const VRegister & vn,const VRegister & vm)1439 void Fcmpe(const VRegister& vn, const VRegister& vm) {
1440 Fcmp(vn, vm, EnableTrap);
1441 }
Fcsel(const VRegister & vd,const VRegister & vn,const VRegister & vm,Condition cond)1442 void Fcsel(const VRegister& vd,
1443 const VRegister& vn,
1444 const VRegister& vm,
1445 Condition cond) {
1446 VIXL_ASSERT(allow_macro_instructions_);
1447 VIXL_ASSERT((cond != al) && (cond != nv));
1448 SingleEmissionCheckScope guard(this);
1449 fcsel(vd, vn, vm, cond);
1450 }
Fcvt(const VRegister & vd,const VRegister & vn)1451 void Fcvt(const VRegister& vd, const VRegister& vn) {
1452 VIXL_ASSERT(allow_macro_instructions_);
1453 SingleEmissionCheckScope guard(this);
1454 fcvt(vd, vn);
1455 }
Fcvtl(const VRegister & vd,const VRegister & vn)1456 void Fcvtl(const VRegister& vd, const VRegister& vn) {
1457 VIXL_ASSERT(allow_macro_instructions_);
1458 SingleEmissionCheckScope guard(this);
1459 fcvtl(vd, vn);
1460 }
Fcvtl2(const VRegister & vd,const VRegister & vn)1461 void Fcvtl2(const VRegister& vd, const VRegister& vn) {
1462 VIXL_ASSERT(allow_macro_instructions_);
1463 SingleEmissionCheckScope guard(this);
1464 fcvtl2(vd, vn);
1465 }
Fcvtn(const VRegister & vd,const VRegister & vn)1466 void Fcvtn(const VRegister& vd, const VRegister& vn) {
1467 VIXL_ASSERT(allow_macro_instructions_);
1468 SingleEmissionCheckScope guard(this);
1469 fcvtn(vd, vn);
1470 }
Fcvtn2(const VRegister & vd,const VRegister & vn)1471 void Fcvtn2(const VRegister& vd, const VRegister& vn) {
1472 VIXL_ASSERT(allow_macro_instructions_);
1473 SingleEmissionCheckScope guard(this);
1474 fcvtn2(vd, vn);
1475 }
Fcvtxn(const VRegister & vd,const VRegister & vn)1476 void Fcvtxn(const VRegister& vd, const VRegister& vn) {
1477 VIXL_ASSERT(allow_macro_instructions_);
1478 SingleEmissionCheckScope guard(this);
1479 fcvtxn(vd, vn);
1480 }
Fcvtxn2(const VRegister & vd,const VRegister & vn)1481 void Fcvtxn2(const VRegister& vd, const VRegister& vn) {
1482 VIXL_ASSERT(allow_macro_instructions_);
1483 SingleEmissionCheckScope guard(this);
1484 fcvtxn2(vd, vn);
1485 }
Fcvtas(const Register & rd,const VRegister & vn)1486 void Fcvtas(const Register& rd, const VRegister& vn) {
1487 VIXL_ASSERT(allow_macro_instructions_);
1488 VIXL_ASSERT(!rd.IsZero());
1489 SingleEmissionCheckScope guard(this);
1490 fcvtas(rd, vn);
1491 }
Fcvtau(const Register & rd,const VRegister & vn)1492 void Fcvtau(const Register& rd, const VRegister& vn) {
1493 VIXL_ASSERT(allow_macro_instructions_);
1494 VIXL_ASSERT(!rd.IsZero());
1495 SingleEmissionCheckScope guard(this);
1496 fcvtau(rd, vn);
1497 }
Fcvtms(const Register & rd,const VRegister & vn)1498 void Fcvtms(const Register& rd, const VRegister& vn) {
1499 VIXL_ASSERT(allow_macro_instructions_);
1500 VIXL_ASSERT(!rd.IsZero());
1501 SingleEmissionCheckScope guard(this);
1502 fcvtms(rd, vn);
1503 }
Fcvtmu(const Register & rd,const VRegister & vn)1504 void Fcvtmu(const Register& rd, const VRegister& vn) {
1505 VIXL_ASSERT(allow_macro_instructions_);
1506 VIXL_ASSERT(!rd.IsZero());
1507 SingleEmissionCheckScope guard(this);
1508 fcvtmu(rd, vn);
1509 }
Fcvtns(const Register & rd,const VRegister & vn)1510 void Fcvtns(const Register& rd, const VRegister& vn) {
1511 VIXL_ASSERT(allow_macro_instructions_);
1512 VIXL_ASSERT(!rd.IsZero());
1513 SingleEmissionCheckScope guard(this);
1514 fcvtns(rd, vn);
1515 }
Fcvtnu(const Register & rd,const VRegister & vn)1516 void Fcvtnu(const Register& rd, const VRegister& vn) {
1517 VIXL_ASSERT(allow_macro_instructions_);
1518 VIXL_ASSERT(!rd.IsZero());
1519 SingleEmissionCheckScope guard(this);
1520 fcvtnu(rd, vn);
1521 }
Fcvtps(const Register & rd,const VRegister & vn)1522 void Fcvtps(const Register& rd, const VRegister& vn) {
1523 VIXL_ASSERT(allow_macro_instructions_);
1524 VIXL_ASSERT(!rd.IsZero());
1525 SingleEmissionCheckScope guard(this);
1526 fcvtps(rd, vn);
1527 }
Fcvtpu(const Register & rd,const VRegister & vn)1528 void Fcvtpu(const Register& rd, const VRegister& vn) {
1529 VIXL_ASSERT(allow_macro_instructions_);
1530 VIXL_ASSERT(!rd.IsZero());
1531 SingleEmissionCheckScope guard(this);
1532 fcvtpu(rd, vn);
1533 }
1534 void Fcvtzs(const Register& rd, const VRegister& vn, int fbits = 0) {
1535 VIXL_ASSERT(allow_macro_instructions_);
1536 VIXL_ASSERT(!rd.IsZero());
1537 SingleEmissionCheckScope guard(this);
1538 fcvtzs(rd, vn, fbits);
1539 }
Fjcvtzs(const Register & rd,const VRegister & vn)1540 void Fjcvtzs(const Register& rd, const VRegister& vn) {
1541 VIXL_ASSERT(allow_macro_instructions_);
1542 VIXL_ASSERT(!rd.IsZero());
1543 SingleEmissionCheckScope guard(this);
1544 fjcvtzs(rd, vn);
1545 }
1546 void Fcvtzu(const Register& rd, const VRegister& vn, int fbits = 0) {
1547 VIXL_ASSERT(allow_macro_instructions_);
1548 VIXL_ASSERT(!rd.IsZero());
1549 SingleEmissionCheckScope guard(this);
1550 fcvtzu(rd, vn, fbits);
1551 }
Fdiv(const VRegister & vd,const VRegister & vn,const VRegister & vm)1552 void Fdiv(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1553 VIXL_ASSERT(allow_macro_instructions_);
1554 SingleEmissionCheckScope guard(this);
1555 fdiv(vd, vn, vm);
1556 }
Fmax(const VRegister & vd,const VRegister & vn,const VRegister & vm)1557 void Fmax(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1558 VIXL_ASSERT(allow_macro_instructions_);
1559 SingleEmissionCheckScope guard(this);
1560 fmax(vd, vn, vm);
1561 }
Fmaxnm(const VRegister & vd,const VRegister & vn,const VRegister & vm)1562 void Fmaxnm(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1563 VIXL_ASSERT(allow_macro_instructions_);
1564 SingleEmissionCheckScope guard(this);
1565 fmaxnm(vd, vn, vm);
1566 }
Fmin(const VRegister & vd,const VRegister & vn,const VRegister & vm)1567 void Fmin(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1568 VIXL_ASSERT(allow_macro_instructions_);
1569 SingleEmissionCheckScope guard(this);
1570 fmin(vd, vn, vm);
1571 }
Fminnm(const VRegister & vd,const VRegister & vn,const VRegister & vm)1572 void Fminnm(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1573 VIXL_ASSERT(allow_macro_instructions_);
1574 SingleEmissionCheckScope guard(this);
1575 fminnm(vd, vn, vm);
1576 }
Fmov(const VRegister & vd,const VRegister & vn)1577 void Fmov(const VRegister& vd, const VRegister& vn) {
1578 VIXL_ASSERT(allow_macro_instructions_);
1579 SingleEmissionCheckScope guard(this);
1580 // TODO: Use DiscardMoveMode to allow this move to be elided if vd.Is(vn).
1581 fmov(vd, vn);
1582 }
Fmov(const VRegister & vd,const Register & rn)1583 void Fmov(const VRegister& vd, const Register& rn) {
1584 VIXL_ASSERT(allow_macro_instructions_);
1585 VIXL_ASSERT(!rn.IsZero());
1586 SingleEmissionCheckScope guard(this);
1587 fmov(vd, rn);
1588 }
Fmov(const VRegister & vd,int index,const Register & rn)1589 void Fmov(const VRegister& vd, int index, const Register& rn) {
1590 VIXL_ASSERT(allow_macro_instructions_);
1591 SingleEmissionCheckScope guard(this);
1592 if (vd.Is1D() && (index == 0)) {
1593 mov(vd, index, rn);
1594 } else {
1595 fmov(vd, index, rn);
1596 }
1597 }
Fmov(const Register & rd,const VRegister & vn,int index)1598 void Fmov(const Register& rd, const VRegister& vn, int index) {
1599 VIXL_ASSERT(allow_macro_instructions_);
1600 SingleEmissionCheckScope guard(this);
1601 if (vn.Is1D() && (index == 0)) {
1602 mov(rd, vn, index);
1603 } else {
1604 fmov(rd, vn, index);
1605 }
1606 }
1607
1608 // Provide explicit double and float interfaces for FP immediate moves, rather
1609 // than relying on implicit C++ casts. This allows signalling NaNs to be
1610 // preserved when the immediate matches the format of vd. Most systems convert
1611 // signalling NaNs to quiet NaNs when converting between float and double.
1612 void Fmov(VRegister vd, double imm);
1613 void Fmov(VRegister vd, float imm);
1614 void Fmov(VRegister vd, const Float16 imm);
1615 // Provide a template to allow other types to be converted automatically.
1616 template <typename T>
Fmov(VRegister vd,T imm)1617 void Fmov(VRegister vd, T imm) {
1618 VIXL_ASSERT(allow_macro_instructions_);
1619 Fmov(vd, static_cast<double>(imm));
1620 }
Fmov(Register rd,VRegister vn)1621 void Fmov(Register rd, VRegister vn) {
1622 VIXL_ASSERT(allow_macro_instructions_);
1623 VIXL_ASSERT(!rd.IsZero());
1624 SingleEmissionCheckScope guard(this);
1625 fmov(rd, vn);
1626 }
Fmul(const VRegister & vd,const VRegister & vn,const VRegister & vm)1627 void Fmul(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1628 VIXL_ASSERT(allow_macro_instructions_);
1629 SingleEmissionCheckScope guard(this);
1630 fmul(vd, vn, vm);
1631 }
Fnmul(const VRegister & vd,const VRegister & vn,const VRegister & vm)1632 void Fnmul(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1633 VIXL_ASSERT(allow_macro_instructions_);
1634 SingleEmissionCheckScope guard(this);
1635 fnmul(vd, vn, vm);
1636 }
Fmadd(const VRegister & vd,const VRegister & vn,const VRegister & vm,const VRegister & va)1637 void Fmadd(const VRegister& vd,
1638 const VRegister& vn,
1639 const VRegister& vm,
1640 const VRegister& va) {
1641 VIXL_ASSERT(allow_macro_instructions_);
1642 SingleEmissionCheckScope guard(this);
1643 fmadd(vd, vn, vm, va);
1644 }
Fmsub(const VRegister & vd,const VRegister & vn,const VRegister & vm,const VRegister & va)1645 void Fmsub(const VRegister& vd,
1646 const VRegister& vn,
1647 const VRegister& vm,
1648 const VRegister& va) {
1649 VIXL_ASSERT(allow_macro_instructions_);
1650 SingleEmissionCheckScope guard(this);
1651 fmsub(vd, vn, vm, va);
1652 }
Fnmadd(const VRegister & vd,const VRegister & vn,const VRegister & vm,const VRegister & va)1653 void Fnmadd(const VRegister& vd,
1654 const VRegister& vn,
1655 const VRegister& vm,
1656 const VRegister& va) {
1657 VIXL_ASSERT(allow_macro_instructions_);
1658 SingleEmissionCheckScope guard(this);
1659 fnmadd(vd, vn, vm, va);
1660 }
Fnmsub(const VRegister & vd,const VRegister & vn,const VRegister & vm,const VRegister & va)1661 void Fnmsub(const VRegister& vd,
1662 const VRegister& vn,
1663 const VRegister& vm,
1664 const VRegister& va) {
1665 VIXL_ASSERT(allow_macro_instructions_);
1666 SingleEmissionCheckScope guard(this);
1667 fnmsub(vd, vn, vm, va);
1668 }
Fsub(const VRegister & vd,const VRegister & vn,const VRegister & vm)1669 void Fsub(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1670 VIXL_ASSERT(allow_macro_instructions_);
1671 SingleEmissionCheckScope guard(this);
1672 fsub(vd, vn, vm);
1673 }
Hint(SystemHint code)1674 void Hint(SystemHint code) {
1675 VIXL_ASSERT(allow_macro_instructions_);
1676 SingleEmissionCheckScope guard(this);
1677 hint(code);
1678 }
Hint(int imm7)1679 void Hint(int imm7) {
1680 VIXL_ASSERT(allow_macro_instructions_);
1681 SingleEmissionCheckScope guard(this);
1682 hint(imm7);
1683 }
Hlt(int code)1684 void Hlt(int code) {
1685 VIXL_ASSERT(allow_macro_instructions_);
1686 SingleEmissionCheckScope guard(this);
1687 hlt(code);
1688 }
Isb()1689 void Isb() {
1690 VIXL_ASSERT(allow_macro_instructions_);
1691 SingleEmissionCheckScope guard(this);
1692 isb();
1693 }
Ldar(const Register & rt,const MemOperand & src)1694 void Ldar(const Register& rt, const MemOperand& src) {
1695 VIXL_ASSERT(allow_macro_instructions_);
1696 SingleEmissionCheckScope guard(this);
1697 ldar(rt, src);
1698 }
Ldarb(const Register & rt,const MemOperand & src)1699 void Ldarb(const Register& rt, const MemOperand& src) {
1700 VIXL_ASSERT(allow_macro_instructions_);
1701 SingleEmissionCheckScope guard(this);
1702 ldarb(rt, src);
1703 }
Ldarh(const Register & rt,const MemOperand & src)1704 void Ldarh(const Register& rt, const MemOperand& src) {
1705 VIXL_ASSERT(allow_macro_instructions_);
1706 SingleEmissionCheckScope guard(this);
1707 ldarh(rt, src);
1708 }
Ldlar(const Register & rt,const MemOperand & src)1709 void Ldlar(const Register& rt, const MemOperand& src) {
1710 VIXL_ASSERT(allow_macro_instructions_);
1711 SingleEmissionCheckScope guard(this);
1712 ldlar(rt, src);
1713 }
Ldlarb(const Register & rt,const MemOperand & src)1714 void Ldlarb(const Register& rt, const MemOperand& src) {
1715 VIXL_ASSERT(allow_macro_instructions_);
1716 SingleEmissionCheckScope guard(this);
1717 ldlarb(rt, src);
1718 }
Ldlarh(const Register & rt,const MemOperand & src)1719 void Ldlarh(const Register& rt, const MemOperand& src) {
1720 VIXL_ASSERT(allow_macro_instructions_);
1721 SingleEmissionCheckScope guard(this);
1722 ldlarh(rt, src);
1723 }
Ldaxp(const Register & rt,const Register & rt2,const MemOperand & src)1724 void Ldaxp(const Register& rt, const Register& rt2, const MemOperand& src) {
1725 VIXL_ASSERT(allow_macro_instructions_);
1726 VIXL_ASSERT(!rt.Aliases(rt2));
1727 SingleEmissionCheckScope guard(this);
1728 ldaxp(rt, rt2, src);
1729 }
Ldaxr(const Register & rt,const MemOperand & src)1730 void Ldaxr(const Register& rt, const MemOperand& src) {
1731 VIXL_ASSERT(allow_macro_instructions_);
1732 SingleEmissionCheckScope guard(this);
1733 ldaxr(rt, src);
1734 }
Ldaxrb(const Register & rt,const MemOperand & src)1735 void Ldaxrb(const Register& rt, const MemOperand& src) {
1736 VIXL_ASSERT(allow_macro_instructions_);
1737 SingleEmissionCheckScope guard(this);
1738 ldaxrb(rt, src);
1739 }
Ldaxrh(const Register & rt,const MemOperand & src)1740 void Ldaxrh(const Register& rt, const MemOperand& src) {
1741 VIXL_ASSERT(allow_macro_instructions_);
1742 SingleEmissionCheckScope guard(this);
1743 ldaxrh(rt, src);
1744 }
1745
1746 // clang-format off
1747 #define COMPARE_AND_SWAP_SINGLE_MACRO_LIST(V) \
1748 V(cas, Cas) \
1749 V(casa, Casa) \
1750 V(casl, Casl) \
1751 V(casal, Casal) \
1752 V(casb, Casb) \
1753 V(casab, Casab) \
1754 V(caslb, Caslb) \
1755 V(casalb, Casalb) \
1756 V(cash, Cash) \
1757 V(casah, Casah) \
1758 V(caslh, Caslh) \
1759 V(casalh, Casalh)
1760 // clang-format on
1761
1762 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
1763 void MASM(const Register& rs, const Register& rt, const MemOperand& src) { \
1764 VIXL_ASSERT(allow_macro_instructions_); \
1765 SingleEmissionCheckScope guard(this); \
1766 ASM(rs, rt, src); \
1767 }
1768 COMPARE_AND_SWAP_SINGLE_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
1769 #undef DEFINE_MACRO_ASM_FUNC
1770
1771
1772 // clang-format off
1773 #define COMPARE_AND_SWAP_PAIR_MACRO_LIST(V) \
1774 V(casp, Casp) \
1775 V(caspa, Caspa) \
1776 V(caspl, Caspl) \
1777 V(caspal, Caspal)
1778 // clang-format on
1779
1780 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
1781 void MASM(const Register& rs, \
1782 const Register& rs2, \
1783 const Register& rt, \
1784 const Register& rt2, \
1785 const MemOperand& src) { \
1786 VIXL_ASSERT(allow_macro_instructions_); \
1787 SingleEmissionCheckScope guard(this); \
1788 ASM(rs, rs2, rt, rt2, src); \
1789 }
COMPARE_AND_SWAP_PAIR_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)1790 COMPARE_AND_SWAP_PAIR_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
1791 #undef DEFINE_MACRO_ASM_FUNC
1792
1793 // These macros generate all the variations of the atomic memory operations,
1794 // e.g. ldadd, ldadda, ldaddb, staddl, etc.
1795
1796 // clang-format off
1797 #define ATOMIC_MEMORY_SIMPLE_MACRO_LIST(V, DEF, MASM_PRE, ASM_PRE) \
1798 V(DEF, MASM_PRE##add, ASM_PRE##add) \
1799 V(DEF, MASM_PRE##clr, ASM_PRE##clr) \
1800 V(DEF, MASM_PRE##eor, ASM_PRE##eor) \
1801 V(DEF, MASM_PRE##set, ASM_PRE##set) \
1802 V(DEF, MASM_PRE##smax, ASM_PRE##smax) \
1803 V(DEF, MASM_PRE##smin, ASM_PRE##smin) \
1804 V(DEF, MASM_PRE##umax, ASM_PRE##umax) \
1805 V(DEF, MASM_PRE##umin, ASM_PRE##umin)
1806
1807 #define ATOMIC_MEMORY_STORE_MACRO_MODES(V, MASM, ASM) \
1808 V(MASM, ASM) \
1809 V(MASM##l, ASM##l) \
1810 V(MASM##b, ASM##b) \
1811 V(MASM##lb, ASM##lb) \
1812 V(MASM##h, ASM##h) \
1813 V(MASM##lh, ASM##lh)
1814
1815 #define ATOMIC_MEMORY_LOAD_MACRO_MODES(V, MASM, ASM) \
1816 ATOMIC_MEMORY_STORE_MACRO_MODES(V, MASM, ASM) \
1817 V(MASM##a, ASM##a) \
1818 V(MASM##al, ASM##al) \
1819 V(MASM##ab, ASM##ab) \
1820 V(MASM##alb, ASM##alb) \
1821 V(MASM##ah, ASM##ah) \
1822 V(MASM##alh, ASM##alh)
1823 // clang-format on
1824
1825 #define DEFINE_MACRO_LOAD_ASM_FUNC(MASM, ASM) \
1826 void MASM(const Register& rs, const Register& rt, const MemOperand& src) { \
1827 VIXL_ASSERT(allow_macro_instructions_); \
1828 SingleEmissionCheckScope guard(this); \
1829 ASM(rs, rt, src); \
1830 }
1831 #define DEFINE_MACRO_STORE_ASM_FUNC(MASM, ASM) \
1832 void MASM(const Register& rs, const MemOperand& src) { \
1833 VIXL_ASSERT(allow_macro_instructions_); \
1834 SingleEmissionCheckScope guard(this); \
1835 ASM(rs, src); \
1836 }
1837
1838 ATOMIC_MEMORY_SIMPLE_MACRO_LIST(ATOMIC_MEMORY_LOAD_MACRO_MODES,
1839 DEFINE_MACRO_LOAD_ASM_FUNC,
1840 Ld,
1841 ld)
1842 ATOMIC_MEMORY_SIMPLE_MACRO_LIST(ATOMIC_MEMORY_STORE_MACRO_MODES,
1843 DEFINE_MACRO_STORE_ASM_FUNC,
1844 St,
1845 st)
1846
1847 #define DEFINE_MACRO_SWP_ASM_FUNC(MASM, ASM) \
1848 void MASM(const Register& rs, const Register& rt, const MemOperand& src) { \
1849 VIXL_ASSERT(allow_macro_instructions_); \
1850 SingleEmissionCheckScope guard(this); \
1851 ASM(rs, rt, src); \
1852 }
1853
1854 ATOMIC_MEMORY_LOAD_MACRO_MODES(DEFINE_MACRO_SWP_ASM_FUNC, Swp, swp)
1855
1856 #undef DEFINE_MACRO_LOAD_ASM_FUNC
1857 #undef DEFINE_MACRO_STORE_ASM_FUNC
1858 #undef DEFINE_MACRO_SWP_ASM_FUNC
1859
1860 void Ldaprb(const Register& rt, const MemOperand& src) {
1861 VIXL_ASSERT(allow_macro_instructions_);
1862 SingleEmissionCheckScope guard(this);
1863 VIXL_ASSERT(src.IsImmediateOffset());
1864 if (src.GetOffset() == 0) {
1865 ldaprb(rt, src);
1866 } else {
1867 ldapurb(rt, src);
1868 }
1869 }
1870
Ldapursb(const Register & rt,const MemOperand & src)1871 void Ldapursb(const Register& rt, const MemOperand& src) {
1872 VIXL_ASSERT(allow_macro_instructions_);
1873 SingleEmissionCheckScope guard(this);
1874 ldapursb(rt, src);
1875 }
1876
Ldaprh(const Register & rt,const MemOperand & src)1877 void Ldaprh(const Register& rt, const MemOperand& src) {
1878 VIXL_ASSERT(allow_macro_instructions_);
1879 SingleEmissionCheckScope guard(this);
1880 VIXL_ASSERT(src.IsImmediateOffset());
1881 if (src.GetOffset() == 0) {
1882 ldaprh(rt, src);
1883 } else {
1884 ldapurh(rt, src);
1885 }
1886 }
1887
Ldapursh(const Register & rt,const MemOperand & src)1888 void Ldapursh(const Register& rt, const MemOperand& src) {
1889 VIXL_ASSERT(allow_macro_instructions_);
1890 SingleEmissionCheckScope guard(this);
1891 ldapursh(rt, src);
1892 }
1893
Ldapr(const Register & rt,const MemOperand & src)1894 void Ldapr(const Register& rt, const MemOperand& src) {
1895 VIXL_ASSERT(allow_macro_instructions_);
1896 SingleEmissionCheckScope guard(this);
1897 VIXL_ASSERT(src.IsImmediateOffset());
1898 if (src.GetOffset() == 0) {
1899 ldapr(rt, src);
1900 } else {
1901 ldapur(rt, src);
1902 }
1903 }
1904
Ldapursw(const Register & rt,const MemOperand & src)1905 void Ldapursw(const Register& rt, const MemOperand& src) {
1906 VIXL_ASSERT(allow_macro_instructions_);
1907 SingleEmissionCheckScope guard(this);
1908 ldapursw(rt, src);
1909 }
1910
Ldnp(const CPURegister & rt,const CPURegister & rt2,const MemOperand & src)1911 void Ldnp(const CPURegister& rt,
1912 const CPURegister& rt2,
1913 const MemOperand& src) {
1914 VIXL_ASSERT(allow_macro_instructions_);
1915 SingleEmissionCheckScope guard(this);
1916 ldnp(rt, rt2, src);
1917 }
1918 // Provide both double and float interfaces for FP immediate loads, rather
1919 // than relying on implicit C++ casts. This allows signalling NaNs to be
1920 // preserved when the immediate matches the format of fd. Most systems convert
1921 // signalling NaNs to quiet NaNs when converting between float and double.
Ldr(const VRegister & vt,double imm)1922 void Ldr(const VRegister& vt, double imm) {
1923 VIXL_ASSERT(allow_macro_instructions_);
1924 SingleEmissionCheckScope guard(this);
1925 RawLiteral* literal;
1926 if (vt.IsD()) {
1927 literal = new Literal<double>(imm,
1928 &literal_pool_,
1929 RawLiteral::kDeletedOnPlacementByPool);
1930 } else {
1931 literal = new Literal<float>(static_cast<float>(imm),
1932 &literal_pool_,
1933 RawLiteral::kDeletedOnPlacementByPool);
1934 }
1935 ldr(vt, literal);
1936 }
Ldr(const VRegister & vt,float imm)1937 void Ldr(const VRegister& vt, float imm) {
1938 VIXL_ASSERT(allow_macro_instructions_);
1939 SingleEmissionCheckScope guard(this);
1940 RawLiteral* literal;
1941 if (vt.IsS()) {
1942 literal = new Literal<float>(imm,
1943 &literal_pool_,
1944 RawLiteral::kDeletedOnPlacementByPool);
1945 } else {
1946 literal = new Literal<double>(static_cast<double>(imm),
1947 &literal_pool_,
1948 RawLiteral::kDeletedOnPlacementByPool);
1949 }
1950 ldr(vt, literal);
1951 }
Ldr(const VRegister & vt,uint64_t high64,uint64_t low64)1952 void Ldr(const VRegister& vt, uint64_t high64, uint64_t low64) {
1953 VIXL_ASSERT(allow_macro_instructions_);
1954 VIXL_ASSERT(vt.IsQ());
1955 SingleEmissionCheckScope guard(this);
1956 ldr(vt,
1957 new Literal<uint64_t>(high64,
1958 low64,
1959 &literal_pool_,
1960 RawLiteral::kDeletedOnPlacementByPool));
1961 }
Ldr(const Register & rt,uint64_t imm)1962 void Ldr(const Register& rt, uint64_t imm) {
1963 VIXL_ASSERT(allow_macro_instructions_);
1964 VIXL_ASSERT(!rt.IsZero());
1965 SingleEmissionCheckScope guard(this);
1966 RawLiteral* literal;
1967 if (rt.Is64Bits()) {
1968 literal = new Literal<uint64_t>(imm,
1969 &literal_pool_,
1970 RawLiteral::kDeletedOnPlacementByPool);
1971 } else {
1972 VIXL_ASSERT(rt.Is32Bits());
1973 VIXL_ASSERT(IsUint32(imm) || IsInt32(imm));
1974 literal = new Literal<uint32_t>(static_cast<uint32_t>(imm),
1975 &literal_pool_,
1976 RawLiteral::kDeletedOnPlacementByPool);
1977 }
1978 ldr(rt, literal);
1979 }
Ldrsw(const Register & rt,uint32_t imm)1980 void Ldrsw(const Register& rt, uint32_t imm) {
1981 VIXL_ASSERT(allow_macro_instructions_);
1982 VIXL_ASSERT(!rt.IsZero());
1983 SingleEmissionCheckScope guard(this);
1984 ldrsw(rt,
1985 new Literal<uint32_t>(imm,
1986 &literal_pool_,
1987 RawLiteral::kDeletedOnPlacementByPool));
1988 }
Ldr(const CPURegister & rt,RawLiteral * literal)1989 void Ldr(const CPURegister& rt, RawLiteral* literal) {
1990 VIXL_ASSERT(allow_macro_instructions_);
1991 SingleEmissionCheckScope guard(this);
1992 ldr(rt, literal);
1993 }
Ldrsw(const Register & rt,RawLiteral * literal)1994 void Ldrsw(const Register& rt, RawLiteral* literal) {
1995 VIXL_ASSERT(allow_macro_instructions_);
1996 SingleEmissionCheckScope guard(this);
1997 ldrsw(rt, literal);
1998 }
Ldxp(const Register & rt,const Register & rt2,const MemOperand & src)1999 void Ldxp(const Register& rt, const Register& rt2, const MemOperand& src) {
2000 VIXL_ASSERT(allow_macro_instructions_);
2001 VIXL_ASSERT(!rt.Aliases(rt2));
2002 SingleEmissionCheckScope guard(this);
2003 ldxp(rt, rt2, src);
2004 }
Ldxr(const Register & rt,const MemOperand & src)2005 void Ldxr(const Register& rt, const MemOperand& src) {
2006 VIXL_ASSERT(allow_macro_instructions_);
2007 SingleEmissionCheckScope guard(this);
2008 ldxr(rt, src);
2009 }
Ldxrb(const Register & rt,const MemOperand & src)2010 void Ldxrb(const Register& rt, const MemOperand& src) {
2011 VIXL_ASSERT(allow_macro_instructions_);
2012 SingleEmissionCheckScope guard(this);
2013 ldxrb(rt, src);
2014 }
Ldxrh(const Register & rt,const MemOperand & src)2015 void Ldxrh(const Register& rt, const MemOperand& src) {
2016 VIXL_ASSERT(allow_macro_instructions_);
2017 SingleEmissionCheckScope guard(this);
2018 ldxrh(rt, src);
2019 }
Lsl(const Register & rd,const Register & rn,unsigned shift)2020 void Lsl(const Register& rd, const Register& rn, unsigned shift) {
2021 VIXL_ASSERT(allow_macro_instructions_);
2022 VIXL_ASSERT(!rd.IsZero());
2023 VIXL_ASSERT(!rn.IsZero());
2024 SingleEmissionCheckScope guard(this);
2025 lsl(rd, rn, shift);
2026 }
Lsl(const Register & rd,const Register & rn,const Register & rm)2027 void Lsl(const Register& rd, const Register& rn, const Register& rm) {
2028 VIXL_ASSERT(allow_macro_instructions_);
2029 VIXL_ASSERT(!rd.IsZero());
2030 VIXL_ASSERT(!rn.IsZero());
2031 VIXL_ASSERT(!rm.IsZero());
2032 SingleEmissionCheckScope guard(this);
2033 lslv(rd, rn, rm);
2034 }
Lsr(const Register & rd,const Register & rn,unsigned shift)2035 void Lsr(const Register& rd, const Register& rn, unsigned shift) {
2036 VIXL_ASSERT(allow_macro_instructions_);
2037 VIXL_ASSERT(!rd.IsZero());
2038 VIXL_ASSERT(!rn.IsZero());
2039 SingleEmissionCheckScope guard(this);
2040 lsr(rd, rn, shift);
2041 }
Lsr(const Register & rd,const Register & rn,const Register & rm)2042 void Lsr(const Register& rd, const Register& rn, const Register& rm) {
2043 VIXL_ASSERT(allow_macro_instructions_);
2044 VIXL_ASSERT(!rd.IsZero());
2045 VIXL_ASSERT(!rn.IsZero());
2046 VIXL_ASSERT(!rm.IsZero());
2047 SingleEmissionCheckScope guard(this);
2048 lsrv(rd, rn, rm);
2049 }
Ldraa(const Register & xt,const MemOperand & src)2050 void Ldraa(const Register& xt, const MemOperand& src) {
2051 VIXL_ASSERT(allow_macro_instructions_);
2052 SingleEmissionCheckScope guard(this);
2053 ldraa(xt, src);
2054 }
Ldrab(const Register & xt,const MemOperand & src)2055 void Ldrab(const Register& xt, const MemOperand& src) {
2056 VIXL_ASSERT(allow_macro_instructions_);
2057 SingleEmissionCheckScope guard(this);
2058 ldrab(xt, src);
2059 }
Madd(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2060 void Madd(const Register& rd,
2061 const Register& rn,
2062 const Register& rm,
2063 const Register& ra) {
2064 VIXL_ASSERT(allow_macro_instructions_);
2065 VIXL_ASSERT(!rd.IsZero());
2066 VIXL_ASSERT(!rn.IsZero());
2067 VIXL_ASSERT(!rm.IsZero());
2068 VIXL_ASSERT(!ra.IsZero());
2069 SingleEmissionCheckScope guard(this);
2070 madd(rd, rn, rm, ra);
2071 }
Mneg(const Register & rd,const Register & rn,const Register & rm)2072 void Mneg(const Register& rd, const Register& rn, const Register& rm) {
2073 VIXL_ASSERT(allow_macro_instructions_);
2074 VIXL_ASSERT(!rd.IsZero());
2075 VIXL_ASSERT(!rn.IsZero());
2076 VIXL_ASSERT(!rm.IsZero());
2077 SingleEmissionCheckScope guard(this);
2078 mneg(rd, rn, rm);
2079 }
2080 void Mov(const Register& rd,
2081 const Register& rn,
2082 DiscardMoveMode discard_mode = kDontDiscardForSameWReg) {
2083 VIXL_ASSERT(allow_macro_instructions_);
2084 // Emit a register move only if the registers are distinct, or if they are
2085 // not X registers.
2086 //
2087 // Note that mov(w0, w0) is not a no-op because it clears the top word of
2088 // x0. A flag is provided (kDiscardForSameWReg) if a move between the same W
2089 // registers is not required to clear the top word of the X register. In
2090 // this case, the instruction is discarded.
2091 //
2092 // If the sp is an operand, add #0 is emitted, otherwise, orr #0.
2093 if (!rd.Is(rn) ||
2094 (rd.Is32Bits() && (discard_mode == kDontDiscardForSameWReg))) {
2095 SingleEmissionCheckScope guard(this);
2096 mov(rd, rn);
2097 }
2098 }
2099 void Movk(const Register& rd, uint64_t imm, int shift = -1) {
2100 VIXL_ASSERT(allow_macro_instructions_);
2101 VIXL_ASSERT(!rd.IsZero());
2102 SingleEmissionCheckScope guard(this);
2103 movk(rd, imm, shift);
2104 }
Mrs(const Register & rt,SystemRegister sysreg)2105 void Mrs(const Register& rt, SystemRegister sysreg) {
2106 VIXL_ASSERT(allow_macro_instructions_);
2107 VIXL_ASSERT(!rt.IsZero());
2108 SingleEmissionCheckScope guard(this);
2109 mrs(rt, sysreg);
2110 }
Msr(SystemRegister sysreg,const Register & rt)2111 void Msr(SystemRegister sysreg, const Register& rt) {
2112 VIXL_ASSERT(allow_macro_instructions_);
2113 VIXL_ASSERT(!rt.IsZero());
2114 SingleEmissionCheckScope guard(this);
2115 msr(sysreg, rt);
2116 }
Cfinv()2117 void Cfinv() {
2118 VIXL_ASSERT(allow_macro_instructions_);
2119 SingleEmissionCheckScope guard(this);
2120 cfinv();
2121 }
Axflag()2122 void Axflag() {
2123 VIXL_ASSERT(allow_macro_instructions_);
2124 SingleEmissionCheckScope guard(this);
2125 axflag();
2126 }
Xaflag()2127 void Xaflag() {
2128 VIXL_ASSERT(allow_macro_instructions_);
2129 SingleEmissionCheckScope guard(this);
2130 xaflag();
2131 }
2132 void Sys(int op1, int crn, int crm, int op2, const Register& rt = xzr) {
2133 VIXL_ASSERT(allow_macro_instructions_);
2134 SingleEmissionCheckScope guard(this);
2135 sys(op1, crn, crm, op2, rt);
2136 }
Dc(DataCacheOp op,const Register & rt)2137 void Dc(DataCacheOp op, const Register& rt) {
2138 VIXL_ASSERT(allow_macro_instructions_);
2139 SingleEmissionCheckScope guard(this);
2140 dc(op, rt);
2141 }
Ic(InstructionCacheOp op,const Register & rt)2142 void Ic(InstructionCacheOp op, const Register& rt) {
2143 VIXL_ASSERT(allow_macro_instructions_);
2144 SingleEmissionCheckScope guard(this);
2145 ic(op, rt);
2146 }
Msub(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2147 void Msub(const Register& rd,
2148 const Register& rn,
2149 const Register& rm,
2150 const Register& ra) {
2151 VIXL_ASSERT(allow_macro_instructions_);
2152 VIXL_ASSERT(!rd.IsZero());
2153 VIXL_ASSERT(!rn.IsZero());
2154 VIXL_ASSERT(!rm.IsZero());
2155 VIXL_ASSERT(!ra.IsZero());
2156 SingleEmissionCheckScope guard(this);
2157 msub(rd, rn, rm, ra);
2158 }
Mul(const Register & rd,const Register & rn,const Register & rm)2159 void Mul(const Register& rd, const Register& rn, const Register& rm) {
2160 VIXL_ASSERT(allow_macro_instructions_);
2161 VIXL_ASSERT(!rd.IsZero());
2162 VIXL_ASSERT(!rn.IsZero());
2163 VIXL_ASSERT(!rm.IsZero());
2164 SingleEmissionCheckScope guard(this);
2165 mul(rd, rn, rm);
2166 }
Nop()2167 void Nop() {
2168 VIXL_ASSERT(allow_macro_instructions_);
2169 SingleEmissionCheckScope guard(this);
2170 nop();
2171 }
Rbit(const Register & rd,const Register & rn)2172 void Rbit(const Register& rd, const Register& rn) {
2173 VIXL_ASSERT(allow_macro_instructions_);
2174 VIXL_ASSERT(!rd.IsZero());
2175 VIXL_ASSERT(!rn.IsZero());
2176 SingleEmissionCheckScope guard(this);
2177 rbit(rd, rn);
2178 }
2179 void Ret(const Register& xn = lr) {
2180 VIXL_ASSERT(allow_macro_instructions_);
2181 VIXL_ASSERT(!xn.IsZero());
2182 SingleEmissionCheckScope guard(this);
2183 ret(xn);
2184 }
Rev(const Register & rd,const Register & rn)2185 void Rev(const Register& rd, const Register& rn) {
2186 VIXL_ASSERT(allow_macro_instructions_);
2187 VIXL_ASSERT(!rd.IsZero());
2188 VIXL_ASSERT(!rn.IsZero());
2189 SingleEmissionCheckScope guard(this);
2190 rev(rd, rn);
2191 }
Rev16(const Register & rd,const Register & rn)2192 void Rev16(const Register& rd, const Register& rn) {
2193 VIXL_ASSERT(allow_macro_instructions_);
2194 VIXL_ASSERT(!rd.IsZero());
2195 VIXL_ASSERT(!rn.IsZero());
2196 SingleEmissionCheckScope guard(this);
2197 rev16(rd, rn);
2198 }
Rev32(const Register & rd,const Register & rn)2199 void Rev32(const Register& rd, const Register& rn) {
2200 VIXL_ASSERT(allow_macro_instructions_);
2201 VIXL_ASSERT(!rd.IsZero());
2202 VIXL_ASSERT(!rn.IsZero());
2203 SingleEmissionCheckScope guard(this);
2204 rev32(rd, rn);
2205 }
Rev64(const Register & rd,const Register & rn)2206 void Rev64(const Register& rd, const Register& rn) {
2207 VIXL_ASSERT(allow_macro_instructions_);
2208 VIXL_ASSERT(!rd.IsZero());
2209 VIXL_ASSERT(!rn.IsZero());
2210 SingleEmissionCheckScope guard(this);
2211 rev64(rd, rn);
2212 }
2213
2214 #define PAUTH_MASM_VARIATIONS(V) \
2215 V(Paci, paci) \
2216 V(Pacd, pacd) \
2217 V(Auti, auti) \
2218 V(Autd, autd)
2219
2220 #define DEFINE_MACRO_ASM_FUNCS(MASM_PRE, ASM_PRE) \
2221 void MASM_PRE##a(const Register& xd, const Register& xn) { \
2222 VIXL_ASSERT(allow_macro_instructions_); \
2223 SingleEmissionCheckScope guard(this); \
2224 ASM_PRE##a(xd, xn); \
2225 } \
2226 void MASM_PRE##za(const Register& xd) { \
2227 VIXL_ASSERT(allow_macro_instructions_); \
2228 SingleEmissionCheckScope guard(this); \
2229 ASM_PRE##za(xd); \
2230 } \
2231 void MASM_PRE##b(const Register& xd, const Register& xn) { \
2232 VIXL_ASSERT(allow_macro_instructions_); \
2233 SingleEmissionCheckScope guard(this); \
2234 ASM_PRE##b(xd, xn); \
2235 } \
2236 void MASM_PRE##zb(const Register& xd) { \
2237 VIXL_ASSERT(allow_macro_instructions_); \
2238 SingleEmissionCheckScope guard(this); \
2239 ASM_PRE##zb(xd); \
2240 }
2241
PAUTH_MASM_VARIATIONS(DEFINE_MACRO_ASM_FUNCS)2242 PAUTH_MASM_VARIATIONS(DEFINE_MACRO_ASM_FUNCS)
2243 #undef DEFINE_MACRO_ASM_FUNCS
2244
2245 void Pacga(const Register& xd, const Register& xn, const Register& xm) {
2246 VIXL_ASSERT(allow_macro_instructions_);
2247 SingleEmissionCheckScope guard(this);
2248 pacga(xd, xn, xm);
2249 }
2250
Xpaci(const Register & xd)2251 void Xpaci(const Register& xd) {
2252 VIXL_ASSERT(allow_macro_instructions_);
2253 SingleEmissionCheckScope guard(this);
2254 xpaci(xd);
2255 }
2256
Xpacd(const Register & xd)2257 void Xpacd(const Register& xd) {
2258 VIXL_ASSERT(allow_macro_instructions_);
2259 SingleEmissionCheckScope guard(this);
2260 xpacd(xd);
2261 }
Ror(const Register & rd,const Register & rs,unsigned shift)2262 void Ror(const Register& rd, const Register& rs, unsigned shift) {
2263 VIXL_ASSERT(allow_macro_instructions_);
2264 VIXL_ASSERT(!rd.IsZero());
2265 VIXL_ASSERT(!rs.IsZero());
2266 SingleEmissionCheckScope guard(this);
2267 ror(rd, rs, shift);
2268 }
Ror(const Register & rd,const Register & rn,const Register & rm)2269 void Ror(const Register& rd, const Register& rn, const Register& rm) {
2270 VIXL_ASSERT(allow_macro_instructions_);
2271 VIXL_ASSERT(!rd.IsZero());
2272 VIXL_ASSERT(!rn.IsZero());
2273 VIXL_ASSERT(!rm.IsZero());
2274 SingleEmissionCheckScope guard(this);
2275 rorv(rd, rn, rm);
2276 }
Sbfiz(const Register & rd,const Register & rn,unsigned lsb,unsigned width)2277 void Sbfiz(const Register& rd,
2278 const Register& rn,
2279 unsigned lsb,
2280 unsigned width) {
2281 VIXL_ASSERT(allow_macro_instructions_);
2282 VIXL_ASSERT(!rd.IsZero());
2283 VIXL_ASSERT(!rn.IsZero());
2284 SingleEmissionCheckScope guard(this);
2285 sbfiz(rd, rn, lsb, width);
2286 }
Sbfm(const Register & rd,const Register & rn,unsigned immr,unsigned imms)2287 void Sbfm(const Register& rd,
2288 const Register& rn,
2289 unsigned immr,
2290 unsigned imms) {
2291 VIXL_ASSERT(allow_macro_instructions_);
2292 VIXL_ASSERT(!rd.IsZero());
2293 VIXL_ASSERT(!rn.IsZero());
2294 SingleEmissionCheckScope guard(this);
2295 sbfm(rd, rn, immr, imms);
2296 }
Sbfx(const Register & rd,const Register & rn,unsigned lsb,unsigned width)2297 void Sbfx(const Register& rd,
2298 const Register& rn,
2299 unsigned lsb,
2300 unsigned width) {
2301 VIXL_ASSERT(allow_macro_instructions_);
2302 VIXL_ASSERT(!rd.IsZero());
2303 VIXL_ASSERT(!rn.IsZero());
2304 SingleEmissionCheckScope guard(this);
2305 sbfx(rd, rn, lsb, width);
2306 }
2307 void Scvtf(const VRegister& vd, const Register& rn, int fbits = 0) {
2308 VIXL_ASSERT(allow_macro_instructions_);
2309 VIXL_ASSERT(!rn.IsZero());
2310 SingleEmissionCheckScope guard(this);
2311 scvtf(vd, rn, fbits);
2312 }
Sdiv(const Register & rd,const Register & rn,const Register & rm)2313 void Sdiv(const Register& rd, const Register& rn, const Register& rm) {
2314 VIXL_ASSERT(allow_macro_instructions_);
2315 VIXL_ASSERT(!rd.IsZero());
2316 VIXL_ASSERT(!rn.IsZero());
2317 VIXL_ASSERT(!rm.IsZero());
2318 SingleEmissionCheckScope guard(this);
2319 sdiv(rd, rn, rm);
2320 }
Smaddl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2321 void Smaddl(const Register& rd,
2322 const Register& rn,
2323 const Register& rm,
2324 const Register& ra) {
2325 VIXL_ASSERT(allow_macro_instructions_);
2326 VIXL_ASSERT(!rd.IsZero());
2327 VIXL_ASSERT(!rn.IsZero());
2328 VIXL_ASSERT(!rm.IsZero());
2329 VIXL_ASSERT(!ra.IsZero());
2330 SingleEmissionCheckScope guard(this);
2331 smaddl(rd, rn, rm, ra);
2332 }
Smsubl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2333 void Smsubl(const Register& rd,
2334 const Register& rn,
2335 const Register& rm,
2336 const Register& ra) {
2337 VIXL_ASSERT(allow_macro_instructions_);
2338 VIXL_ASSERT(!rd.IsZero());
2339 VIXL_ASSERT(!rn.IsZero());
2340 VIXL_ASSERT(!rm.IsZero());
2341 VIXL_ASSERT(!ra.IsZero());
2342 SingleEmissionCheckScope guard(this);
2343 smsubl(rd, rn, rm, ra);
2344 }
Smull(const Register & rd,const Register & rn,const Register & rm)2345 void Smull(const Register& rd, const Register& rn, const Register& rm) {
2346 VIXL_ASSERT(allow_macro_instructions_);
2347 VIXL_ASSERT(!rd.IsZero());
2348 VIXL_ASSERT(!rn.IsZero());
2349 VIXL_ASSERT(!rm.IsZero());
2350 SingleEmissionCheckScope guard(this);
2351 smull(rd, rn, rm);
2352 }
Smulh(const Register & xd,const Register & xn,const Register & xm)2353 void Smulh(const Register& xd, const Register& xn, const Register& xm) {
2354 VIXL_ASSERT(allow_macro_instructions_);
2355 VIXL_ASSERT(!xd.IsZero());
2356 VIXL_ASSERT(!xn.IsZero());
2357 VIXL_ASSERT(!xm.IsZero());
2358 SingleEmissionCheckScope guard(this);
2359 smulh(xd, xn, xm);
2360 }
Stlr(const Register & rt,const MemOperand & dst)2361 void Stlr(const Register& rt, const MemOperand& dst) {
2362 VIXL_ASSERT(allow_macro_instructions_);
2363 SingleEmissionCheckScope guard(this);
2364 VIXL_ASSERT(dst.IsImmediateOffset());
2365 if (dst.GetOffset() == 0) {
2366 stlr(rt, dst);
2367 } else {
2368 stlur(rt, dst);
2369 }
2370 }
Stlrb(const Register & rt,const MemOperand & dst)2371 void Stlrb(const Register& rt, const MemOperand& dst) {
2372 VIXL_ASSERT(allow_macro_instructions_);
2373 SingleEmissionCheckScope guard(this);
2374 VIXL_ASSERT(dst.IsImmediateOffset());
2375 if (dst.GetOffset() == 0) {
2376 stlrb(rt, dst);
2377 } else {
2378 stlurb(rt, dst);
2379 }
2380 }
Stlrh(const Register & rt,const MemOperand & dst)2381 void Stlrh(const Register& rt, const MemOperand& dst) {
2382 VIXL_ASSERT(allow_macro_instructions_);
2383 SingleEmissionCheckScope guard(this);
2384 VIXL_ASSERT(dst.IsImmediateOffset());
2385 if (dst.GetOffset() == 0) {
2386 stlrh(rt, dst);
2387 } else {
2388 stlurh(rt, dst);
2389 }
2390 }
Stllr(const Register & rt,const MemOperand & dst)2391 void Stllr(const Register& rt, const MemOperand& dst) {
2392 VIXL_ASSERT(allow_macro_instructions_);
2393 SingleEmissionCheckScope guard(this);
2394 stllr(rt, dst);
2395 }
Stllrb(const Register & rt,const MemOperand & dst)2396 void Stllrb(const Register& rt, const MemOperand& dst) {
2397 VIXL_ASSERT(allow_macro_instructions_);
2398 SingleEmissionCheckScope guard(this);
2399 stllrb(rt, dst);
2400 }
Stllrh(const Register & rt,const MemOperand & dst)2401 void Stllrh(const Register& rt, const MemOperand& dst) {
2402 VIXL_ASSERT(allow_macro_instructions_);
2403 SingleEmissionCheckScope guard(this);
2404 stllrh(rt, dst);
2405 }
Stlxp(const Register & rs,const Register & rt,const Register & rt2,const MemOperand & dst)2406 void Stlxp(const Register& rs,
2407 const Register& rt,
2408 const Register& rt2,
2409 const MemOperand& dst) {
2410 VIXL_ASSERT(allow_macro_instructions_);
2411 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2412 VIXL_ASSERT(!rs.Aliases(rt));
2413 VIXL_ASSERT(!rs.Aliases(rt2));
2414 SingleEmissionCheckScope guard(this);
2415 stlxp(rs, rt, rt2, dst);
2416 }
Stlxr(const Register & rs,const Register & rt,const MemOperand & dst)2417 void Stlxr(const Register& rs, const Register& rt, const MemOperand& dst) {
2418 VIXL_ASSERT(allow_macro_instructions_);
2419 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2420 VIXL_ASSERT(!rs.Aliases(rt));
2421 SingleEmissionCheckScope guard(this);
2422 stlxr(rs, rt, dst);
2423 }
Stlxrb(const Register & rs,const Register & rt,const MemOperand & dst)2424 void Stlxrb(const Register& rs, const Register& rt, const MemOperand& dst) {
2425 VIXL_ASSERT(allow_macro_instructions_);
2426 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2427 VIXL_ASSERT(!rs.Aliases(rt));
2428 SingleEmissionCheckScope guard(this);
2429 stlxrb(rs, rt, dst);
2430 }
Stlxrh(const Register & rs,const Register & rt,const MemOperand & dst)2431 void Stlxrh(const Register& rs, const Register& rt, const MemOperand& dst) {
2432 VIXL_ASSERT(allow_macro_instructions_);
2433 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2434 VIXL_ASSERT(!rs.Aliases(rt));
2435 SingleEmissionCheckScope guard(this);
2436 stlxrh(rs, rt, dst);
2437 }
Stnp(const CPURegister & rt,const CPURegister & rt2,const MemOperand & dst)2438 void Stnp(const CPURegister& rt,
2439 const CPURegister& rt2,
2440 const MemOperand& dst) {
2441 VIXL_ASSERT(allow_macro_instructions_);
2442 SingleEmissionCheckScope guard(this);
2443 stnp(rt, rt2, dst);
2444 }
Stxp(const Register & rs,const Register & rt,const Register & rt2,const MemOperand & dst)2445 void Stxp(const Register& rs,
2446 const Register& rt,
2447 const Register& rt2,
2448 const MemOperand& dst) {
2449 VIXL_ASSERT(allow_macro_instructions_);
2450 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2451 VIXL_ASSERT(!rs.Aliases(rt));
2452 VIXL_ASSERT(!rs.Aliases(rt2));
2453 SingleEmissionCheckScope guard(this);
2454 stxp(rs, rt, rt2, dst);
2455 }
Stxr(const Register & rs,const Register & rt,const MemOperand & dst)2456 void Stxr(const Register& rs, const Register& rt, const MemOperand& dst) {
2457 VIXL_ASSERT(allow_macro_instructions_);
2458 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2459 VIXL_ASSERT(!rs.Aliases(rt));
2460 SingleEmissionCheckScope guard(this);
2461 stxr(rs, rt, dst);
2462 }
Stxrb(const Register & rs,const Register & rt,const MemOperand & dst)2463 void Stxrb(const Register& rs, const Register& rt, const MemOperand& dst) {
2464 VIXL_ASSERT(allow_macro_instructions_);
2465 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2466 VIXL_ASSERT(!rs.Aliases(rt));
2467 SingleEmissionCheckScope guard(this);
2468 stxrb(rs, rt, dst);
2469 }
Stxrh(const Register & rs,const Register & rt,const MemOperand & dst)2470 void Stxrh(const Register& rs, const Register& rt, const MemOperand& dst) {
2471 VIXL_ASSERT(allow_macro_instructions_);
2472 VIXL_ASSERT(!rs.Aliases(dst.GetBaseRegister()));
2473 VIXL_ASSERT(!rs.Aliases(rt));
2474 SingleEmissionCheckScope guard(this);
2475 stxrh(rs, rt, dst);
2476 }
Svc(int code)2477 void Svc(int code) {
2478 VIXL_ASSERT(allow_macro_instructions_);
2479 SingleEmissionCheckScope guard(this);
2480 svc(code);
2481 }
Sxtb(const Register & rd,const Register & rn)2482 void Sxtb(const Register& rd, const Register& rn) {
2483 VIXL_ASSERT(allow_macro_instructions_);
2484 VIXL_ASSERT(!rd.IsZero());
2485 VIXL_ASSERT(!rn.IsZero());
2486 SingleEmissionCheckScope guard(this);
2487 sxtb(rd, rn);
2488 }
Sxth(const Register & rd,const Register & rn)2489 void Sxth(const Register& rd, const Register& rn) {
2490 VIXL_ASSERT(allow_macro_instructions_);
2491 VIXL_ASSERT(!rd.IsZero());
2492 VIXL_ASSERT(!rn.IsZero());
2493 SingleEmissionCheckScope guard(this);
2494 sxth(rd, rn);
2495 }
Sxtw(const Register & rd,const Register & rn)2496 void Sxtw(const Register& rd, const Register& rn) {
2497 VIXL_ASSERT(allow_macro_instructions_);
2498 VIXL_ASSERT(!rd.IsZero());
2499 VIXL_ASSERT(!rn.IsZero());
2500 SingleEmissionCheckScope guard(this);
2501 sxtw(rd, rn);
2502 }
Tbl(const VRegister & vd,const VRegister & vn,const VRegister & vm)2503 void Tbl(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
2504 VIXL_ASSERT(allow_macro_instructions_);
2505 SingleEmissionCheckScope guard(this);
2506 tbl(vd, vn, vm);
2507 }
Tbl(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vm)2508 void Tbl(const VRegister& vd,
2509 const VRegister& vn,
2510 const VRegister& vn2,
2511 const VRegister& vm) {
2512 VIXL_ASSERT(allow_macro_instructions_);
2513 SingleEmissionCheckScope guard(this);
2514 tbl(vd, vn, vn2, vm);
2515 }
Tbl(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vn3,const VRegister & vm)2516 void Tbl(const VRegister& vd,
2517 const VRegister& vn,
2518 const VRegister& vn2,
2519 const VRegister& vn3,
2520 const VRegister& vm) {
2521 VIXL_ASSERT(allow_macro_instructions_);
2522 SingleEmissionCheckScope guard(this);
2523 tbl(vd, vn, vn2, vn3, vm);
2524 }
Tbl(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vn3,const VRegister & vn4,const VRegister & vm)2525 void Tbl(const VRegister& vd,
2526 const VRegister& vn,
2527 const VRegister& vn2,
2528 const VRegister& vn3,
2529 const VRegister& vn4,
2530 const VRegister& vm) {
2531 VIXL_ASSERT(allow_macro_instructions_);
2532 SingleEmissionCheckScope guard(this);
2533 tbl(vd, vn, vn2, vn3, vn4, vm);
2534 }
Tbx(const VRegister & vd,const VRegister & vn,const VRegister & vm)2535 void Tbx(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
2536 VIXL_ASSERT(allow_macro_instructions_);
2537 SingleEmissionCheckScope guard(this);
2538 tbx(vd, vn, vm);
2539 }
Tbx(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vm)2540 void Tbx(const VRegister& vd,
2541 const VRegister& vn,
2542 const VRegister& vn2,
2543 const VRegister& vm) {
2544 VIXL_ASSERT(allow_macro_instructions_);
2545 SingleEmissionCheckScope guard(this);
2546 tbx(vd, vn, vn2, vm);
2547 }
Tbx(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vn3,const VRegister & vm)2548 void Tbx(const VRegister& vd,
2549 const VRegister& vn,
2550 const VRegister& vn2,
2551 const VRegister& vn3,
2552 const VRegister& vm) {
2553 VIXL_ASSERT(allow_macro_instructions_);
2554 SingleEmissionCheckScope guard(this);
2555 tbx(vd, vn, vn2, vn3, vm);
2556 }
Tbx(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vn3,const VRegister & vn4,const VRegister & vm)2557 void Tbx(const VRegister& vd,
2558 const VRegister& vn,
2559 const VRegister& vn2,
2560 const VRegister& vn3,
2561 const VRegister& vn4,
2562 const VRegister& vm) {
2563 VIXL_ASSERT(allow_macro_instructions_);
2564 SingleEmissionCheckScope guard(this);
2565 tbx(vd, vn, vn2, vn3, vn4, vm);
2566 }
2567 void Tbnz(const Register& rt, unsigned bit_pos, Label* label);
2568 void Tbz(const Register& rt, unsigned bit_pos, Label* label);
Ubfiz(const Register & rd,const Register & rn,unsigned lsb,unsigned width)2569 void Ubfiz(const Register& rd,
2570 const Register& rn,
2571 unsigned lsb,
2572 unsigned width) {
2573 VIXL_ASSERT(allow_macro_instructions_);
2574 VIXL_ASSERT(!rd.IsZero());
2575 VIXL_ASSERT(!rn.IsZero());
2576 SingleEmissionCheckScope guard(this);
2577 ubfiz(rd, rn, lsb, width);
2578 }
Ubfm(const Register & rd,const Register & rn,unsigned immr,unsigned imms)2579 void Ubfm(const Register& rd,
2580 const Register& rn,
2581 unsigned immr,
2582 unsigned imms) {
2583 VIXL_ASSERT(allow_macro_instructions_);
2584 VIXL_ASSERT(!rd.IsZero());
2585 VIXL_ASSERT(!rn.IsZero());
2586 SingleEmissionCheckScope guard(this);
2587 ubfm(rd, rn, immr, imms);
2588 }
Ubfx(const Register & rd,const Register & rn,unsigned lsb,unsigned width)2589 void Ubfx(const Register& rd,
2590 const Register& rn,
2591 unsigned lsb,
2592 unsigned width) {
2593 VIXL_ASSERT(allow_macro_instructions_);
2594 VIXL_ASSERT(!rd.IsZero());
2595 VIXL_ASSERT(!rn.IsZero());
2596 SingleEmissionCheckScope guard(this);
2597 ubfx(rd, rn, lsb, width);
2598 }
2599 void Ucvtf(const VRegister& vd, const Register& rn, int fbits = 0) {
2600 VIXL_ASSERT(allow_macro_instructions_);
2601 VIXL_ASSERT(!rn.IsZero());
2602 SingleEmissionCheckScope guard(this);
2603 ucvtf(vd, rn, fbits);
2604 }
Udiv(const Register & rd,const Register & rn,const Register & rm)2605 void Udiv(const Register& rd, const Register& rn, const Register& rm) {
2606 VIXL_ASSERT(allow_macro_instructions_);
2607 VIXL_ASSERT(!rd.IsZero());
2608 VIXL_ASSERT(!rn.IsZero());
2609 VIXL_ASSERT(!rm.IsZero());
2610 SingleEmissionCheckScope guard(this);
2611 udiv(rd, rn, rm);
2612 }
Umaddl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2613 void Umaddl(const Register& rd,
2614 const Register& rn,
2615 const Register& rm,
2616 const Register& ra) {
2617 VIXL_ASSERT(allow_macro_instructions_);
2618 VIXL_ASSERT(!rd.IsZero());
2619 VIXL_ASSERT(!rn.IsZero());
2620 VIXL_ASSERT(!rm.IsZero());
2621 VIXL_ASSERT(!ra.IsZero());
2622 SingleEmissionCheckScope guard(this);
2623 umaddl(rd, rn, rm, ra);
2624 }
Umull(const Register & rd,const Register & rn,const Register & rm)2625 void Umull(const Register& rd, const Register& rn, const Register& rm) {
2626 VIXL_ASSERT(allow_macro_instructions_);
2627 VIXL_ASSERT(!rd.IsZero());
2628 VIXL_ASSERT(!rn.IsZero());
2629 VIXL_ASSERT(!rm.IsZero());
2630 SingleEmissionCheckScope guard(this);
2631 umull(rd, rn, rm);
2632 }
Umulh(const Register & xd,const Register & xn,const Register & xm)2633 void Umulh(const Register& xd, const Register& xn, const Register& xm) {
2634 VIXL_ASSERT(allow_macro_instructions_);
2635 VIXL_ASSERT(!xd.IsZero());
2636 VIXL_ASSERT(!xn.IsZero());
2637 VIXL_ASSERT(!xm.IsZero());
2638 SingleEmissionCheckScope guard(this);
2639 umulh(xd, xn, xm);
2640 }
Umsubl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2641 void Umsubl(const Register& rd,
2642 const Register& rn,
2643 const Register& rm,
2644 const Register& ra) {
2645 VIXL_ASSERT(allow_macro_instructions_);
2646 VIXL_ASSERT(!rd.IsZero());
2647 VIXL_ASSERT(!rn.IsZero());
2648 VIXL_ASSERT(!rm.IsZero());
2649 VIXL_ASSERT(!ra.IsZero());
2650 SingleEmissionCheckScope guard(this);
2651 umsubl(rd, rn, rm, ra);
2652 }
Unreachable()2653 void Unreachable() {
2654 VIXL_ASSERT(allow_macro_instructions_);
2655 SingleEmissionCheckScope guard(this);
2656 if (generate_simulator_code_) {
2657 hlt(kUnreachableOpcode);
2658 } else {
2659 // Use the architecturally-defined UDF instruction to abort on hardware,
2660 // because using HLT and BRK tends to make the process difficult to debug.
2661 udf(kUnreachableOpcode);
2662 }
2663 }
Uxtb(const Register & rd,const Register & rn)2664 void Uxtb(const Register& rd, const Register& rn) {
2665 VIXL_ASSERT(allow_macro_instructions_);
2666 VIXL_ASSERT(!rd.IsZero());
2667 VIXL_ASSERT(!rn.IsZero());
2668 SingleEmissionCheckScope guard(this);
2669 uxtb(rd, rn);
2670 }
Uxth(const Register & rd,const Register & rn)2671 void Uxth(const Register& rd, const Register& rn) {
2672 VIXL_ASSERT(allow_macro_instructions_);
2673 VIXL_ASSERT(!rd.IsZero());
2674 VIXL_ASSERT(!rn.IsZero());
2675 SingleEmissionCheckScope guard(this);
2676 uxth(rd, rn);
2677 }
Uxtw(const Register & rd,const Register & rn)2678 void Uxtw(const Register& rd, const Register& rn) {
2679 VIXL_ASSERT(allow_macro_instructions_);
2680 VIXL_ASSERT(!rd.IsZero());
2681 VIXL_ASSERT(!rn.IsZero());
2682 SingleEmissionCheckScope guard(this);
2683 uxtw(rd, rn);
2684 }
2685
2686 // NEON 3 vector register instructions.
2687 #define NEON_3VREG_MACRO_LIST(V) \
2688 V(add, Add) \
2689 V(addhn, Addhn) \
2690 V(addhn2, Addhn2) \
2691 V(addp, Addp) \
2692 V(and_, And) \
2693 V(bic, Bic) \
2694 V(bif, Bif) \
2695 V(bit, Bit) \
2696 V(bsl, Bsl) \
2697 V(cmeq, Cmeq) \
2698 V(cmge, Cmge) \
2699 V(cmgt, Cmgt) \
2700 V(cmhi, Cmhi) \
2701 V(cmhs, Cmhs) \
2702 V(cmtst, Cmtst) \
2703 V(eor, Eor) \
2704 V(fabd, Fabd) \
2705 V(facge, Facge) \
2706 V(facgt, Facgt) \
2707 V(faddp, Faddp) \
2708 V(fcmeq, Fcmeq) \
2709 V(fcmge, Fcmge) \
2710 V(fcmgt, Fcmgt) \
2711 V(fmaxnmp, Fmaxnmp) \
2712 V(fmaxp, Fmaxp) \
2713 V(fminnmp, Fminnmp) \
2714 V(fminp, Fminp) \
2715 V(fmla, Fmla) \
2716 V(fmlal, Fmlal) \
2717 V(fmlal2, Fmlal2) \
2718 V(fmls, Fmls) \
2719 V(fmlsl, Fmlsl) \
2720 V(fmlsl2, Fmlsl2) \
2721 V(fmulx, Fmulx) \
2722 V(frecps, Frecps) \
2723 V(frsqrts, Frsqrts) \
2724 V(mla, Mla) \
2725 V(mls, Mls) \
2726 V(mul, Mul) \
2727 V(orn, Orn) \
2728 V(orr, Orr) \
2729 V(pmul, Pmul) \
2730 V(pmull, Pmull) \
2731 V(pmull2, Pmull2) \
2732 V(raddhn, Raddhn) \
2733 V(raddhn2, Raddhn2) \
2734 V(rsubhn, Rsubhn) \
2735 V(rsubhn2, Rsubhn2) \
2736 V(saba, Saba) \
2737 V(sabal, Sabal) \
2738 V(sabal2, Sabal2) \
2739 V(sabd, Sabd) \
2740 V(sabdl, Sabdl) \
2741 V(sabdl2, Sabdl2) \
2742 V(saddl, Saddl) \
2743 V(saddl2, Saddl2) \
2744 V(saddw, Saddw) \
2745 V(saddw2, Saddw2) \
2746 V(shadd, Shadd) \
2747 V(shsub, Shsub) \
2748 V(smax, Smax) \
2749 V(smaxp, Smaxp) \
2750 V(smin, Smin) \
2751 V(sminp, Sminp) \
2752 V(smlal, Smlal) \
2753 V(smlal2, Smlal2) \
2754 V(smlsl, Smlsl) \
2755 V(smlsl2, Smlsl2) \
2756 V(smull, Smull) \
2757 V(smull2, Smull2) \
2758 V(sqadd, Sqadd) \
2759 V(sqdmlal, Sqdmlal) \
2760 V(sqdmlal2, Sqdmlal2) \
2761 V(sqdmlsl, Sqdmlsl) \
2762 V(sqdmlsl2, Sqdmlsl2) \
2763 V(sqdmulh, Sqdmulh) \
2764 V(sqdmull, Sqdmull) \
2765 V(sqdmull2, Sqdmull2) \
2766 V(sqrdmulh, Sqrdmulh) \
2767 V(sdot, Sdot) \
2768 V(sqrdmlah, Sqrdmlah) \
2769 V(udot, Udot) \
2770 V(sqrdmlsh, Sqrdmlsh) \
2771 V(sqrshl, Sqrshl) \
2772 V(sqshl, Sqshl) \
2773 V(sqsub, Sqsub) \
2774 V(srhadd, Srhadd) \
2775 V(srshl, Srshl) \
2776 V(sshl, Sshl) \
2777 V(ssubl, Ssubl) \
2778 V(ssubl2, Ssubl2) \
2779 V(ssubw, Ssubw) \
2780 V(ssubw2, Ssubw2) \
2781 V(sub, Sub) \
2782 V(subhn, Subhn) \
2783 V(subhn2, Subhn2) \
2784 V(trn1, Trn1) \
2785 V(trn2, Trn2) \
2786 V(uaba, Uaba) \
2787 V(uabal, Uabal) \
2788 V(uabal2, Uabal2) \
2789 V(uabd, Uabd) \
2790 V(uabdl, Uabdl) \
2791 V(uabdl2, Uabdl2) \
2792 V(uaddl, Uaddl) \
2793 V(uaddl2, Uaddl2) \
2794 V(uaddw, Uaddw) \
2795 V(uaddw2, Uaddw2) \
2796 V(uhadd, Uhadd) \
2797 V(uhsub, Uhsub) \
2798 V(umax, Umax) \
2799 V(umaxp, Umaxp) \
2800 V(umin, Umin) \
2801 V(uminp, Uminp) \
2802 V(umlal, Umlal) \
2803 V(umlal2, Umlal2) \
2804 V(umlsl, Umlsl) \
2805 V(umlsl2, Umlsl2) \
2806 V(umull, Umull) \
2807 V(umull2, Umull2) \
2808 V(uqadd, Uqadd) \
2809 V(uqrshl, Uqrshl) \
2810 V(uqshl, Uqshl) \
2811 V(uqsub, Uqsub) \
2812 V(urhadd, Urhadd) \
2813 V(urshl, Urshl) \
2814 V(ushl, Ushl) \
2815 V(usubl, Usubl) \
2816 V(usubl2, Usubl2) \
2817 V(usubw, Usubw) \
2818 V(usubw2, Usubw2) \
2819 V(uzp1, Uzp1) \
2820 V(uzp2, Uzp2) \
2821 V(zip1, Zip1) \
2822 V(zip2, Zip2)
2823
2824 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
2825 void MASM(const VRegister& vd, const VRegister& vn, const VRegister& vm) { \
2826 VIXL_ASSERT(allow_macro_instructions_); \
2827 SingleEmissionCheckScope guard(this); \
2828 ASM(vd, vn, vm); \
2829 }
2830 NEON_3VREG_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2831 #undef DEFINE_MACRO_ASM_FUNC
2832
2833 // NEON 2 vector register instructions.
2834 #define NEON_2VREG_MACRO_LIST(V) \
2835 V(abs, Abs) \
2836 V(addp, Addp) \
2837 V(addv, Addv) \
2838 V(cls, Cls) \
2839 V(clz, Clz) \
2840 V(cnt, Cnt) \
2841 V(fabs, Fabs) \
2842 V(faddp, Faddp) \
2843 V(fcvtas, Fcvtas) \
2844 V(fcvtau, Fcvtau) \
2845 V(fcvtms, Fcvtms) \
2846 V(fcvtmu, Fcvtmu) \
2847 V(fcvtns, Fcvtns) \
2848 V(fcvtnu, Fcvtnu) \
2849 V(fcvtps, Fcvtps) \
2850 V(fcvtpu, Fcvtpu) \
2851 V(fmaxnmp, Fmaxnmp) \
2852 V(fmaxnmv, Fmaxnmv) \
2853 V(fmaxp, Fmaxp) \
2854 V(fmaxv, Fmaxv) \
2855 V(fminnmp, Fminnmp) \
2856 V(fminnmv, Fminnmv) \
2857 V(fminp, Fminp) \
2858 V(fminv, Fminv) \
2859 V(fneg, Fneg) \
2860 V(frecpe, Frecpe) \
2861 V(frecpx, Frecpx) \
2862 V(frint32x, Frint32x) \
2863 V(frint32z, Frint32z) \
2864 V(frint64x, Frint64x) \
2865 V(frint64z, Frint64z) \
2866 V(frinta, Frinta) \
2867 V(frinti, Frinti) \
2868 V(frintm, Frintm) \
2869 V(frintn, Frintn) \
2870 V(frintp, Frintp) \
2871 V(frintx, Frintx) \
2872 V(frintz, Frintz) \
2873 V(frsqrte, Frsqrte) \
2874 V(fsqrt, Fsqrt) \
2875 V(mov, Mov) \
2876 V(mvn, Mvn) \
2877 V(neg, Neg) \
2878 V(not_, Not) \
2879 V(rbit, Rbit) \
2880 V(rev16, Rev16) \
2881 V(rev32, Rev32) \
2882 V(rev64, Rev64) \
2883 V(sadalp, Sadalp) \
2884 V(saddlp, Saddlp) \
2885 V(saddlv, Saddlv) \
2886 V(smaxv, Smaxv) \
2887 V(sminv, Sminv) \
2888 V(sqabs, Sqabs) \
2889 V(sqneg, Sqneg) \
2890 V(sqxtn, Sqxtn) \
2891 V(sqxtn2, Sqxtn2) \
2892 V(sqxtun, Sqxtun) \
2893 V(sqxtun2, Sqxtun2) \
2894 V(suqadd, Suqadd) \
2895 V(sxtl, Sxtl) \
2896 V(sxtl2, Sxtl2) \
2897 V(uadalp, Uadalp) \
2898 V(uaddlp, Uaddlp) \
2899 V(uaddlv, Uaddlv) \
2900 V(umaxv, Umaxv) \
2901 V(uminv, Uminv) \
2902 V(uqxtn, Uqxtn) \
2903 V(uqxtn2, Uqxtn2) \
2904 V(urecpe, Urecpe) \
2905 V(ursqrte, Ursqrte) \
2906 V(usqadd, Usqadd) \
2907 V(uxtl, Uxtl) \
2908 V(uxtl2, Uxtl2) \
2909 V(xtn, Xtn) \
2910 V(xtn2, Xtn2)
2911
2912 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
2913 void MASM(const VRegister& vd, const VRegister& vn) { \
2914 VIXL_ASSERT(allow_macro_instructions_); \
2915 SingleEmissionCheckScope guard(this); \
2916 ASM(vd, vn); \
2917 }
NEON_2VREG_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)2918 NEON_2VREG_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2919 #undef DEFINE_MACRO_ASM_FUNC
2920
2921 // NEON 2 vector register with immediate instructions.
2922 #define NEON_2VREG_FPIMM_MACRO_LIST(V) \
2923 V(fcmeq, Fcmeq) \
2924 V(fcmge, Fcmge) \
2925 V(fcmgt, Fcmgt) \
2926 V(fcmle, Fcmle) \
2927 V(fcmlt, Fcmlt)
2928
2929 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
2930 void MASM(const VRegister& vd, const VRegister& vn, double imm) { \
2931 VIXL_ASSERT(allow_macro_instructions_); \
2932 SingleEmissionCheckScope guard(this); \
2933 ASM(vd, vn, imm); \
2934 }
2935 NEON_2VREG_FPIMM_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2936 #undef DEFINE_MACRO_ASM_FUNC
2937
2938 // NEON by element instructions.
2939 #define NEON_BYELEMENT_MACRO_LIST(V) \
2940 V(fmul, Fmul) \
2941 V(fmla, Fmla) \
2942 V(fmlal, Fmlal) \
2943 V(fmlal2, Fmlal2) \
2944 V(fmls, Fmls) \
2945 V(fmlsl, Fmlsl) \
2946 V(fmlsl2, Fmlsl2) \
2947 V(fmulx, Fmulx) \
2948 V(mul, Mul) \
2949 V(mla, Mla) \
2950 V(mls, Mls) \
2951 V(sqdmulh, Sqdmulh) \
2952 V(sqrdmulh, Sqrdmulh) \
2953 V(sdot, Sdot) \
2954 V(sqrdmlah, Sqrdmlah) \
2955 V(udot, Udot) \
2956 V(sqrdmlsh, Sqrdmlsh) \
2957 V(sqdmull, Sqdmull) \
2958 V(sqdmull2, Sqdmull2) \
2959 V(sqdmlal, Sqdmlal) \
2960 V(sqdmlal2, Sqdmlal2) \
2961 V(sqdmlsl, Sqdmlsl) \
2962 V(sqdmlsl2, Sqdmlsl2) \
2963 V(smull, Smull) \
2964 V(smull2, Smull2) \
2965 V(smlal, Smlal) \
2966 V(smlal2, Smlal2) \
2967 V(smlsl, Smlsl) \
2968 V(smlsl2, Smlsl2) \
2969 V(umull, Umull) \
2970 V(umull2, Umull2) \
2971 V(umlal, Umlal) \
2972 V(umlal2, Umlal2) \
2973 V(umlsl, Umlsl) \
2974 V(umlsl2, Umlsl2)
2975
2976 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
2977 void MASM(const VRegister& vd, \
2978 const VRegister& vn, \
2979 const VRegister& vm, \
2980 int vm_index) { \
2981 VIXL_ASSERT(allow_macro_instructions_); \
2982 SingleEmissionCheckScope guard(this); \
2983 ASM(vd, vn, vm, vm_index); \
2984 }
2985 NEON_BYELEMENT_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2986 #undef DEFINE_MACRO_ASM_FUNC
2987
2988 #define NEON_2VREG_SHIFT_MACRO_LIST(V) \
2989 V(rshrn, Rshrn) \
2990 V(rshrn2, Rshrn2) \
2991 V(shl, Shl) \
2992 V(shll, Shll) \
2993 V(shll2, Shll2) \
2994 V(shrn, Shrn) \
2995 V(shrn2, Shrn2) \
2996 V(sli, Sli) \
2997 V(sqrshrn, Sqrshrn) \
2998 V(sqrshrn2, Sqrshrn2) \
2999 V(sqrshrun, Sqrshrun) \
3000 V(sqrshrun2, Sqrshrun2) \
3001 V(sqshl, Sqshl) \
3002 V(sqshlu, Sqshlu) \
3003 V(sqshrn, Sqshrn) \
3004 V(sqshrn2, Sqshrn2) \
3005 V(sqshrun, Sqshrun) \
3006 V(sqshrun2, Sqshrun2) \
3007 V(sri, Sri) \
3008 V(srshr, Srshr) \
3009 V(srsra, Srsra) \
3010 V(sshr, Sshr) \
3011 V(ssra, Ssra) \
3012 V(uqrshrn, Uqrshrn) \
3013 V(uqrshrn2, Uqrshrn2) \
3014 V(uqshl, Uqshl) \
3015 V(uqshrn, Uqshrn) \
3016 V(uqshrn2, Uqshrn2) \
3017 V(urshr, Urshr) \
3018 V(ursra, Ursra) \
3019 V(ushr, Ushr) \
3020 V(usra, Usra)
3021
3022 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
3023 void MASM(const VRegister& vd, const VRegister& vn, int shift) { \
3024 VIXL_ASSERT(allow_macro_instructions_); \
3025 SingleEmissionCheckScope guard(this); \
3026 ASM(vd, vn, shift); \
3027 }
3028 NEON_2VREG_SHIFT_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
3029 #undef DEFINE_MACRO_ASM_FUNC
3030
3031 #define NEON_2VREG_SHIFT_LONG_MACRO_LIST(V) \
3032 V(shll, sshll, Sshll) \
3033 V(shll, ushll, Ushll) \
3034 V(shll2, sshll2, Sshll2) \
3035 V(shll2, ushll2, Ushll2)
3036
3037 #define DEFINE_MACRO_ASM_FUNC(ASM1, ASM2, MASM) \
3038 void MASM(const VRegister& vd, const VRegister& vn, int shift) { \
3039 VIXL_ASSERT(allow_macro_instructions_); \
3040 SingleEmissionCheckScope guard(this); \
3041 if (vn.GetLaneSizeInBits() == static_cast<unsigned>(shift)) { \
3042 ASM1(vd, vn, shift); \
3043 } else { \
3044 ASM2(vd, vn, shift); \
3045 } \
3046 }
3047 NEON_2VREG_SHIFT_LONG_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
3048 #undef DEFINE_MACRO_ASM_FUNC
3049
3050 // SVE 3 vector register instructions.
3051 #define SVE_3VREG_COMMUTATIVE_MACRO_LIST(V) \
3052 V(add, Add) \
3053 V(and_, And) \
3054 V(bic, Bic) \
3055 V(eor, Eor) \
3056 V(mul, Mul) \
3057 V(orr, Orr) \
3058 V(sabd, Sabd) \
3059 V(smax, Smax) \
3060 V(smulh, Smulh) \
3061 V(smin, Smin) \
3062 V(uabd, Uabd) \
3063 V(umax, Umax) \
3064 V(umin, Umin) \
3065 V(umulh, Umulh)
3066
3067 #define DEFINE_MACRO_ASM_FUNC(ASM, MASM) \
3068 void MASM(const ZRegister& zd, \
3069 const PRegisterM& pg, \
3070 const ZRegister& zn, \
3071 const ZRegister& zm) { \
3072 VIXL_ASSERT(allow_macro_instructions_); \
3073 if (zd.Aliases(zn)) { \
3074 SingleEmissionCheckScope guard(this); \
3075 ASM(zd, pg, zd, zm); \
3076 } else if (zd.Aliases(zm)) { \
3077 SingleEmissionCheckScope guard(this); \
3078 ASM(zd, pg, zd, zn); \
3079 } else { \
3080 MovprfxHelperScope guard(this, zd, pg, zn); \
3081 ASM(zd, pg, zd, zm); \
3082 } \
3083 }
3084 SVE_3VREG_COMMUTATIVE_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
3085 #undef DEFINE_MACRO_ASM_FUNC
3086
3087 void Bic(const VRegister& vd, const int imm8, const int left_shift = 0) {
3088 VIXL_ASSERT(allow_macro_instructions_);
3089 SingleEmissionCheckScope guard(this);
3090 bic(vd, imm8, left_shift);
3091 }
Cmeq(const VRegister & vd,const VRegister & vn,int imm)3092 void Cmeq(const VRegister& vd, const VRegister& vn, int imm) {
3093 VIXL_ASSERT(allow_macro_instructions_);
3094 SingleEmissionCheckScope guard(this);
3095 cmeq(vd, vn, imm);
3096 }
Cmge(const VRegister & vd,const VRegister & vn,int imm)3097 void Cmge(const VRegister& vd, const VRegister& vn, int imm) {
3098 VIXL_ASSERT(allow_macro_instructions_);
3099 SingleEmissionCheckScope guard(this);
3100 cmge(vd, vn, imm);
3101 }
Cmgt(const VRegister & vd,const VRegister & vn,int imm)3102 void Cmgt(const VRegister& vd, const VRegister& vn, int imm) {
3103 VIXL_ASSERT(allow_macro_instructions_);
3104 SingleEmissionCheckScope guard(this);
3105 cmgt(vd, vn, imm);
3106 }
Cmle(const VRegister & vd,const VRegister & vn,int imm)3107 void Cmle(const VRegister& vd, const VRegister& vn, int imm) {
3108 VIXL_ASSERT(allow_macro_instructions_);
3109 SingleEmissionCheckScope guard(this);
3110 cmle(vd, vn, imm);
3111 }
Cmlt(const VRegister & vd,const VRegister & vn,int imm)3112 void Cmlt(const VRegister& vd, const VRegister& vn, int imm) {
3113 VIXL_ASSERT(allow_macro_instructions_);
3114 SingleEmissionCheckScope guard(this);
3115 cmlt(vd, vn, imm);
3116 }
Dup(const VRegister & vd,const VRegister & vn,int index)3117 void Dup(const VRegister& vd, const VRegister& vn, int index) {
3118 VIXL_ASSERT(allow_macro_instructions_);
3119 SingleEmissionCheckScope guard(this);
3120 dup(vd, vn, index);
3121 }
Dup(const VRegister & vd,const Register & rn)3122 void Dup(const VRegister& vd, const Register& rn) {
3123 VIXL_ASSERT(allow_macro_instructions_);
3124 SingleEmissionCheckScope guard(this);
3125 dup(vd, rn);
3126 }
Ext(const VRegister & vd,const VRegister & vn,const VRegister & vm,int index)3127 void Ext(const VRegister& vd,
3128 const VRegister& vn,
3129 const VRegister& vm,
3130 int index) {
3131 VIXL_ASSERT(allow_macro_instructions_);
3132 SingleEmissionCheckScope guard(this);
3133 ext(vd, vn, vm, index);
3134 }
Fcadd(const VRegister & vd,const VRegister & vn,const VRegister & vm,int rot)3135 void Fcadd(const VRegister& vd,
3136 const VRegister& vn,
3137 const VRegister& vm,
3138 int rot) {
3139 VIXL_ASSERT(allow_macro_instructions_);
3140 SingleEmissionCheckScope guard(this);
3141 fcadd(vd, vn, vm, rot);
3142 }
Fcmla(const VRegister & vd,const VRegister & vn,const VRegister & vm,int vm_index,int rot)3143 void Fcmla(const VRegister& vd,
3144 const VRegister& vn,
3145 const VRegister& vm,
3146 int vm_index,
3147 int rot) {
3148 VIXL_ASSERT(allow_macro_instructions_);
3149 SingleEmissionCheckScope guard(this);
3150 fcmla(vd, vn, vm, vm_index, rot);
3151 }
Fcmla(const VRegister & vd,const VRegister & vn,const VRegister & vm,int rot)3152 void Fcmla(const VRegister& vd,
3153 const VRegister& vn,
3154 const VRegister& vm,
3155 int rot) {
3156 VIXL_ASSERT(allow_macro_instructions_);
3157 SingleEmissionCheckScope guard(this);
3158 fcmla(vd, vn, vm, rot);
3159 }
Ins(const VRegister & vd,int vd_index,const VRegister & vn,int vn_index)3160 void Ins(const VRegister& vd,
3161 int vd_index,
3162 const VRegister& vn,
3163 int vn_index) {
3164 VIXL_ASSERT(allow_macro_instructions_);
3165 SingleEmissionCheckScope guard(this);
3166 ins(vd, vd_index, vn, vn_index);
3167 }
Ins(const VRegister & vd,int vd_index,const Register & rn)3168 void Ins(const VRegister& vd, int vd_index, const Register& rn) {
3169 VIXL_ASSERT(allow_macro_instructions_);
3170 SingleEmissionCheckScope guard(this);
3171 ins(vd, vd_index, rn);
3172 }
Ld1(const VRegister & vt,const MemOperand & src)3173 void Ld1(const VRegister& vt, const MemOperand& src) {
3174 VIXL_ASSERT(allow_macro_instructions_);
3175 SingleEmissionCheckScope guard(this);
3176 ld1(vt, src);
3177 }
Ld1(const VRegister & vt,const VRegister & vt2,const MemOperand & src)3178 void Ld1(const VRegister& vt, const VRegister& vt2, const MemOperand& src) {
3179 VIXL_ASSERT(allow_macro_instructions_);
3180 SingleEmissionCheckScope guard(this);
3181 ld1(vt, vt2, src);
3182 }
Ld1(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & src)3183 void Ld1(const VRegister& vt,
3184 const VRegister& vt2,
3185 const VRegister& vt3,
3186 const MemOperand& src) {
3187 VIXL_ASSERT(allow_macro_instructions_);
3188 SingleEmissionCheckScope guard(this);
3189 ld1(vt, vt2, vt3, src);
3190 }
Ld1(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & src)3191 void Ld1(const VRegister& vt,
3192 const VRegister& vt2,
3193 const VRegister& vt3,
3194 const VRegister& vt4,
3195 const MemOperand& src) {
3196 VIXL_ASSERT(allow_macro_instructions_);
3197 SingleEmissionCheckScope guard(this);
3198 ld1(vt, vt2, vt3, vt4, src);
3199 }
Ld1(const VRegister & vt,int lane,const MemOperand & src)3200 void Ld1(const VRegister& vt, int lane, const MemOperand& src) {
3201 VIXL_ASSERT(allow_macro_instructions_);
3202 SingleEmissionCheckScope guard(this);
3203 ld1(vt, lane, src);
3204 }
Ld1r(const VRegister & vt,const MemOperand & src)3205 void Ld1r(const VRegister& vt, const MemOperand& src) {
3206 VIXL_ASSERT(allow_macro_instructions_);
3207 SingleEmissionCheckScope guard(this);
3208 ld1r(vt, src);
3209 }
Ld2(const VRegister & vt,const VRegister & vt2,const MemOperand & src)3210 void Ld2(const VRegister& vt, const VRegister& vt2, const MemOperand& src) {
3211 VIXL_ASSERT(allow_macro_instructions_);
3212 SingleEmissionCheckScope guard(this);
3213 ld2(vt, vt2, src);
3214 }
Ld2(const VRegister & vt,const VRegister & vt2,int lane,const MemOperand & src)3215 void Ld2(const VRegister& vt,
3216 const VRegister& vt2,
3217 int lane,
3218 const MemOperand& src) {
3219 VIXL_ASSERT(allow_macro_instructions_);
3220 SingleEmissionCheckScope guard(this);
3221 ld2(vt, vt2, lane, src);
3222 }
Ld2r(const VRegister & vt,const VRegister & vt2,const MemOperand & src)3223 void Ld2r(const VRegister& vt, const VRegister& vt2, const MemOperand& src) {
3224 VIXL_ASSERT(allow_macro_instructions_);
3225 SingleEmissionCheckScope guard(this);
3226 ld2r(vt, vt2, src);
3227 }
Ld3(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & src)3228 void Ld3(const VRegister& vt,
3229 const VRegister& vt2,
3230 const VRegister& vt3,
3231 const MemOperand& src) {
3232 VIXL_ASSERT(allow_macro_instructions_);
3233 SingleEmissionCheckScope guard(this);
3234 ld3(vt, vt2, vt3, src);
3235 }
Ld3(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,int lane,const MemOperand & src)3236 void Ld3(const VRegister& vt,
3237 const VRegister& vt2,
3238 const VRegister& vt3,
3239 int lane,
3240 const MemOperand& src) {
3241 VIXL_ASSERT(allow_macro_instructions_);
3242 SingleEmissionCheckScope guard(this);
3243 ld3(vt, vt2, vt3, lane, src);
3244 }
Ld3r(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & src)3245 void Ld3r(const VRegister& vt,
3246 const VRegister& vt2,
3247 const VRegister& vt3,
3248 const MemOperand& src) {
3249 VIXL_ASSERT(allow_macro_instructions_);
3250 SingleEmissionCheckScope guard(this);
3251 ld3r(vt, vt2, vt3, src);
3252 }
Ld4(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & src)3253 void Ld4(const VRegister& vt,
3254 const VRegister& vt2,
3255 const VRegister& vt3,
3256 const VRegister& vt4,
3257 const MemOperand& src) {
3258 VIXL_ASSERT(allow_macro_instructions_);
3259 SingleEmissionCheckScope guard(this);
3260 ld4(vt, vt2, vt3, vt4, src);
3261 }
Ld4(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,int lane,const MemOperand & src)3262 void Ld4(const VRegister& vt,
3263 const VRegister& vt2,
3264 const VRegister& vt3,
3265 const VRegister& vt4,
3266 int lane,
3267 const MemOperand& src) {
3268 VIXL_ASSERT(allow_macro_instructions_);
3269 SingleEmissionCheckScope guard(this);
3270 ld4(vt, vt2, vt3, vt4, lane, src);
3271 }
Ld4r(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & src)3272 void Ld4r(const VRegister& vt,
3273 const VRegister& vt2,
3274 const VRegister& vt3,
3275 const VRegister& vt4,
3276 const MemOperand& src) {
3277 VIXL_ASSERT(allow_macro_instructions_);
3278 SingleEmissionCheckScope guard(this);
3279 ld4r(vt, vt2, vt3, vt4, src);
3280 }
Mov(const VRegister & vd,int vd_index,const VRegister & vn,int vn_index)3281 void Mov(const VRegister& vd,
3282 int vd_index,
3283 const VRegister& vn,
3284 int vn_index) {
3285 VIXL_ASSERT(allow_macro_instructions_);
3286 SingleEmissionCheckScope guard(this);
3287 mov(vd, vd_index, vn, vn_index);
3288 }
Mov(const VRegister & vd,const VRegister & vn,int index)3289 void Mov(const VRegister& vd, const VRegister& vn, int index) {
3290 VIXL_ASSERT(allow_macro_instructions_);
3291 SingleEmissionCheckScope guard(this);
3292 mov(vd, vn, index);
3293 }
Mov(const VRegister & vd,int vd_index,const Register & rn)3294 void Mov(const VRegister& vd, int vd_index, const Register& rn) {
3295 VIXL_ASSERT(allow_macro_instructions_);
3296 SingleEmissionCheckScope guard(this);
3297 mov(vd, vd_index, rn);
3298 }
Mov(const Register & rd,const VRegister & vn,int vn_index)3299 void Mov(const Register& rd, const VRegister& vn, int vn_index) {
3300 VIXL_ASSERT(allow_macro_instructions_);
3301 SingleEmissionCheckScope guard(this);
3302 mov(rd, vn, vn_index);
3303 }
3304 void Movi(const VRegister& vd,
3305 uint64_t imm,
3306 Shift shift = LSL,
3307 int shift_amount = 0);
3308 void Movi(const VRegister& vd, uint64_t hi, uint64_t lo);
3309 void Mvni(const VRegister& vd,
3310 const int imm8,
3311 Shift shift = LSL,
3312 const int shift_amount = 0) {
3313 VIXL_ASSERT(allow_macro_instructions_);
3314 SingleEmissionCheckScope guard(this);
3315 mvni(vd, imm8, shift, shift_amount);
3316 }
3317 void Orr(const VRegister& vd, const int imm8, const int left_shift = 0) {
3318 VIXL_ASSERT(allow_macro_instructions_);
3319 SingleEmissionCheckScope guard(this);
3320 orr(vd, imm8, left_shift);
3321 }
3322 void Scvtf(const VRegister& vd, const VRegister& vn, int fbits = 0) {
3323 VIXL_ASSERT(allow_macro_instructions_);
3324 SingleEmissionCheckScope guard(this);
3325 scvtf(vd, vn, fbits);
3326 }
3327 void Ucvtf(const VRegister& vd, const VRegister& vn, int fbits = 0) {
3328 VIXL_ASSERT(allow_macro_instructions_);
3329 SingleEmissionCheckScope guard(this);
3330 ucvtf(vd, vn, fbits);
3331 }
3332 void Fcvtzs(const VRegister& vd, const VRegister& vn, int fbits = 0) {
3333 VIXL_ASSERT(allow_macro_instructions_);
3334 SingleEmissionCheckScope guard(this);
3335 fcvtzs(vd, vn, fbits);
3336 }
3337 void Fcvtzu(const VRegister& vd, const VRegister& vn, int fbits = 0) {
3338 VIXL_ASSERT(allow_macro_instructions_);
3339 SingleEmissionCheckScope guard(this);
3340 fcvtzu(vd, vn, fbits);
3341 }
St1(const VRegister & vt,const MemOperand & dst)3342 void St1(const VRegister& vt, const MemOperand& dst) {
3343 VIXL_ASSERT(allow_macro_instructions_);
3344 SingleEmissionCheckScope guard(this);
3345 st1(vt, dst);
3346 }
St1(const VRegister & vt,const VRegister & vt2,const MemOperand & dst)3347 void St1(const VRegister& vt, const VRegister& vt2, const MemOperand& dst) {
3348 VIXL_ASSERT(allow_macro_instructions_);
3349 SingleEmissionCheckScope guard(this);
3350 st1(vt, vt2, dst);
3351 }
St1(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & dst)3352 void St1(const VRegister& vt,
3353 const VRegister& vt2,
3354 const VRegister& vt3,
3355 const MemOperand& dst) {
3356 VIXL_ASSERT(allow_macro_instructions_);
3357 SingleEmissionCheckScope guard(this);
3358 st1(vt, vt2, vt3, dst);
3359 }
St1(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & dst)3360 void St1(const VRegister& vt,
3361 const VRegister& vt2,
3362 const VRegister& vt3,
3363 const VRegister& vt4,
3364 const MemOperand& dst) {
3365 VIXL_ASSERT(allow_macro_instructions_);
3366 SingleEmissionCheckScope guard(this);
3367 st1(vt, vt2, vt3, vt4, dst);
3368 }
St1(const VRegister & vt,int lane,const MemOperand & dst)3369 void St1(const VRegister& vt, int lane, const MemOperand& dst) {
3370 VIXL_ASSERT(allow_macro_instructions_);
3371 SingleEmissionCheckScope guard(this);
3372 st1(vt, lane, dst);
3373 }
St2(const VRegister & vt,const VRegister & vt2,const MemOperand & dst)3374 void St2(const VRegister& vt, const VRegister& vt2, const MemOperand& dst) {
3375 VIXL_ASSERT(allow_macro_instructions_);
3376 SingleEmissionCheckScope guard(this);
3377 st2(vt, vt2, dst);
3378 }
St3(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & dst)3379 void St3(const VRegister& vt,
3380 const VRegister& vt2,
3381 const VRegister& vt3,
3382 const MemOperand& dst) {
3383 VIXL_ASSERT(allow_macro_instructions_);
3384 SingleEmissionCheckScope guard(this);
3385 st3(vt, vt2, vt3, dst);
3386 }
St4(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & dst)3387 void St4(const VRegister& vt,
3388 const VRegister& vt2,
3389 const VRegister& vt3,
3390 const VRegister& vt4,
3391 const MemOperand& dst) {
3392 VIXL_ASSERT(allow_macro_instructions_);
3393 SingleEmissionCheckScope guard(this);
3394 st4(vt, vt2, vt3, vt4, dst);
3395 }
St2(const VRegister & vt,const VRegister & vt2,int lane,const MemOperand & dst)3396 void St2(const VRegister& vt,
3397 const VRegister& vt2,
3398 int lane,
3399 const MemOperand& dst) {
3400 VIXL_ASSERT(allow_macro_instructions_);
3401 SingleEmissionCheckScope guard(this);
3402 st2(vt, vt2, lane, dst);
3403 }
St3(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,int lane,const MemOperand & dst)3404 void St3(const VRegister& vt,
3405 const VRegister& vt2,
3406 const VRegister& vt3,
3407 int lane,
3408 const MemOperand& dst) {
3409 VIXL_ASSERT(allow_macro_instructions_);
3410 SingleEmissionCheckScope guard(this);
3411 st3(vt, vt2, vt3, lane, dst);
3412 }
St4(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,int lane,const MemOperand & dst)3413 void St4(const VRegister& vt,
3414 const VRegister& vt2,
3415 const VRegister& vt3,
3416 const VRegister& vt4,
3417 int lane,
3418 const MemOperand& dst) {
3419 VIXL_ASSERT(allow_macro_instructions_);
3420 SingleEmissionCheckScope guard(this);
3421 st4(vt, vt2, vt3, vt4, lane, dst);
3422 }
Smov(const Register & rd,const VRegister & vn,int vn_index)3423 void Smov(const Register& rd, const VRegister& vn, int vn_index) {
3424 VIXL_ASSERT(allow_macro_instructions_);
3425 SingleEmissionCheckScope guard(this);
3426 smov(rd, vn, vn_index);
3427 }
Umov(const Register & rd,const VRegister & vn,int vn_index)3428 void Umov(const Register& rd, const VRegister& vn, int vn_index) {
3429 VIXL_ASSERT(allow_macro_instructions_);
3430 SingleEmissionCheckScope guard(this);
3431 umov(rd, vn, vn_index);
3432 }
Crc32b(const Register & rd,const Register & rn,const Register & rm)3433 void Crc32b(const Register& rd, const Register& rn, const Register& rm) {
3434 VIXL_ASSERT(allow_macro_instructions_);
3435 SingleEmissionCheckScope guard(this);
3436 crc32b(rd, rn, rm);
3437 }
Crc32h(const Register & rd,const Register & rn,const Register & rm)3438 void Crc32h(const Register& rd, const Register& rn, const Register& rm) {
3439 VIXL_ASSERT(allow_macro_instructions_);
3440 SingleEmissionCheckScope guard(this);
3441 crc32h(rd, rn, rm);
3442 }
Crc32w(const Register & rd,const Register & rn,const Register & rm)3443 void Crc32w(const Register& rd, const Register& rn, const Register& rm) {
3444 VIXL_ASSERT(allow_macro_instructions_);
3445 SingleEmissionCheckScope guard(this);
3446 crc32w(rd, rn, rm);
3447 }
Crc32x(const Register & rd,const Register & rn,const Register & rm)3448 void Crc32x(const Register& rd, const Register& rn, const Register& rm) {
3449 VIXL_ASSERT(allow_macro_instructions_);
3450 SingleEmissionCheckScope guard(this);
3451 crc32x(rd, rn, rm);
3452 }
Crc32cb(const Register & rd,const Register & rn,const Register & rm)3453 void Crc32cb(const Register& rd, const Register& rn, const Register& rm) {
3454 VIXL_ASSERT(allow_macro_instructions_);
3455 SingleEmissionCheckScope guard(this);
3456 crc32cb(rd, rn, rm);
3457 }
Crc32ch(const Register & rd,const Register & rn,const Register & rm)3458 void Crc32ch(const Register& rd, const Register& rn, const Register& rm) {
3459 VIXL_ASSERT(allow_macro_instructions_);
3460 SingleEmissionCheckScope guard(this);
3461 crc32ch(rd, rn, rm);
3462 }
Crc32cw(const Register & rd,const Register & rn,const Register & rm)3463 void Crc32cw(const Register& rd, const Register& rn, const Register& rm) {
3464 VIXL_ASSERT(allow_macro_instructions_);
3465 SingleEmissionCheckScope guard(this);
3466 crc32cw(rd, rn, rm);
3467 }
Crc32cx(const Register & rd,const Register & rn,const Register & rm)3468 void Crc32cx(const Register& rd, const Register& rn, const Register& rm) {
3469 VIXL_ASSERT(allow_macro_instructions_);
3470 SingleEmissionCheckScope guard(this);
3471 crc32cx(rd, rn, rm);
3472 }
3473
3474 // Scalable Vector Extensions.
Abs(const ZRegister & zd,const PRegisterM & pg,const ZRegister & zn)3475 void Abs(const ZRegister& zd, const PRegisterM& pg, const ZRegister& zn) {
3476 VIXL_ASSERT(allow_macro_instructions_);
3477 SingleEmissionCheckScope guard(this);
3478 abs(zd, pg, zn);
3479 }
Add(const ZRegister & zd,const ZRegister & zn,const ZRegister & zm)3480 void Add(const ZRegister& zd, const ZRegister& zn, const ZRegister& zm) {
3481 VIXL_ASSERT(allow_macro_instructions_);
3482 SingleEmissionCheckScope guard(this);
3483 add(zd, zn, zm);
3484 }
Add(const ZRegister & zd,const ZRegister & zn,IntegerOperand imm)3485 void Add(const ZRegister& zd, const ZRegister& zn, IntegerOperand imm) {
3486 VIXL_ASSERT(allow_macro_instructions_);
3487 AddSubHelper(kAddImmediate, zd, zn, imm);
3488 }
3489 void Addpl(const Register& xd, const Register& xn, int64_t multiplier);
3490 void Addvl(const Register& xd, const Register& xn, int64_t multiplier);
3491 // Note that unlike the core ISA, SVE's `adr` is not PC-relative.
Adr(const ZRegister & zd,const SVEMemOperand & addr)3492 void Adr(const ZRegister& zd, const SVEMemOperand& addr) {
3493 VIXL_ASSERT(allow_macro_instructions_);
3494 SingleEmissionCheckScope guard(this);
3495 adr(zd, addr);
3496 }
And(const PRegisterWithLaneSize & pd,const PRegisterZ & pg,const PRegisterWithLaneSize & pn,const PRegisterWithLaneSize & pm)3497 void And(const PRegisterWithLaneSize& pd,
3498 const PRegisterZ& pg,
3499 const PRegisterWithLaneSize& pn,
3500 const PRegisterWithLaneSize& pm) {
3501 VIXL_ASSERT(allow_macro_instructions_);
3502 SingleEmissionCheckScope guard(this);
3503 and_(pd, pg, pn, pm);
3504 }
And(const ZRegister & zd,const ZRegister & zn,uint64_t imm)3505 void And(const ZRegister& zd, const ZRegister& zn, uint64_t imm) {
3506 VIXL_ASSERT(allow_macro_instructions_);
3507 SingleEmissionCheckScope guard(this);
3508 if (IsImmLogical(imm, zd.GetLaneSizeInBits())) {
3509 and_(zd, zn, imm);
3510 } else {
3511 // TODO: Synthesise the immediate once 'Mov' is implemented.
3512 VIXL_UNIMPLEMENTED();
3513 }
3514 }
And(const ZRegister & zd,const ZRegister & zn,const ZRegister & zm)3515 void And(const ZRegister& zd, const ZRegister& zn, const ZRegister& zm) {
3516 VIXL_ASSERT(allow_macro_instructions_);
3517 VIXL_ASSERT(AreSameLaneSize(zd, zn, zm));
3518 SingleEmissionCheckScope guard(this);
3519 and_(zd.VnD(), zn.VnD(), zm.VnD());
3520 }
Ands(const PRegisterWithLaneSize & pd,const PRegisterZ & pg,const PRegisterWithLaneSize & pn,const PRegisterWithLaneSize & pm)3521 void Ands(const PRegisterWithLaneSize& pd,
3522 const PRegisterZ& pg,
3523 const PRegisterWithLaneSize& pn,
3524 const PRegisterWithLaneSize& pm) {
3525 VIXL_ASSERT(allow_macro_instructions_);
3526 SingleEmissionCheckScope guard(this);
3527 ands(pd, pg, pn, pm);
3528 }
Andv(const VRegister & vd,const PRegister & pg,const ZRegister & zn)3529 void Andv(const VRegister& vd, const PRegister& pg, const ZRegister& zn) {
3530 VIXL_ASSERT(allow_macro_instructions_);
3531 SingleEmissionCheckScope guard(this);
3532 andv(vd, pg, zn);
3533 }
Asr(const ZRegister & zd,const PRegisterM & pg,const ZRegister & zn,int shift)3534 void Asr(const ZRegister& zd,
3535 const PRegisterM& pg,
3536 const ZRegister& zn,
3537 int shift) {
3538 VIXL_ASSERT(allow_macro_instructions_);
3539 MovprfxHelperScope guard(this, zd, pg, zn);
3540 asr(zd, pg, zd, shift);
3541 }
3542 void Asr(const ZRegister& zd,
3543 const PRegisterM& pg,
3544 const ZRegister& zn,
3545 const ZRegister& zm);
Asr(const ZRegister & zd,const ZRegister & zn,int shift)3546 void Asr(const ZRegister& zd, const ZRegister& zn, int shift) {
3547 VIXL_ASSERT(allow_macro_instructions_);
3548 SingleEmissionCheckScope guard(this);
3549 asr(zd, zn, shift);
3550 }
Asr(const ZRegister & zd,const ZRegister & zn,const ZRegister & zm)3551 void Asr(const ZRegister& zd, const ZRegister& zn, const ZRegister& zm) {
3552 VIXL_ASSERT(allow_macro_instructions_);
3553 SingleEmissionCheckScope guard(this);
3554 asr(zd, zn, zm);
3555 }
Asrd(const ZRegister & zd,const PRegisterM & pg,const ZRegister & zn,int shift)3556 void Asrd(const ZRegister& zd,
3557 const PRegisterM& pg,
3558 const ZRegister& zn,
3559 int shift) {
3560 VIXL_ASSERT(allow_macro_instructions_);
3561 MovprfxHelperScope guard(this, zd, pg, zn);
3562 asrd(zd, pg, zd, shift);
3563 }
Bic(const PRegisterWithLaneSize & pd,const PRegisterZ & pg,const PRegisterWithLaneSize & pn,const PRegisterWithLaneSize & pm)3564 void Bic(const PRegisterWithLaneSize& pd,
3565 const PRegisterZ& pg,
3566 const PRegisterWithLaneSize& pn,
3567 const PRegisterWithLaneSize& pm) {
3568 VIXL_ASSERT(allow_macro_instructions_);
3569 SingleEmissionCheckScope guard(this);
3570 bic(pd, pg, pn, pm);
3571 }
Bic(const ZRegister & zd,const ZRegister & zn,const ZRegister & zm)3572 void Bic(const ZRegister& zd, const ZRegister& zn, const ZRegister& zm) {
3573 VIXL_ASSERT(allow_macro_instructions_);
3574 VIXL_ASSERT(AreSameLaneSize(zd, zn, zm));
3575 SingleEmissionCheckScope guard(this);
3576 bic(zd.VnD(), zn.VnD(), zm.VnD());
3577 }
Bic(const ZRegister & zd,const ZRegister & zn,uint64_t imm)3578