• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 //   * Redistributions of source code must retain the above copyright notice,
8 //     this list of conditions and the following disclaimer.
9 //   * Redistributions in binary form must reproduce the above copyright notice,
10 //     this list of conditions and the following disclaimer in the documentation
11 //     and/or other materials provided with the distribution.
12 //   * Neither the name of ARM Limited nor the names of its contributors may be
13 //     used to endorse or promote products derived from this software without
14 //     specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 
27 #include "operands-aarch64.h"
28 
29 namespace vixl {
30 namespace aarch64 {
31 
32 // CPURegList utilities.
PopLowestIndex()33 CPURegister CPURegList::PopLowestIndex() {
34   if (IsEmpty()) {
35     return NoCPUReg;
36   }
37   int index = CountTrailingZeros(list_);
38   VIXL_ASSERT((1 << index) & list_);
39   Remove(index);
40   return CPURegister(index, size_, type_);
41 }
42 
43 
PopHighestIndex()44 CPURegister CPURegList::PopHighestIndex() {
45   VIXL_ASSERT(IsValid());
46   if (IsEmpty()) {
47     return NoCPUReg;
48   }
49   int index = CountLeadingZeros(list_);
50   index = kRegListSizeInBits - 1 - index;
51   VIXL_ASSERT((1 << index) & list_);
52   Remove(index);
53   return CPURegister(index, size_, type_);
54 }
55 
56 
IsValid() const57 bool CPURegList::IsValid() const {
58   if ((type_ == CPURegister::kRegister) || (type_ == CPURegister::kVRegister)) {
59     bool is_valid = true;
60     // Try to create a CPURegister for each element in the list.
61     for (int i = 0; i < kRegListSizeInBits; i++) {
62       if (((list_ >> i) & 1) != 0) {
63         is_valid &= CPURegister(i, size_, type_).IsValid();
64       }
65     }
66     return is_valid;
67   } else if (type_ == CPURegister::kNoRegister) {
68     // We can't use IsEmpty here because that asserts IsValid().
69     return list_ == 0;
70   } else {
71     return false;
72   }
73 }
74 
75 
RemoveCalleeSaved()76 void CPURegList::RemoveCalleeSaved() {
77   if (GetType() == CPURegister::kRegister) {
78     Remove(GetCalleeSaved(GetRegisterSizeInBits()));
79   } else if (GetType() == CPURegister::kVRegister) {
80     Remove(GetCalleeSavedV(GetRegisterSizeInBits()));
81   } else {
82     VIXL_ASSERT(GetType() == CPURegister::kNoRegister);
83     VIXL_ASSERT(IsEmpty());
84     // The list must already be empty, so do nothing.
85   }
86 }
87 
88 
Union(const CPURegList & list_1,const CPURegList & list_2,const CPURegList & list_3)89 CPURegList CPURegList::Union(const CPURegList& list_1,
90                              const CPURegList& list_2,
91                              const CPURegList& list_3) {
92   return Union(list_1, Union(list_2, list_3));
93 }
94 
95 
Union(const CPURegList & list_1,const CPURegList & list_2,const CPURegList & list_3,const CPURegList & list_4)96 CPURegList CPURegList::Union(const CPURegList& list_1,
97                              const CPURegList& list_2,
98                              const CPURegList& list_3,
99                              const CPURegList& list_4) {
100   return Union(Union(list_1, list_2), Union(list_3, list_4));
101 }
102 
103 
Intersection(const CPURegList & list_1,const CPURegList & list_2,const CPURegList & list_3)104 CPURegList CPURegList::Intersection(const CPURegList& list_1,
105                                     const CPURegList& list_2,
106                                     const CPURegList& list_3) {
107   return Intersection(list_1, Intersection(list_2, list_3));
108 }
109 
110 
Intersection(const CPURegList & list_1,const CPURegList & list_2,const CPURegList & list_3,const CPURegList & list_4)111 CPURegList CPURegList::Intersection(const CPURegList& list_1,
112                                     const CPURegList& list_2,
113                                     const CPURegList& list_3,
114                                     const CPURegList& list_4) {
115   return Intersection(Intersection(list_1, list_2),
116                       Intersection(list_3, list_4));
117 }
118 
119 
GetCalleeSaved(unsigned size)120 CPURegList CPURegList::GetCalleeSaved(unsigned size) {
121   return CPURegList(CPURegister::kRegister, size, 19, 29);
122 }
123 
124 
GetCalleeSavedV(unsigned size)125 CPURegList CPURegList::GetCalleeSavedV(unsigned size) {
126   return CPURegList(CPURegister::kVRegister, size, 8, 15);
127 }
128 
129 
GetCallerSaved(unsigned size)130 CPURegList CPURegList::GetCallerSaved(unsigned size) {
131   // Registers x0-x18 and lr (x30) are caller-saved.
132   CPURegList list = CPURegList(CPURegister::kRegister, size, 0, 18);
133   // Do not use lr directly to avoid initialisation order fiasco bugs for users.
134   list.Combine(Register(30, kXRegSize));
135   return list;
136 }
137 
138 
GetCallerSavedV(unsigned size)139 CPURegList CPURegList::GetCallerSavedV(unsigned size) {
140   // Registers d0-d7 and d16-d31 are caller-saved.
141   CPURegList list = CPURegList(CPURegister::kVRegister, size, 0, 7);
142   list.Combine(CPURegList(CPURegister::kVRegister, size, 16, 31));
143   return list;
144 }
145 
146 
147 const CPURegList kCalleeSaved = CPURegList::GetCalleeSaved();
148 const CPURegList kCalleeSavedV = CPURegList::GetCalleeSavedV();
149 const CPURegList kCallerSaved = CPURegList::GetCallerSaved();
150 const CPURegList kCallerSavedV = CPURegList::GetCallerSavedV();
151 
152 
153 // Registers.
154 #define WREG(n) w##n,
155 const Register Register::wregisters[] = {AARCH64_REGISTER_CODE_LIST(WREG)};
156 #undef WREG
157 
158 #define XREG(n) x##n,
159 const Register Register::xregisters[] = {AARCH64_REGISTER_CODE_LIST(XREG)};
160 #undef XREG
161 
162 #define BREG(n) b##n,
163 const VRegister VRegister::bregisters[] = {AARCH64_REGISTER_CODE_LIST(BREG)};
164 #undef BREG
165 
166 #define HREG(n) h##n,
167 const VRegister VRegister::hregisters[] = {AARCH64_REGISTER_CODE_LIST(HREG)};
168 #undef HREG
169 
170 #define SREG(n) s##n,
171 const VRegister VRegister::sregisters[] = {AARCH64_REGISTER_CODE_LIST(SREG)};
172 #undef SREG
173 
174 #define DREG(n) d##n,
175 const VRegister VRegister::dregisters[] = {AARCH64_REGISTER_CODE_LIST(DREG)};
176 #undef DREG
177 
178 #define QREG(n) q##n,
179 const VRegister VRegister::qregisters[] = {AARCH64_REGISTER_CODE_LIST(QREG)};
180 #undef QREG
181 
182 #define VREG(n) v##n,
183 const VRegister VRegister::vregisters[] = {AARCH64_REGISTER_CODE_LIST(VREG)};
184 #undef VREG
185 
186 
GetWRegFromCode(unsigned code)187 const Register& Register::GetWRegFromCode(unsigned code) {
188   if (code == kSPRegInternalCode) {
189     return wsp;
190   } else {
191     VIXL_ASSERT(code < kNumberOfRegisters);
192     return wregisters[code];
193   }
194 }
195 
196 
GetXRegFromCode(unsigned code)197 const Register& Register::GetXRegFromCode(unsigned code) {
198   if (code == kSPRegInternalCode) {
199     return sp;
200   } else {
201     VIXL_ASSERT(code < kNumberOfRegisters);
202     return xregisters[code];
203   }
204 }
205 
206 
GetBRegFromCode(unsigned code)207 const VRegister& VRegister::GetBRegFromCode(unsigned code) {
208   VIXL_ASSERT(code < kNumberOfVRegisters);
209   return bregisters[code];
210 }
211 
212 
GetHRegFromCode(unsigned code)213 const VRegister& VRegister::GetHRegFromCode(unsigned code) {
214   VIXL_ASSERT(code < kNumberOfVRegisters);
215   return hregisters[code];
216 }
217 
218 
GetSRegFromCode(unsigned code)219 const VRegister& VRegister::GetSRegFromCode(unsigned code) {
220   VIXL_ASSERT(code < kNumberOfVRegisters);
221   return sregisters[code];
222 }
223 
224 
GetDRegFromCode(unsigned code)225 const VRegister& VRegister::GetDRegFromCode(unsigned code) {
226   VIXL_ASSERT(code < kNumberOfVRegisters);
227   return dregisters[code];
228 }
229 
230 
GetQRegFromCode(unsigned code)231 const VRegister& VRegister::GetQRegFromCode(unsigned code) {
232   VIXL_ASSERT(code < kNumberOfVRegisters);
233   return qregisters[code];
234 }
235 
236 
GetVRegFromCode(unsigned code)237 const VRegister& VRegister::GetVRegFromCode(unsigned code) {
238   VIXL_ASSERT(code < kNumberOfVRegisters);
239   return vregisters[code];
240 }
241 
242 
W() const243 const Register& CPURegister::W() const {
244   VIXL_ASSERT(IsValidRegister());
245   return Register::GetWRegFromCode(code_);
246 }
247 
248 
X() const249 const Register& CPURegister::X() const {
250   VIXL_ASSERT(IsValidRegister());
251   return Register::GetXRegFromCode(code_);
252 }
253 
254 
B() const255 const VRegister& CPURegister::B() const {
256   VIXL_ASSERT(IsValidVRegister());
257   return VRegister::GetBRegFromCode(code_);
258 }
259 
260 
H() const261 const VRegister& CPURegister::H() const {
262   VIXL_ASSERT(IsValidVRegister());
263   return VRegister::GetHRegFromCode(code_);
264 }
265 
266 
S() const267 const VRegister& CPURegister::S() const {
268   VIXL_ASSERT(IsValidVRegister());
269   return VRegister::GetSRegFromCode(code_);
270 }
271 
272 
D() const273 const VRegister& CPURegister::D() const {
274   VIXL_ASSERT(IsValidVRegister());
275   return VRegister::GetDRegFromCode(code_);
276 }
277 
278 
Q() const279 const VRegister& CPURegister::Q() const {
280   VIXL_ASSERT(IsValidVRegister());
281   return VRegister::GetQRegFromCode(code_);
282 }
283 
284 
V() const285 const VRegister& CPURegister::V() const {
286   VIXL_ASSERT(IsValidVRegister());
287   return VRegister::GetVRegFromCode(code_);
288 }
289 
290 
291 // Operand.
Operand(int64_t immediate)292 Operand::Operand(int64_t immediate)
293     : immediate_(immediate),
294       reg_(NoReg),
295       shift_(NO_SHIFT),
296       extend_(NO_EXTEND),
297       shift_amount_(0) {}
298 
299 
Operand(Register reg,Shift shift,unsigned shift_amount)300 Operand::Operand(Register reg, Shift shift, unsigned shift_amount)
301     : reg_(reg),
302       shift_(shift),
303       extend_(NO_EXTEND),
304       shift_amount_(shift_amount) {
305   VIXL_ASSERT(shift != MSL);
306   VIXL_ASSERT(reg.Is64Bits() || (shift_amount < kWRegSize));
307   VIXL_ASSERT(reg.Is32Bits() || (shift_amount < kXRegSize));
308   VIXL_ASSERT(!reg.IsSP());
309 }
310 
311 
Operand(Register reg,Extend extend,unsigned shift_amount)312 Operand::Operand(Register reg, Extend extend, unsigned shift_amount)
313     : reg_(reg),
314       shift_(NO_SHIFT),
315       extend_(extend),
316       shift_amount_(shift_amount) {
317   VIXL_ASSERT(reg.IsValid());
318   VIXL_ASSERT(shift_amount <= 4);
319   VIXL_ASSERT(!reg.IsSP());
320 
321   // Extend modes SXTX and UXTX require a 64-bit register.
322   VIXL_ASSERT(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX)));
323 }
324 
325 
IsImmediate() const326 bool Operand::IsImmediate() const { return reg_.Is(NoReg); }
327 
328 
IsPlainRegister() const329 bool Operand::IsPlainRegister() const {
330   return reg_.IsValid() &&
331          (((shift_ == NO_SHIFT) && (extend_ == NO_EXTEND)) ||
332           // No-op shifts.
333           ((shift_ != NO_SHIFT) && (shift_amount_ == 0)) ||
334           // No-op extend operations.
335           ((extend_ == UXTX) || (extend_ == SXTX) ||
336            (reg_.IsW() && ((extend_ == UXTW) || (extend_ == SXTW)))));
337 }
338 
339 
IsShiftedRegister() const340 bool Operand::IsShiftedRegister() const {
341   return reg_.IsValid() && (shift_ != NO_SHIFT);
342 }
343 
344 
IsExtendedRegister() const345 bool Operand::IsExtendedRegister() const {
346   return reg_.IsValid() && (extend_ != NO_EXTEND);
347 }
348 
349 
IsZero() const350 bool Operand::IsZero() const {
351   if (IsImmediate()) {
352     return GetImmediate() == 0;
353   } else {
354     return GetRegister().IsZero();
355   }
356 }
357 
358 
ToExtendedRegister() const359 Operand Operand::ToExtendedRegister() const {
360   VIXL_ASSERT(IsShiftedRegister());
361   VIXL_ASSERT((shift_ == LSL) && (shift_amount_ <= 4));
362   return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_);
363 }
364 
365 
366 // MemOperand
MemOperand()367 MemOperand::MemOperand()
368     : base_(NoReg),
369       regoffset_(NoReg),
370       offset_(0),
371       addrmode_(Offset),
372       shift_(NO_SHIFT),
373       extend_(NO_EXTEND) {}
374 
375 
MemOperand(Register base,int64_t offset,AddrMode addrmode)376 MemOperand::MemOperand(Register base, int64_t offset, AddrMode addrmode)
377     : base_(base),
378       regoffset_(NoReg),
379       offset_(offset),
380       addrmode_(addrmode),
381       shift_(NO_SHIFT),
382       extend_(NO_EXTEND),
383       shift_amount_(0) {
384   VIXL_ASSERT(base.Is64Bits() && !base.IsZero());
385 }
386 
387 
MemOperand(Register base,Register regoffset,Extend extend,unsigned shift_amount)388 MemOperand::MemOperand(Register base,
389                        Register regoffset,
390                        Extend extend,
391                        unsigned shift_amount)
392     : base_(base),
393       regoffset_(regoffset),
394       offset_(0),
395       addrmode_(Offset),
396       shift_(NO_SHIFT),
397       extend_(extend),
398       shift_amount_(shift_amount) {
399   VIXL_ASSERT(base.Is64Bits() && !base.IsZero());
400   VIXL_ASSERT(!regoffset.IsSP());
401   VIXL_ASSERT((extend == UXTW) || (extend == SXTW) || (extend == SXTX));
402 
403   // SXTX extend mode requires a 64-bit offset register.
404   VIXL_ASSERT(regoffset.Is64Bits() || (extend != SXTX));
405 }
406 
407 
MemOperand(Register base,Register regoffset,Shift shift,unsigned shift_amount)408 MemOperand::MemOperand(Register base,
409                        Register regoffset,
410                        Shift shift,
411                        unsigned shift_amount)
412     : base_(base),
413       regoffset_(regoffset),
414       offset_(0),
415       addrmode_(Offset),
416       shift_(shift),
417       extend_(NO_EXTEND),
418       shift_amount_(shift_amount) {
419   VIXL_ASSERT(base.Is64Bits() && !base.IsZero());
420   VIXL_ASSERT(regoffset.Is64Bits() && !regoffset.IsSP());
421   VIXL_ASSERT(shift == LSL);
422 }
423 
424 
MemOperand(Register base,const Operand & offset,AddrMode addrmode)425 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode)
426     : base_(base),
427       regoffset_(NoReg),
428       addrmode_(addrmode),
429       shift_(NO_SHIFT),
430       extend_(NO_EXTEND),
431       shift_amount_(0) {
432   VIXL_ASSERT(base.Is64Bits() && !base.IsZero());
433 
434   if (offset.IsImmediate()) {
435     offset_ = offset.GetImmediate();
436   } else if (offset.IsShiftedRegister()) {
437     VIXL_ASSERT((addrmode == Offset) || (addrmode == PostIndex));
438 
439     regoffset_ = offset.GetRegister();
440     shift_ = offset.GetShift();
441     shift_amount_ = offset.GetShiftAmount();
442 
443     extend_ = NO_EXTEND;
444     offset_ = 0;
445 
446     // These assertions match those in the shifted-register constructor.
447     VIXL_ASSERT(regoffset_.Is64Bits() && !regoffset_.IsSP());
448     VIXL_ASSERT(shift_ == LSL);
449   } else {
450     VIXL_ASSERT(offset.IsExtendedRegister());
451     VIXL_ASSERT(addrmode == Offset);
452 
453     regoffset_ = offset.GetRegister();
454     extend_ = offset.GetExtend();
455     shift_amount_ = offset.GetShiftAmount();
456 
457     shift_ = NO_SHIFT;
458     offset_ = 0;
459 
460     // These assertions match those in the extended-register constructor.
461     VIXL_ASSERT(!regoffset_.IsSP());
462     VIXL_ASSERT((extend_ == UXTW) || (extend_ == SXTW) || (extend_ == SXTX));
463     VIXL_ASSERT((regoffset_.Is64Bits() || (extend_ != SXTX)));
464   }
465 }
466 
467 
IsImmediateOffset() const468 bool MemOperand::IsImmediateOffset() const {
469   return (addrmode_ == Offset) && regoffset_.Is(NoReg);
470 }
471 
472 
IsRegisterOffset() const473 bool MemOperand::IsRegisterOffset() const {
474   return (addrmode_ == Offset) && !regoffset_.Is(NoReg);
475 }
476 
477 
IsPreIndex() const478 bool MemOperand::IsPreIndex() const { return addrmode_ == PreIndex; }
479 
480 
IsPostIndex() const481 bool MemOperand::IsPostIndex() const { return addrmode_ == PostIndex; }
482 
483 
AddOffset(int64_t offset)484 void MemOperand::AddOffset(int64_t offset) {
485   VIXL_ASSERT(IsImmediateOffset());
486   offset_ += offset;
487 }
488 
489 
GenericOperand(const CPURegister & reg)490 GenericOperand::GenericOperand(const CPURegister& reg)
491     : cpu_register_(reg), mem_op_size_(0) {
492   if (reg.IsQ()) {
493     VIXL_ASSERT(reg.GetSizeInBits() > static_cast<int>(kXRegSize));
494     // Support for Q registers is not implemented yet.
495     VIXL_UNIMPLEMENTED();
496   }
497 }
498 
499 
GenericOperand(const MemOperand & mem_op,size_t mem_op_size)500 GenericOperand::GenericOperand(const MemOperand& mem_op, size_t mem_op_size)
501     : cpu_register_(NoReg), mem_op_(mem_op), mem_op_size_(mem_op_size) {
502   if (mem_op_size_ > kXRegSizeInBytes) {
503     // We only support generic operands up to the size of X registers.
504     VIXL_UNIMPLEMENTED();
505   }
506 }
507 
Equals(const GenericOperand & other) const508 bool GenericOperand::Equals(const GenericOperand& other) const {
509   if (!IsValid() || !other.IsValid()) {
510     // Two invalid generic operands are considered equal.
511     return !IsValid() && !other.IsValid();
512   }
513   if (IsCPURegister() && other.IsCPURegister()) {
514     return GetCPURegister().Is(other.GetCPURegister());
515   } else if (IsMemOperand() && other.IsMemOperand()) {
516     return GetMemOperand().Equals(other.GetMemOperand()) &&
517            (GetMemOperandSizeInBytes() == other.GetMemOperandSizeInBytes());
518   }
519   return false;
520 }
521 }
522 }  // namespace vixl::aarch64
523