• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <iostream>
18 #include <type_traits>
19 
20 #include "jni_macro_assembler_arm_vixl.h"
21 #include "entrypoints/quick/quick_entrypoints.h"
22 #include "thread.h"
23 
24 using namespace vixl::aarch32;  // NOLINT(build/namespaces)
25 namespace vixl32 = vixl::aarch32;
26 
27 using vixl::ExactAssemblyScope;
28 using vixl::CodeBufferCheckScope;
29 
30 namespace art {
31 namespace arm {
32 
33 #ifdef ___
34 #error "ARM Assembler macro already defined."
35 #else
36 #define ___   asm_.GetVIXLAssembler()->
37 #endif
38 
FinalizeCode()39 void ArmVIXLJNIMacroAssembler::FinalizeCode() {
40   for (const std::unique_ptr<
41       ArmVIXLJNIMacroAssembler::ArmException>& exception : exception_blocks_) {
42     EmitExceptionPoll(exception.get());
43   }
44   asm_.FinalizeCode();
45 }
46 
DWARFReg(vixl32::Register reg)47 static dwarf::Reg DWARFReg(vixl32::Register reg) {
48   return dwarf::Reg::ArmCore(static_cast<int>(reg.GetCode()));
49 }
50 
DWARFReg(vixl32::SRegister reg)51 static dwarf::Reg DWARFReg(vixl32::SRegister reg) {
52   return dwarf::Reg::ArmFp(static_cast<int>(reg.GetCode()));
53 }
54 
55 static constexpr size_t kFramePointerSize = static_cast<size_t>(kArmPointerSize);
56 
BuildFrame(size_t frame_size,ManagedRegister method_reg,ArrayRef<const ManagedRegister> callee_save_regs,const ManagedRegisterEntrySpills & entry_spills)57 void ArmVIXLJNIMacroAssembler::BuildFrame(size_t frame_size,
58                                           ManagedRegister method_reg,
59                                           ArrayRef<const ManagedRegister> callee_save_regs,
60                                           const ManagedRegisterEntrySpills& entry_spills) {
61   CHECK_ALIGNED(frame_size, kStackAlignment);
62   CHECK(r0.Is(method_reg.AsArm().AsVIXLRegister()));
63 
64   // Push callee saves and link register.
65   RegList core_spill_mask = 1 << LR;
66   uint32_t fp_spill_mask = 0;
67   for (const ManagedRegister& reg : callee_save_regs) {
68     if (reg.AsArm().IsCoreRegister()) {
69       core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
70     } else {
71       fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
72     }
73   }
74   ___ Push(RegisterList(core_spill_mask));
75   cfi().AdjustCFAOffset(POPCOUNT(core_spill_mask) * kFramePointerSize);
76   cfi().RelOffsetForMany(DWARFReg(r0), 0, core_spill_mask, kFramePointerSize);
77   if (fp_spill_mask != 0) {
78     uint32_t first = CTZ(fp_spill_mask);
79 
80     // Check that list is contiguous.
81     DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
82 
83     ___ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
84     cfi().AdjustCFAOffset(POPCOUNT(fp_spill_mask) * kFramePointerSize);
85     cfi().RelOffsetForMany(DWARFReg(s0), 0, fp_spill_mask, kFramePointerSize);
86   }
87 
88   // Increase frame to required size.
89   int pushed_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
90   // Must at least have space for Method*.
91   CHECK_GT(frame_size, pushed_values * kFramePointerSize);
92   IncreaseFrameSize(frame_size - pushed_values * kFramePointerSize);  // handles CFI as well.
93 
94   // Write out Method*.
95   asm_.StoreToOffset(kStoreWord, r0, sp, 0);
96 
97   // Write out entry spills.
98   int32_t offset = frame_size + kFramePointerSize;
99   for (size_t i = 0; i < entry_spills.size(); ++i) {
100     ArmManagedRegister reg = entry_spills.at(i).AsArm();
101     if (reg.IsNoRegister()) {
102       // only increment stack offset.
103       ManagedRegisterSpill spill = entry_spills.at(i);
104       offset += spill.getSize();
105     } else if (reg.IsCoreRegister()) {
106       asm_.StoreToOffset(kStoreWord, reg.AsVIXLRegister(), sp, offset);
107       offset += 4;
108     } else if (reg.IsSRegister()) {
109       asm_.StoreSToOffset(reg.AsVIXLSRegister(), sp, offset);
110       offset += 4;
111     } else if (reg.IsDRegister()) {
112       asm_.StoreDToOffset(reg.AsVIXLDRegister(), sp, offset);
113       offset += 8;
114     }
115   }
116 }
117 
RemoveFrame(size_t frame_size,ArrayRef<const ManagedRegister> callee_save_regs)118 void ArmVIXLJNIMacroAssembler::RemoveFrame(size_t frame_size,
119                                            ArrayRef<const ManagedRegister> callee_save_regs) {
120   CHECK_ALIGNED(frame_size, kStackAlignment);
121   cfi().RememberState();
122 
123   // Compute callee saves to pop and PC.
124   RegList core_spill_mask = 1 << PC;
125   uint32_t fp_spill_mask = 0;
126   for (const ManagedRegister& reg : callee_save_regs) {
127     if (reg.AsArm().IsCoreRegister()) {
128       core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
129     } else {
130       fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
131     }
132   }
133 
134   // Decrease frame to start of callee saves.
135   int pop_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
136   CHECK_GT(frame_size, pop_values * kFramePointerSize);
137   DecreaseFrameSize(frame_size - (pop_values * kFramePointerSize));  // handles CFI as well.
138 
139   if (fp_spill_mask != 0) {
140     uint32_t first = CTZ(fp_spill_mask);
141     // Check that list is contiguous.
142      DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
143 
144     ___ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
145     cfi().AdjustCFAOffset(-kFramePointerSize * POPCOUNT(fp_spill_mask));
146     cfi().RestoreMany(DWARFReg(s0), fp_spill_mask);
147   }
148 
149   // Pop callee saves and PC.
150   ___ Pop(RegisterList(core_spill_mask));
151 
152   // The CFI should be restored for any code that follows the exit block.
153   cfi().RestoreState();
154   cfi().DefCFAOffset(frame_size);
155 }
156 
157 
IncreaseFrameSize(size_t adjust)158 void ArmVIXLJNIMacroAssembler::IncreaseFrameSize(size_t adjust) {
159   asm_.AddConstant(sp, -adjust);
160   cfi().AdjustCFAOffset(adjust);
161 }
162 
DecreaseFrameSize(size_t adjust)163 void ArmVIXLJNIMacroAssembler::DecreaseFrameSize(size_t adjust) {
164   asm_.AddConstant(sp, adjust);
165   cfi().AdjustCFAOffset(-adjust);
166 }
167 
Store(FrameOffset dest,ManagedRegister m_src,size_t size)168 void ArmVIXLJNIMacroAssembler::Store(FrameOffset dest, ManagedRegister m_src, size_t size) {
169   ArmManagedRegister src = m_src.AsArm();
170   if (src.IsNoRegister()) {
171     CHECK_EQ(0u, size);
172   } else if (src.IsCoreRegister()) {
173     CHECK_EQ(4u, size);
174     UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
175     temps.Exclude(src.AsVIXLRegister());
176     asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
177   } else if (src.IsRegisterPair()) {
178     CHECK_EQ(8u, size);
179     asm_.StoreToOffset(kStoreWord, src.AsVIXLRegisterPairLow(),  sp, dest.Int32Value());
180     asm_.StoreToOffset(kStoreWord, src.AsVIXLRegisterPairHigh(), sp, dest.Int32Value() + 4);
181   } else if (src.IsSRegister()) {
182     CHECK_EQ(4u, size);
183     asm_.StoreSToOffset(src.AsVIXLSRegister(), sp, dest.Int32Value());
184   } else {
185     CHECK_EQ(8u, size);
186     CHECK(src.IsDRegister()) << src;
187     asm_.StoreDToOffset(src.AsVIXLDRegister(), sp, dest.Int32Value());
188   }
189 }
190 
StoreRef(FrameOffset dest,ManagedRegister msrc)191 void ArmVIXLJNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
192   ArmManagedRegister src = msrc.AsArm();
193   CHECK(src.IsCoreRegister()) << src;
194   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
195   temps.Exclude(src.AsVIXLRegister());
196   asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
197 }
198 
StoreRawPtr(FrameOffset dest,ManagedRegister msrc)199 void ArmVIXLJNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
200   ArmManagedRegister src = msrc.AsArm();
201   CHECK(src.IsCoreRegister()) << src;
202   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
203   temps.Exclude(src.AsVIXLRegister());
204   asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
205 }
206 
StoreSpanning(FrameOffset dest,ManagedRegister msrc,FrameOffset in_off,ManagedRegister mscratch)207 void ArmVIXLJNIMacroAssembler::StoreSpanning(FrameOffset dest,
208                                              ManagedRegister msrc,
209                                              FrameOffset in_off,
210                                              ManagedRegister mscratch) {
211   ArmManagedRegister src = msrc.AsArm();
212   ArmManagedRegister scratch = mscratch.AsArm();
213   asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
214   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
215   temps.Exclude(scratch.AsVIXLRegister());
216   asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, in_off.Int32Value());
217   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, dest.Int32Value() + 4);
218 }
219 
CopyRef(FrameOffset dest,FrameOffset src,ManagedRegister mscratch)220 void ArmVIXLJNIMacroAssembler::CopyRef(FrameOffset dest,
221                                        FrameOffset src,
222                                        ManagedRegister mscratch) {
223   ArmManagedRegister scratch = mscratch.AsArm();
224   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
225   temps.Exclude(scratch.AsVIXLRegister());
226   asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, src.Int32Value());
227   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, dest.Int32Value());
228 }
229 
LoadRef(ManagedRegister dest,ManagedRegister base,MemberOffset offs,bool unpoison_reference)230 void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest,
231                                        ManagedRegister base,
232                                        MemberOffset offs,
233                                        bool unpoison_reference) {
234   ArmManagedRegister dst = dest.AsArm();
235   CHECK(dst.IsCoreRegister() && dst.IsCoreRegister()) << dst;
236   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
237   temps.Exclude(dst.AsVIXLRegister(), base.AsArm().AsVIXLRegister());
238   asm_.LoadFromOffset(kLoadWord,
239                       dst.AsVIXLRegister(),
240                       base.AsArm().AsVIXLRegister(),
241                       offs.Int32Value());
242 
243   if (unpoison_reference) {
244     asm_.MaybeUnpoisonHeapReference(dst.AsVIXLRegister());
245   }
246 }
247 
LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED,FrameOffset src ATTRIBUTE_UNUSED)248 void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED,
249                                        FrameOffset src ATTRIBUTE_UNUSED) {
250   UNIMPLEMENTED(FATAL);
251 }
252 
LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED,ManagedRegister base ATTRIBUTE_UNUSED,Offset offs ATTRIBUTE_UNUSED)253 void ArmVIXLJNIMacroAssembler::LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED,
254                                           ManagedRegister base ATTRIBUTE_UNUSED,
255                                           Offset offs ATTRIBUTE_UNUSED) {
256   UNIMPLEMENTED(FATAL);
257 }
258 
StoreImmediateToFrame(FrameOffset dest,uint32_t imm,ManagedRegister scratch)259 void ArmVIXLJNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest,
260                                                      uint32_t imm,
261                                                      ManagedRegister scratch) {
262   ArmManagedRegister mscratch = scratch.AsArm();
263   CHECK(mscratch.IsCoreRegister()) << mscratch;
264   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
265   temps.Exclude(mscratch.AsVIXLRegister());
266   asm_.LoadImmediate(mscratch.AsVIXLRegister(), imm);
267   asm_.StoreToOffset(kStoreWord, mscratch.AsVIXLRegister(), sp, dest.Int32Value());
268 }
269 
Load(ManagedRegister m_dst,FrameOffset src,size_t size)270 void ArmVIXLJNIMacroAssembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
271   return Load(m_dst.AsArm(), sp, src.Int32Value(), size);
272 }
273 
LoadFromThread(ManagedRegister m_dst,ThreadOffset32 src,size_t size)274 void ArmVIXLJNIMacroAssembler::LoadFromThread(ManagedRegister m_dst,
275                                               ThreadOffset32 src,
276                                               size_t size) {
277   return Load(m_dst.AsArm(), tr, src.Int32Value(), size);
278 }
279 
LoadRawPtrFromThread(ManagedRegister m_dst,ThreadOffset32 offs)280 void ArmVIXLJNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister m_dst, ThreadOffset32 offs) {
281   ArmManagedRegister dst = m_dst.AsArm();
282   CHECK(dst.IsCoreRegister()) << dst;
283   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
284   temps.Exclude(dst.AsVIXLRegister());
285   asm_.LoadFromOffset(kLoadWord, dst.AsVIXLRegister(), tr, offs.Int32Value());
286 }
287 
CopyRawPtrFromThread(FrameOffset fr_offs,ThreadOffset32 thr_offs,ManagedRegister mscratch)288 void ArmVIXLJNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs,
289                                                     ThreadOffset32 thr_offs,
290                                                     ManagedRegister mscratch) {
291   ArmManagedRegister scratch = mscratch.AsArm();
292   CHECK(scratch.IsCoreRegister()) << scratch;
293   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
294   temps.Exclude(scratch.AsVIXLRegister());
295   asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), tr, thr_offs.Int32Value());
296   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, fr_offs.Int32Value());
297 }
298 
CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED,FrameOffset fr_offs ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED)299 void ArmVIXLJNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED,
300                                                   FrameOffset fr_offs ATTRIBUTE_UNUSED,
301                                                   ManagedRegister mscratch ATTRIBUTE_UNUSED) {
302   UNIMPLEMENTED(FATAL);
303 }
304 
StoreStackOffsetToThread(ThreadOffset32 thr_offs,FrameOffset fr_offs,ManagedRegister mscratch)305 void ArmVIXLJNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs,
306                                                         FrameOffset fr_offs,
307                                                         ManagedRegister mscratch) {
308   ArmManagedRegister scratch = mscratch.AsArm();
309   CHECK(scratch.IsCoreRegister()) << scratch;
310   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
311   temps.Exclude(scratch.AsVIXLRegister());
312   asm_.AddConstant(scratch.AsVIXLRegister(), sp, fr_offs.Int32Value());
313   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), tr, thr_offs.Int32Value());
314 }
315 
StoreStackPointerToThread(ThreadOffset32 thr_offs)316 void ArmVIXLJNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs) {
317   asm_.StoreToOffset(kStoreWord, sp, tr, thr_offs.Int32Value());
318 }
319 
SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)320 void ArmVIXLJNIMacroAssembler::SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
321                                           size_t size ATTRIBUTE_UNUSED) {
322   UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm";
323 }
324 
ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)325 void ArmVIXLJNIMacroAssembler::ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
326                                           size_t size ATTRIBUTE_UNUSED) {
327   UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm";
328 }
329 
Move(ManagedRegister m_dst,ManagedRegister m_src,size_t size ATTRIBUTE_UNUSED)330 void ArmVIXLJNIMacroAssembler::Move(ManagedRegister m_dst,
331                                     ManagedRegister m_src,
332                                     size_t size  ATTRIBUTE_UNUSED) {
333   ArmManagedRegister dst = m_dst.AsArm();
334   ArmManagedRegister src = m_src.AsArm();
335   if (!dst.Equals(src)) {
336     if (dst.IsCoreRegister()) {
337       CHECK(src.IsCoreRegister()) << src;
338       UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
339       temps.Exclude(dst.AsVIXLRegister());
340       ___ Mov(dst.AsVIXLRegister(), src.AsVIXLRegister());
341     } else if (dst.IsDRegister()) {
342       if (src.IsDRegister()) {
343         ___ Vmov(F64, dst.AsVIXLDRegister(), src.AsVIXLDRegister());
344       } else {
345         // VMOV Dn, Rlo, Rhi (Dn = {Rlo, Rhi})
346         CHECK(src.IsRegisterPair()) << src;
347         ___ Vmov(dst.AsVIXLDRegister(), src.AsVIXLRegisterPairLow(), src.AsVIXLRegisterPairHigh());
348       }
349     } else if (dst.IsSRegister()) {
350       if (src.IsSRegister()) {
351         ___ Vmov(F32, dst.AsVIXLSRegister(), src.AsVIXLSRegister());
352       } else {
353         // VMOV Sn, Rn  (Sn = Rn)
354         CHECK(src.IsCoreRegister()) << src;
355         ___ Vmov(dst.AsVIXLSRegister(), src.AsVIXLRegister());
356       }
357     } else {
358       CHECK(dst.IsRegisterPair()) << dst;
359       CHECK(src.IsRegisterPair()) << src;
360       // Ensure that the first move doesn't clobber the input of the second.
361       if (src.AsRegisterPairHigh() != dst.AsRegisterPairLow()) {
362         ___ Mov(dst.AsVIXLRegisterPairLow(),  src.AsVIXLRegisterPairLow());
363         ___ Mov(dst.AsVIXLRegisterPairHigh(), src.AsVIXLRegisterPairHigh());
364       } else {
365         ___ Mov(dst.AsVIXLRegisterPairHigh(), src.AsVIXLRegisterPairHigh());
366         ___ Mov(dst.AsVIXLRegisterPairLow(),  src.AsVIXLRegisterPairLow());
367       }
368     }
369   }
370 }
371 
Copy(FrameOffset dest,FrameOffset src,ManagedRegister scratch,size_t size)372 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest,
373                                     FrameOffset src,
374                                     ManagedRegister scratch,
375                                     size_t size) {
376   ArmManagedRegister temp = scratch.AsArm();
377   CHECK(temp.IsCoreRegister()) << temp;
378   CHECK(size == 4 || size == 8) << size;
379   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
380   temps.Exclude(temp.AsVIXLRegister());
381   if (size == 4) {
382     asm_.LoadFromOffset(kLoadWord, temp.AsVIXLRegister(), sp, src.Int32Value());
383     asm_.StoreToOffset(kStoreWord, temp.AsVIXLRegister(), sp, dest.Int32Value());
384   } else if (size == 8) {
385     asm_.LoadFromOffset(kLoadWord, temp.AsVIXLRegister(), sp, src.Int32Value());
386     asm_.StoreToOffset(kStoreWord, temp.AsVIXLRegister(), sp, dest.Int32Value());
387     asm_.LoadFromOffset(kLoadWord, temp.AsVIXLRegister(), sp, src.Int32Value() + 4);
388     asm_.StoreToOffset(kStoreWord, temp.AsVIXLRegister(), sp, dest.Int32Value() + 4);
389   }
390 }
391 
Copy(FrameOffset dest ATTRIBUTE_UNUSED,ManagedRegister src_base ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)392 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest ATTRIBUTE_UNUSED,
393                                     ManagedRegister src_base ATTRIBUTE_UNUSED,
394                                     Offset src_offset ATTRIBUTE_UNUSED,
395                                     ManagedRegister mscratch ATTRIBUTE_UNUSED,
396                                     size_t size ATTRIBUTE_UNUSED) {
397   UNIMPLEMENTED(FATAL);
398 }
399 
Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED,Offset dest_offset ATTRIBUTE_UNUSED,FrameOffset src ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)400 void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED,
401                                     Offset dest_offset ATTRIBUTE_UNUSED,
402                                     FrameOffset src ATTRIBUTE_UNUSED,
403                                     ManagedRegister mscratch ATTRIBUTE_UNUSED,
404                                     size_t size ATTRIBUTE_UNUSED) {
405   UNIMPLEMENTED(FATAL);
406 }
407 
Copy(FrameOffset dst ATTRIBUTE_UNUSED,FrameOffset src_base ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)408 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
409                                     FrameOffset src_base ATTRIBUTE_UNUSED,
410                                     Offset src_offset ATTRIBUTE_UNUSED,
411                                     ManagedRegister mscratch ATTRIBUTE_UNUSED,
412                                     size_t size ATTRIBUTE_UNUSED) {
413   UNIMPLEMENTED(FATAL);
414 }
415 
Copy(ManagedRegister dest ATTRIBUTE_UNUSED,Offset dest_offset ATTRIBUTE_UNUSED,ManagedRegister src ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)416 void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest ATTRIBUTE_UNUSED,
417                                     Offset dest_offset ATTRIBUTE_UNUSED,
418                                     ManagedRegister src ATTRIBUTE_UNUSED,
419                                     Offset src_offset ATTRIBUTE_UNUSED,
420                                     ManagedRegister mscratch ATTRIBUTE_UNUSED,
421                                     size_t size ATTRIBUTE_UNUSED) {
422   UNIMPLEMENTED(FATAL);
423 }
424 
Copy(FrameOffset dst ATTRIBUTE_UNUSED,Offset dest_offset ATTRIBUTE_UNUSED,FrameOffset src ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)425 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
426                                     Offset dest_offset ATTRIBUTE_UNUSED,
427                                     FrameOffset src ATTRIBUTE_UNUSED,
428                                     Offset src_offset ATTRIBUTE_UNUSED,
429                                     ManagedRegister scratch ATTRIBUTE_UNUSED,
430                                     size_t size ATTRIBUTE_UNUSED) {
431   UNIMPLEMENTED(FATAL);
432 }
433 
CreateHandleScopeEntry(ManagedRegister mout_reg,FrameOffset handle_scope_offset,ManagedRegister min_reg,bool null_allowed)434 void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
435                                                       FrameOffset handle_scope_offset,
436                                                       ManagedRegister min_reg,
437                                                       bool null_allowed) {
438   ArmManagedRegister out_reg = mout_reg.AsArm();
439   ArmManagedRegister in_reg = min_reg.AsArm();
440   CHECK(in_reg.IsNoRegister() || in_reg.IsCoreRegister()) << in_reg;
441   CHECK(out_reg.IsCoreRegister()) << out_reg;
442   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
443   temps.Exclude(out_reg.AsVIXLRegister());
444   if (null_allowed) {
445     // Null values get a handle scope entry value of 0.  Otherwise, the handle scope entry is
446     // the address in the handle scope holding the reference.
447     // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
448     if (in_reg.IsNoRegister()) {
449       asm_.LoadFromOffset(kLoadWord,
450                           out_reg.AsVIXLRegister(),
451                           sp,
452                           handle_scope_offset.Int32Value());
453       in_reg = out_reg;
454     }
455 
456     temps.Exclude(in_reg.AsVIXLRegister());
457     ___ Cmp(in_reg.AsVIXLRegister(), 0);
458 
459     if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value(), kCcDontCare)) {
460       if (!out_reg.Equals(in_reg)) {
461         ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
462                                  3 * vixl32::kMaxInstructionSizeInBytes,
463                                  CodeBufferCheckScope::kMaximumSize);
464         ___ it(eq, 0xc);
465         ___ mov(eq, out_reg.AsVIXLRegister(), 0);
466         asm_.AddConstantInIt(out_reg.AsVIXLRegister(), sp, handle_scope_offset.Int32Value(), ne);
467       } else {
468         ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
469                                  2 * vixl32::kMaxInstructionSizeInBytes,
470                                  CodeBufferCheckScope::kMaximumSize);
471         ___ it(ne, 0x8);
472         asm_.AddConstantInIt(out_reg.AsVIXLRegister(), sp, handle_scope_offset.Int32Value(), ne);
473       }
474     } else {
475       // TODO: Implement this (old arm assembler would have crashed here).
476       UNIMPLEMENTED(FATAL);
477     }
478   } else {
479     asm_.AddConstant(out_reg.AsVIXLRegister(), sp, handle_scope_offset.Int32Value());
480   }
481 }
482 
CreateHandleScopeEntry(FrameOffset out_off,FrameOffset handle_scope_offset,ManagedRegister mscratch,bool null_allowed)483 void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(FrameOffset out_off,
484                                                       FrameOffset handle_scope_offset,
485                                                       ManagedRegister mscratch,
486                                                       bool null_allowed) {
487   ArmManagedRegister scratch = mscratch.AsArm();
488   CHECK(scratch.IsCoreRegister()) << scratch;
489   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
490   temps.Exclude(scratch.AsVIXLRegister());
491   if (null_allowed) {
492     asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, handle_scope_offset.Int32Value());
493     // Null values get a handle scope entry value of 0.  Otherwise, the handle scope entry is
494     // the address in the handle scope holding the reference.
495     // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
496     ___ Cmp(scratch.AsVIXLRegister(), 0);
497 
498     if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value(), kCcDontCare)) {
499       ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
500                                2 * vixl32::kMaxInstructionSizeInBytes,
501                                CodeBufferCheckScope::kMaximumSize);
502       ___ it(ne, 0x8);
503       asm_.AddConstantInIt(scratch.AsVIXLRegister(), sp, handle_scope_offset.Int32Value(), ne);
504     } else {
505       // TODO: Implement this (old arm assembler would have crashed here).
506       UNIMPLEMENTED(FATAL);
507     }
508   } else {
509     asm_.AddConstant(scratch.AsVIXLRegister(), sp, handle_scope_offset.Int32Value());
510   }
511   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, out_off.Int32Value());
512 }
513 
LoadReferenceFromHandleScope(ManagedRegister mout_reg ATTRIBUTE_UNUSED,ManagedRegister min_reg ATTRIBUTE_UNUSED)514 void ArmVIXLJNIMacroAssembler::LoadReferenceFromHandleScope(
515     ManagedRegister mout_reg ATTRIBUTE_UNUSED,
516     ManagedRegister min_reg ATTRIBUTE_UNUSED) {
517   UNIMPLEMENTED(FATAL);
518 }
519 
VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,bool could_be_null ATTRIBUTE_UNUSED)520 void ArmVIXLJNIMacroAssembler::VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,
521                                             bool could_be_null ATTRIBUTE_UNUSED) {
522   // TODO: not validating references.
523 }
524 
VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,bool could_be_null ATTRIBUTE_UNUSED)525 void ArmVIXLJNIMacroAssembler::VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,
526                                             bool could_be_null ATTRIBUTE_UNUSED) {
527   // TODO: not validating references.
528 }
529 
Call(ManagedRegister mbase,Offset offset,ManagedRegister mscratch)530 void ArmVIXLJNIMacroAssembler::Call(ManagedRegister mbase,
531                                     Offset offset,
532                                     ManagedRegister mscratch) {
533   ArmManagedRegister base = mbase.AsArm();
534   ArmManagedRegister scratch = mscratch.AsArm();
535   CHECK(base.IsCoreRegister()) << base;
536   CHECK(scratch.IsCoreRegister()) << scratch;
537   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
538   temps.Exclude(scratch.AsVIXLRegister());
539   asm_.LoadFromOffset(kLoadWord,
540                       scratch.AsVIXLRegister(),
541                       base.AsVIXLRegister(),
542                       offset.Int32Value());
543   ___ Blx(scratch.AsVIXLRegister());
544   // TODO: place reference map on call.
545 }
546 
Call(FrameOffset base,Offset offset,ManagedRegister mscratch)547 void ArmVIXLJNIMacroAssembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
548   ArmManagedRegister scratch = mscratch.AsArm();
549   CHECK(scratch.IsCoreRegister()) << scratch;
550   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
551   temps.Exclude(scratch.AsVIXLRegister());
552   // Call *(*(SP + base) + offset)
553   asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, base.Int32Value());
554   asm_.LoadFromOffset(kLoadWord,
555                       scratch.AsVIXLRegister(),
556                       scratch.AsVIXLRegister(),
557                       offset.Int32Value());
558   ___ Blx(scratch.AsVIXLRegister());
559   // TODO: place reference map on call
560 }
561 
CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)562 void ArmVIXLJNIMacroAssembler::CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,
563                                               ManagedRegister scratch ATTRIBUTE_UNUSED) {
564   UNIMPLEMENTED(FATAL);
565 }
566 
GetCurrentThread(ManagedRegister mtr)567 void ArmVIXLJNIMacroAssembler::GetCurrentThread(ManagedRegister mtr) {
568   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
569   temps.Exclude(mtr.AsArm().AsVIXLRegister());
570   ___ Mov(mtr.AsArm().AsVIXLRegister(), tr);
571 }
572 
GetCurrentThread(FrameOffset dest_offset,ManagedRegister scratch ATTRIBUTE_UNUSED)573 void ArmVIXLJNIMacroAssembler::GetCurrentThread(FrameOffset dest_offset,
574                                                 ManagedRegister scratch ATTRIBUTE_UNUSED) {
575   asm_.StoreToOffset(kStoreWord, tr, sp, dest_offset.Int32Value());
576 }
577 
ExceptionPoll(ManagedRegister m_scratch,size_t stack_adjust)578 void ArmVIXLJNIMacroAssembler::ExceptionPoll(ManagedRegister m_scratch, size_t stack_adjust) {
579   CHECK_ALIGNED(stack_adjust, kStackAlignment);
580   ArmManagedRegister scratch = m_scratch.AsArm();
581   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
582   temps.Exclude(scratch.AsVIXLRegister());
583   exception_blocks_.emplace_back(
584       new ArmVIXLJNIMacroAssembler::ArmException(scratch, stack_adjust));
585   asm_.LoadFromOffset(kLoadWord,
586                       scratch.AsVIXLRegister(),
587                       tr,
588                       Thread::ExceptionOffset<kArmPointerSize>().Int32Value());
589 
590   ___ Cmp(scratch.AsVIXLRegister(), 0);
591   {
592     ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
593                              vixl32::kMaxInstructionSizeInBytes,
594                              CodeBufferCheckScope::kMaximumSize);
595     vixl32::Label* label = exception_blocks_.back()->Entry();
596     ___ b(ne, Narrow, label);
597     ___ AddBranchLabel(label);
598   }
599   // TODO: think about using CBNZ here.
600 }
601 
CreateLabel()602 std::unique_ptr<JNIMacroLabel> ArmVIXLJNIMacroAssembler::CreateLabel() {
603   return std::unique_ptr<JNIMacroLabel>(new ArmVIXLJNIMacroLabel());
604 }
605 
Jump(JNIMacroLabel * label)606 void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label) {
607   CHECK(label != nullptr);
608   ___ B(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
609 }
610 
Jump(JNIMacroLabel * label,JNIMacroUnaryCondition condition,ManagedRegister test)611 void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label,
612                                     JNIMacroUnaryCondition condition,
613                                     ManagedRegister test) {
614   CHECK(label != nullptr);
615 
616   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
617   temps.Exclude(test.AsArm().AsVIXLRegister());
618   switch (condition) {
619     case JNIMacroUnaryCondition::kZero:
620       ___ CompareAndBranchIfZero(test.AsArm().AsVIXLRegister(),
621                                  ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
622       break;
623     case JNIMacroUnaryCondition::kNotZero:
624       ___ CompareAndBranchIfNonZero(test.AsArm().AsVIXLRegister(),
625                                     ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
626       break;
627     default:
628       LOG(FATAL) << "Not implemented unary condition: " << static_cast<int>(condition);
629       UNREACHABLE();
630   }
631 }
632 
Bind(JNIMacroLabel * label)633 void ArmVIXLJNIMacroAssembler::Bind(JNIMacroLabel* label) {
634   CHECK(label != nullptr);
635   ___ Bind(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
636 }
637 
EmitExceptionPoll(ArmVIXLJNIMacroAssembler::ArmException * exception)638 void ArmVIXLJNIMacroAssembler::EmitExceptionPoll(
639     ArmVIXLJNIMacroAssembler::ArmException* exception) {
640   ___ Bind(exception->Entry());
641   if (exception->stack_adjust_ != 0) {  // Fix up the frame.
642     DecreaseFrameSize(exception->stack_adjust_);
643   }
644 
645   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
646   temps.Exclude(exception->scratch_.AsVIXLRegister());
647   // Pass exception object as argument.
648   // Don't care about preserving r0 as this won't return.
649   ___ Mov(r0, exception->scratch_.AsVIXLRegister());
650   temps.Include(exception->scratch_.AsVIXLRegister());
651   // TODO: check that exception->scratch_ is dead by this point.
652   vixl32::Register temp = temps.Acquire();
653   ___ Ldr(temp,
654           MemOperand(tr,
655               QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pDeliverException).Int32Value()));
656   ___ Blx(temp);
657 }
658 
MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED)659 void ArmVIXLJNIMacroAssembler::MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED) {
660   UNIMPLEMENTED(FATAL);
661 }
662 
Load(ArmManagedRegister dest,vixl32::Register base,int32_t offset,size_t size)663 void ArmVIXLJNIMacroAssembler::Load(ArmManagedRegister
664                                     dest,
665                                     vixl32::Register base,
666                                     int32_t offset,
667                                     size_t size) {
668   if (dest.IsNoRegister()) {
669     CHECK_EQ(0u, size) << dest;
670   } else if (dest.IsCoreRegister()) {
671     CHECK(!dest.AsVIXLRegister().Is(sp)) << dest;
672 
673     UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
674     temps.Exclude(dest.AsVIXLRegister());
675 
676     if (size == 1u) {
677       ___ Ldrb(dest.AsVIXLRegister(), MemOperand(base, offset));
678     } else {
679       CHECK_EQ(4u, size) << dest;
680       ___ Ldr(dest.AsVIXLRegister(), MemOperand(base, offset));
681     }
682   } else if (dest.IsRegisterPair()) {
683     CHECK_EQ(8u, size) << dest;
684     ___ Ldr(dest.AsVIXLRegisterPairLow(),  MemOperand(base, offset));
685     ___ Ldr(dest.AsVIXLRegisterPairHigh(), MemOperand(base, offset + 4));
686   } else if (dest.IsSRegister()) {
687     ___ Vldr(dest.AsVIXLSRegister(), MemOperand(base, offset));
688   } else {
689     CHECK(dest.IsDRegister()) << dest;
690     ___ Vldr(dest.AsVIXLDRegister(), MemOperand(base, offset));
691   }
692 }
693 
694 }  // namespace arm
695 }  // namespace art
696