• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "jni_macro_assembler_arm_vixl.h"
18 
19 #include <iostream>
20 #include <type_traits>
21 
22 #include "entrypoints/quick/quick_entrypoints.h"
23 #include "thread.h"
24 
25 using namespace vixl::aarch32;  // NOLINT(build/namespaces)
26 namespace vixl32 = vixl::aarch32;
27 
28 using vixl::ExactAssemblyScope;
29 using vixl::CodeBufferCheckScope;
30 
31 namespace art {
32 namespace arm {
33 
34 #ifdef ___
35 #error "ARM Assembler macro already defined."
36 #else
37 #define ___   asm_.GetVIXLAssembler()->
38 #endif
39 
FinalizeCode()40 void ArmVIXLJNIMacroAssembler::FinalizeCode() {
41   for (const std::unique_ptr<
42       ArmVIXLJNIMacroAssembler::ArmException>& exception : exception_blocks_) {
43     EmitExceptionPoll(exception.get());
44   }
45   asm_.FinalizeCode();
46 }
47 
DWARFReg(vixl32::Register reg)48 static dwarf::Reg DWARFReg(vixl32::Register reg) {
49   return dwarf::Reg::ArmCore(static_cast<int>(reg.GetCode()));
50 }
51 
DWARFReg(vixl32::SRegister reg)52 static dwarf::Reg DWARFReg(vixl32::SRegister reg) {
53   return dwarf::Reg::ArmFp(static_cast<int>(reg.GetCode()));
54 }
55 
56 static constexpr size_t kFramePointerSize = static_cast<size_t>(kArmPointerSize);
57 
BuildFrame(size_t frame_size,ManagedRegister method_reg,ArrayRef<const ManagedRegister> callee_save_regs,const ManagedRegisterEntrySpills & entry_spills)58 void ArmVIXLJNIMacroAssembler::BuildFrame(size_t frame_size,
59                                           ManagedRegister method_reg,
60                                           ArrayRef<const ManagedRegister> callee_save_regs,
61                                           const ManagedRegisterEntrySpills& entry_spills) {
62   CHECK_ALIGNED(frame_size, kStackAlignment);
63   CHECK(r0.Is(method_reg.AsArm().AsVIXLRegister()));
64 
65   // Push callee saves and link register.
66   RegList core_spill_mask = 1 << LR;
67   uint32_t fp_spill_mask = 0;
68   for (const ManagedRegister& reg : callee_save_regs) {
69     if (reg.AsArm().IsCoreRegister()) {
70       core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
71     } else {
72       fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
73     }
74   }
75   ___ Push(RegisterList(core_spill_mask));
76   cfi().AdjustCFAOffset(POPCOUNT(core_spill_mask) * kFramePointerSize);
77   cfi().RelOffsetForMany(DWARFReg(r0), 0, core_spill_mask, kFramePointerSize);
78   if (fp_spill_mask != 0) {
79     uint32_t first = CTZ(fp_spill_mask);
80 
81     // Check that list is contiguous.
82     DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
83 
84     ___ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
85     cfi().AdjustCFAOffset(POPCOUNT(fp_spill_mask) * kFramePointerSize);
86     cfi().RelOffsetForMany(DWARFReg(s0), 0, fp_spill_mask, kFramePointerSize);
87   }
88 
89   // Increase frame to required size.
90   int pushed_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
91   // Must at least have space for Method*.
92   CHECK_GT(frame_size, pushed_values * kFramePointerSize);
93   IncreaseFrameSize(frame_size - pushed_values * kFramePointerSize);  // handles CFI as well.
94 
95   // Write out Method*.
96   asm_.StoreToOffset(kStoreWord, r0, sp, 0);
97 
98   // Write out entry spills.
99   int32_t offset = frame_size + kFramePointerSize;
100   for (size_t i = 0; i < entry_spills.size(); ++i) {
101     ArmManagedRegister reg = entry_spills.at(i).AsArm();
102     if (reg.IsNoRegister()) {
103       // only increment stack offset.
104       ManagedRegisterSpill spill = entry_spills.at(i);
105       offset += spill.getSize();
106     } else if (reg.IsCoreRegister()) {
107       asm_.StoreToOffset(kStoreWord, reg.AsVIXLRegister(), sp, offset);
108       offset += 4;
109     } else if (reg.IsSRegister()) {
110       asm_.StoreSToOffset(reg.AsVIXLSRegister(), sp, offset);
111       offset += 4;
112     } else if (reg.IsDRegister()) {
113       asm_.StoreDToOffset(reg.AsVIXLDRegister(), sp, offset);
114       offset += 8;
115     }
116   }
117 }
118 
RemoveFrame(size_t frame_size,ArrayRef<const ManagedRegister> callee_save_regs,bool may_suspend)119 void ArmVIXLJNIMacroAssembler::RemoveFrame(size_t frame_size,
120                                            ArrayRef<const ManagedRegister> callee_save_regs,
121                                            bool may_suspend) {
122   CHECK_ALIGNED(frame_size, kStackAlignment);
123   cfi().RememberState();
124 
125   // Compute callee saves to pop and LR.
126   RegList core_spill_mask = 1 << LR;
127   uint32_t fp_spill_mask = 0;
128   for (const ManagedRegister& reg : callee_save_regs) {
129     if (reg.AsArm().IsCoreRegister()) {
130       core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
131     } else {
132       fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
133     }
134   }
135 
136   // Decrease frame to start of callee saves.
137   int pop_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
138   CHECK_GT(frame_size, pop_values * kFramePointerSize);
139   DecreaseFrameSize(frame_size - (pop_values * kFramePointerSize));  // handles CFI as well.
140 
141   // Pop FP callee saves.
142   if (fp_spill_mask != 0) {
143     uint32_t first = CTZ(fp_spill_mask);
144     // Check that list is contiguous.
145      DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
146 
147     ___ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
148     cfi().AdjustCFAOffset(-kFramePointerSize * POPCOUNT(fp_spill_mask));
149     cfi().RestoreMany(DWARFReg(s0), fp_spill_mask);
150   }
151 
152   // Pop core callee saves and LR.
153   ___ Pop(RegisterList(core_spill_mask));
154 
155   if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
156     if (may_suspend) {
157       // The method may be suspended; refresh the Marking Register.
158       ___ Ldr(mr, MemOperand(tr, Thread::IsGcMarkingOffset<kArmPointerSize>().Int32Value()));
159     } else {
160       // The method shall not be suspended; no need to refresh the Marking Register.
161 
162       // Check that the Marking Register is a callee-save register,
163       // and thus has been preserved by native code following the
164       // AAPCS calling convention.
165       DCHECK_NE(core_spill_mask & (1 << MR), 0)
166           << "core_spill_mask should contain Marking Register R" << MR;
167 
168       // The following condition is a compile-time one, so it does not have a run-time cost.
169       if (kIsDebugBuild) {
170         // The following condition is a run-time one; it is executed after the
171         // previous compile-time test, to avoid penalizing non-debug builds.
172         if (emit_run_time_checks_in_debug_mode_) {
173           // Emit a run-time check verifying that the Marking Register is up-to-date.
174           UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
175           vixl32::Register temp = temps.Acquire();
176           // Ensure we are not clobbering a callee-save register that was restored before.
177           DCHECK_EQ(core_spill_mask & (1 << temp.GetCode()), 0)
178               << "core_spill_mask hould not contain scratch register R" << temp.GetCode();
179           asm_.GenerateMarkingRegisterCheck(temp);
180         }
181       }
182     }
183   }
184 
185   // Return to LR.
186   ___ Bx(vixl32::lr);
187 
188   // The CFI should be restored for any code that follows the exit block.
189   cfi().RestoreState();
190   cfi().DefCFAOffset(frame_size);
191 }
192 
193 
IncreaseFrameSize(size_t adjust)194 void ArmVIXLJNIMacroAssembler::IncreaseFrameSize(size_t adjust) {
195   asm_.AddConstant(sp, -adjust);
196   cfi().AdjustCFAOffset(adjust);
197 }
198 
DecreaseFrameSize(size_t adjust)199 void ArmVIXLJNIMacroAssembler::DecreaseFrameSize(size_t adjust) {
200   asm_.AddConstant(sp, adjust);
201   cfi().AdjustCFAOffset(-adjust);
202 }
203 
Store(FrameOffset dest,ManagedRegister m_src,size_t size)204 void ArmVIXLJNIMacroAssembler::Store(FrameOffset dest, ManagedRegister m_src, size_t size) {
205   ArmManagedRegister src = m_src.AsArm();
206   if (src.IsNoRegister()) {
207     CHECK_EQ(0u, size);
208   } else if (src.IsCoreRegister()) {
209     CHECK_EQ(4u, size);
210     UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
211     temps.Exclude(src.AsVIXLRegister());
212     asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
213   } else if (src.IsRegisterPair()) {
214     CHECK_EQ(8u, size);
215     asm_.StoreToOffset(kStoreWord, src.AsVIXLRegisterPairLow(),  sp, dest.Int32Value());
216     asm_.StoreToOffset(kStoreWord, src.AsVIXLRegisterPairHigh(), sp, dest.Int32Value() + 4);
217   } else if (src.IsSRegister()) {
218     CHECK_EQ(4u, size);
219     asm_.StoreSToOffset(src.AsVIXLSRegister(), sp, dest.Int32Value());
220   } else {
221     CHECK_EQ(8u, size);
222     CHECK(src.IsDRegister()) << src;
223     asm_.StoreDToOffset(src.AsVIXLDRegister(), sp, dest.Int32Value());
224   }
225 }
226 
StoreRef(FrameOffset dest,ManagedRegister msrc)227 void ArmVIXLJNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
228   ArmManagedRegister src = msrc.AsArm();
229   CHECK(src.IsCoreRegister()) << src;
230   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
231   temps.Exclude(src.AsVIXLRegister());
232   asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
233 }
234 
StoreRawPtr(FrameOffset dest,ManagedRegister msrc)235 void ArmVIXLJNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
236   ArmManagedRegister src = msrc.AsArm();
237   CHECK(src.IsCoreRegister()) << src;
238   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
239   temps.Exclude(src.AsVIXLRegister());
240   asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
241 }
242 
StoreSpanning(FrameOffset dest,ManagedRegister msrc,FrameOffset in_off,ManagedRegister mscratch)243 void ArmVIXLJNIMacroAssembler::StoreSpanning(FrameOffset dest,
244                                              ManagedRegister msrc,
245                                              FrameOffset in_off,
246                                              ManagedRegister mscratch) {
247   ArmManagedRegister src = msrc.AsArm();
248   ArmManagedRegister scratch = mscratch.AsArm();
249   asm_.StoreToOffset(kStoreWord, src.AsVIXLRegister(), sp, dest.Int32Value());
250   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
251   temps.Exclude(scratch.AsVIXLRegister());
252   asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, in_off.Int32Value());
253   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, dest.Int32Value() + 4);
254 }
255 
CopyRef(FrameOffset dest,FrameOffset src,ManagedRegister mscratch)256 void ArmVIXLJNIMacroAssembler::CopyRef(FrameOffset dest,
257                                        FrameOffset src,
258                                        ManagedRegister mscratch) {
259   ArmManagedRegister scratch = mscratch.AsArm();
260   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
261   temps.Exclude(scratch.AsVIXLRegister());
262   asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, src.Int32Value());
263   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, dest.Int32Value());
264 }
265 
LoadRef(ManagedRegister dest,ManagedRegister base,MemberOffset offs,bool unpoison_reference)266 void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest,
267                                        ManagedRegister base,
268                                        MemberOffset offs,
269                                        bool unpoison_reference) {
270   ArmManagedRegister dst = dest.AsArm();
271   CHECK(dst.IsCoreRegister() && dst.IsCoreRegister()) << dst;
272   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
273   temps.Exclude(dst.AsVIXLRegister(), base.AsArm().AsVIXLRegister());
274   asm_.LoadFromOffset(kLoadWord,
275                       dst.AsVIXLRegister(),
276                       base.AsArm().AsVIXLRegister(),
277                       offs.Int32Value());
278 
279   if (unpoison_reference) {
280     asm_.MaybeUnpoisonHeapReference(dst.AsVIXLRegister());
281   }
282 }
283 
LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED,FrameOffset src ATTRIBUTE_UNUSED)284 void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED,
285                                        FrameOffset src ATTRIBUTE_UNUSED) {
286   UNIMPLEMENTED(FATAL);
287 }
288 
LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED,ManagedRegister base ATTRIBUTE_UNUSED,Offset offs ATTRIBUTE_UNUSED)289 void ArmVIXLJNIMacroAssembler::LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED,
290                                           ManagedRegister base ATTRIBUTE_UNUSED,
291                                           Offset offs ATTRIBUTE_UNUSED) {
292   UNIMPLEMENTED(FATAL);
293 }
294 
StoreImmediateToFrame(FrameOffset dest,uint32_t imm,ManagedRegister scratch)295 void ArmVIXLJNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest,
296                                                      uint32_t imm,
297                                                      ManagedRegister scratch) {
298   ArmManagedRegister mscratch = scratch.AsArm();
299   CHECK(mscratch.IsCoreRegister()) << mscratch;
300   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
301   temps.Exclude(mscratch.AsVIXLRegister());
302   asm_.LoadImmediate(mscratch.AsVIXLRegister(), imm);
303   asm_.StoreToOffset(kStoreWord, mscratch.AsVIXLRegister(), sp, dest.Int32Value());
304 }
305 
Load(ManagedRegister m_dst,FrameOffset src,size_t size)306 void ArmVIXLJNIMacroAssembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
307   return Load(m_dst.AsArm(), sp, src.Int32Value(), size);
308 }
309 
LoadFromThread(ManagedRegister m_dst,ThreadOffset32 src,size_t size)310 void ArmVIXLJNIMacroAssembler::LoadFromThread(ManagedRegister m_dst,
311                                               ThreadOffset32 src,
312                                               size_t size) {
313   return Load(m_dst.AsArm(), tr, src.Int32Value(), size);
314 }
315 
LoadRawPtrFromThread(ManagedRegister m_dst,ThreadOffset32 offs)316 void ArmVIXLJNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister m_dst, ThreadOffset32 offs) {
317   ArmManagedRegister dst = m_dst.AsArm();
318   CHECK(dst.IsCoreRegister()) << dst;
319   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
320   temps.Exclude(dst.AsVIXLRegister());
321   asm_.LoadFromOffset(kLoadWord, dst.AsVIXLRegister(), tr, offs.Int32Value());
322 }
323 
CopyRawPtrFromThread(FrameOffset fr_offs,ThreadOffset32 thr_offs,ManagedRegister mscratch)324 void ArmVIXLJNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs,
325                                                     ThreadOffset32 thr_offs,
326                                                     ManagedRegister mscratch) {
327   ArmManagedRegister scratch = mscratch.AsArm();
328   CHECK(scratch.IsCoreRegister()) << scratch;
329   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
330   temps.Exclude(scratch.AsVIXLRegister());
331   asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), tr, thr_offs.Int32Value());
332   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, fr_offs.Int32Value());
333 }
334 
CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED,FrameOffset fr_offs ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED)335 void ArmVIXLJNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED,
336                                                   FrameOffset fr_offs ATTRIBUTE_UNUSED,
337                                                   ManagedRegister mscratch ATTRIBUTE_UNUSED) {
338   UNIMPLEMENTED(FATAL);
339 }
340 
StoreStackOffsetToThread(ThreadOffset32 thr_offs,FrameOffset fr_offs,ManagedRegister mscratch)341 void ArmVIXLJNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs,
342                                                         FrameOffset fr_offs,
343                                                         ManagedRegister mscratch) {
344   ArmManagedRegister scratch = mscratch.AsArm();
345   CHECK(scratch.IsCoreRegister()) << scratch;
346   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
347   temps.Exclude(scratch.AsVIXLRegister());
348   asm_.AddConstant(scratch.AsVIXLRegister(), sp, fr_offs.Int32Value());
349   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), tr, thr_offs.Int32Value());
350 }
351 
StoreStackPointerToThread(ThreadOffset32 thr_offs)352 void ArmVIXLJNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs) {
353   asm_.StoreToOffset(kStoreWord, sp, tr, thr_offs.Int32Value());
354 }
355 
SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)356 void ArmVIXLJNIMacroAssembler::SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
357                                           size_t size ATTRIBUTE_UNUSED) {
358   UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm";
359 }
360 
ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)361 void ArmVIXLJNIMacroAssembler::ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
362                                           size_t size ATTRIBUTE_UNUSED) {
363   UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm";
364 }
365 
Move(ManagedRegister m_dst,ManagedRegister m_src,size_t size ATTRIBUTE_UNUSED)366 void ArmVIXLJNIMacroAssembler::Move(ManagedRegister m_dst,
367                                     ManagedRegister m_src,
368                                     size_t size  ATTRIBUTE_UNUSED) {
369   ArmManagedRegister dst = m_dst.AsArm();
370   ArmManagedRegister src = m_src.AsArm();
371   if (!dst.Equals(src)) {
372     if (dst.IsCoreRegister()) {
373       CHECK(src.IsCoreRegister()) << src;
374       UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
375       temps.Exclude(dst.AsVIXLRegister());
376       ___ Mov(dst.AsVIXLRegister(), src.AsVIXLRegister());
377     } else if (dst.IsDRegister()) {
378       if (src.IsDRegister()) {
379         ___ Vmov(F64, dst.AsVIXLDRegister(), src.AsVIXLDRegister());
380       } else {
381         // VMOV Dn, Rlo, Rhi (Dn = {Rlo, Rhi})
382         CHECK(src.IsRegisterPair()) << src;
383         ___ Vmov(dst.AsVIXLDRegister(), src.AsVIXLRegisterPairLow(), src.AsVIXLRegisterPairHigh());
384       }
385     } else if (dst.IsSRegister()) {
386       if (src.IsSRegister()) {
387         ___ Vmov(F32, dst.AsVIXLSRegister(), src.AsVIXLSRegister());
388       } else {
389         // VMOV Sn, Rn  (Sn = Rn)
390         CHECK(src.IsCoreRegister()) << src;
391         ___ Vmov(dst.AsVIXLSRegister(), src.AsVIXLRegister());
392       }
393     } else {
394       CHECK(dst.IsRegisterPair()) << dst;
395       CHECK(src.IsRegisterPair()) << src;
396       // Ensure that the first move doesn't clobber the input of the second.
397       if (src.AsRegisterPairHigh() != dst.AsRegisterPairLow()) {
398         ___ Mov(dst.AsVIXLRegisterPairLow(),  src.AsVIXLRegisterPairLow());
399         ___ Mov(dst.AsVIXLRegisterPairHigh(), src.AsVIXLRegisterPairHigh());
400       } else {
401         ___ Mov(dst.AsVIXLRegisterPairHigh(), src.AsVIXLRegisterPairHigh());
402         ___ Mov(dst.AsVIXLRegisterPairLow(),  src.AsVIXLRegisterPairLow());
403       }
404     }
405   }
406 }
407 
Copy(FrameOffset dest,FrameOffset src,ManagedRegister scratch,size_t size)408 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest,
409                                     FrameOffset src,
410                                     ManagedRegister scratch,
411                                     size_t size) {
412   ArmManagedRegister temp = scratch.AsArm();
413   CHECK(temp.IsCoreRegister()) << temp;
414   CHECK(size == 4 || size == 8) << size;
415   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
416   temps.Exclude(temp.AsVIXLRegister());
417   if (size == 4) {
418     asm_.LoadFromOffset(kLoadWord, temp.AsVIXLRegister(), sp, src.Int32Value());
419     asm_.StoreToOffset(kStoreWord, temp.AsVIXLRegister(), sp, dest.Int32Value());
420   } else if (size == 8) {
421     asm_.LoadFromOffset(kLoadWord, temp.AsVIXLRegister(), sp, src.Int32Value());
422     asm_.StoreToOffset(kStoreWord, temp.AsVIXLRegister(), sp, dest.Int32Value());
423     asm_.LoadFromOffset(kLoadWord, temp.AsVIXLRegister(), sp, src.Int32Value() + 4);
424     asm_.StoreToOffset(kStoreWord, temp.AsVIXLRegister(), sp, dest.Int32Value() + 4);
425   }
426 }
427 
Copy(FrameOffset dest ATTRIBUTE_UNUSED,ManagedRegister src_base ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)428 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest ATTRIBUTE_UNUSED,
429                                     ManagedRegister src_base ATTRIBUTE_UNUSED,
430                                     Offset src_offset ATTRIBUTE_UNUSED,
431                                     ManagedRegister mscratch ATTRIBUTE_UNUSED,
432                                     size_t size ATTRIBUTE_UNUSED) {
433   UNIMPLEMENTED(FATAL);
434 }
435 
Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED,Offset dest_offset ATTRIBUTE_UNUSED,FrameOffset src ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)436 void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED,
437                                     Offset dest_offset ATTRIBUTE_UNUSED,
438                                     FrameOffset src ATTRIBUTE_UNUSED,
439                                     ManagedRegister mscratch ATTRIBUTE_UNUSED,
440                                     size_t size ATTRIBUTE_UNUSED) {
441   UNIMPLEMENTED(FATAL);
442 }
443 
Copy(FrameOffset dst ATTRIBUTE_UNUSED,FrameOffset src_base ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)444 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
445                                     FrameOffset src_base ATTRIBUTE_UNUSED,
446                                     Offset src_offset ATTRIBUTE_UNUSED,
447                                     ManagedRegister mscratch ATTRIBUTE_UNUSED,
448                                     size_t size ATTRIBUTE_UNUSED) {
449   UNIMPLEMENTED(FATAL);
450 }
451 
Copy(ManagedRegister dest ATTRIBUTE_UNUSED,Offset dest_offset ATTRIBUTE_UNUSED,ManagedRegister src ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)452 void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest ATTRIBUTE_UNUSED,
453                                     Offset dest_offset ATTRIBUTE_UNUSED,
454                                     ManagedRegister src ATTRIBUTE_UNUSED,
455                                     Offset src_offset ATTRIBUTE_UNUSED,
456                                     ManagedRegister mscratch ATTRIBUTE_UNUSED,
457                                     size_t size ATTRIBUTE_UNUSED) {
458   UNIMPLEMENTED(FATAL);
459 }
460 
Copy(FrameOffset dst ATTRIBUTE_UNUSED,Offset dest_offset ATTRIBUTE_UNUSED,FrameOffset src ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)461 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
462                                     Offset dest_offset ATTRIBUTE_UNUSED,
463                                     FrameOffset src ATTRIBUTE_UNUSED,
464                                     Offset src_offset ATTRIBUTE_UNUSED,
465                                     ManagedRegister scratch ATTRIBUTE_UNUSED,
466                                     size_t size ATTRIBUTE_UNUSED) {
467   UNIMPLEMENTED(FATAL);
468 }
469 
CreateHandleScopeEntry(ManagedRegister mout_reg,FrameOffset handle_scope_offset,ManagedRegister min_reg,bool null_allowed)470 void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
471                                                       FrameOffset handle_scope_offset,
472                                                       ManagedRegister min_reg,
473                                                       bool null_allowed) {
474   ArmManagedRegister out_reg = mout_reg.AsArm();
475   ArmManagedRegister in_reg = min_reg.AsArm();
476   CHECK(in_reg.IsNoRegister() || in_reg.IsCoreRegister()) << in_reg;
477   CHECK(out_reg.IsCoreRegister()) << out_reg;
478   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
479   temps.Exclude(out_reg.AsVIXLRegister());
480   if (null_allowed) {
481     // Null values get a handle scope entry value of 0.  Otherwise, the handle scope entry is
482     // the address in the handle scope holding the reference.
483     // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
484     if (in_reg.IsNoRegister()) {
485       asm_.LoadFromOffset(kLoadWord,
486                           out_reg.AsVIXLRegister(),
487                           sp,
488                           handle_scope_offset.Int32Value());
489       in_reg = out_reg;
490     }
491 
492     temps.Exclude(in_reg.AsVIXLRegister());
493     ___ Cmp(in_reg.AsVIXLRegister(), 0);
494 
495     if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value())) {
496       if (!out_reg.Equals(in_reg)) {
497         ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
498                                  3 * vixl32::kMaxInstructionSizeInBytes,
499                                  CodeBufferCheckScope::kMaximumSize);
500         ___ it(eq, 0xc);
501         ___ mov(eq, out_reg.AsVIXLRegister(), 0);
502         asm_.AddConstantInIt(out_reg.AsVIXLRegister(), sp, handle_scope_offset.Int32Value(), ne);
503       } else {
504         ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
505                                  2 * vixl32::kMaxInstructionSizeInBytes,
506                                  CodeBufferCheckScope::kMaximumSize);
507         ___ it(ne, 0x8);
508         asm_.AddConstantInIt(out_reg.AsVIXLRegister(), sp, handle_scope_offset.Int32Value(), ne);
509       }
510     } else {
511       // TODO: Implement this (old arm assembler would have crashed here).
512       UNIMPLEMENTED(FATAL);
513     }
514   } else {
515     asm_.AddConstant(out_reg.AsVIXLRegister(), sp, handle_scope_offset.Int32Value());
516   }
517 }
518 
CreateHandleScopeEntry(FrameOffset out_off,FrameOffset handle_scope_offset,ManagedRegister mscratch,bool null_allowed)519 void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(FrameOffset out_off,
520                                                       FrameOffset handle_scope_offset,
521                                                       ManagedRegister mscratch,
522                                                       bool null_allowed) {
523   ArmManagedRegister scratch = mscratch.AsArm();
524   CHECK(scratch.IsCoreRegister()) << scratch;
525   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
526   temps.Exclude(scratch.AsVIXLRegister());
527   if (null_allowed) {
528     asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, handle_scope_offset.Int32Value());
529     // Null values get a handle scope entry value of 0.  Otherwise, the handle scope entry is
530     // the address in the handle scope holding the reference.
531     // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
532     ___ Cmp(scratch.AsVIXLRegister(), 0);
533 
534     if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value())) {
535       ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
536                                2 * vixl32::kMaxInstructionSizeInBytes,
537                                CodeBufferCheckScope::kMaximumSize);
538       ___ it(ne, 0x8);
539       asm_.AddConstantInIt(scratch.AsVIXLRegister(), sp, handle_scope_offset.Int32Value(), ne);
540     } else {
541       // TODO: Implement this (old arm assembler would have crashed here).
542       UNIMPLEMENTED(FATAL);
543     }
544   } else {
545     asm_.AddConstant(scratch.AsVIXLRegister(), sp, handle_scope_offset.Int32Value());
546   }
547   asm_.StoreToOffset(kStoreWord, scratch.AsVIXLRegister(), sp, out_off.Int32Value());
548 }
549 
LoadReferenceFromHandleScope(ManagedRegister mout_reg ATTRIBUTE_UNUSED,ManagedRegister min_reg ATTRIBUTE_UNUSED)550 void ArmVIXLJNIMacroAssembler::LoadReferenceFromHandleScope(
551     ManagedRegister mout_reg ATTRIBUTE_UNUSED,
552     ManagedRegister min_reg ATTRIBUTE_UNUSED) {
553   UNIMPLEMENTED(FATAL);
554 }
555 
VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,bool could_be_null ATTRIBUTE_UNUSED)556 void ArmVIXLJNIMacroAssembler::VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,
557                                             bool could_be_null ATTRIBUTE_UNUSED) {
558   // TODO: not validating references.
559 }
560 
VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,bool could_be_null ATTRIBUTE_UNUSED)561 void ArmVIXLJNIMacroAssembler::VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,
562                                             bool could_be_null ATTRIBUTE_UNUSED) {
563   // TODO: not validating references.
564 }
565 
Call(ManagedRegister mbase,Offset offset,ManagedRegister mscratch)566 void ArmVIXLJNIMacroAssembler::Call(ManagedRegister mbase,
567                                     Offset offset,
568                                     ManagedRegister mscratch) {
569   ArmManagedRegister base = mbase.AsArm();
570   ArmManagedRegister scratch = mscratch.AsArm();
571   CHECK(base.IsCoreRegister()) << base;
572   CHECK(scratch.IsCoreRegister()) << scratch;
573   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
574   temps.Exclude(scratch.AsVIXLRegister());
575   asm_.LoadFromOffset(kLoadWord,
576                       scratch.AsVIXLRegister(),
577                       base.AsVIXLRegister(),
578                       offset.Int32Value());
579   ___ Blx(scratch.AsVIXLRegister());
580   // TODO: place reference map on call.
581 }
582 
Call(FrameOffset base,Offset offset,ManagedRegister mscratch)583 void ArmVIXLJNIMacroAssembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
584   ArmManagedRegister scratch = mscratch.AsArm();
585   CHECK(scratch.IsCoreRegister()) << scratch;
586   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
587   temps.Exclude(scratch.AsVIXLRegister());
588   // Call *(*(SP + base) + offset)
589   asm_.LoadFromOffset(kLoadWord, scratch.AsVIXLRegister(), sp, base.Int32Value());
590   asm_.LoadFromOffset(kLoadWord,
591                       scratch.AsVIXLRegister(),
592                       scratch.AsVIXLRegister(),
593                       offset.Int32Value());
594   ___ Blx(scratch.AsVIXLRegister());
595   // TODO: place reference map on call
596 }
597 
CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)598 void ArmVIXLJNIMacroAssembler::CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,
599                                               ManagedRegister scratch ATTRIBUTE_UNUSED) {
600   UNIMPLEMENTED(FATAL);
601 }
602 
GetCurrentThread(ManagedRegister mtr)603 void ArmVIXLJNIMacroAssembler::GetCurrentThread(ManagedRegister mtr) {
604   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
605   temps.Exclude(mtr.AsArm().AsVIXLRegister());
606   ___ Mov(mtr.AsArm().AsVIXLRegister(), tr);
607 }
608 
GetCurrentThread(FrameOffset dest_offset,ManagedRegister scratch ATTRIBUTE_UNUSED)609 void ArmVIXLJNIMacroAssembler::GetCurrentThread(FrameOffset dest_offset,
610                                                 ManagedRegister scratch ATTRIBUTE_UNUSED) {
611   asm_.StoreToOffset(kStoreWord, tr, sp, dest_offset.Int32Value());
612 }
613 
ExceptionPoll(ManagedRegister m_scratch,size_t stack_adjust)614 void ArmVIXLJNIMacroAssembler::ExceptionPoll(ManagedRegister m_scratch, size_t stack_adjust) {
615   CHECK_ALIGNED(stack_adjust, kStackAlignment);
616   ArmManagedRegister scratch = m_scratch.AsArm();
617   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
618   temps.Exclude(scratch.AsVIXLRegister());
619   exception_blocks_.emplace_back(
620       new ArmVIXLJNIMacroAssembler::ArmException(scratch, stack_adjust));
621   asm_.LoadFromOffset(kLoadWord,
622                       scratch.AsVIXLRegister(),
623                       tr,
624                       Thread::ExceptionOffset<kArmPointerSize>().Int32Value());
625 
626   ___ Cmp(scratch.AsVIXLRegister(), 0);
627   vixl32::Label* label = exception_blocks_.back()->Entry();
628   ___ BPreferNear(ne, label);
629   // TODO: think about using CBNZ here.
630 }
631 
CreateLabel()632 std::unique_ptr<JNIMacroLabel> ArmVIXLJNIMacroAssembler::CreateLabel() {
633   return std::unique_ptr<JNIMacroLabel>(new ArmVIXLJNIMacroLabel());
634 }
635 
Jump(JNIMacroLabel * label)636 void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label) {
637   CHECK(label != nullptr);
638   ___ B(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
639 }
640 
Jump(JNIMacroLabel * label,JNIMacroUnaryCondition condition,ManagedRegister test)641 void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label,
642                                     JNIMacroUnaryCondition condition,
643                                     ManagedRegister test) {
644   CHECK(label != nullptr);
645 
646   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
647   temps.Exclude(test.AsArm().AsVIXLRegister());
648   switch (condition) {
649     case JNIMacroUnaryCondition::kZero:
650       ___ CompareAndBranchIfZero(test.AsArm().AsVIXLRegister(),
651                                  ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
652       break;
653     case JNIMacroUnaryCondition::kNotZero:
654       ___ CompareAndBranchIfNonZero(test.AsArm().AsVIXLRegister(),
655                                     ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
656       break;
657     default:
658       LOG(FATAL) << "Not implemented unary condition: " << static_cast<int>(condition);
659       UNREACHABLE();
660   }
661 }
662 
Bind(JNIMacroLabel * label)663 void ArmVIXLJNIMacroAssembler::Bind(JNIMacroLabel* label) {
664   CHECK(label != nullptr);
665   ___ Bind(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
666 }
667 
EmitExceptionPoll(ArmVIXLJNIMacroAssembler::ArmException * exception)668 void ArmVIXLJNIMacroAssembler::EmitExceptionPoll(
669     ArmVIXLJNIMacroAssembler::ArmException* exception) {
670   ___ Bind(exception->Entry());
671   if (exception->stack_adjust_ != 0) {  // Fix up the frame.
672     DecreaseFrameSize(exception->stack_adjust_);
673   }
674 
675   UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
676   temps.Exclude(exception->scratch_.AsVIXLRegister());
677   // Pass exception object as argument.
678   // Don't care about preserving r0 as this won't return.
679   ___ Mov(r0, exception->scratch_.AsVIXLRegister());
680   temps.Include(exception->scratch_.AsVIXLRegister());
681   // TODO: check that exception->scratch_ is dead by this point.
682   vixl32::Register temp = temps.Acquire();
683   ___ Ldr(temp,
684           MemOperand(tr,
685               QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pDeliverException).Int32Value()));
686   ___ Blx(temp);
687 }
688 
MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED)689 void ArmVIXLJNIMacroAssembler::MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED) {
690   UNIMPLEMENTED(FATAL);
691 }
692 
Load(ArmManagedRegister dest,vixl32::Register base,int32_t offset,size_t size)693 void ArmVIXLJNIMacroAssembler::Load(ArmManagedRegister
694                                     dest,
695                                     vixl32::Register base,
696                                     int32_t offset,
697                                     size_t size) {
698   if (dest.IsNoRegister()) {
699     CHECK_EQ(0u, size) << dest;
700   } else if (dest.IsCoreRegister()) {
701     CHECK(!dest.AsVIXLRegister().Is(sp)) << dest;
702 
703     UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
704     temps.Exclude(dest.AsVIXLRegister());
705 
706     if (size == 1u) {
707       ___ Ldrb(dest.AsVIXLRegister(), MemOperand(base, offset));
708     } else {
709       CHECK_EQ(4u, size) << dest;
710       ___ Ldr(dest.AsVIXLRegister(), MemOperand(base, offset));
711     }
712   } else if (dest.IsRegisterPair()) {
713     CHECK_EQ(8u, size) << dest;
714     ___ Ldr(dest.AsVIXLRegisterPairLow(),  MemOperand(base, offset));
715     ___ Ldr(dest.AsVIXLRegisterPairHigh(), MemOperand(base, offset + 4));
716   } else if (dest.IsSRegister()) {
717     ___ Vldr(dest.AsVIXLSRegister(), MemOperand(base, offset));
718   } else {
719     CHECK(dest.IsDRegister()) << dest;
720     ___ Vldr(dest.AsVIXLDRegister(), MemOperand(base, offset));
721   }
722 }
723 
724 }  // namespace arm
725 }  // namespace art
726