1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "jni_macro_assembler_arm_vixl.h"
18
19 #include <iostream>
20 #include <type_traits>
21
22 #include "entrypoints/quick/quick_entrypoints.h"
23 #include "thread.h"
24
25 using namespace vixl::aarch32; // NOLINT(build/namespaces)
26 namespace vixl32 = vixl::aarch32;
27
28 using vixl::ExactAssemblyScope;
29 using vixl::CodeBufferCheckScope;
30
31 namespace art {
32 namespace arm {
33
34 #ifdef ___
35 #error "ARM Assembler macro already defined."
36 #else
37 #define ___ asm_.GetVIXLAssembler()->
38 #endif
39
AsVIXLRegister(ArmManagedRegister reg)40 vixl::aarch32::Register AsVIXLRegister(ArmManagedRegister reg) {
41 CHECK(reg.IsCoreRegister());
42 return vixl::aarch32::Register(reg.RegId());
43 }
44
AsVIXLSRegister(ArmManagedRegister reg)45 static inline vixl::aarch32::SRegister AsVIXLSRegister(ArmManagedRegister reg) {
46 CHECK(reg.IsSRegister());
47 return vixl::aarch32::SRegister(reg.RegId() - kNumberOfCoreRegIds);
48 }
49
AsVIXLDRegister(ArmManagedRegister reg)50 static inline vixl::aarch32::DRegister AsVIXLDRegister(ArmManagedRegister reg) {
51 CHECK(reg.IsDRegister());
52 return vixl::aarch32::DRegister(reg.RegId() - kNumberOfCoreRegIds - kNumberOfSRegIds);
53 }
54
AsVIXLRegisterPairLow(ArmManagedRegister reg)55 static inline vixl::aarch32::Register AsVIXLRegisterPairLow(ArmManagedRegister reg) {
56 return vixl::aarch32::Register(reg.AsRegisterPairLow());
57 }
58
AsVIXLRegisterPairHigh(ArmManagedRegister reg)59 static inline vixl::aarch32::Register AsVIXLRegisterPairHigh(ArmManagedRegister reg) {
60 return vixl::aarch32::Register(reg.AsRegisterPairHigh());
61 }
62
FinalizeCode()63 void ArmVIXLJNIMacroAssembler::FinalizeCode() {
64 for (const std::unique_ptr<
65 ArmVIXLJNIMacroAssembler::ArmException>& exception : exception_blocks_) {
66 EmitExceptionPoll(exception.get());
67 }
68 asm_.FinalizeCode();
69 }
70
DWARFReg(vixl32::Register reg)71 static dwarf::Reg DWARFReg(vixl32::Register reg) {
72 return dwarf::Reg::ArmCore(static_cast<int>(reg.GetCode()));
73 }
74
DWARFReg(vixl32::SRegister reg)75 static dwarf::Reg DWARFReg(vixl32::SRegister reg) {
76 return dwarf::Reg::ArmFp(static_cast<int>(reg.GetCode()));
77 }
78
79 static constexpr size_t kFramePointerSize = static_cast<size_t>(kArmPointerSize);
80
BuildFrame(size_t frame_size,ManagedRegister method_reg,ArrayRef<const ManagedRegister> callee_save_regs,const ManagedRegisterEntrySpills & entry_spills)81 void ArmVIXLJNIMacroAssembler::BuildFrame(size_t frame_size,
82 ManagedRegister method_reg,
83 ArrayRef<const ManagedRegister> callee_save_regs,
84 const ManagedRegisterEntrySpills& entry_spills) {
85 CHECK_ALIGNED(frame_size, kStackAlignment);
86 CHECK(r0.Is(AsVIXLRegister(method_reg.AsArm())));
87
88 // Push callee saves and link register.
89 RegList core_spill_mask = 1 << LR;
90 uint32_t fp_spill_mask = 0;
91 for (const ManagedRegister& reg : callee_save_regs) {
92 if (reg.AsArm().IsCoreRegister()) {
93 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
94 } else {
95 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
96 }
97 }
98 ___ Push(RegisterList(core_spill_mask));
99 cfi().AdjustCFAOffset(POPCOUNT(core_spill_mask) * kFramePointerSize);
100 cfi().RelOffsetForMany(DWARFReg(r0), 0, core_spill_mask, kFramePointerSize);
101 if (fp_spill_mask != 0) {
102 uint32_t first = CTZ(fp_spill_mask);
103
104 // Check that list is contiguous.
105 DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
106
107 ___ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
108 cfi().AdjustCFAOffset(POPCOUNT(fp_spill_mask) * kFramePointerSize);
109 cfi().RelOffsetForMany(DWARFReg(s0), 0, fp_spill_mask, kFramePointerSize);
110 }
111
112 // Increase frame to required size.
113 int pushed_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
114 // Must at least have space for Method*.
115 CHECK_GT(frame_size, pushed_values * kFramePointerSize);
116 IncreaseFrameSize(frame_size - pushed_values * kFramePointerSize); // handles CFI as well.
117
118 // Write out Method*.
119 asm_.StoreToOffset(kStoreWord, r0, sp, 0);
120
121 // Write out entry spills.
122 int32_t offset = frame_size + kFramePointerSize;
123 for (const ManagedRegisterSpill& spill : entry_spills) {
124 ArmManagedRegister reg = spill.AsArm();
125 if (reg.IsNoRegister()) {
126 // only increment stack offset.
127 offset += spill.getSize();
128 } else if (reg.IsCoreRegister()) {
129 asm_.StoreToOffset(kStoreWord, AsVIXLRegister(reg), sp, offset);
130 offset += 4;
131 } else if (reg.IsSRegister()) {
132 asm_.StoreSToOffset(AsVIXLSRegister(reg), sp, offset);
133 offset += 4;
134 } else if (reg.IsDRegister()) {
135 asm_.StoreDToOffset(AsVIXLDRegister(reg), sp, offset);
136 offset += 8;
137 }
138 }
139 }
140
RemoveFrame(size_t frame_size,ArrayRef<const ManagedRegister> callee_save_regs,bool may_suspend)141 void ArmVIXLJNIMacroAssembler::RemoveFrame(size_t frame_size,
142 ArrayRef<const ManagedRegister> callee_save_regs,
143 bool may_suspend) {
144 CHECK_ALIGNED(frame_size, kStackAlignment);
145 cfi().RememberState();
146
147 // Compute callee saves to pop and LR.
148 RegList core_spill_mask = 1 << LR;
149 uint32_t fp_spill_mask = 0;
150 for (const ManagedRegister& reg : callee_save_regs) {
151 if (reg.AsArm().IsCoreRegister()) {
152 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
153 } else {
154 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
155 }
156 }
157
158 // Decrease frame to start of callee saves.
159 int pop_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
160 CHECK_GT(frame_size, pop_values * kFramePointerSize);
161 DecreaseFrameSize(frame_size - (pop_values * kFramePointerSize)); // handles CFI as well.
162
163 // Pop FP callee saves.
164 if (fp_spill_mask != 0) {
165 uint32_t first = CTZ(fp_spill_mask);
166 // Check that list is contiguous.
167 DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
168
169 ___ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
170 cfi().AdjustCFAOffset(-kFramePointerSize * POPCOUNT(fp_spill_mask));
171 cfi().RestoreMany(DWARFReg(s0), fp_spill_mask);
172 }
173
174 // Pop core callee saves and LR.
175 ___ Pop(RegisterList(core_spill_mask));
176
177 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
178 if (may_suspend) {
179 // The method may be suspended; refresh the Marking Register.
180 ___ Ldr(mr, MemOperand(tr, Thread::IsGcMarkingOffset<kArmPointerSize>().Int32Value()));
181 } else {
182 // The method shall not be suspended; no need to refresh the Marking Register.
183
184 // Check that the Marking Register is a callee-save register,
185 // and thus has been preserved by native code following the
186 // AAPCS calling convention.
187 DCHECK_NE(core_spill_mask & (1 << MR), 0)
188 << "core_spill_mask should contain Marking Register R" << MR;
189
190 // The following condition is a compile-time one, so it does not have a run-time cost.
191 if (kIsDebugBuild) {
192 // The following condition is a run-time one; it is executed after the
193 // previous compile-time test, to avoid penalizing non-debug builds.
194 if (emit_run_time_checks_in_debug_mode_) {
195 // Emit a run-time check verifying that the Marking Register is up-to-date.
196 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
197 vixl32::Register temp = temps.Acquire();
198 // Ensure we are not clobbering a callee-save register that was restored before.
199 DCHECK_EQ(core_spill_mask & (1 << temp.GetCode()), 0)
200 << "core_spill_mask hould not contain scratch register R" << temp.GetCode();
201 asm_.GenerateMarkingRegisterCheck(temp);
202 }
203 }
204 }
205 }
206
207 // Return to LR.
208 ___ Bx(vixl32::lr);
209
210 // The CFI should be restored for any code that follows the exit block.
211 cfi().RestoreState();
212 cfi().DefCFAOffset(frame_size);
213 }
214
215
IncreaseFrameSize(size_t adjust)216 void ArmVIXLJNIMacroAssembler::IncreaseFrameSize(size_t adjust) {
217 asm_.AddConstant(sp, -adjust);
218 cfi().AdjustCFAOffset(adjust);
219 }
220
DecreaseFrameSize(size_t adjust)221 void ArmVIXLJNIMacroAssembler::DecreaseFrameSize(size_t adjust) {
222 asm_.AddConstant(sp, adjust);
223 cfi().AdjustCFAOffset(-adjust);
224 }
225
Store(FrameOffset dest,ManagedRegister m_src,size_t size)226 void ArmVIXLJNIMacroAssembler::Store(FrameOffset dest, ManagedRegister m_src, size_t size) {
227 ArmManagedRegister src = m_src.AsArm();
228 if (src.IsNoRegister()) {
229 CHECK_EQ(0u, size);
230 } else if (src.IsCoreRegister()) {
231 CHECK_EQ(4u, size);
232 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
233 temps.Exclude(AsVIXLRegister(src));
234 asm_.StoreToOffset(kStoreWord, AsVIXLRegister(src), sp, dest.Int32Value());
235 } else if (src.IsRegisterPair()) {
236 CHECK_EQ(8u, size);
237 asm_.StoreToOffset(kStoreWord, AsVIXLRegisterPairLow(src), sp, dest.Int32Value());
238 asm_.StoreToOffset(kStoreWord, AsVIXLRegisterPairHigh(src), sp, dest.Int32Value() + 4);
239 } else if (src.IsSRegister()) {
240 CHECK_EQ(4u, size);
241 asm_.StoreSToOffset(AsVIXLSRegister(src), sp, dest.Int32Value());
242 } else {
243 CHECK_EQ(8u, size);
244 CHECK(src.IsDRegister()) << src;
245 asm_.StoreDToOffset(AsVIXLDRegister(src), sp, dest.Int32Value());
246 }
247 }
248
StoreRef(FrameOffset dest,ManagedRegister msrc)249 void ArmVIXLJNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
250 vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm());
251 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
252 temps.Exclude(src);
253 asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value());
254 }
255
StoreRawPtr(FrameOffset dest,ManagedRegister msrc)256 void ArmVIXLJNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
257 vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm());
258 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
259 temps.Exclude(src);
260 asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value());
261 }
262
StoreSpanning(FrameOffset dest,ManagedRegister msrc,FrameOffset in_off,ManagedRegister mscratch)263 void ArmVIXLJNIMacroAssembler::StoreSpanning(FrameOffset dest,
264 ManagedRegister msrc,
265 FrameOffset in_off,
266 ManagedRegister mscratch) {
267 vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm());
268 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
269 asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value());
270 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
271 temps.Exclude(scratch);
272 asm_.LoadFromOffset(kLoadWord, scratch, sp, in_off.Int32Value());
273 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value() + 4);
274 }
275
CopyRef(FrameOffset dest,FrameOffset src,ManagedRegister mscratch)276 void ArmVIXLJNIMacroAssembler::CopyRef(FrameOffset dest,
277 FrameOffset src,
278 ManagedRegister mscratch) {
279 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
280 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
281 temps.Exclude(scratch);
282 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value());
283 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
284 }
285
LoadRef(ManagedRegister mdest,ManagedRegister mbase,MemberOffset offs,bool unpoison_reference)286 void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister mdest,
287 ManagedRegister mbase,
288 MemberOffset offs,
289 bool unpoison_reference) {
290 vixl::aarch32::Register dest = AsVIXLRegister(mdest.AsArm());
291 vixl::aarch32::Register base = AsVIXLRegister(mbase.AsArm());
292 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
293 temps.Exclude(dest, base);
294 asm_.LoadFromOffset(kLoadWord, dest, base, offs.Int32Value());
295
296 if (unpoison_reference) {
297 asm_.MaybeUnpoisonHeapReference(dest);
298 }
299 }
300
LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED,FrameOffset src ATTRIBUTE_UNUSED)301 void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED,
302 FrameOffset src ATTRIBUTE_UNUSED) {
303 UNIMPLEMENTED(FATAL);
304 }
305
LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED,ManagedRegister base ATTRIBUTE_UNUSED,Offset offs ATTRIBUTE_UNUSED)306 void ArmVIXLJNIMacroAssembler::LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED,
307 ManagedRegister base ATTRIBUTE_UNUSED,
308 Offset offs ATTRIBUTE_UNUSED) {
309 UNIMPLEMENTED(FATAL);
310 }
311
StoreImmediateToFrame(FrameOffset dest,uint32_t imm,ManagedRegister mscratch)312 void ArmVIXLJNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest,
313 uint32_t imm,
314 ManagedRegister mscratch) {
315 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
316 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
317 temps.Exclude(scratch);
318 asm_.LoadImmediate(scratch, imm);
319 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
320 }
321
Load(ManagedRegister m_dst,FrameOffset src,size_t size)322 void ArmVIXLJNIMacroAssembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
323 return Load(m_dst.AsArm(), sp, src.Int32Value(), size);
324 }
325
LoadFromThread(ManagedRegister m_dst,ThreadOffset32 src,size_t size)326 void ArmVIXLJNIMacroAssembler::LoadFromThread(ManagedRegister m_dst,
327 ThreadOffset32 src,
328 size_t size) {
329 return Load(m_dst.AsArm(), tr, src.Int32Value(), size);
330 }
331
LoadRawPtrFromThread(ManagedRegister mdest,ThreadOffset32 offs)332 void ArmVIXLJNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, ThreadOffset32 offs) {
333 vixl::aarch32::Register dest = AsVIXLRegister(mdest.AsArm());
334 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
335 temps.Exclude(dest);
336 asm_.LoadFromOffset(kLoadWord, dest, tr, offs.Int32Value());
337 }
338
CopyRawPtrFromThread(FrameOffset fr_offs,ThreadOffset32 thr_offs,ManagedRegister mscratch)339 void ArmVIXLJNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs,
340 ThreadOffset32 thr_offs,
341 ManagedRegister mscratch) {
342 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
343 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
344 temps.Exclude(scratch);
345 asm_.LoadFromOffset(kLoadWord, scratch, tr, thr_offs.Int32Value());
346 asm_.StoreToOffset(kStoreWord, scratch, sp, fr_offs.Int32Value());
347 }
348
CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED,FrameOffset fr_offs ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED)349 void ArmVIXLJNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED,
350 FrameOffset fr_offs ATTRIBUTE_UNUSED,
351 ManagedRegister mscratch ATTRIBUTE_UNUSED) {
352 UNIMPLEMENTED(FATAL);
353 }
354
StoreStackOffsetToThread(ThreadOffset32 thr_offs,FrameOffset fr_offs,ManagedRegister mscratch)355 void ArmVIXLJNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs,
356 FrameOffset fr_offs,
357 ManagedRegister mscratch) {
358 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
359 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
360 temps.Exclude(scratch);
361 asm_.AddConstant(scratch, sp, fr_offs.Int32Value());
362 asm_.StoreToOffset(kStoreWord, scratch, tr, thr_offs.Int32Value());
363 }
364
StoreStackPointerToThread(ThreadOffset32 thr_offs)365 void ArmVIXLJNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs) {
366 asm_.StoreToOffset(kStoreWord, sp, tr, thr_offs.Int32Value());
367 }
368
SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)369 void ArmVIXLJNIMacroAssembler::SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
370 size_t size ATTRIBUTE_UNUSED) {
371 UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm";
372 }
373
ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)374 void ArmVIXLJNIMacroAssembler::ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
375 size_t size ATTRIBUTE_UNUSED) {
376 UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm";
377 }
378
Move(ManagedRegister mdst,ManagedRegister msrc,size_t size ATTRIBUTE_UNUSED)379 void ArmVIXLJNIMacroAssembler::Move(ManagedRegister mdst,
380 ManagedRegister msrc,
381 size_t size ATTRIBUTE_UNUSED) {
382 ArmManagedRegister dst = mdst.AsArm();
383 ArmManagedRegister src = msrc.AsArm();
384 if (!dst.Equals(src)) {
385 if (dst.IsCoreRegister()) {
386 CHECK(src.IsCoreRegister()) << src;
387 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
388 temps.Exclude(AsVIXLRegister(dst));
389 ___ Mov(AsVIXLRegister(dst), AsVIXLRegister(src));
390 } else if (dst.IsDRegister()) {
391 if (src.IsDRegister()) {
392 ___ Vmov(F64, AsVIXLDRegister(dst), AsVIXLDRegister(src));
393 } else {
394 // VMOV Dn, Rlo, Rhi (Dn = {Rlo, Rhi})
395 CHECK(src.IsRegisterPair()) << src;
396 ___ Vmov(AsVIXLDRegister(dst), AsVIXLRegisterPairLow(src), AsVIXLRegisterPairHigh(src));
397 }
398 } else if (dst.IsSRegister()) {
399 if (src.IsSRegister()) {
400 ___ Vmov(F32, AsVIXLSRegister(dst), AsVIXLSRegister(src));
401 } else {
402 // VMOV Sn, Rn (Sn = Rn)
403 CHECK(src.IsCoreRegister()) << src;
404 ___ Vmov(AsVIXLSRegister(dst), AsVIXLRegister(src));
405 }
406 } else {
407 CHECK(dst.IsRegisterPair()) << dst;
408 CHECK(src.IsRegisterPair()) << src;
409 // Ensure that the first move doesn't clobber the input of the second.
410 if (src.AsRegisterPairHigh() != dst.AsRegisterPairLow()) {
411 ___ Mov(AsVIXLRegisterPairLow(dst), AsVIXLRegisterPairLow(src));
412 ___ Mov(AsVIXLRegisterPairHigh(dst), AsVIXLRegisterPairHigh(src));
413 } else {
414 ___ Mov(AsVIXLRegisterPairHigh(dst), AsVIXLRegisterPairHigh(src));
415 ___ Mov(AsVIXLRegisterPairLow(dst), AsVIXLRegisterPairLow(src));
416 }
417 }
418 }
419 }
420
Copy(FrameOffset dest,FrameOffset src,ManagedRegister mscratch,size_t size)421 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest,
422 FrameOffset src,
423 ManagedRegister mscratch,
424 size_t size) {
425 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
426 CHECK(size == 4 || size == 8) << size;
427 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
428 temps.Exclude(scratch);
429 if (size == 4) {
430 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value());
431 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
432 } else if (size == 8) {
433 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value());
434 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
435 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value() + 4);
436 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value() + 4);
437 }
438 }
439
Copy(FrameOffset dest ATTRIBUTE_UNUSED,ManagedRegister src_base ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)440 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest ATTRIBUTE_UNUSED,
441 ManagedRegister src_base ATTRIBUTE_UNUSED,
442 Offset src_offset ATTRIBUTE_UNUSED,
443 ManagedRegister mscratch ATTRIBUTE_UNUSED,
444 size_t size ATTRIBUTE_UNUSED) {
445 UNIMPLEMENTED(FATAL);
446 }
447
Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED,Offset dest_offset ATTRIBUTE_UNUSED,FrameOffset src ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)448 void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED,
449 Offset dest_offset ATTRIBUTE_UNUSED,
450 FrameOffset src ATTRIBUTE_UNUSED,
451 ManagedRegister mscratch ATTRIBUTE_UNUSED,
452 size_t size ATTRIBUTE_UNUSED) {
453 UNIMPLEMENTED(FATAL);
454 }
455
Copy(FrameOffset dst ATTRIBUTE_UNUSED,FrameOffset src_base ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)456 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
457 FrameOffset src_base ATTRIBUTE_UNUSED,
458 Offset src_offset ATTRIBUTE_UNUSED,
459 ManagedRegister mscratch ATTRIBUTE_UNUSED,
460 size_t size ATTRIBUTE_UNUSED) {
461 UNIMPLEMENTED(FATAL);
462 }
463
Copy(ManagedRegister dest ATTRIBUTE_UNUSED,Offset dest_offset ATTRIBUTE_UNUSED,ManagedRegister src ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)464 void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest ATTRIBUTE_UNUSED,
465 Offset dest_offset ATTRIBUTE_UNUSED,
466 ManagedRegister src ATTRIBUTE_UNUSED,
467 Offset src_offset ATTRIBUTE_UNUSED,
468 ManagedRegister mscratch ATTRIBUTE_UNUSED,
469 size_t size ATTRIBUTE_UNUSED) {
470 UNIMPLEMENTED(FATAL);
471 }
472
Copy(FrameOffset dst ATTRIBUTE_UNUSED,Offset dest_offset ATTRIBUTE_UNUSED,FrameOffset src ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)473 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
474 Offset dest_offset ATTRIBUTE_UNUSED,
475 FrameOffset src ATTRIBUTE_UNUSED,
476 Offset src_offset ATTRIBUTE_UNUSED,
477 ManagedRegister scratch ATTRIBUTE_UNUSED,
478 size_t size ATTRIBUTE_UNUSED) {
479 UNIMPLEMENTED(FATAL);
480 }
481
CreateHandleScopeEntry(ManagedRegister mout_reg,FrameOffset handle_scope_offset,ManagedRegister min_reg,bool null_allowed)482 void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
483 FrameOffset handle_scope_offset,
484 ManagedRegister min_reg,
485 bool null_allowed) {
486 vixl::aarch32::Register out_reg = AsVIXLRegister(mout_reg.AsArm());
487 vixl::aarch32::Register in_reg =
488 min_reg.AsArm().IsNoRegister() ? vixl::aarch32::Register() : AsVIXLRegister(min_reg.AsArm());
489 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
490 temps.Exclude(out_reg);
491 if (null_allowed) {
492 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
493 // the address in the handle scope holding the reference.
494 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
495 if (!in_reg.IsValid()) {
496 asm_.LoadFromOffset(kLoadWord, out_reg, sp, handle_scope_offset.Int32Value());
497 in_reg = out_reg;
498 }
499
500 temps.Exclude(in_reg);
501 ___ Cmp(in_reg, 0);
502
503 if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value())) {
504 if (!out_reg.Is(in_reg)) {
505 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
506 3 * vixl32::kMaxInstructionSizeInBytes,
507 CodeBufferCheckScope::kMaximumSize);
508 ___ it(eq, 0xc);
509 ___ mov(eq, out_reg, 0);
510 asm_.AddConstantInIt(out_reg, sp, handle_scope_offset.Int32Value(), ne);
511 } else {
512 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
513 2 * vixl32::kMaxInstructionSizeInBytes,
514 CodeBufferCheckScope::kMaximumSize);
515 ___ it(ne, 0x8);
516 asm_.AddConstantInIt(out_reg, sp, handle_scope_offset.Int32Value(), ne);
517 }
518 } else {
519 // TODO: Implement this (old arm assembler would have crashed here).
520 UNIMPLEMENTED(FATAL);
521 }
522 } else {
523 asm_.AddConstant(out_reg, sp, handle_scope_offset.Int32Value());
524 }
525 }
526
CreateHandleScopeEntry(FrameOffset out_off,FrameOffset handle_scope_offset,ManagedRegister mscratch,bool null_allowed)527 void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(FrameOffset out_off,
528 FrameOffset handle_scope_offset,
529 ManagedRegister mscratch,
530 bool null_allowed) {
531 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
532 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
533 temps.Exclude(scratch);
534 if (null_allowed) {
535 asm_.LoadFromOffset(kLoadWord, scratch, sp, handle_scope_offset.Int32Value());
536 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
537 // the address in the handle scope holding the reference.
538 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
539 ___ Cmp(scratch, 0);
540
541 if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value())) {
542 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
543 2 * vixl32::kMaxInstructionSizeInBytes,
544 CodeBufferCheckScope::kMaximumSize);
545 ___ it(ne, 0x8);
546 asm_.AddConstantInIt(scratch, sp, handle_scope_offset.Int32Value(), ne);
547 } else {
548 // TODO: Implement this (old arm assembler would have crashed here).
549 UNIMPLEMENTED(FATAL);
550 }
551 } else {
552 asm_.AddConstant(scratch, sp, handle_scope_offset.Int32Value());
553 }
554 asm_.StoreToOffset(kStoreWord, scratch, sp, out_off.Int32Value());
555 }
556
LoadReferenceFromHandleScope(ManagedRegister mout_reg ATTRIBUTE_UNUSED,ManagedRegister min_reg ATTRIBUTE_UNUSED)557 void ArmVIXLJNIMacroAssembler::LoadReferenceFromHandleScope(
558 ManagedRegister mout_reg ATTRIBUTE_UNUSED,
559 ManagedRegister min_reg ATTRIBUTE_UNUSED) {
560 UNIMPLEMENTED(FATAL);
561 }
562
VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,bool could_be_null ATTRIBUTE_UNUSED)563 void ArmVIXLJNIMacroAssembler::VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,
564 bool could_be_null ATTRIBUTE_UNUSED) {
565 // TODO: not validating references.
566 }
567
VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,bool could_be_null ATTRIBUTE_UNUSED)568 void ArmVIXLJNIMacroAssembler::VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,
569 bool could_be_null ATTRIBUTE_UNUSED) {
570 // TODO: not validating references.
571 }
572
Call(ManagedRegister mbase,Offset offset,ManagedRegister mscratch)573 void ArmVIXLJNIMacroAssembler::Call(ManagedRegister mbase,
574 Offset offset,
575 ManagedRegister mscratch) {
576 vixl::aarch32::Register base = AsVIXLRegister(mbase.AsArm());
577 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
578 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
579 temps.Exclude(scratch);
580 asm_.LoadFromOffset(kLoadWord, scratch, base, offset.Int32Value());
581 ___ Blx(scratch);
582 // TODO: place reference map on call.
583 }
584
Call(FrameOffset base,Offset offset,ManagedRegister mscratch)585 void ArmVIXLJNIMacroAssembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
586 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
587 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
588 temps.Exclude(scratch);
589 // Call *(*(SP + base) + offset)
590 asm_.LoadFromOffset(kLoadWord, scratch, sp, base.Int32Value());
591 asm_.LoadFromOffset(kLoadWord, scratch, scratch, offset.Int32Value());
592 ___ Blx(scratch);
593 // TODO: place reference map on call
594 }
595
CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)596 void ArmVIXLJNIMacroAssembler::CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,
597 ManagedRegister scratch ATTRIBUTE_UNUSED) {
598 UNIMPLEMENTED(FATAL);
599 }
600
GetCurrentThread(ManagedRegister mtr)601 void ArmVIXLJNIMacroAssembler::GetCurrentThread(ManagedRegister mtr) {
602 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
603 temps.Exclude(AsVIXLRegister(mtr.AsArm()));
604 ___ Mov(AsVIXLRegister(mtr.AsArm()), tr);
605 }
606
GetCurrentThread(FrameOffset dest_offset,ManagedRegister scratch ATTRIBUTE_UNUSED)607 void ArmVIXLJNIMacroAssembler::GetCurrentThread(FrameOffset dest_offset,
608 ManagedRegister scratch ATTRIBUTE_UNUSED) {
609 asm_.StoreToOffset(kStoreWord, tr, sp, dest_offset.Int32Value());
610 }
611
ExceptionPoll(ManagedRegister mscratch,size_t stack_adjust)612 void ArmVIXLJNIMacroAssembler::ExceptionPoll(ManagedRegister mscratch, size_t stack_adjust) {
613 CHECK_ALIGNED(stack_adjust, kStackAlignment);
614 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
615 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
616 temps.Exclude(scratch);
617 exception_blocks_.emplace_back(
618 new ArmVIXLJNIMacroAssembler::ArmException(mscratch.AsArm(), stack_adjust));
619 asm_.LoadFromOffset(kLoadWord,
620 scratch,
621 tr,
622 Thread::ExceptionOffset<kArmPointerSize>().Int32Value());
623
624 ___ Cmp(scratch, 0);
625 vixl32::Label* label = exception_blocks_.back()->Entry();
626 ___ BPreferNear(ne, label);
627 // TODO: think about using CBNZ here.
628 }
629
CreateLabel()630 std::unique_ptr<JNIMacroLabel> ArmVIXLJNIMacroAssembler::CreateLabel() {
631 return std::unique_ptr<JNIMacroLabel>(new ArmVIXLJNIMacroLabel());
632 }
633
Jump(JNIMacroLabel * label)634 void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label) {
635 CHECK(label != nullptr);
636 ___ B(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
637 }
638
Jump(JNIMacroLabel * label,JNIMacroUnaryCondition condition,ManagedRegister mtest)639 void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label,
640 JNIMacroUnaryCondition condition,
641 ManagedRegister mtest) {
642 CHECK(label != nullptr);
643
644 vixl::aarch32::Register test = AsVIXLRegister(mtest.AsArm());
645 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
646 temps.Exclude(test);
647 switch (condition) {
648 case JNIMacroUnaryCondition::kZero:
649 ___ CompareAndBranchIfZero(test, ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
650 break;
651 case JNIMacroUnaryCondition::kNotZero:
652 ___ CompareAndBranchIfNonZero(test, ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
653 break;
654 default:
655 LOG(FATAL) << "Not implemented unary condition: " << static_cast<int>(condition);
656 UNREACHABLE();
657 }
658 }
659
Bind(JNIMacroLabel * label)660 void ArmVIXLJNIMacroAssembler::Bind(JNIMacroLabel* label) {
661 CHECK(label != nullptr);
662 ___ Bind(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
663 }
664
EmitExceptionPoll(ArmVIXLJNIMacroAssembler::ArmException * exception)665 void ArmVIXLJNIMacroAssembler::EmitExceptionPoll(
666 ArmVIXLJNIMacroAssembler::ArmException* exception) {
667 ___ Bind(exception->Entry());
668 if (exception->stack_adjust_ != 0) { // Fix up the frame.
669 DecreaseFrameSize(exception->stack_adjust_);
670 }
671
672 vixl::aarch32::Register scratch = AsVIXLRegister(exception->scratch_);
673 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
674 temps.Exclude(scratch);
675 // Pass exception object as argument.
676 // Don't care about preserving r0 as this won't return.
677 ___ Mov(r0, scratch);
678 temps.Include(scratch);
679 // TODO: check that exception->scratch_ is dead by this point.
680 vixl32::Register temp = temps.Acquire();
681 ___ Ldr(temp,
682 MemOperand(tr,
683 QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pDeliverException).Int32Value()));
684 ___ Blx(temp);
685 }
686
MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED)687 void ArmVIXLJNIMacroAssembler::MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED) {
688 UNIMPLEMENTED(FATAL);
689 }
690
Load(ArmManagedRegister dest,vixl32::Register base,int32_t offset,size_t size)691 void ArmVIXLJNIMacroAssembler::Load(ArmManagedRegister
692 dest,
693 vixl32::Register base,
694 int32_t offset,
695 size_t size) {
696 if (dest.IsNoRegister()) {
697 CHECK_EQ(0u, size) << dest;
698 } else if (dest.IsCoreRegister()) {
699 vixl::aarch32::Register dst = AsVIXLRegister(dest);
700 CHECK(!dst.Is(sp)) << dest;
701
702 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
703 temps.Exclude(dst);
704
705 if (size == 1u) {
706 ___ Ldrb(dst, MemOperand(base, offset));
707 } else {
708 CHECK_EQ(4u, size) << dest;
709 ___ Ldr(dst, MemOperand(base, offset));
710 }
711 } else if (dest.IsRegisterPair()) {
712 CHECK_EQ(8u, size) << dest;
713 ___ Ldr(AsVIXLRegisterPairLow(dest), MemOperand(base, offset));
714 ___ Ldr(AsVIXLRegisterPairHigh(dest), MemOperand(base, offset + 4));
715 } else if (dest.IsSRegister()) {
716 ___ Vldr(AsVIXLSRegister(dest), MemOperand(base, offset));
717 } else {
718 CHECK(dest.IsDRegister()) << dest;
719 ___ Vldr(AsVIXLDRegister(dest), MemOperand(base, offset));
720 }
721 }
722
723 } // namespace arm
724 } // namespace art
725