1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "jni_macro_assembler_arm_vixl.h"
18
19 #include <iostream>
20 #include <type_traits>
21
22 #include "entrypoints/quick/quick_entrypoints.h"
23 #include "thread.h"
24
25 using namespace vixl::aarch32; // NOLINT(build/namespaces)
26 namespace vixl32 = vixl::aarch32;
27
28 using vixl::ExactAssemblyScope;
29 using vixl::CodeBufferCheckScope;
30
31 namespace art {
32 namespace arm {
33
34 #ifdef ___
35 #error "ARM Assembler macro already defined."
36 #else
37 #define ___ asm_.GetVIXLAssembler()->
38 #endif
39
40 // The AAPCS requires 8-byte alignement. This is not as strict as the Managed ABI stack alignment.
41 static constexpr size_t kAapcsStackAlignment = 8u;
42 static_assert(kAapcsStackAlignment < kStackAlignment);
43
AsVIXLRegister(ArmManagedRegister reg)44 vixl::aarch32::Register AsVIXLRegister(ArmManagedRegister reg) {
45 CHECK(reg.IsCoreRegister());
46 return vixl::aarch32::Register(reg.RegId());
47 }
48
AsVIXLSRegister(ArmManagedRegister reg)49 static inline vixl::aarch32::SRegister AsVIXLSRegister(ArmManagedRegister reg) {
50 CHECK(reg.IsSRegister());
51 return vixl::aarch32::SRegister(reg.RegId() - kNumberOfCoreRegIds);
52 }
53
AsVIXLDRegister(ArmManagedRegister reg)54 static inline vixl::aarch32::DRegister AsVIXLDRegister(ArmManagedRegister reg) {
55 CHECK(reg.IsDRegister());
56 return vixl::aarch32::DRegister(reg.RegId() - kNumberOfCoreRegIds - kNumberOfSRegIds);
57 }
58
AsVIXLRegisterPairLow(ArmManagedRegister reg)59 static inline vixl::aarch32::Register AsVIXLRegisterPairLow(ArmManagedRegister reg) {
60 return vixl::aarch32::Register(reg.AsRegisterPairLow());
61 }
62
AsVIXLRegisterPairHigh(ArmManagedRegister reg)63 static inline vixl::aarch32::Register AsVIXLRegisterPairHigh(ArmManagedRegister reg) {
64 return vixl::aarch32::Register(reg.AsRegisterPairHigh());
65 }
66
FinalizeCode()67 void ArmVIXLJNIMacroAssembler::FinalizeCode() {
68 for (const std::unique_ptr<
69 ArmVIXLJNIMacroAssembler::ArmException>& exception : exception_blocks_) {
70 EmitExceptionPoll(exception.get());
71 }
72 asm_.FinalizeCode();
73 }
74
75 static constexpr size_t kFramePointerSize = static_cast<size_t>(kArmPointerSize);
76
BuildFrame(size_t frame_size,ManagedRegister method_reg,ArrayRef<const ManagedRegister> callee_save_regs,const ManagedRegisterEntrySpills & entry_spills)77 void ArmVIXLJNIMacroAssembler::BuildFrame(size_t frame_size,
78 ManagedRegister method_reg,
79 ArrayRef<const ManagedRegister> callee_save_regs,
80 const ManagedRegisterEntrySpills& entry_spills) {
81 // If we're creating an actual frame with the method, enforce managed stack alignment,
82 // otherwise only the native stack alignment.
83 if (method_reg.IsNoRegister()) {
84 CHECK_ALIGNED_PARAM(frame_size, kAapcsStackAlignment);
85 } else {
86 CHECK_ALIGNED_PARAM(frame_size, kStackAlignment);
87 }
88
89 // Push callee saves and link register.
90 RegList core_spill_mask = 0;
91 uint32_t fp_spill_mask = 0;
92 for (const ManagedRegister& reg : callee_save_regs) {
93 if (reg.AsArm().IsCoreRegister()) {
94 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
95 } else {
96 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
97 }
98 }
99 if (core_spill_mask != 0u) {
100 ___ Push(RegisterList(core_spill_mask));
101 cfi().AdjustCFAOffset(POPCOUNT(core_spill_mask) * kFramePointerSize);
102 cfi().RelOffsetForMany(DWARFReg(r0), 0, core_spill_mask, kFramePointerSize);
103 }
104 if (fp_spill_mask != 0) {
105 uint32_t first = CTZ(fp_spill_mask);
106
107 // Check that list is contiguous.
108 DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
109
110 ___ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
111 cfi().AdjustCFAOffset(POPCOUNT(fp_spill_mask) * kFramePointerSize);
112 cfi().RelOffsetForMany(DWARFReg(s0), 0, fp_spill_mask, kFramePointerSize);
113 }
114
115 // Increase frame to required size.
116 int pushed_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
117 // Must at least have space for Method* if we're going to spill it.
118 CHECK_GE(frame_size, (pushed_values + (method_reg.IsRegister() ? 1u : 0u)) * kFramePointerSize);
119 IncreaseFrameSize(frame_size - pushed_values * kFramePointerSize); // handles CFI as well.
120
121 if (method_reg.IsRegister()) {
122 // Write out Method*.
123 CHECK(r0.Is(AsVIXLRegister(method_reg.AsArm())));
124 asm_.StoreToOffset(kStoreWord, r0, sp, 0);
125 }
126
127 // Write out entry spills.
128 int32_t offset = frame_size + kFramePointerSize;
129 for (const ManagedRegisterSpill& spill : entry_spills) {
130 ArmManagedRegister reg = spill.AsArm();
131 if (reg.IsNoRegister()) {
132 // only increment stack offset.
133 offset += spill.getSize();
134 } else if (reg.IsCoreRegister()) {
135 asm_.StoreToOffset(kStoreWord, AsVIXLRegister(reg), sp, offset);
136 offset += 4;
137 } else if (reg.IsSRegister()) {
138 asm_.StoreSToOffset(AsVIXLSRegister(reg), sp, offset);
139 offset += 4;
140 } else if (reg.IsDRegister()) {
141 asm_.StoreDToOffset(AsVIXLDRegister(reg), sp, offset);
142 offset += 8;
143 }
144 }
145 }
146
RemoveFrame(size_t frame_size,ArrayRef<const ManagedRegister> callee_save_regs,bool may_suspend)147 void ArmVIXLJNIMacroAssembler::RemoveFrame(size_t frame_size,
148 ArrayRef<const ManagedRegister> callee_save_regs,
149 bool may_suspend) {
150 CHECK_ALIGNED(frame_size, kAapcsStackAlignment);
151 cfi().RememberState();
152
153 // Compute callee saves to pop.
154 RegList core_spill_mask = 0u;
155 uint32_t fp_spill_mask = 0u;
156 for (const ManagedRegister& reg : callee_save_regs) {
157 if (reg.AsArm().IsCoreRegister()) {
158 core_spill_mask |= 1u << reg.AsArm().AsCoreRegister();
159 } else {
160 fp_spill_mask |= 1u << reg.AsArm().AsSRegister();
161 }
162 }
163
164 // Decrease frame to start of callee saves.
165 size_t pop_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
166 CHECK_GE(frame_size, pop_values * kFramePointerSize);
167 DecreaseFrameSize(frame_size - (pop_values * kFramePointerSize)); // handles CFI as well.
168
169 // Pop FP callee saves.
170 if (fp_spill_mask != 0u) {
171 uint32_t first = CTZ(fp_spill_mask);
172 // Check that list is contiguous.
173 DCHECK_EQ(fp_spill_mask >> CTZ(fp_spill_mask), ~0u >> (32 - POPCOUNT(fp_spill_mask)));
174
175 ___ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fp_spill_mask)));
176 cfi().AdjustCFAOffset(-kFramePointerSize * POPCOUNT(fp_spill_mask));
177 cfi().RestoreMany(DWARFReg(s0), fp_spill_mask);
178 }
179
180 // Pop core callee saves and LR.
181 if (core_spill_mask != 0u) {
182 ___ Pop(RegisterList(core_spill_mask));
183 }
184
185 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
186 if (may_suspend) {
187 // The method may be suspended; refresh the Marking Register.
188 ___ Ldr(mr, MemOperand(tr, Thread::IsGcMarkingOffset<kArmPointerSize>().Int32Value()));
189 } else {
190 // The method shall not be suspended; no need to refresh the Marking Register.
191
192 // The Marking Register is a callee-save register, and thus has been
193 // preserved by native code following the AAPCS calling convention.
194
195 // The following condition is a compile-time one, so it does not have a run-time cost.
196 if (kIsDebugBuild) {
197 // The following condition is a run-time one; it is executed after the
198 // previous compile-time test, to avoid penalizing non-debug builds.
199 if (emit_run_time_checks_in_debug_mode_) {
200 // Emit a run-time check verifying that the Marking Register is up-to-date.
201 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
202 vixl32::Register temp = temps.Acquire();
203 // Ensure we are not clobbering a callee-save register that was restored before.
204 DCHECK_EQ(core_spill_mask & (1 << temp.GetCode()), 0)
205 << "core_spill_mask hould not contain scratch register R" << temp.GetCode();
206 asm_.GenerateMarkingRegisterCheck(temp);
207 }
208 }
209 }
210 }
211
212 // Return to LR.
213 ___ Bx(vixl32::lr);
214
215 // The CFI should be restored for any code that follows the exit block.
216 cfi().RestoreState();
217 cfi().DefCFAOffset(frame_size);
218 }
219
220
IncreaseFrameSize(size_t adjust)221 void ArmVIXLJNIMacroAssembler::IncreaseFrameSize(size_t adjust) {
222 if (adjust != 0u) {
223 asm_.AddConstant(sp, -adjust);
224 cfi().AdjustCFAOffset(adjust);
225 }
226 }
227
DecreaseFrameSize(size_t adjust)228 void ArmVIXLJNIMacroAssembler::DecreaseFrameSize(size_t adjust) {
229 if (adjust != 0u) {
230 asm_.AddConstant(sp, adjust);
231 cfi().AdjustCFAOffset(-adjust);
232 }
233 }
234
Store(FrameOffset dest,ManagedRegister m_src,size_t size)235 void ArmVIXLJNIMacroAssembler::Store(FrameOffset dest, ManagedRegister m_src, size_t size) {
236 ArmManagedRegister src = m_src.AsArm();
237 if (src.IsNoRegister()) {
238 CHECK_EQ(0u, size);
239 } else if (src.IsCoreRegister()) {
240 CHECK_EQ(4u, size);
241 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
242 temps.Exclude(AsVIXLRegister(src));
243 asm_.StoreToOffset(kStoreWord, AsVIXLRegister(src), sp, dest.Int32Value());
244 } else if (src.IsRegisterPair()) {
245 CHECK_EQ(8u, size);
246 asm_.StoreToOffset(kStoreWord, AsVIXLRegisterPairLow(src), sp, dest.Int32Value());
247 asm_.StoreToOffset(kStoreWord, AsVIXLRegisterPairHigh(src), sp, dest.Int32Value() + 4);
248 } else if (src.IsSRegister()) {
249 CHECK_EQ(4u, size);
250 asm_.StoreSToOffset(AsVIXLSRegister(src), sp, dest.Int32Value());
251 } else {
252 CHECK_EQ(8u, size);
253 CHECK(src.IsDRegister()) << src;
254 asm_.StoreDToOffset(AsVIXLDRegister(src), sp, dest.Int32Value());
255 }
256 }
257
StoreRef(FrameOffset dest,ManagedRegister msrc)258 void ArmVIXLJNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
259 vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm());
260 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
261 temps.Exclude(src);
262 asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value());
263 }
264
StoreRawPtr(FrameOffset dest,ManagedRegister msrc)265 void ArmVIXLJNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
266 vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm());
267 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
268 temps.Exclude(src);
269 asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value());
270 }
271
StoreSpanning(FrameOffset dest,ManagedRegister msrc,FrameOffset in_off,ManagedRegister mscratch)272 void ArmVIXLJNIMacroAssembler::StoreSpanning(FrameOffset dest,
273 ManagedRegister msrc,
274 FrameOffset in_off,
275 ManagedRegister mscratch) {
276 vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm());
277 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
278 asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value());
279 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
280 temps.Exclude(scratch);
281 asm_.LoadFromOffset(kLoadWord, scratch, sp, in_off.Int32Value());
282 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value() + 4);
283 }
284
CopyRef(FrameOffset dest,FrameOffset src,ManagedRegister mscratch)285 void ArmVIXLJNIMacroAssembler::CopyRef(FrameOffset dest,
286 FrameOffset src,
287 ManagedRegister mscratch) {
288 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
289 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
290 temps.Exclude(scratch);
291 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value());
292 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
293 }
294
LoadRef(ManagedRegister mdest,ManagedRegister mbase,MemberOffset offs,bool unpoison_reference)295 void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister mdest,
296 ManagedRegister mbase,
297 MemberOffset offs,
298 bool unpoison_reference) {
299 vixl::aarch32::Register dest = AsVIXLRegister(mdest.AsArm());
300 vixl::aarch32::Register base = AsVIXLRegister(mbase.AsArm());
301 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
302 temps.Exclude(dest, base);
303 asm_.LoadFromOffset(kLoadWord, dest, base, offs.Int32Value());
304
305 if (unpoison_reference) {
306 asm_.MaybeUnpoisonHeapReference(dest);
307 }
308 }
309
LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED,FrameOffset src ATTRIBUTE_UNUSED)310 void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED,
311 FrameOffset src ATTRIBUTE_UNUSED) {
312 UNIMPLEMENTED(FATAL);
313 }
314
LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED,ManagedRegister base ATTRIBUTE_UNUSED,Offset offs ATTRIBUTE_UNUSED)315 void ArmVIXLJNIMacroAssembler::LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED,
316 ManagedRegister base ATTRIBUTE_UNUSED,
317 Offset offs ATTRIBUTE_UNUSED) {
318 UNIMPLEMENTED(FATAL);
319 }
320
StoreImmediateToFrame(FrameOffset dest,uint32_t imm,ManagedRegister mscratch)321 void ArmVIXLJNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest,
322 uint32_t imm,
323 ManagedRegister mscratch) {
324 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
325 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
326 temps.Exclude(scratch);
327 asm_.LoadImmediate(scratch, imm);
328 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
329 }
330
Load(ManagedRegister m_dst,FrameOffset src,size_t size)331 void ArmVIXLJNIMacroAssembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
332 return Load(m_dst.AsArm(), sp, src.Int32Value(), size);
333 }
334
LoadFromThread(ManagedRegister m_dst,ThreadOffset32 src,size_t size)335 void ArmVIXLJNIMacroAssembler::LoadFromThread(ManagedRegister m_dst,
336 ThreadOffset32 src,
337 size_t size) {
338 return Load(m_dst.AsArm(), tr, src.Int32Value(), size);
339 }
340
LoadRawPtrFromThread(ManagedRegister mdest,ThreadOffset32 offs)341 void ArmVIXLJNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, ThreadOffset32 offs) {
342 vixl::aarch32::Register dest = AsVIXLRegister(mdest.AsArm());
343 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
344 temps.Exclude(dest);
345 asm_.LoadFromOffset(kLoadWord, dest, tr, offs.Int32Value());
346 }
347
CopyRawPtrFromThread(FrameOffset fr_offs,ThreadOffset32 thr_offs,ManagedRegister mscratch)348 void ArmVIXLJNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs,
349 ThreadOffset32 thr_offs,
350 ManagedRegister mscratch) {
351 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
352 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
353 temps.Exclude(scratch);
354 asm_.LoadFromOffset(kLoadWord, scratch, tr, thr_offs.Int32Value());
355 asm_.StoreToOffset(kStoreWord, scratch, sp, fr_offs.Int32Value());
356 }
357
CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED,FrameOffset fr_offs ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED)358 void ArmVIXLJNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED,
359 FrameOffset fr_offs ATTRIBUTE_UNUSED,
360 ManagedRegister mscratch ATTRIBUTE_UNUSED) {
361 UNIMPLEMENTED(FATAL);
362 }
363
StoreStackOffsetToThread(ThreadOffset32 thr_offs,FrameOffset fr_offs,ManagedRegister mscratch)364 void ArmVIXLJNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs,
365 FrameOffset fr_offs,
366 ManagedRegister mscratch) {
367 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
368 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
369 temps.Exclude(scratch);
370 asm_.AddConstant(scratch, sp, fr_offs.Int32Value());
371 asm_.StoreToOffset(kStoreWord, scratch, tr, thr_offs.Int32Value());
372 }
373
StoreStackPointerToThread(ThreadOffset32 thr_offs)374 void ArmVIXLJNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs) {
375 asm_.StoreToOffset(kStoreWord, sp, tr, thr_offs.Int32Value());
376 }
377
SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)378 void ArmVIXLJNIMacroAssembler::SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
379 size_t size ATTRIBUTE_UNUSED) {
380 UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm";
381 }
382
ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)383 void ArmVIXLJNIMacroAssembler::ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
384 size_t size ATTRIBUTE_UNUSED) {
385 UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm";
386 }
387
Move(ManagedRegister mdst,ManagedRegister msrc,size_t size ATTRIBUTE_UNUSED)388 void ArmVIXLJNIMacroAssembler::Move(ManagedRegister mdst,
389 ManagedRegister msrc,
390 size_t size ATTRIBUTE_UNUSED) {
391 ArmManagedRegister dst = mdst.AsArm();
392 ArmManagedRegister src = msrc.AsArm();
393 if (!dst.Equals(src)) {
394 if (dst.IsCoreRegister()) {
395 CHECK(src.IsCoreRegister()) << src;
396 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
397 temps.Exclude(AsVIXLRegister(dst));
398 ___ Mov(AsVIXLRegister(dst), AsVIXLRegister(src));
399 } else if (dst.IsDRegister()) {
400 if (src.IsDRegister()) {
401 ___ Vmov(F64, AsVIXLDRegister(dst), AsVIXLDRegister(src));
402 } else {
403 // VMOV Dn, Rlo, Rhi (Dn = {Rlo, Rhi})
404 CHECK(src.IsRegisterPair()) << src;
405 ___ Vmov(AsVIXLDRegister(dst), AsVIXLRegisterPairLow(src), AsVIXLRegisterPairHigh(src));
406 }
407 } else if (dst.IsSRegister()) {
408 if (src.IsSRegister()) {
409 ___ Vmov(F32, AsVIXLSRegister(dst), AsVIXLSRegister(src));
410 } else {
411 // VMOV Sn, Rn (Sn = Rn)
412 CHECK(src.IsCoreRegister()) << src;
413 ___ Vmov(AsVIXLSRegister(dst), AsVIXLRegister(src));
414 }
415 } else {
416 CHECK(dst.IsRegisterPair()) << dst;
417 CHECK(src.IsRegisterPair()) << src;
418 // Ensure that the first move doesn't clobber the input of the second.
419 if (src.AsRegisterPairHigh() != dst.AsRegisterPairLow()) {
420 ___ Mov(AsVIXLRegisterPairLow(dst), AsVIXLRegisterPairLow(src));
421 ___ Mov(AsVIXLRegisterPairHigh(dst), AsVIXLRegisterPairHigh(src));
422 } else {
423 ___ Mov(AsVIXLRegisterPairHigh(dst), AsVIXLRegisterPairHigh(src));
424 ___ Mov(AsVIXLRegisterPairLow(dst), AsVIXLRegisterPairLow(src));
425 }
426 }
427 }
428 }
429
Copy(FrameOffset dest,FrameOffset src,ManagedRegister mscratch,size_t size)430 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest,
431 FrameOffset src,
432 ManagedRegister mscratch,
433 size_t size) {
434 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
435 CHECK(size == 4 || size == 8) << size;
436 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
437 temps.Exclude(scratch);
438 if (size == 4) {
439 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value());
440 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
441 } else if (size == 8) {
442 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value());
443 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
444 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value() + 4);
445 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value() + 4);
446 }
447 }
448
Copy(FrameOffset dest ATTRIBUTE_UNUSED,ManagedRegister src_base ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)449 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest ATTRIBUTE_UNUSED,
450 ManagedRegister src_base ATTRIBUTE_UNUSED,
451 Offset src_offset ATTRIBUTE_UNUSED,
452 ManagedRegister mscratch ATTRIBUTE_UNUSED,
453 size_t size ATTRIBUTE_UNUSED) {
454 UNIMPLEMENTED(FATAL);
455 }
456
Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED,Offset dest_offset ATTRIBUTE_UNUSED,FrameOffset src ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)457 void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED,
458 Offset dest_offset ATTRIBUTE_UNUSED,
459 FrameOffset src ATTRIBUTE_UNUSED,
460 ManagedRegister mscratch ATTRIBUTE_UNUSED,
461 size_t size ATTRIBUTE_UNUSED) {
462 UNIMPLEMENTED(FATAL);
463 }
464
Copy(FrameOffset dst ATTRIBUTE_UNUSED,FrameOffset src_base ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)465 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
466 FrameOffset src_base ATTRIBUTE_UNUSED,
467 Offset src_offset ATTRIBUTE_UNUSED,
468 ManagedRegister mscratch ATTRIBUTE_UNUSED,
469 size_t size ATTRIBUTE_UNUSED) {
470 UNIMPLEMENTED(FATAL);
471 }
472
Copy(ManagedRegister dest ATTRIBUTE_UNUSED,Offset dest_offset ATTRIBUTE_UNUSED,ManagedRegister src ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister mscratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)473 void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest ATTRIBUTE_UNUSED,
474 Offset dest_offset ATTRIBUTE_UNUSED,
475 ManagedRegister src ATTRIBUTE_UNUSED,
476 Offset src_offset ATTRIBUTE_UNUSED,
477 ManagedRegister mscratch ATTRIBUTE_UNUSED,
478 size_t size ATTRIBUTE_UNUSED) {
479 UNIMPLEMENTED(FATAL);
480 }
481
Copy(FrameOffset dst ATTRIBUTE_UNUSED,Offset dest_offset ATTRIBUTE_UNUSED,FrameOffset src ATTRIBUTE_UNUSED,Offset src_offset ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED,size_t size ATTRIBUTE_UNUSED)482 void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
483 Offset dest_offset ATTRIBUTE_UNUSED,
484 FrameOffset src ATTRIBUTE_UNUSED,
485 Offset src_offset ATTRIBUTE_UNUSED,
486 ManagedRegister scratch ATTRIBUTE_UNUSED,
487 size_t size ATTRIBUTE_UNUSED) {
488 UNIMPLEMENTED(FATAL);
489 }
490
CreateHandleScopeEntry(ManagedRegister mout_reg,FrameOffset handle_scope_offset,ManagedRegister min_reg,bool null_allowed)491 void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
492 FrameOffset handle_scope_offset,
493 ManagedRegister min_reg,
494 bool null_allowed) {
495 vixl::aarch32::Register out_reg = AsVIXLRegister(mout_reg.AsArm());
496 vixl::aarch32::Register in_reg =
497 min_reg.AsArm().IsNoRegister() ? vixl::aarch32::Register() : AsVIXLRegister(min_reg.AsArm());
498 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
499 temps.Exclude(out_reg);
500 if (null_allowed) {
501 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
502 // the address in the handle scope holding the reference.
503 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
504 if (!in_reg.IsValid()) {
505 asm_.LoadFromOffset(kLoadWord, out_reg, sp, handle_scope_offset.Int32Value());
506 in_reg = out_reg;
507 }
508
509 temps.Exclude(in_reg);
510 ___ Cmp(in_reg, 0);
511
512 if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value())) {
513 if (!out_reg.Is(in_reg)) {
514 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
515 3 * vixl32::kMaxInstructionSizeInBytes,
516 CodeBufferCheckScope::kMaximumSize);
517 ___ it(eq, 0xc);
518 ___ mov(eq, out_reg, 0);
519 asm_.AddConstantInIt(out_reg, sp, handle_scope_offset.Int32Value(), ne);
520 } else {
521 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
522 2 * vixl32::kMaxInstructionSizeInBytes,
523 CodeBufferCheckScope::kMaximumSize);
524 ___ it(ne, 0x8);
525 asm_.AddConstantInIt(out_reg, sp, handle_scope_offset.Int32Value(), ne);
526 }
527 } else {
528 // TODO: Implement this (old arm assembler would have crashed here).
529 UNIMPLEMENTED(FATAL);
530 }
531 } else {
532 asm_.AddConstant(out_reg, sp, handle_scope_offset.Int32Value());
533 }
534 }
535
CreateHandleScopeEntry(FrameOffset out_off,FrameOffset handle_scope_offset,ManagedRegister mscratch,bool null_allowed)536 void ArmVIXLJNIMacroAssembler::CreateHandleScopeEntry(FrameOffset out_off,
537 FrameOffset handle_scope_offset,
538 ManagedRegister mscratch,
539 bool null_allowed) {
540 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
541 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
542 temps.Exclude(scratch);
543 if (null_allowed) {
544 asm_.LoadFromOffset(kLoadWord, scratch, sp, handle_scope_offset.Int32Value());
545 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
546 // the address in the handle scope holding the reference.
547 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
548 ___ Cmp(scratch, 0);
549
550 if (asm_.ShifterOperandCanHold(ADD, handle_scope_offset.Int32Value())) {
551 ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
552 2 * vixl32::kMaxInstructionSizeInBytes,
553 CodeBufferCheckScope::kMaximumSize);
554 ___ it(ne, 0x8);
555 asm_.AddConstantInIt(scratch, sp, handle_scope_offset.Int32Value(), ne);
556 } else {
557 // TODO: Implement this (old arm assembler would have crashed here).
558 UNIMPLEMENTED(FATAL);
559 }
560 } else {
561 asm_.AddConstant(scratch, sp, handle_scope_offset.Int32Value());
562 }
563 asm_.StoreToOffset(kStoreWord, scratch, sp, out_off.Int32Value());
564 }
565
LoadReferenceFromHandleScope(ManagedRegister mout_reg ATTRIBUTE_UNUSED,ManagedRegister min_reg ATTRIBUTE_UNUSED)566 void ArmVIXLJNIMacroAssembler::LoadReferenceFromHandleScope(
567 ManagedRegister mout_reg ATTRIBUTE_UNUSED,
568 ManagedRegister min_reg ATTRIBUTE_UNUSED) {
569 UNIMPLEMENTED(FATAL);
570 }
571
VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,bool could_be_null ATTRIBUTE_UNUSED)572 void ArmVIXLJNIMacroAssembler::VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,
573 bool could_be_null ATTRIBUTE_UNUSED) {
574 // TODO: not validating references.
575 }
576
VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,bool could_be_null ATTRIBUTE_UNUSED)577 void ArmVIXLJNIMacroAssembler::VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,
578 bool could_be_null ATTRIBUTE_UNUSED) {
579 // TODO: not validating references.
580 }
581
Jump(ManagedRegister mbase,Offset offset,ManagedRegister mscratch)582 void ArmVIXLJNIMacroAssembler::Jump(ManagedRegister mbase,
583 Offset offset,
584 ManagedRegister mscratch) {
585 vixl::aarch32::Register base = AsVIXLRegister(mbase.AsArm());
586 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
587 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
588 temps.Exclude(scratch);
589 asm_.LoadFromOffset(kLoadWord, scratch, base, offset.Int32Value());
590 ___ Bx(scratch);
591 }
592
Call(ManagedRegister mbase,Offset offset,ManagedRegister mscratch)593 void ArmVIXLJNIMacroAssembler::Call(ManagedRegister mbase,
594 Offset offset,
595 ManagedRegister mscratch) {
596 vixl::aarch32::Register base = AsVIXLRegister(mbase.AsArm());
597 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
598 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
599 temps.Exclude(scratch);
600 asm_.LoadFromOffset(kLoadWord, scratch, base, offset.Int32Value());
601 ___ Blx(scratch);
602 // TODO: place reference map on call.
603 }
604
Call(FrameOffset base,Offset offset,ManagedRegister mscratch)605 void ArmVIXLJNIMacroAssembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
606 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
607 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
608 temps.Exclude(scratch);
609 // Call *(*(SP + base) + offset)
610 asm_.LoadFromOffset(kLoadWord, scratch, sp, base.Int32Value());
611 asm_.LoadFromOffset(kLoadWord, scratch, scratch, offset.Int32Value());
612 ___ Blx(scratch);
613 // TODO: place reference map on call
614 }
615
CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)616 void ArmVIXLJNIMacroAssembler::CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,
617 ManagedRegister scratch ATTRIBUTE_UNUSED) {
618 UNIMPLEMENTED(FATAL);
619 }
620
GetCurrentThread(ManagedRegister mtr)621 void ArmVIXLJNIMacroAssembler::GetCurrentThread(ManagedRegister mtr) {
622 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
623 temps.Exclude(AsVIXLRegister(mtr.AsArm()));
624 ___ Mov(AsVIXLRegister(mtr.AsArm()), tr);
625 }
626
GetCurrentThread(FrameOffset dest_offset,ManagedRegister scratch ATTRIBUTE_UNUSED)627 void ArmVIXLJNIMacroAssembler::GetCurrentThread(FrameOffset dest_offset,
628 ManagedRegister scratch ATTRIBUTE_UNUSED) {
629 asm_.StoreToOffset(kStoreWord, tr, sp, dest_offset.Int32Value());
630 }
631
ExceptionPoll(ManagedRegister mscratch,size_t stack_adjust)632 void ArmVIXLJNIMacroAssembler::ExceptionPoll(ManagedRegister mscratch, size_t stack_adjust) {
633 CHECK_ALIGNED(stack_adjust, kAapcsStackAlignment);
634 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm());
635 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
636 temps.Exclude(scratch);
637 exception_blocks_.emplace_back(
638 new ArmVIXLJNIMacroAssembler::ArmException(mscratch.AsArm(), stack_adjust));
639 asm_.LoadFromOffset(kLoadWord,
640 scratch,
641 tr,
642 Thread::ExceptionOffset<kArmPointerSize>().Int32Value());
643
644 ___ Cmp(scratch, 0);
645 vixl32::Label* label = exception_blocks_.back()->Entry();
646 ___ BPreferNear(ne, label);
647 // TODO: think about using CBNZ here.
648 }
649
CreateLabel()650 std::unique_ptr<JNIMacroLabel> ArmVIXLJNIMacroAssembler::CreateLabel() {
651 return std::unique_ptr<JNIMacroLabel>(new ArmVIXLJNIMacroLabel());
652 }
653
Jump(JNIMacroLabel * label)654 void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label) {
655 CHECK(label != nullptr);
656 ___ B(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
657 }
658
Jump(JNIMacroLabel * label,JNIMacroUnaryCondition condition,ManagedRegister mtest)659 void ArmVIXLJNIMacroAssembler::Jump(JNIMacroLabel* label,
660 JNIMacroUnaryCondition condition,
661 ManagedRegister mtest) {
662 CHECK(label != nullptr);
663
664 vixl::aarch32::Register test = AsVIXLRegister(mtest.AsArm());
665 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
666 temps.Exclude(test);
667 switch (condition) {
668 case JNIMacroUnaryCondition::kZero:
669 ___ CompareAndBranchIfZero(test, ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
670 break;
671 case JNIMacroUnaryCondition::kNotZero:
672 ___ CompareAndBranchIfNonZero(test, ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
673 break;
674 default:
675 LOG(FATAL) << "Not implemented unary condition: " << static_cast<int>(condition);
676 UNREACHABLE();
677 }
678 }
679
Bind(JNIMacroLabel * label)680 void ArmVIXLJNIMacroAssembler::Bind(JNIMacroLabel* label) {
681 CHECK(label != nullptr);
682 ___ Bind(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
683 }
684
EmitExceptionPoll(ArmVIXLJNIMacroAssembler::ArmException * exception)685 void ArmVIXLJNIMacroAssembler::EmitExceptionPoll(
686 ArmVIXLJNIMacroAssembler::ArmException* exception) {
687 ___ Bind(exception->Entry());
688 if (exception->stack_adjust_ != 0) { // Fix up the frame.
689 DecreaseFrameSize(exception->stack_adjust_);
690 }
691
692 vixl::aarch32::Register scratch = AsVIXLRegister(exception->scratch_);
693 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
694 temps.Exclude(scratch);
695 // Pass exception object as argument.
696 // Don't care about preserving r0 as this won't return.
697 ___ Mov(r0, scratch);
698 temps.Include(scratch);
699 // TODO: check that exception->scratch_ is dead by this point.
700 vixl32::Register temp = temps.Acquire();
701 ___ Ldr(temp,
702 MemOperand(tr,
703 QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pDeliverException).Int32Value()));
704 ___ Blx(temp);
705 }
706
MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED)707 void ArmVIXLJNIMacroAssembler::MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED) {
708 UNIMPLEMENTED(FATAL);
709 }
710
Load(ArmManagedRegister dest,vixl32::Register base,int32_t offset,size_t size)711 void ArmVIXLJNIMacroAssembler::Load(ArmManagedRegister
712 dest,
713 vixl32::Register base,
714 int32_t offset,
715 size_t size) {
716 if (dest.IsNoRegister()) {
717 CHECK_EQ(0u, size) << dest;
718 } else if (dest.IsCoreRegister()) {
719 vixl::aarch32::Register dst = AsVIXLRegister(dest);
720 CHECK(!dst.Is(sp)) << dest;
721
722 UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
723 temps.Exclude(dst);
724
725 if (size == 1u) {
726 ___ Ldrb(dst, MemOperand(base, offset));
727 } else {
728 CHECK_EQ(4u, size) << dest;
729 ___ Ldr(dst, MemOperand(base, offset));
730 }
731 } else if (dest.IsRegisterPair()) {
732 CHECK_EQ(8u, size) << dest;
733 ___ Ldr(AsVIXLRegisterPairLow(dest), MemOperand(base, offset));
734 ___ Ldr(AsVIXLRegisterPairHigh(dest), MemOperand(base, offset + 4));
735 } else if (dest.IsSRegister()) {
736 ___ Vldr(AsVIXLSRegister(dest), MemOperand(base, offset));
737 } else {
738 CHECK(dest.IsDRegister()) << dest;
739 ___ Vldr(AsVIXLDRegister(dest), MemOperand(base, offset));
740 }
741 }
742
743 } // namespace arm
744 } // namespace art
745