• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "calling_convention_arm64.h"
18 
19 #include <android-base/logging.h>
20 
21 #include "arch/arm64/jni_frame_arm64.h"
22 #include "arch/instruction_set.h"
23 #include "utils/arm64/managed_register_arm64.h"
24 
25 namespace art HIDDEN {
26 namespace arm64 {
27 
28 static constexpr ManagedRegister kXArgumentRegisters[] = {
29     Arm64ManagedRegister::FromXRegister(X0),
30     Arm64ManagedRegister::FromXRegister(X1),
31     Arm64ManagedRegister::FromXRegister(X2),
32     Arm64ManagedRegister::FromXRegister(X3),
33     Arm64ManagedRegister::FromXRegister(X4),
34     Arm64ManagedRegister::FromXRegister(X5),
35     Arm64ManagedRegister::FromXRegister(X6),
36     Arm64ManagedRegister::FromXRegister(X7),
37 };
38 static_assert(kMaxIntLikeRegisterArguments == arraysize(kXArgumentRegisters));
39 
40 static const DRegister kDArgumentRegisters[] = {
41   D0, D1, D2, D3, D4, D5, D6, D7
42 };
43 static_assert(kMaxFloatOrDoubleRegisterArguments == arraysize(kDArgumentRegisters));
44 
45 static const SRegister kSArgumentRegisters[] = {
46   S0, S1, S2, S3, S4, S5, S6, S7
47 };
48 static_assert(kMaxFloatOrDoubleRegisterArguments == arraysize(kSArgumentRegisters));
49 
50 static constexpr ManagedRegister kCalleeSaveRegisters[] = {
51     // Core registers.
52     // Note: The native jni function may call to some VM runtime functions which may suspend
53     // or trigger GC. And the jni method frame will become top quick frame in those cases.
54     // So we need to satisfy GC to save LR and callee-save registers which is similar to
55     // CalleeSaveMethod(RefOnly) frame.
56     // Jni function is the native function which the java code wants to call.
57     // Jni method is the method that is compiled by jni compiler.
58     // Call chain: managed code(java) --> jni method --> jni function.
59     // This does not apply to the @CriticalNative.
60 
61     // Thread register(X19) is saved on stack.
62     Arm64ManagedRegister::FromXRegister(X19),
63     Arm64ManagedRegister::FromXRegister(X20),  // Note: Marking register.
64     Arm64ManagedRegister::FromXRegister(X21),  // Note: Suspend check register.
65     Arm64ManagedRegister::FromXRegister(X22),
66     Arm64ManagedRegister::FromXRegister(X23),
67     Arm64ManagedRegister::FromXRegister(X24),
68     Arm64ManagedRegister::FromXRegister(X25),
69     Arm64ManagedRegister::FromXRegister(X26),
70     Arm64ManagedRegister::FromXRegister(X27),
71     Arm64ManagedRegister::FromXRegister(X28),
72     Arm64ManagedRegister::FromXRegister(X29),
73     Arm64ManagedRegister::FromXRegister(LR),
74     // Hard float registers.
75     // Considering the case, java_method_1 --> jni method --> jni function --> java_method_2,
76     // we may break on java_method_2 and we still need to find out the values of DEX registers
77     // in java_method_1. So all callee-saves(in managed code) need to be saved.
78     Arm64ManagedRegister::FromDRegister(D8),
79     Arm64ManagedRegister::FromDRegister(D9),
80     Arm64ManagedRegister::FromDRegister(D10),
81     Arm64ManagedRegister::FromDRegister(D11),
82     Arm64ManagedRegister::FromDRegister(D12),
83     Arm64ManagedRegister::FromDRegister(D13),
84     Arm64ManagedRegister::FromDRegister(D14),
85     Arm64ManagedRegister::FromDRegister(D15),
86 };
87 
88 template <size_t size>
CalculateCoreCalleeSpillMask(const ManagedRegister (& callee_saves)[size])89 static constexpr uint32_t CalculateCoreCalleeSpillMask(
90     const ManagedRegister (&callee_saves)[size]) {
91   uint32_t result = 0u;
92   for (auto&& r : callee_saves) {
93     if (r.AsArm64().IsXRegister()) {
94       result |= (1u << r.AsArm64().AsXRegister());
95     }
96   }
97   return result;
98 }
99 
100 template <size_t size>
CalculateFpCalleeSpillMask(const ManagedRegister (& callee_saves)[size])101 static constexpr uint32_t CalculateFpCalleeSpillMask(const ManagedRegister (&callee_saves)[size]) {
102   uint32_t result = 0u;
103   for (auto&& r : callee_saves) {
104     if (r.AsArm64().IsDRegister()) {
105       result |= (1u << r.AsArm64().AsDRegister());
106     }
107   }
108   return result;
109 }
110 
111 static constexpr uint32_t kCoreCalleeSpillMask = CalculateCoreCalleeSpillMask(kCalleeSaveRegisters);
112 static constexpr uint32_t kFpCalleeSpillMask = CalculateFpCalleeSpillMask(kCalleeSaveRegisters);
113 
114 static constexpr ManagedRegister kAapcs64CalleeSaveRegisters[] = {
115     // Core registers.
116     Arm64ManagedRegister::FromXRegister(X19),
117     Arm64ManagedRegister::FromXRegister(X20),
118     Arm64ManagedRegister::FromXRegister(X21),
119     Arm64ManagedRegister::FromXRegister(X22),
120     Arm64ManagedRegister::FromXRegister(X23),
121     Arm64ManagedRegister::FromXRegister(X24),
122     Arm64ManagedRegister::FromXRegister(X25),
123     Arm64ManagedRegister::FromXRegister(X26),
124     Arm64ManagedRegister::FromXRegister(X27),
125     Arm64ManagedRegister::FromXRegister(X28),
126     Arm64ManagedRegister::FromXRegister(X29),
127     Arm64ManagedRegister::FromXRegister(LR),
128     // Hard float registers.
129     Arm64ManagedRegister::FromDRegister(D8),
130     Arm64ManagedRegister::FromDRegister(D9),
131     Arm64ManagedRegister::FromDRegister(D10),
132     Arm64ManagedRegister::FromDRegister(D11),
133     Arm64ManagedRegister::FromDRegister(D12),
134     Arm64ManagedRegister::FromDRegister(D13),
135     Arm64ManagedRegister::FromDRegister(D14),
136     Arm64ManagedRegister::FromDRegister(D15),
137 };
138 
139 static constexpr uint32_t kAapcs64CoreCalleeSpillMask =
140     CalculateCoreCalleeSpillMask(kAapcs64CalleeSaveRegisters);
141 static constexpr uint32_t kAapcs64FpCalleeSpillMask =
142     CalculateFpCalleeSpillMask(kAapcs64CalleeSaveRegisters);
143 
144 // Calling convention
ReturnRegisterForShorty(std::string_view shorty)145 static ManagedRegister ReturnRegisterForShorty(std::string_view shorty) {
146   if (shorty[0] == 'F') {
147     return Arm64ManagedRegister::FromSRegister(S0);
148   } else if (shorty[0] == 'D') {
149     return Arm64ManagedRegister::FromDRegister(D0);
150   } else if (shorty[0] == 'J') {
151     return Arm64ManagedRegister::FromXRegister(X0);
152   } else if (shorty[0] == 'V') {
153     return Arm64ManagedRegister::NoRegister();
154   } else {
155     return Arm64ManagedRegister::FromWRegister(W0);
156   }
157 }
158 
ReturnRegister() const159 ManagedRegister Arm64ManagedRuntimeCallingConvention::ReturnRegister() const {
160   return ReturnRegisterForShorty(GetShorty());
161 }
162 
ReturnRegister() const163 ManagedRegister Arm64JniCallingConvention::ReturnRegister() const {
164   return ReturnRegisterForShorty(GetShorty());
165 }
166 
IntReturnRegister() const167 ManagedRegister Arm64JniCallingConvention::IntReturnRegister() const {
168   return Arm64ManagedRegister::FromWRegister(W0);
169 }
170 
171 // Managed runtime calling convention
172 
MethodRegister()173 ManagedRegister Arm64ManagedRuntimeCallingConvention::MethodRegister() {
174   return Arm64ManagedRegister::FromXRegister(X0);
175 }
176 
ArgumentRegisterForMethodExitHook()177 ManagedRegister Arm64ManagedRuntimeCallingConvention::ArgumentRegisterForMethodExitHook() {
178   return Arm64ManagedRegister::FromXRegister(X4);
179 }
180 
IsCurrentParamInRegister()181 bool Arm64ManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
182   if (IsCurrentParamAFloatOrDouble()) {
183     return itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments;
184   } else {
185     size_t non_fp_arg_number = itr_args_ - itr_float_and_doubles_;
186     return /* method */ 1u + non_fp_arg_number < kMaxIntLikeRegisterArguments;
187   }
188 }
189 
IsCurrentParamOnStack()190 bool Arm64ManagedRuntimeCallingConvention::IsCurrentParamOnStack() {
191   return !IsCurrentParamInRegister();
192 }
193 
CurrentParamRegister()194 ManagedRegister Arm64ManagedRuntimeCallingConvention::CurrentParamRegister() {
195   DCHECK(IsCurrentParamInRegister());
196   if (IsCurrentParamAFloatOrDouble()) {
197     if (IsCurrentParamADouble()) {
198       return Arm64ManagedRegister::FromDRegister(kDArgumentRegisters[itr_float_and_doubles_]);
199     } else {
200       return Arm64ManagedRegister::FromSRegister(kSArgumentRegisters[itr_float_and_doubles_]);
201     }
202   } else {
203     size_t non_fp_arg_number = itr_args_ - itr_float_and_doubles_;
204     ManagedRegister x_reg = kXArgumentRegisters[/* method */ 1u + non_fp_arg_number];
205     if (IsCurrentParamALong()) {
206       return x_reg;
207     } else {
208       return Arm64ManagedRegister::FromWRegister(x_reg.AsArm64().AsOverlappingWRegister());
209     }
210   }
211 }
212 
CurrentParamStackOffset()213 FrameOffset Arm64ManagedRuntimeCallingConvention::CurrentParamStackOffset() {
214   return FrameOffset(displacement_.Int32Value() +  // displacement
215                      kFramePointerSize +  // Method ref
216                      (itr_slots_ * sizeof(uint32_t)));  // offset into in args
217 }
218 
219 // JNI calling convention
220 
Arm64JniCallingConvention(bool is_static,bool is_synchronized,bool is_fast_native,bool is_critical_native,std::string_view shorty)221 Arm64JniCallingConvention::Arm64JniCallingConvention(bool is_static,
222                                                      bool is_synchronized,
223                                                      bool is_fast_native,
224                                                      bool is_critical_native,
225                                                      std::string_view shorty)
226     : JniCallingConvention(is_static,
227                            is_synchronized,
228                            is_fast_native,
229                            is_critical_native,
230                            shorty,
231                            kArm64PointerSize) {
232 }
233 
CoreSpillMask() const234 uint32_t Arm64JniCallingConvention::CoreSpillMask() const {
235   return is_critical_native_ ? 0u : kCoreCalleeSpillMask;
236 }
237 
FpSpillMask() const238 uint32_t Arm64JniCallingConvention::FpSpillMask() const {
239   return is_critical_native_ ? 0u : kFpCalleeSpillMask;
240 }
241 
CalleeSaveScratchRegisters() const242 ArrayRef<const ManagedRegister> Arm64JniCallingConvention::CalleeSaveScratchRegisters() const {
243   DCHECK(!IsCriticalNative());
244   // Use X22-X29 from native callee saves.
245   constexpr size_t kStart = 3u;
246   constexpr size_t kLength = 8u;
247   static_assert(kAapcs64CalleeSaveRegisters[kStart].Equals(
248                     Arm64ManagedRegister::FromXRegister(X22)));
249   static_assert(kAapcs64CalleeSaveRegisters[kStart + kLength - 1u].Equals(
250                     Arm64ManagedRegister::FromXRegister(X29)));
251   static_assert((kAapcs64CoreCalleeSpillMask & ~kCoreCalleeSpillMask) == 0u);
252   return ArrayRef<const ManagedRegister>(kAapcs64CalleeSaveRegisters).SubArray(kStart, kLength);
253 }
254 
ArgumentScratchRegisters() const255 ArrayRef<const ManagedRegister> Arm64JniCallingConvention::ArgumentScratchRegisters() const {
256   DCHECK(!IsCriticalNative());
257   ArrayRef<const ManagedRegister> scratch_regs(kXArgumentRegisters);
258   // Exclude return register (X0) even if unused. Using the same scratch registers helps
259   // making more JNI stubs identical for better reuse, such as deduplicating them in oat files.
260   static_assert(kXArgumentRegisters[0].Equals(Arm64ManagedRegister::FromXRegister(X0)));
261   scratch_regs = scratch_regs.SubArray(/*pos=*/ 1u);
262   DCHECK(std::none_of(scratch_regs.begin(),
263                       scratch_regs.end(),
264                       [return_reg = ReturnRegister().AsArm64()](ManagedRegister reg) {
265                         return return_reg.Overlaps(reg.AsArm64());
266                       }));
267   return scratch_regs;
268 }
269 
FrameSize() const270 size_t Arm64JniCallingConvention::FrameSize() const {
271   if (is_critical_native_) {
272     CHECK(!SpillsMethod());
273     CHECK(!HasLocalReferenceSegmentState());
274     return 0u;  // There is no managed frame for @CriticalNative.
275   }
276 
277   // Method*, callee save area size, local reference segment state
278   DCHECK(SpillsMethod());
279   size_t method_ptr_size = static_cast<size_t>(kFramePointerSize);
280   size_t callee_save_area_size = CalleeSaveRegisters().size() * kFramePointerSize;
281   size_t total_size = method_ptr_size + callee_save_area_size;
282 
283   DCHECK(HasLocalReferenceSegmentState());
284   // Cookie is saved in one of the spilled registers.
285 
286   return RoundUp(total_size, kStackAlignment);
287 }
288 
OutFrameSize() const289 size_t Arm64JniCallingConvention::OutFrameSize() const {
290   // Count param args, including JNIEnv* and jclass*.
291   size_t all_args = NumberOfExtraArgumentsForJni() + NumArgs();
292   size_t num_fp_args = NumFloatOrDoubleArgs();
293   DCHECK_GE(all_args, num_fp_args);
294   size_t num_non_fp_args = all_args - num_fp_args;
295   // The size of outgoing arguments.
296   size_t size = GetNativeOutArgsSize(num_fp_args, num_non_fp_args);
297 
298   // @CriticalNative can use tail call as all managed callee saves are preserved by AAPCS64.
299   static_assert((kCoreCalleeSpillMask & ~kAapcs64CoreCalleeSpillMask) == 0u);
300   static_assert((kFpCalleeSpillMask & ~kAapcs64FpCalleeSpillMask) == 0u);
301 
302   // For @CriticalNative, we can make a tail call if there are no stack args and
303   // we do not need to extend the result. Otherwise, add space for return PC.
304   if (is_critical_native_ && (size != 0u || RequiresSmallResultTypeExtension())) {
305     size += kFramePointerSize;  // We need to spill LR with the args.
306   }
307   size_t out_args_size = RoundUp(size, kAapcs64StackAlignment);
308   if (UNLIKELY(IsCriticalNative())) {
309     DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty()));
310   }
311   return out_args_size;
312 }
313 
CalleeSaveRegisters() const314 ArrayRef<const ManagedRegister> Arm64JniCallingConvention::CalleeSaveRegisters() const {
315   if (UNLIKELY(IsCriticalNative())) {
316     if (UseTailCall()) {
317       return ArrayRef<const ManagedRegister>();  // Do not spill anything.
318     } else {
319       // Spill LR with out args.
320       static_assert((kCoreCalleeSpillMask >> LR) == 1u);  // Contains LR as the highest bit.
321       constexpr size_t lr_index = POPCOUNT(kCoreCalleeSpillMask) - 1u;
322       static_assert(kCalleeSaveRegisters[lr_index].Equals(
323                         Arm64ManagedRegister::FromXRegister(LR)));
324       return ArrayRef<const ManagedRegister>(kCalleeSaveRegisters).SubArray(
325           /*pos=*/ lr_index, /*length=*/ 1u);
326     }
327   } else {
328     return ArrayRef<const ManagedRegister>(kCalleeSaveRegisters);
329   }
330 }
331 
IsCurrentParamInRegister()332 bool Arm64JniCallingConvention::IsCurrentParamInRegister() {
333   if (IsCurrentParamAFloatOrDouble()) {
334     return (itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments);
335   } else {
336     return ((itr_args_ - itr_float_and_doubles_) < kMaxIntLikeRegisterArguments);
337   }
338   // TODO: Can we just call CurrentParamRegister to figure this out?
339 }
340 
IsCurrentParamOnStack()341 bool Arm64JniCallingConvention::IsCurrentParamOnStack() {
342   // Is this ever not the same for all the architectures?
343   return !IsCurrentParamInRegister();
344 }
345 
CurrentParamRegister()346 ManagedRegister Arm64JniCallingConvention::CurrentParamRegister() {
347   CHECK(IsCurrentParamInRegister());
348   if (IsCurrentParamAFloatOrDouble()) {
349     CHECK_LT(itr_float_and_doubles_, kMaxFloatOrDoubleRegisterArguments);
350     if (IsCurrentParamADouble()) {
351       return Arm64ManagedRegister::FromDRegister(kDArgumentRegisters[itr_float_and_doubles_]);
352     } else {
353       return Arm64ManagedRegister::FromSRegister(kSArgumentRegisters[itr_float_and_doubles_]);
354     }
355   } else {
356     int gp_reg = itr_args_ - itr_float_and_doubles_;
357     CHECK_LT(static_cast<unsigned int>(gp_reg), kMaxIntLikeRegisterArguments);
358     ManagedRegister x_reg = kXArgumentRegisters[gp_reg];
359     if (IsCurrentParamALong() || IsCurrentParamAReference() || IsCurrentParamJniEnv())  {
360       return x_reg;
361     } else {
362       return Arm64ManagedRegister::FromWRegister(x_reg.AsArm64().AsOverlappingWRegister());
363     }
364   }
365 }
366 
CurrentParamStackOffset()367 FrameOffset Arm64JniCallingConvention::CurrentParamStackOffset() {
368   CHECK(IsCurrentParamOnStack());
369   size_t args_on_stack = itr_args_
370                   - std::min(kMaxFloatOrDoubleRegisterArguments,
371                              static_cast<size_t>(itr_float_and_doubles_))
372                   - std::min(kMaxIntLikeRegisterArguments,
373                              static_cast<size_t>(itr_args_ - itr_float_and_doubles_));
374   size_t offset = displacement_.Int32Value() - OutFrameSize() + (args_on_stack * kFramePointerSize);
375   CHECK_LT(offset, OutFrameSize());
376   return FrameOffset(offset);
377 }
378 
379 // X15 is neither managed callee-save, nor argument register. It is suitable for use as the
380 // locking argument for synchronized methods and hidden argument for @CriticalNative methods.
AssertX15IsNeitherCalleeSaveNorArgumentRegister()381 static void AssertX15IsNeitherCalleeSaveNorArgumentRegister() {
382   // TODO: Change to static_assert; std::none_of should be constexpr since C++20.
383   DCHECK(std::none_of(kCalleeSaveRegisters,
384                       kCalleeSaveRegisters + std::size(kCalleeSaveRegisters),
385                       [](ManagedRegister callee_save) constexpr {
386                         return callee_save.Equals(Arm64ManagedRegister::FromXRegister(X15));
387                       }));
388   DCHECK(std::none_of(kXArgumentRegisters,
389                       kXArgumentRegisters + std::size(kXArgumentRegisters),
390                       [](ManagedRegister arg) { return arg.AsArm64().AsXRegister() == X15; }));
391 }
392 
LockingArgumentRegister() const393 ManagedRegister Arm64JniCallingConvention::LockingArgumentRegister() const {
394   DCHECK(!IsFastNative());
395   DCHECK(!IsCriticalNative());
396   DCHECK(IsSynchronized());
397   AssertX15IsNeitherCalleeSaveNorArgumentRegister();
398   return Arm64ManagedRegister::FromWRegister(W15);
399 }
400 
HiddenArgumentRegister() const401 ManagedRegister Arm64JniCallingConvention::HiddenArgumentRegister() const {
402   DCHECK(IsCriticalNative());
403   AssertX15IsNeitherCalleeSaveNorArgumentRegister();
404   return Arm64ManagedRegister::FromXRegister(X15);
405 }
406 
407 // Whether to use tail call (used only for @CriticalNative).
UseTailCall() const408 bool Arm64JniCallingConvention::UseTailCall() const {
409   CHECK(IsCriticalNative());
410   return OutFrameSize() == 0u;
411 }
412 
413 }  // namespace arm64
414 }  // namespace art
415