• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "calling_convention_x86_64.h"
18 
19 #include "base/logging.h"
20 #include "utils/x86_64/managed_register_x86_64.h"
21 #include "utils.h"
22 
23 namespace art {
24 namespace x86_64 {
25 
26 // Calling convention
27 
InterproceduralScratchRegister()28 ManagedRegister X86_64ManagedRuntimeCallingConvention::InterproceduralScratchRegister() {
29   return X86_64ManagedRegister::FromCpuRegister(RAX);
30 }
31 
InterproceduralScratchRegister()32 ManagedRegister X86_64JniCallingConvention::InterproceduralScratchRegister() {
33   return X86_64ManagedRegister::FromCpuRegister(RAX);
34 }
35 
ReturnScratchRegister() const36 ManagedRegister X86_64JniCallingConvention::ReturnScratchRegister() const {
37   return ManagedRegister::NoRegister();  // No free regs, so assembler uses push/pop
38 }
39 
ReturnRegisterForShorty(const char * shorty,bool jni)40 static ManagedRegister ReturnRegisterForShorty(const char* shorty, bool jni) {
41   if (shorty[0] == 'F' || shorty[0] == 'D') {
42     return X86_64ManagedRegister::FromXmmRegister(XMM0);
43   } else if (shorty[0] == 'J') {
44     return X86_64ManagedRegister::FromCpuRegister(RAX);
45   } else if (shorty[0] == 'V') {
46     return ManagedRegister::NoRegister();
47   } else {
48     return X86_64ManagedRegister::FromCpuRegister(RAX);
49   }
50 }
51 
ReturnRegister()52 ManagedRegister X86_64ManagedRuntimeCallingConvention::ReturnRegister() {
53   return ReturnRegisterForShorty(GetShorty(), false);
54 }
55 
ReturnRegister()56 ManagedRegister X86_64JniCallingConvention::ReturnRegister() {
57   return ReturnRegisterForShorty(GetShorty(), true);
58 }
59 
IntReturnRegister()60 ManagedRegister X86_64JniCallingConvention::IntReturnRegister() {
61   return X86_64ManagedRegister::FromCpuRegister(RAX);
62 }
63 
64 // Managed runtime calling convention
65 
MethodRegister()66 ManagedRegister X86_64ManagedRuntimeCallingConvention::MethodRegister() {
67   return X86_64ManagedRegister::FromCpuRegister(RDI);
68 }
69 
IsCurrentParamInRegister()70 bool X86_64ManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
71   return !IsCurrentParamOnStack();
72 }
73 
IsCurrentParamOnStack()74 bool X86_64ManagedRuntimeCallingConvention::IsCurrentParamOnStack() {
75   // We assume all parameters are on stack, args coming via registers are spilled as entry_spills
76   return true;
77 }
78 
CurrentParamRegister()79 ManagedRegister X86_64ManagedRuntimeCallingConvention::CurrentParamRegister() {
80   ManagedRegister res = ManagedRegister::NoRegister();
81   if (!IsCurrentParamAFloatOrDouble()) {
82     switch (itr_args_ - itr_float_and_doubles_) {
83     case 0: res = X86_64ManagedRegister::FromCpuRegister(RSI); break;
84     case 1: res = X86_64ManagedRegister::FromCpuRegister(RDX); break;
85     case 2: res = X86_64ManagedRegister::FromCpuRegister(RCX); break;
86     case 3: res = X86_64ManagedRegister::FromCpuRegister(R8); break;
87     case 4: res = X86_64ManagedRegister::FromCpuRegister(R9); break;
88     }
89   } else if (itr_float_and_doubles_ < 8) {
90     // First eight float parameters are passed via XMM0..XMM7
91     res = X86_64ManagedRegister::FromXmmRegister(
92                                  static_cast<FloatRegister>(XMM0 + itr_float_and_doubles_));
93   }
94   return res;
95 }
96 
CurrentParamStackOffset()97 FrameOffset X86_64ManagedRuntimeCallingConvention::CurrentParamStackOffset() {
98   return FrameOffset(displacement_.Int32Value() +   // displacement
99                      sizeof(StackReference<mirror::ArtMethod>) +  // Method ref
100                      (itr_slots_ * sizeof(uint32_t)));  // offset into in args
101 }
102 
EntrySpills()103 const ManagedRegisterEntrySpills& X86_64ManagedRuntimeCallingConvention::EntrySpills() {
104   // We spill the argument registers on X86 to free them up for scratch use, we then assume
105   // all arguments are on the stack.
106   if (entry_spills_.size() == 0) {
107     ResetIterator(FrameOffset(0));
108     while (HasNext()) {
109       ManagedRegister in_reg = CurrentParamRegister();
110       if (!in_reg.IsNoRegister()) {
111         int32_t size = IsParamALongOrDouble(itr_args_)? 8 : 4;
112         int32_t spill_offset = CurrentParamStackOffset().Uint32Value();
113         ManagedRegisterSpill spill(in_reg, size, spill_offset);
114         entry_spills_.push_back(spill);
115       }
116       Next();
117     }
118   }
119   return entry_spills_;
120 }
121 
122 // JNI calling convention
123 
X86_64JniCallingConvention(bool is_static,bool is_synchronized,const char * shorty)124 X86_64JniCallingConvention::X86_64JniCallingConvention(bool is_static, bool is_synchronized,
125                                                        const char* shorty)
126     : JniCallingConvention(is_static, is_synchronized, shorty, kFramePointerSize) {
127   callee_save_regs_.push_back(X86_64ManagedRegister::FromCpuRegister(RBX));
128   callee_save_regs_.push_back(X86_64ManagedRegister::FromCpuRegister(RBP));
129   callee_save_regs_.push_back(X86_64ManagedRegister::FromCpuRegister(R12));
130   callee_save_regs_.push_back(X86_64ManagedRegister::FromCpuRegister(R13));
131   callee_save_regs_.push_back(X86_64ManagedRegister::FromCpuRegister(R14));
132   callee_save_regs_.push_back(X86_64ManagedRegister::FromCpuRegister(R15));
133   callee_save_regs_.push_back(X86_64ManagedRegister::FromXmmRegister(XMM12));
134   callee_save_regs_.push_back(X86_64ManagedRegister::FromXmmRegister(XMM13));
135   callee_save_regs_.push_back(X86_64ManagedRegister::FromXmmRegister(XMM14));
136   callee_save_regs_.push_back(X86_64ManagedRegister::FromXmmRegister(XMM15));
137 }
138 
CoreSpillMask() const139 uint32_t X86_64JniCallingConvention::CoreSpillMask() const {
140   return 1 << RBX | 1 << RBP | 1 << R12 | 1 << R13 | 1 << R14 | 1 << R15 |
141       1 << kNumberOfCpuRegisters;
142 }
143 
FpSpillMask() const144 uint32_t X86_64JniCallingConvention::FpSpillMask() const {
145   return 1 << XMM12 | 1 << XMM13 | 1 << XMM14 | 1 << XMM15;
146 }
147 
FrameSize()148 size_t X86_64JniCallingConvention::FrameSize() {
149   // Method*, return address and callee save area size, local reference segment state
150   size_t frame_data_size = sizeof(StackReference<mirror::ArtMethod>) +
151       (2 + CalleeSaveRegisters().size()) * kFramePointerSize;
152   // References plus link_ (pointer) and number_of_references_ (uint32_t) for HandleScope header
153   size_t handle_scope_size = HandleScope::SizeOf(kFramePointerSize, ReferenceCount());
154   // Plus return value spill area size
155   return RoundUp(frame_data_size + handle_scope_size + SizeOfReturnValue(), kStackAlignment);
156 }
157 
OutArgSize()158 size_t X86_64JniCallingConvention::OutArgSize() {
159   return RoundUp(NumberOfOutgoingStackArgs() * kFramePointerSize, kStackAlignment);
160 }
161 
IsCurrentParamInRegister()162 bool X86_64JniCallingConvention::IsCurrentParamInRegister() {
163   return !IsCurrentParamOnStack();
164 }
165 
IsCurrentParamOnStack()166 bool X86_64JniCallingConvention::IsCurrentParamOnStack() {
167   return CurrentParamRegister().IsNoRegister();
168 }
169 
CurrentParamRegister()170 ManagedRegister X86_64JniCallingConvention::CurrentParamRegister() {
171   ManagedRegister res = ManagedRegister::NoRegister();
172   if (!IsCurrentParamAFloatOrDouble()) {
173     switch (itr_args_ - itr_float_and_doubles_) {
174     case 0: res = X86_64ManagedRegister::FromCpuRegister(RDI); break;
175     case 1: res = X86_64ManagedRegister::FromCpuRegister(RSI); break;
176     case 2: res = X86_64ManagedRegister::FromCpuRegister(RDX); break;
177     case 3: res = X86_64ManagedRegister::FromCpuRegister(RCX); break;
178     case 4: res = X86_64ManagedRegister::FromCpuRegister(R8); break;
179     case 5: res = X86_64ManagedRegister::FromCpuRegister(R9); break;
180     }
181   } else if (itr_float_and_doubles_ < 8) {
182     // First eight float parameters are passed via XMM0..XMM7
183     res = X86_64ManagedRegister::FromXmmRegister(
184                                  static_cast<FloatRegister>(XMM0 + itr_float_and_doubles_));
185   }
186   return res;
187 }
188 
CurrentParamStackOffset()189 FrameOffset X86_64JniCallingConvention::CurrentParamStackOffset() {
190   size_t offset = itr_args_
191       - std::min(8U, itr_float_and_doubles_)               // Float arguments passed through Xmm0..Xmm7
192       - std::min(6U, itr_args_ - itr_float_and_doubles_);  // Integer arguments passed through GPR
193   return FrameOffset(displacement_.Int32Value() - OutArgSize() + (offset * kFramePointerSize));
194 }
195 
NumberOfOutgoingStackArgs()196 size_t X86_64JniCallingConvention::NumberOfOutgoingStackArgs() {
197   size_t static_args = IsStatic() ? 1 : 0;  // count jclass
198   // regular argument parameters and this
199   size_t param_args = NumArgs() + NumLongOrDoubleArgs();
200   // count JNIEnv* and return pc (pushed after Method*)
201   size_t total_args = static_args + param_args + 2;
202 
203   // Float arguments passed through Xmm0..Xmm7
204   // Other (integer) arguments passed through GPR (RDI, RSI, RDX, RCX, R8, R9)
205   size_t total_stack_args = total_args
206                             - std::min(8U, static_cast<unsigned int>(NumFloatOrDoubleArgs()))
207                             - std::min(6U, static_cast<unsigned int>(NumArgs() - NumFloatOrDoubleArgs()));
208 
209   return total_stack_args;
210 }
211 
212 }  // namespace x86_64
213 }  // namespace art
214