• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
18 #define ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
19 
20 #include "base/casts.h"
21 #include "base/macros.h"
22 #include "class_root-inl.h"
23 #include "code_generator.h"
24 #include "data_type-inl.h"
25 #include "dex/dex_file-inl.h"
26 #include "locations.h"
27 #include "mirror/var_handle.h"
28 #include "nodes.h"
29 #include "utils/assembler.h"
30 #include "utils/label.h"
31 
32 namespace art HIDDEN {
33 
34 // Default slow-path for fallback (calling the managed code to handle the intrinsic) in an
35 // intrinsified call. This will copy the arguments into the positions for a regular call.
36 //
37 // Note: The actual parameters are required to be in the locations given by the invoke's location
38 //       summary. If an intrinsic modifies those locations before a slowpath call, they must be
39 //       restored!
40 //
41 // Note: If an invoke wasn't sharpened, we will put down an invoke-virtual here. That's potentially
42 //       sub-optimal (compared to a direct pointer call), but this is a slow-path.
43 
44 template <typename TDexCallingConvention,
45           typename TSlowPathCode = SlowPathCode,
46           typename TAssembler = Assembler>
47 class IntrinsicSlowPath : public TSlowPathCode {
48  public:
IntrinsicSlowPath(HInvoke * invoke)49   explicit IntrinsicSlowPath(HInvoke* invoke) : TSlowPathCode(invoke), invoke_(invoke) { }
50 
MoveArguments(CodeGenerator * codegen)51   Location MoveArguments(CodeGenerator* codegen) {
52     TDexCallingConvention calling_convention_visitor;
53     IntrinsicVisitor::MoveArguments(invoke_, codegen, &calling_convention_visitor);
54     return calling_convention_visitor.GetMethodLocation();
55   }
56 
EmitNativeCode(CodeGenerator * codegen)57   void EmitNativeCode(CodeGenerator* codegen) override {
58     TAssembler* assembler = down_cast<TAssembler*>(codegen->GetAssembler());
59     assembler->Bind(this->GetEntryLabel());
60 
61     this->SaveLiveRegisters(codegen, invoke_->GetLocations());
62 
63     Location method_loc = MoveArguments(codegen);
64 
65     if (invoke_->IsInvokeStaticOrDirect()) {
66       HInvokeStaticOrDirect* invoke_static_or_direct = invoke_->AsInvokeStaticOrDirect();
67       DCHECK_NE(invoke_static_or_direct->GetMethodLoadKind(), MethodLoadKind::kRecursive);
68       DCHECK_NE(invoke_static_or_direct->GetCodePtrLocation(),
69                 CodePtrLocation::kCallCriticalNative);
70       codegen->GenerateStaticOrDirectCall(invoke_static_or_direct, method_loc, this);
71     } else if (invoke_->IsInvokeVirtual()) {
72       codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), method_loc, this);
73     } else {
74       DCHECK(invoke_->IsInvokePolymorphic());
75       codegen->GenerateInvokePolymorphicCall(invoke_->AsInvokePolymorphic(), this);
76     }
77 
78     // Copy the result back to the expected output.
79     Location out = invoke_->GetLocations()->Out();
80     if (out.IsValid()) {
81       DCHECK(out.IsRegisterKind());  // TODO: Replace this when we support output in memory.
82       // We want to double-check that we don't overwrite a live register with the return
83       // value.
84       // Note: For the possible kNoOutputOverlap case we can't simply remove the OUT register
85       // from the GetLiveRegisters() - theoretically it might be needed after the return from
86       // the slow path.
87       DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->OverlapsRegisters(out));
88       codegen->MoveFromReturnRegister(out, invoke_->GetType());
89     }
90 
91     this->RestoreLiveRegisters(codegen, invoke_->GetLocations());
92     assembler->Jump(this->GetExitLabel());
93   }
94 
GetDescription()95   const char* GetDescription() const override { return "IntrinsicSlowPath"; }
96 
97  private:
98   // The instruction where this slow path is happening.
99   HInvoke* const invoke_;
100 
101   DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPath);
102 };
103 
GetExpectedVarHandleCoordinatesCount(HInvoke * invoke)104 static inline size_t GetExpectedVarHandleCoordinatesCount(HInvoke *invoke) {
105   mirror::VarHandle::AccessModeTemplate access_mode_template =
106       mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
107   size_t var_type_count = mirror::VarHandle::GetNumberOfVarTypeParameters(access_mode_template);
108   size_t accessor_argument_count = invoke->GetNumberOfArguments() - 1;
109 
110   return accessor_argument_count - var_type_count;
111 }
112 
GetDataTypeFromShorty(HInvoke * invoke,uint32_t index)113 static inline DataType::Type GetDataTypeFromShorty(HInvoke* invoke, uint32_t index) {
114   DCHECK(invoke->IsInvokePolymorphic());
115   const DexFile* dex_file = invoke->GetMethodReference().dex_file;
116   const char* shorty = dex_file->GetShorty(invoke->AsInvokePolymorphic()->GetProtoIndex());
117   DCHECK_LT(index, strlen(shorty));
118 
119   return DataType::FromShorty(shorty[index]);
120 }
121 
IsVarHandleGetAndBitwiseOp(HInvoke * invoke)122 static inline bool IsVarHandleGetAndBitwiseOp(HInvoke* invoke) {
123   switch (invoke->GetIntrinsic()) {
124     case Intrinsics::kVarHandleGetAndBitwiseOr:
125     case Intrinsics::kVarHandleGetAndBitwiseOrAcquire:
126     case Intrinsics::kVarHandleGetAndBitwiseOrRelease:
127     case Intrinsics::kVarHandleGetAndBitwiseXor:
128     case Intrinsics::kVarHandleGetAndBitwiseXorAcquire:
129     case Intrinsics::kVarHandleGetAndBitwiseXorRelease:
130     case Intrinsics::kVarHandleGetAndBitwiseAnd:
131     case Intrinsics::kVarHandleGetAndBitwiseAndAcquire:
132     case Intrinsics::kVarHandleGetAndBitwiseAndRelease:
133       return true;
134     default:
135       return false;
136   }
137 }
138 
IsVarHandleGetAndAdd(HInvoke * invoke)139 static inline bool IsVarHandleGetAndAdd(HInvoke* invoke) {
140   switch (invoke->GetIntrinsic()) {
141     case Intrinsics::kVarHandleGetAndAdd:
142     case Intrinsics::kVarHandleGetAndAddAcquire:
143     case Intrinsics::kVarHandleGetAndAddRelease:
144       return true;
145     default:
146       return false;
147   }
148 }
149 
IsVarHandleGet(HInvoke * invoke)150 static inline bool IsVarHandleGet(HInvoke* invoke) {
151   mirror::VarHandle::AccessModeTemplate access_mode =
152       mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
153   return access_mode == mirror::VarHandle::AccessModeTemplate::kGet;
154 }
155 
IsUnsafeGetReference(HInvoke * invoke)156 static inline bool IsUnsafeGetReference(HInvoke* invoke) {
157   switch (invoke->GetIntrinsic()) {
158     case Intrinsics::kUnsafeGetObject:
159     case Intrinsics::kUnsafeGetObjectVolatile:
160     case Intrinsics::kJdkUnsafeGetReference:
161     case Intrinsics::kJdkUnsafeGetReferenceVolatile:
162     case Intrinsics::kJdkUnsafeGetReferenceAcquire:
163       return true;
164     default:
165       return false;
166   }
167 }
168 
IsUnsafeCASReference(HInvoke * invoke)169 static inline bool IsUnsafeCASReference(HInvoke* invoke) {
170   switch (invoke->GetIntrinsic()) {
171     case Intrinsics::kUnsafeCASObject:
172     case Intrinsics::kJdkUnsafeCASObject:
173     case Intrinsics::kJdkUnsafeCompareAndSetReference:
174       return true;
175     default:
176       return false;
177   }
178 }
179 
IsUnsafeGetAndSetReference(HInvoke * invoke)180 static inline bool IsUnsafeGetAndSetReference(HInvoke* invoke) {
181   switch (invoke->GetIntrinsic()) {
182     case Intrinsics::kUnsafeGetAndSetObject:
183     case Intrinsics::kJdkUnsafeGetAndSetReference:
184       return true;
185     default:
186       return false;
187   }
188 }
189 
IsVarHandleCASFamily(HInvoke * invoke)190 static inline bool IsVarHandleCASFamily(HInvoke* invoke) {
191   mirror::VarHandle::AccessModeTemplate access_mode =
192       mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
193   return access_mode == mirror::VarHandle::AccessModeTemplate::kCompareAndSet ||
194       access_mode == mirror::VarHandle::AccessModeTemplate::kGetAndUpdate ||
195       access_mode == mirror::VarHandle::AccessModeTemplate::kCompareAndExchange;
196 }
197 
GetVarHandleExpectedValueType(HInvoke * invoke,size_t expected_coordinates_count)198 static inline DataType::Type GetVarHandleExpectedValueType(HInvoke* invoke,
199                                                            size_t expected_coordinates_count) {
200   DCHECK_EQ(expected_coordinates_count, GetExpectedVarHandleCoordinatesCount(invoke));
201   uint32_t number_of_arguments = invoke->GetNumberOfArguments();
202   DCHECK_GE(number_of_arguments, /* VarHandle object */ 1u + expected_coordinates_count);
203   if (number_of_arguments == /* VarHandle object */ 1u + expected_coordinates_count) {
204     return invoke->GetType();
205   } else {
206     return GetDataTypeFromShorty(invoke, number_of_arguments - 1u);
207   }
208 }
209 
GetBootImageVarHandleField(HInvoke * invoke)210 static inline ArtField* GetBootImageVarHandleField(HInvoke* invoke)
211     REQUIRES_SHARED(Locks::mutator_lock_) {
212   DCHECK_LE(GetExpectedVarHandleCoordinatesCount(invoke), 1u);
213   DCHECK(VarHandleOptimizations(invoke).GetUseKnownImageVarHandle());
214   HInstruction* var_handle_instruction = invoke->InputAt(0);
215   if (var_handle_instruction->IsNullCheck()) {
216     var_handle_instruction = var_handle_instruction->InputAt(0);
217   }
218   DCHECK(var_handle_instruction->IsStaticFieldGet());
219   ArtField* field = var_handle_instruction->AsStaticFieldGet()->GetFieldInfo().GetField();
220   DCHECK(field->IsStatic());
221   DCHECK(field->IsFinal());
222   DCHECK(var_handle_instruction->InputAt(0)->AsLoadClass()->IsInImage());
223   ObjPtr<mirror::Object> var_handle = field->GetObject(field->GetDeclaringClass());
224   DCHECK(var_handle->GetClass() ==
225          (GetExpectedVarHandleCoordinatesCount(invoke) == 0u
226              ? GetClassRoot<mirror::StaticFieldVarHandle>()
227              : GetClassRoot<mirror::FieldVarHandle>()));
228   static_assert(std::is_base_of_v<mirror::FieldVarHandle, mirror::StaticFieldVarHandle>);
229   return ObjPtr<mirror::FieldVarHandle>::DownCast(var_handle)->GetArtField();
230 }
231 
232 }  // namespace art
233 
234 #endif  // ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
235