1 /*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "intrinsics.h"
18
19 #include "art_field-inl.h"
20 #include "art_method-inl.h"
21 #include "base/utils.h"
22 #include "class_linker.h"
23 #include "class_root-inl.h"
24 #include "code_generator.h"
25 #include "dex/invoke_type.h"
26 #include "driver/compiler_options.h"
27 #include "gc/space/image_space.h"
28 #include "intrinsic_objects.h"
29 #include "intrinsics_list.h"
30 #include "nodes.h"
31 #include "oat/image-inl.h"
32 #include "obj_ptr-inl.h"
33 #include "scoped_thread_state_change-inl.h"
34 #include "thread-current-inl.h"
35 #include "well_known_classes-inl.h"
36
37 namespace art HIDDEN {
38
operator <<(std::ostream & os,const Intrinsics & intrinsic)39 std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic) {
40 switch (intrinsic) {
41 case Intrinsics::kNone:
42 os << "None";
43 break;
44 #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
45 case Intrinsics::k ## Name: \
46 os << # Name; \
47 break;
48 ART_INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
49 #undef OPTIMIZING_INTRINSICS
50 }
51 return os;
52 }
53
GetBootImageLiveObjects()54 static ObjPtr<mirror::ObjectArray<mirror::Object>> GetBootImageLiveObjects()
55 REQUIRES_SHARED(Locks::mutator_lock_) {
56 gc::Heap* heap = Runtime::Current()->GetHeap();
57 const std::vector<gc::space::ImageSpace*>& boot_image_spaces = heap->GetBootImageSpaces();
58 DCHECK(!boot_image_spaces.empty());
59 const ImageHeader& main_header = boot_image_spaces[0]->GetImageHeader();
60 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
61 ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
62 main_header.GetImageRoot<kWithoutReadBarrier>(ImageHeader::kBootImageLiveObjects));
63 DCHECK(boot_image_live_objects != nullptr);
64 DCHECK(heap->ObjectIsInBootImageSpace(boot_image_live_objects));
65 return boot_image_live_objects;
66 }
67
CanReferenceBootImageObjects(HInvoke * invoke,const CompilerOptions & compiler_options)68 static bool CanReferenceBootImageObjects(HInvoke* invoke, const CompilerOptions& compiler_options) {
69 // Piggyback on the method load kind to determine whether we can use PC-relative addressing
70 // for AOT. This should cover both the testing config (non-PIC boot image) and codegens that
71 // reject PC-relative load kinds and fall back to the runtime call.
72 if (compiler_options.IsAotCompiler() &&
73 !invoke->AsInvokeStaticOrDirect()->HasPcRelativeMethodLoadKind()) {
74 return false;
75 }
76 if (!compiler_options.IsBootImage() &&
77 Runtime::Current()->GetHeap()->GetBootImageSpaces().empty()) {
78 return false; // Running without boot image, cannot use required boot image objects.
79 }
80 return true;
81 }
82
ComputeValueOfLocations(HInvoke * invoke,CodeGenerator * codegen,int32_t low,int32_t length,Location return_location,Location first_argument_location)83 void IntrinsicVisitor::ComputeValueOfLocations(HInvoke* invoke,
84 CodeGenerator* codegen,
85 int32_t low,
86 int32_t length,
87 Location return_location,
88 Location first_argument_location) {
89 // The intrinsic will call if it needs to allocate a boxed object.
90 LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly;
91 const CompilerOptions& compiler_options = codegen->GetCompilerOptions();
92 if (!CanReferenceBootImageObjects(invoke, compiler_options)) {
93 return;
94 }
95 HInstruction* const input = invoke->InputAt(0);
96 if (input->IsIntConstant()) {
97 int32_t value = input->AsIntConstant()->GetValue();
98 if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) < static_cast<uint32_t>(length)) {
99 // No call, we shall use direct pointer to the boxed object.
100 call_kind = LocationSummary::kNoCall;
101 }
102 }
103
104 ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator();
105 LocationSummary* locations = new (allocator) LocationSummary(invoke, call_kind, kIntrinsified);
106 if (call_kind == LocationSummary::kCallOnMainOnly) {
107 locations->SetInAt(0, Location::RegisterOrConstant(input));
108 locations->AddTemp(first_argument_location);
109 locations->SetOut(return_location);
110 } else {
111 locations->SetInAt(0, Location::ConstantLocation(input));
112 locations->SetOut(Location::RequiresRegister());
113 }
114 }
115
ValueOfInfo()116 inline IntrinsicVisitor::ValueOfInfo::ValueOfInfo()
117 : value_offset(0),
118 low(0),
119 length(0u),
120 value_boot_image_reference(kInvalidReference) {}
121
ComputeValueOfInfo(HInvoke * invoke,const CompilerOptions & compiler_options,ArtField * value_field,int32_t low,int32_t length,size_t base)122 IntrinsicVisitor::ValueOfInfo IntrinsicVisitor::ComputeValueOfInfo(
123 HInvoke* invoke,
124 const CompilerOptions& compiler_options,
125 ArtField* value_field,
126 int32_t low,
127 int32_t length,
128 size_t base) {
129 ValueOfInfo info;
130 info.low = low;
131 info.length = length;
132 info.value_offset = value_field->GetOffset().Uint32Value();
133 if (compiler_options.IsBootImage()) {
134 if (invoke->InputAt(0)->IsIntConstant()) {
135 int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue();
136 uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low);
137 if (index < static_cast<uint32_t>(info.length)) {
138 info.value_boot_image_reference = IntrinsicObjects::EncodePatch(
139 IntrinsicObjects::PatchType::kValueOfObject, index + base);
140 } else {
141 // Not in the cache.
142 info.value_boot_image_reference = ValueOfInfo::kInvalidReference;
143 }
144 } else {
145 info.array_data_boot_image_reference =
146 IntrinsicObjects::EncodePatch(IntrinsicObjects::PatchType::kValueOfArray, base);
147 }
148 } else {
149 ScopedObjectAccess soa(Thread::Current());
150 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects();
151
152 if (invoke->InputAt(0)->IsIntConstant()) {
153 int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue();
154 uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low);
155 if (index < static_cast<uint32_t>(info.length)) {
156 ObjPtr<mirror::Object> object =
157 IntrinsicObjects::GetValueOfObject(boot_image_live_objects, base, index);
158 info.value_boot_image_reference = CodeGenerator::GetBootImageOffset(object);
159 } else {
160 // Not in the cache.
161 info.value_boot_image_reference = ValueOfInfo::kInvalidReference;
162 }
163 } else {
164 info.array_data_boot_image_reference =
165 CodeGenerator::GetBootImageOffset(boot_image_live_objects) +
166 IntrinsicObjects::GetValueOfArrayDataOffset(
167 boot_image_live_objects, base).Uint32Value();
168 }
169 }
170
171 return info;
172 }
173
GetReferenceDisableIntrinsicOffset()174 MemberOffset IntrinsicVisitor::GetReferenceDisableIntrinsicOffset() {
175 return WellKnownClasses::java_lang_ref_Reference_disableIntrinsic->GetOffset();
176 }
177
GetReferenceSlowPathEnabledOffset()178 MemberOffset IntrinsicVisitor::GetReferenceSlowPathEnabledOffset() {
179 return WellKnownClasses::java_lang_ref_Reference_slowPathEnabled->GetOffset();
180 }
181
CreateReferenceGetReferentLocations(HInvoke * invoke,CodeGenerator * codegen)182 void IntrinsicVisitor::CreateReferenceGetReferentLocations(HInvoke* invoke,
183 CodeGenerator* codegen) {
184 if (!CanReferenceBootImageObjects(invoke, codegen->GetCompilerOptions())) {
185 return;
186 }
187
188 ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator();
189 LocationSummary* locations =
190 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
191 locations->SetInAt(0, Location::RequiresRegister());
192 locations->SetOut(Location::RequiresRegister());
193 }
194
CreateReferenceRefersToLocations(HInvoke * invoke,CodeGenerator * codegen)195 void IntrinsicVisitor::CreateReferenceRefersToLocations(HInvoke* invoke, CodeGenerator* codegen) {
196 if (codegen->EmitNonBakerReadBarrier()) {
197 // Unimplemented for non-Baker read barrier.
198 return;
199 }
200
201 ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator();
202 LocationSummary* locations =
203 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
204 locations->SetInAt(0, Location::RequiresRegister());
205 locations->SetInAt(1, Location::RequiresRegister());
206 locations->SetOut(Location::RequiresRegister());
207 }
208
AssertNonMovableStringClass()209 void IntrinsicVisitor::AssertNonMovableStringClass() {
210 if (kIsDebugBuild) {
211 ScopedObjectAccess soa(Thread::Current());
212 ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>();
213 CHECK(!art::Runtime::Current()->GetHeap()->IsMovableObject(string_class));
214 }
215 }
216
InsertFpToIntegralIntrinsic(HInvokeStaticOrDirect * invoke,size_t input_index)217 void InsertFpToIntegralIntrinsic(HInvokeStaticOrDirect* invoke, size_t input_index) {
218 DCHECK_EQ(invoke->GetCodePtrLocation(), CodePtrLocation::kCallCriticalNative);
219 DCHECK(!invoke->GetBlock()->GetGraph()->IsDebuggable())
220 << "Unexpected direct @CriticalNative call in a debuggable graph!";
221 DCHECK_LT(input_index, invoke->GetNumberOfArguments());
222 HInstruction* input = invoke->InputAt(input_index);
223 DataType::Type input_type = input->GetType();
224 DCHECK(DataType::IsFloatingPointType(input_type));
225 bool is_double = (input_type == DataType::Type::kFloat64);
226 DataType::Type converted_type = is_double ? DataType::Type::kInt64 : DataType::Type::kInt32;
227 ArtMethod* resolved_method = is_double
228 ? WellKnownClasses::java_lang_Double_doubleToRawLongBits
229 : WellKnownClasses::java_lang_Float_floatToRawIntBits;
230 DCHECK(resolved_method != nullptr);
231 DCHECK(resolved_method->IsIntrinsic());
232 MethodReference target_method(nullptr, 0);
233 {
234 ScopedObjectAccess soa(Thread::Current());
235 target_method =
236 MethodReference(resolved_method->GetDexFile(), resolved_method->GetDexMethodIndex());
237 }
238 // Use arbitrary dispatch info that does not require the method argument.
239 HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
240 MethodLoadKind::kBssEntry,
241 CodePtrLocation::kCallArtMethod,
242 /*method_load_data=*/ 0u
243 };
244 HBasicBlock* block = invoke->GetBlock();
245 ArenaAllocator* allocator = block->GetGraph()->GetAllocator();
246 HInvokeStaticOrDirect* new_input = new (allocator) HInvokeStaticOrDirect(
247 allocator,
248 /*number_of_arguments=*/ 1u,
249 /*number_of_out_vregs=*/ is_double ? 2u : 1u,
250 converted_type,
251 invoke->GetDexPc(),
252 /*method_reference=*/ MethodReference(nullptr, dex::kDexNoIndex),
253 resolved_method,
254 dispatch_info,
255 kStatic,
256 target_method,
257 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone,
258 /*enable_intrinsic_opt=*/ true);
259 // The intrinsic has no side effects and does not need the environment.
260 new_input->SetSideEffects(SideEffects::None());
261 IntrinsicOptimizations opt(new_input);
262 opt.SetDoesNotNeedEnvironment();
263 new_input->SetRawInputAt(0u, input);
264 block->InsertInstructionBefore(new_input, invoke);
265 invoke->ReplaceInput(new_input, input_index);
266 }
267
268 } // namespace art
269