1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "compiler_driver.h"
18
19 #include <unordered_set>
20 #include <vector>
21 #include <unistd.h>
22
23 #ifndef __APPLE__
24 #include <malloc.h> // For mallinfo
25 #endif
26
27 #include "art_field-inl.h"
28 #include "art_method-inl.h"
29 #include "base/bit_vector.h"
30 #include "base/stl_util.h"
31 #include "base/systrace.h"
32 #include "base/time_utils.h"
33 #include "base/timing_logger.h"
34 #include "class_linker-inl.h"
35 #include "compiled_class.h"
36 #include "compiled_method.h"
37 #include "compiler.h"
38 #include "compiler_driver-inl.h"
39 #include "dex_compilation_unit.h"
40 #include "dex_file-inl.h"
41 #include "dex_instruction-inl.h"
42 #include "dex/dex_to_dex_compiler.h"
43 #include "dex/verification_results.h"
44 #include "dex/verified_method.h"
45 #include "dex/quick/dex_file_method_inliner.h"
46 #include "dex/quick/dex_file_to_method_inliner_map.h"
47 #include "driver/compiler_options.h"
48 #include "jni_internal.h"
49 #include "object_lock.h"
50 #include "profiler.h"
51 #include "runtime.h"
52 #include "gc/accounting/card_table-inl.h"
53 #include "gc/accounting/heap_bitmap.h"
54 #include "gc/space/image_space.h"
55 #include "gc/space/space.h"
56 #include "mirror/class_loader.h"
57 #include "mirror/class-inl.h"
58 #include "mirror/dex_cache-inl.h"
59 #include "mirror/object-inl.h"
60 #include "mirror/object_array-inl.h"
61 #include "mirror/throwable.h"
62 #include "scoped_thread_state_change.h"
63 #include "ScopedLocalRef.h"
64 #include "handle_scope-inl.h"
65 #include "thread.h"
66 #include "thread_list.h"
67 #include "thread_pool.h"
68 #include "trampolines/trampoline_compiler.h"
69 #include "transaction.h"
70 #include "utils/array_ref.h"
71 #include "utils/dex_cache_arrays_layout-inl.h"
72 #include "utils/swap_space.h"
73 #include "verifier/method_verifier.h"
74 #include "verifier/method_verifier-inl.h"
75
76 namespace art {
77
78 static constexpr bool kTimeCompileMethod = !kIsDebugBuild;
79
80 // Whether classes-to-compile and methods-to-compile are only applied to the boot image, or, when
81 // given, too all compilations.
82 static constexpr bool kRestrictCompilationFiltersToImage = true;
83
84 // Print additional info during profile guided compilation.
85 static constexpr bool kDebugProfileGuidedCompilation = false;
86
Percentage(size_t x,size_t y)87 static double Percentage(size_t x, size_t y) {
88 return 100.0 * (static_cast<double>(x)) / (static_cast<double>(x + y));
89 }
90
DumpStat(size_t x,size_t y,const char * str)91 static void DumpStat(size_t x, size_t y, const char* str) {
92 if (x == 0 && y == 0) {
93 return;
94 }
95 LOG(INFO) << Percentage(x, y) << "% of " << str << " for " << (x + y) << " cases";
96 }
97
98 class CompilerDriver::AOTCompilationStats {
99 public:
AOTCompilationStats()100 AOTCompilationStats()
101 : stats_lock_("AOT compilation statistics lock"),
102 types_in_dex_cache_(0), types_not_in_dex_cache_(0),
103 strings_in_dex_cache_(0), strings_not_in_dex_cache_(0),
104 resolved_types_(0), unresolved_types_(0),
105 resolved_instance_fields_(0), unresolved_instance_fields_(0),
106 resolved_local_static_fields_(0), resolved_static_fields_(0), unresolved_static_fields_(0),
107 type_based_devirtualization_(0),
108 safe_casts_(0), not_safe_casts_(0) {
109 for (size_t i = 0; i <= kMaxInvokeType; i++) {
110 resolved_methods_[i] = 0;
111 unresolved_methods_[i] = 0;
112 virtual_made_direct_[i] = 0;
113 direct_calls_to_boot_[i] = 0;
114 direct_methods_to_boot_[i] = 0;
115 }
116 }
117
Dump()118 void Dump() {
119 DumpStat(types_in_dex_cache_, types_not_in_dex_cache_, "types known to be in dex cache");
120 DumpStat(strings_in_dex_cache_, strings_not_in_dex_cache_, "strings known to be in dex cache");
121 DumpStat(resolved_types_, unresolved_types_, "types resolved");
122 DumpStat(resolved_instance_fields_, unresolved_instance_fields_, "instance fields resolved");
123 DumpStat(resolved_local_static_fields_ + resolved_static_fields_, unresolved_static_fields_,
124 "static fields resolved");
125 DumpStat(resolved_local_static_fields_, resolved_static_fields_ + unresolved_static_fields_,
126 "static fields local to a class");
127 DumpStat(safe_casts_, not_safe_casts_, "check-casts removed based on type information");
128 // Note, the code below subtracts the stat value so that when added to the stat value we have
129 // 100% of samples. TODO: clean this up.
130 DumpStat(type_based_devirtualization_,
131 resolved_methods_[kVirtual] + unresolved_methods_[kVirtual] +
132 resolved_methods_[kInterface] + unresolved_methods_[kInterface] -
133 type_based_devirtualization_,
134 "virtual/interface calls made direct based on type information");
135
136 for (size_t i = 0; i <= kMaxInvokeType; i++) {
137 std::ostringstream oss;
138 oss << static_cast<InvokeType>(i) << " methods were AOT resolved";
139 DumpStat(resolved_methods_[i], unresolved_methods_[i], oss.str().c_str());
140 if (virtual_made_direct_[i] > 0) {
141 std::ostringstream oss2;
142 oss2 << static_cast<InvokeType>(i) << " methods made direct";
143 DumpStat(virtual_made_direct_[i],
144 resolved_methods_[i] + unresolved_methods_[i] - virtual_made_direct_[i],
145 oss2.str().c_str());
146 }
147 if (direct_calls_to_boot_[i] > 0) {
148 std::ostringstream oss2;
149 oss2 << static_cast<InvokeType>(i) << " method calls are direct into boot";
150 DumpStat(direct_calls_to_boot_[i],
151 resolved_methods_[i] + unresolved_methods_[i] - direct_calls_to_boot_[i],
152 oss2.str().c_str());
153 }
154 if (direct_methods_to_boot_[i] > 0) {
155 std::ostringstream oss2;
156 oss2 << static_cast<InvokeType>(i) << " method calls have methods in boot";
157 DumpStat(direct_methods_to_boot_[i],
158 resolved_methods_[i] + unresolved_methods_[i] - direct_methods_to_boot_[i],
159 oss2.str().c_str());
160 }
161 }
162 }
163
164 // Allow lossy statistics in non-debug builds.
165 #ifndef NDEBUG
166 #define STATS_LOCK() MutexLock mu(Thread::Current(), stats_lock_)
167 #else
168 #define STATS_LOCK()
169 #endif
170
TypeInDexCache()171 void TypeInDexCache() REQUIRES(!stats_lock_) {
172 STATS_LOCK();
173 types_in_dex_cache_++;
174 }
175
TypeNotInDexCache()176 void TypeNotInDexCache() REQUIRES(!stats_lock_) {
177 STATS_LOCK();
178 types_not_in_dex_cache_++;
179 }
180
StringInDexCache()181 void StringInDexCache() REQUIRES(!stats_lock_) {
182 STATS_LOCK();
183 strings_in_dex_cache_++;
184 }
185
StringNotInDexCache()186 void StringNotInDexCache() REQUIRES(!stats_lock_) {
187 STATS_LOCK();
188 strings_not_in_dex_cache_++;
189 }
190
TypeDoesntNeedAccessCheck()191 void TypeDoesntNeedAccessCheck() REQUIRES(!stats_lock_) {
192 STATS_LOCK();
193 resolved_types_++;
194 }
195
TypeNeedsAccessCheck()196 void TypeNeedsAccessCheck() REQUIRES(!stats_lock_) {
197 STATS_LOCK();
198 unresolved_types_++;
199 }
200
ResolvedInstanceField()201 void ResolvedInstanceField() REQUIRES(!stats_lock_) {
202 STATS_LOCK();
203 resolved_instance_fields_++;
204 }
205
UnresolvedInstanceField()206 void UnresolvedInstanceField() REQUIRES(!stats_lock_) {
207 STATS_LOCK();
208 unresolved_instance_fields_++;
209 }
210
ResolvedLocalStaticField()211 void ResolvedLocalStaticField() REQUIRES(!stats_lock_) {
212 STATS_LOCK();
213 resolved_local_static_fields_++;
214 }
215
ResolvedStaticField()216 void ResolvedStaticField() REQUIRES(!stats_lock_) {
217 STATS_LOCK();
218 resolved_static_fields_++;
219 }
220
UnresolvedStaticField()221 void UnresolvedStaticField() REQUIRES(!stats_lock_) {
222 STATS_LOCK();
223 unresolved_static_fields_++;
224 }
225
226 // Indicate that type information from the verifier led to devirtualization.
PreciseTypeDevirtualization()227 void PreciseTypeDevirtualization() REQUIRES(!stats_lock_) {
228 STATS_LOCK();
229 type_based_devirtualization_++;
230 }
231
232 // Indicate that a method of the given type was resolved at compile time.
ResolvedMethod(InvokeType type)233 void ResolvedMethod(InvokeType type) REQUIRES(!stats_lock_) {
234 DCHECK_LE(type, kMaxInvokeType);
235 STATS_LOCK();
236 resolved_methods_[type]++;
237 }
238
239 // Indicate that a method of the given type was unresolved at compile time as it was in an
240 // unknown dex file.
UnresolvedMethod(InvokeType type)241 void UnresolvedMethod(InvokeType type) REQUIRES(!stats_lock_) {
242 DCHECK_LE(type, kMaxInvokeType);
243 STATS_LOCK();
244 unresolved_methods_[type]++;
245 }
246
247 // Indicate that a type of virtual method dispatch has been converted into a direct method
248 // dispatch.
VirtualMadeDirect(InvokeType type)249 void VirtualMadeDirect(InvokeType type) REQUIRES(!stats_lock_) {
250 DCHECK(type == kVirtual || type == kInterface || type == kSuper);
251 STATS_LOCK();
252 virtual_made_direct_[type]++;
253 }
254
255 // Indicate that a method of the given type was able to call directly into boot.
DirectCallsToBoot(InvokeType type)256 void DirectCallsToBoot(InvokeType type) REQUIRES(!stats_lock_) {
257 DCHECK_LE(type, kMaxInvokeType);
258 STATS_LOCK();
259 direct_calls_to_boot_[type]++;
260 }
261
262 // Indicate that a method of the given type was able to be resolved directly from boot.
DirectMethodsToBoot(InvokeType type)263 void DirectMethodsToBoot(InvokeType type) REQUIRES(!stats_lock_) {
264 DCHECK_LE(type, kMaxInvokeType);
265 STATS_LOCK();
266 direct_methods_to_boot_[type]++;
267 }
268
ProcessedInvoke(InvokeType type,int flags)269 void ProcessedInvoke(InvokeType type, int flags) REQUIRES(!stats_lock_) {
270 STATS_LOCK();
271 if (flags == 0) {
272 unresolved_methods_[type]++;
273 } else {
274 DCHECK_NE((flags & kFlagMethodResolved), 0);
275 resolved_methods_[type]++;
276 if ((flags & kFlagVirtualMadeDirect) != 0) {
277 virtual_made_direct_[type]++;
278 if ((flags & kFlagPreciseTypeDevirtualization) != 0) {
279 type_based_devirtualization_++;
280 }
281 } else {
282 DCHECK_EQ((flags & kFlagPreciseTypeDevirtualization), 0);
283 }
284 if ((flags & kFlagDirectCallToBoot) != 0) {
285 direct_calls_to_boot_[type]++;
286 }
287 if ((flags & kFlagDirectMethodToBoot) != 0) {
288 direct_methods_to_boot_[type]++;
289 }
290 }
291 }
292
293 // A check-cast could be eliminated due to verifier type analysis.
SafeCast()294 void SafeCast() REQUIRES(!stats_lock_) {
295 STATS_LOCK();
296 safe_casts_++;
297 }
298
299 // A check-cast couldn't be eliminated due to verifier type analysis.
NotASafeCast()300 void NotASafeCast() REQUIRES(!stats_lock_) {
301 STATS_LOCK();
302 not_safe_casts_++;
303 }
304
305 private:
306 Mutex stats_lock_;
307
308 size_t types_in_dex_cache_;
309 size_t types_not_in_dex_cache_;
310
311 size_t strings_in_dex_cache_;
312 size_t strings_not_in_dex_cache_;
313
314 size_t resolved_types_;
315 size_t unresolved_types_;
316
317 size_t resolved_instance_fields_;
318 size_t unresolved_instance_fields_;
319
320 size_t resolved_local_static_fields_;
321 size_t resolved_static_fields_;
322 size_t unresolved_static_fields_;
323 // Type based devirtualization for invoke interface and virtual.
324 size_t type_based_devirtualization_;
325
326 size_t resolved_methods_[kMaxInvokeType + 1];
327 size_t unresolved_methods_[kMaxInvokeType + 1];
328 size_t virtual_made_direct_[kMaxInvokeType + 1];
329 size_t direct_calls_to_boot_[kMaxInvokeType + 1];
330 size_t direct_methods_to_boot_[kMaxInvokeType + 1];
331
332 size_t safe_casts_;
333 size_t not_safe_casts_;
334
335 DISALLOW_COPY_AND_ASSIGN(AOTCompilationStats);
336 };
337
338 class CompilerDriver::DexFileMethodSet {
339 public:
DexFileMethodSet(const DexFile & dex_file)340 explicit DexFileMethodSet(const DexFile& dex_file)
341 : dex_file_(dex_file),
342 method_indexes_(dex_file.NumMethodIds(), false, Allocator::GetMallocAllocator()) {
343 }
344 DexFileMethodSet(DexFileMethodSet&& other) = default;
345
GetDexFile() const346 const DexFile& GetDexFile() const { return dex_file_; }
347
GetMethodIndexes()348 BitVector& GetMethodIndexes() { return method_indexes_; }
GetMethodIndexes() const349 const BitVector& GetMethodIndexes() const { return method_indexes_; }
350
351 private:
352 const DexFile& dex_file_;
353 BitVector method_indexes_;
354 };
355
CompilerDriver(const CompilerOptions * compiler_options,VerificationResults * verification_results,DexFileToMethodInlinerMap * method_inliner_map,Compiler::Kind compiler_kind,InstructionSet instruction_set,const InstructionSetFeatures * instruction_set_features,bool boot_image,bool app_image,std::unordered_set<std::string> * image_classes,std::unordered_set<std::string> * compiled_classes,std::unordered_set<std::string> * compiled_methods,size_t thread_count,bool dump_stats,bool dump_passes,CumulativeLogger * timer,int swap_fd,const ProfileCompilationInfo * profile_compilation_info)356 CompilerDriver::CompilerDriver(
357 const CompilerOptions* compiler_options,
358 VerificationResults* verification_results,
359 DexFileToMethodInlinerMap* method_inliner_map,
360 Compiler::Kind compiler_kind,
361 InstructionSet instruction_set,
362 const InstructionSetFeatures* instruction_set_features,
363 bool boot_image,
364 bool app_image,
365 std::unordered_set<std::string>* image_classes,
366 std::unordered_set<std::string>* compiled_classes,
367 std::unordered_set<std::string>* compiled_methods,
368 size_t thread_count,
369 bool dump_stats,
370 bool dump_passes,
371 CumulativeLogger* timer,
372 int swap_fd,
373 const ProfileCompilationInfo* profile_compilation_info)
374 : compiler_options_(compiler_options),
375 verification_results_(verification_results),
376 method_inliner_map_(method_inliner_map),
377 compiler_(Compiler::Create(this, compiler_kind)),
378 compiler_kind_(compiler_kind),
379 instruction_set_(instruction_set),
380 instruction_set_features_(instruction_set_features),
381 requires_constructor_barrier_lock_("constructor barrier lock"),
382 compiled_classes_lock_("compiled classes lock"),
383 compiled_methods_lock_("compiled method lock"),
384 compiled_methods_(MethodTable::key_compare()),
385 non_relative_linker_patch_count_(0u),
386 boot_image_(boot_image),
387 app_image_(app_image),
388 image_classes_(image_classes),
389 classes_to_compile_(compiled_classes),
390 methods_to_compile_(compiled_methods),
391 had_hard_verifier_failure_(false),
392 parallel_thread_count_(thread_count),
393 stats_(new AOTCompilationStats),
394 dump_stats_(dump_stats),
395 dump_passes_(dump_passes),
396 timings_logger_(timer),
397 compiler_context_(nullptr),
398 support_boot_image_fixup_(instruction_set != kMips && instruction_set != kMips64),
399 dex_files_for_oat_file_(nullptr),
400 compiled_method_storage_(swap_fd),
401 profile_compilation_info_(profile_compilation_info),
402 max_arena_alloc_(0),
403 dex_to_dex_references_lock_("dex-to-dex references lock"),
404 dex_to_dex_references_(),
405 current_dex_to_dex_methods_(nullptr) {
406 DCHECK(compiler_options_ != nullptr);
407 DCHECK(method_inliner_map_ != nullptr);
408
409 compiler_->Init();
410
411 if (compiler_options->VerifyOnlyProfile()) {
412 CHECK(profile_compilation_info_ != nullptr) << "Requires profile";
413 }
414 if (boot_image_) {
415 CHECK(image_classes_.get() != nullptr) << "Expected image classes for boot image";
416 }
417 }
418
~CompilerDriver()419 CompilerDriver::~CompilerDriver() {
420 Thread* self = Thread::Current();
421 {
422 MutexLock mu(self, compiled_classes_lock_);
423 STLDeleteValues(&compiled_classes_);
424 }
425 {
426 MutexLock mu(self, compiled_methods_lock_);
427 for (auto& pair : compiled_methods_) {
428 CompiledMethod::ReleaseSwapAllocatedCompiledMethod(this, pair.second);
429 }
430 }
431 compiler_->UnInit();
432 }
433
434
435 #define CREATE_TRAMPOLINE(type, abi, offset) \
436 if (Is64BitInstructionSet(instruction_set_)) { \
437 return CreateTrampoline64(instruction_set_, abi, \
438 type ## _ENTRYPOINT_OFFSET(8, offset)); \
439 } else { \
440 return CreateTrampoline32(instruction_set_, abi, \
441 type ## _ENTRYPOINT_OFFSET(4, offset)); \
442 }
443
CreateJniDlsymLookup() const444 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateJniDlsymLookup() const {
445 CREATE_TRAMPOLINE(JNI, kJniAbi, pDlsymLookup)
446 }
447
CreateQuickGenericJniTrampoline() const448 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickGenericJniTrampoline()
449 const {
450 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickGenericJniTrampoline)
451 }
452
CreateQuickImtConflictTrampoline() const453 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickImtConflictTrampoline()
454 const {
455 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickImtConflictTrampoline)
456 }
457
CreateQuickResolutionTrampoline() const458 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickResolutionTrampoline()
459 const {
460 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickResolutionTrampoline)
461 }
462
CreateQuickToInterpreterBridge() const463 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickToInterpreterBridge()
464 const {
465 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickToInterpreterBridge)
466 }
467 #undef CREATE_TRAMPOLINE
468
CompileAll(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)469 void CompilerDriver::CompileAll(jobject class_loader,
470 const std::vector<const DexFile*>& dex_files,
471 TimingLogger* timings) {
472 DCHECK(!Runtime::Current()->IsStarted());
473
474 InitializeThreadPools();
475
476 VLOG(compiler) << "Before precompile " << GetMemoryUsageString(false);
477 // Precompile:
478 // 1) Load image classes
479 // 2) Resolve all classes
480 // 3) Attempt to verify all classes
481 // 4) Attempt to initialize image classes, and trivially initialized classes
482 PreCompile(class_loader, dex_files, timings);
483 // Compile:
484 // 1) Compile all classes and methods enabled for compilation. May fall back to dex-to-dex
485 // compilation.
486 if (!GetCompilerOptions().VerifyAtRuntime()) {
487 Compile(class_loader, dex_files, timings);
488 }
489 if (dump_stats_) {
490 stats_->Dump();
491 }
492
493 FreeThreadPools();
494 }
495
GetDexToDexCompilationLevel(Thread * self,const CompilerDriver & driver,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,const DexFile::ClassDef & class_def)496 static optimizer::DexToDexCompilationLevel GetDexToDexCompilationLevel(
497 Thread* self, const CompilerDriver& driver, Handle<mirror::ClassLoader> class_loader,
498 const DexFile& dex_file, const DexFile::ClassDef& class_def)
499 SHARED_REQUIRES(Locks::mutator_lock_) {
500 auto* const runtime = Runtime::Current();
501 if (runtime->UseJitCompilation() || driver.GetCompilerOptions().VerifyAtRuntime()) {
502 // Verify at runtime shouldn't dex to dex since we didn't resolve of verify.
503 return optimizer::DexToDexCompilationLevel::kDontDexToDexCompile;
504 }
505 const char* descriptor = dex_file.GetClassDescriptor(class_def);
506 ClassLinker* class_linker = runtime->GetClassLinker();
507 mirror::Class* klass = class_linker->FindClass(self, descriptor, class_loader);
508 if (klass == nullptr) {
509 CHECK(self->IsExceptionPending());
510 self->ClearException();
511 return optimizer::DexToDexCompilationLevel::kDontDexToDexCompile;
512 }
513 // DexToDex at the kOptimize level may introduce quickened opcodes, which replace symbolic
514 // references with actual offsets. We cannot re-verify such instructions.
515 //
516 // We store the verification information in the class status in the oat file, which the linker
517 // can validate (checksums) and use to skip load-time verification. It is thus safe to
518 // optimize when a class has been fully verified before.
519 if (klass->IsVerified()) {
520 // Class is verified so we can enable DEX-to-DEX compilation for performance.
521 return optimizer::DexToDexCompilationLevel::kOptimize;
522 } else if (klass->IsCompileTimeVerified()) {
523 // Class verification has soft-failed. Anyway, ensure at least correctness.
524 DCHECK_EQ(klass->GetStatus(), mirror::Class::kStatusRetryVerificationAtRuntime);
525 return optimizer::DexToDexCompilationLevel::kRequired;
526 } else {
527 // Class verification has failed: do not run DEX-to-DEX compilation.
528 return optimizer::DexToDexCompilationLevel::kDontDexToDexCompile;
529 }
530 }
531
GetDexToDexCompilationLevel(Thread * self,const CompilerDriver & driver,jobject jclass_loader,const DexFile & dex_file,const DexFile::ClassDef & class_def)532 static optimizer::DexToDexCompilationLevel GetDexToDexCompilationLevel(
533 Thread* self,
534 const CompilerDriver& driver,
535 jobject jclass_loader,
536 const DexFile& dex_file,
537 const DexFile::ClassDef& class_def) {
538 ScopedObjectAccess soa(self);
539 StackHandleScope<1> hs(soa.Self());
540 Handle<mirror::ClassLoader> class_loader(
541 hs.NewHandle(soa.Decode<mirror::ClassLoader*>(jclass_loader)));
542 return GetDexToDexCompilationLevel(self, driver, class_loader, dex_file, class_def);
543 }
544
545 // Does the runtime for the InstructionSet provide an implementation returned by
546 // GetQuickGenericJniStub allowing down calls that aren't compiled using a JNI compiler?
InstructionSetHasGenericJniStub(InstructionSet isa)547 static bool InstructionSetHasGenericJniStub(InstructionSet isa) {
548 switch (isa) {
549 case kArm:
550 case kArm64:
551 case kThumb2:
552 case kMips:
553 case kMips64:
554 case kX86:
555 case kX86_64: return true;
556 default: return false;
557 }
558 }
559
CompileMethod(Thread * self,CompilerDriver * driver,const DexFile::CodeItem * code_item,uint32_t access_flags,InvokeType invoke_type,uint16_t class_def_idx,uint32_t method_idx,jobject class_loader,const DexFile & dex_file,optimizer::DexToDexCompilationLevel dex_to_dex_compilation_level,bool compilation_enabled,Handle<mirror::DexCache> dex_cache)560 static void CompileMethod(Thread* self,
561 CompilerDriver* driver,
562 const DexFile::CodeItem* code_item,
563 uint32_t access_flags,
564 InvokeType invoke_type,
565 uint16_t class_def_idx,
566 uint32_t method_idx,
567 jobject class_loader,
568 const DexFile& dex_file,
569 optimizer::DexToDexCompilationLevel dex_to_dex_compilation_level,
570 bool compilation_enabled,
571 Handle<mirror::DexCache> dex_cache)
572 REQUIRES(!driver->compiled_methods_lock_) {
573 DCHECK(driver != nullptr);
574 CompiledMethod* compiled_method = nullptr;
575 uint64_t start_ns = kTimeCompileMethod ? NanoTime() : 0;
576 MethodReference method_ref(&dex_file, method_idx);
577
578 if (driver->GetCurrentDexToDexMethods() != nullptr) {
579 // This is the second pass when we dex-to-dex compile previously marked methods.
580 // TODO: Refactor the compilation to avoid having to distinguish the two passes
581 // here. That should be done on a higher level. http://b/29089975
582 if (driver->GetCurrentDexToDexMethods()->IsBitSet(method_idx)) {
583 const VerifiedMethod* verified_method =
584 driver->GetVerificationResults()->GetVerifiedMethod(method_ref);
585 // Do not optimize if a VerifiedMethod is missing. SafeCast elision,
586 // for example, relies on it.
587 compiled_method = optimizer::ArtCompileDEX(
588 driver,
589 code_item,
590 access_flags,
591 invoke_type,
592 class_def_idx,
593 method_idx,
594 class_loader,
595 dex_file,
596 (verified_method != nullptr)
597 ? dex_to_dex_compilation_level
598 : optimizer::DexToDexCompilationLevel::kRequired);
599 }
600 } else if ((access_flags & kAccNative) != 0) {
601 // Are we extracting only and have support for generic JNI down calls?
602 if (!driver->GetCompilerOptions().IsJniCompilationEnabled() &&
603 InstructionSetHasGenericJniStub(driver->GetInstructionSet())) {
604 // Leaving this empty will trigger the generic JNI version
605 } else {
606 compiled_method = driver->GetCompiler()->JniCompile(access_flags, method_idx, dex_file);
607 CHECK(compiled_method != nullptr);
608 }
609 } else if ((access_flags & kAccAbstract) != 0) {
610 // Abstract methods don't have code.
611 } else {
612 const VerifiedMethod* verified_method =
613 driver->GetVerificationResults()->GetVerifiedMethod(method_ref);
614 bool compile = compilation_enabled &&
615 // Basic checks, e.g., not <clinit>.
616 driver->GetVerificationResults()
617 ->IsCandidateForCompilation(method_ref, access_flags) &&
618 // Did not fail to create VerifiedMethod metadata.
619 verified_method != nullptr &&
620 // Do not have failures that should punt to the interpreter.
621 !verified_method->HasRuntimeThrow() &&
622 (verified_method->GetEncounteredVerificationFailures() &
623 (verifier::VERIFY_ERROR_FORCE_INTERPRETER | verifier::VERIFY_ERROR_LOCKING)) == 0 &&
624 // Is eligable for compilation by methods-to-compile filter.
625 driver->IsMethodToCompile(method_ref) &&
626 driver->ShouldCompileBasedOnProfile(method_ref);
627
628 if (compile) {
629 // NOTE: if compiler declines to compile this method, it will return null.
630 compiled_method = driver->GetCompiler()->Compile(code_item, access_flags, invoke_type,
631 class_def_idx, method_idx, class_loader,
632 dex_file, dex_cache);
633 }
634 if (compiled_method == nullptr &&
635 dex_to_dex_compilation_level != optimizer::DexToDexCompilationLevel::kDontDexToDexCompile) {
636 DCHECK(!Runtime::Current()->UseJitCompilation());
637 // TODO: add a command-line option to disable DEX-to-DEX compilation ?
638 driver->MarkForDexToDexCompilation(self, method_ref);
639 }
640 }
641 if (kTimeCompileMethod) {
642 uint64_t duration_ns = NanoTime() - start_ns;
643 if (duration_ns > MsToNs(driver->GetCompiler()->GetMaximumCompilationTimeBeforeWarning())) {
644 LOG(WARNING) << "Compilation of " << PrettyMethod(method_idx, dex_file)
645 << " took " << PrettyDuration(duration_ns);
646 }
647 }
648
649 if (compiled_method != nullptr) {
650 // Count non-relative linker patches.
651 size_t non_relative_linker_patch_count = 0u;
652 for (const LinkerPatch& patch : compiled_method->GetPatches()) {
653 if (!patch.IsPcRelative()) {
654 ++non_relative_linker_patch_count;
655 }
656 }
657 bool compile_pic = driver->GetCompilerOptions().GetCompilePic(); // Off by default
658 // When compiling with PIC, there should be zero non-relative linker patches
659 CHECK(!compile_pic || non_relative_linker_patch_count == 0u);
660
661 driver->AddCompiledMethod(method_ref, compiled_method, non_relative_linker_patch_count);
662 }
663
664 if (self->IsExceptionPending()) {
665 ScopedObjectAccess soa(self);
666 LOG(FATAL) << "Unexpected exception compiling: " << PrettyMethod(method_idx, dex_file) << "\n"
667 << self->GetException()->Dump();
668 }
669 }
670
CompileOne(Thread * self,ArtMethod * method,TimingLogger * timings)671 void CompilerDriver::CompileOne(Thread* self, ArtMethod* method, TimingLogger* timings) {
672 DCHECK(!Runtime::Current()->IsStarted());
673 jobject jclass_loader;
674 const DexFile* dex_file;
675 uint16_t class_def_idx;
676 uint32_t method_idx = method->GetDexMethodIndex();
677 uint32_t access_flags = method->GetAccessFlags();
678 InvokeType invoke_type = method->GetInvokeType();
679 StackHandleScope<1> hs(self);
680 Handle<mirror::DexCache> dex_cache(hs.NewHandle(method->GetDexCache()));
681 {
682 ScopedObjectAccessUnchecked soa(self);
683 ScopedLocalRef<jobject> local_class_loader(
684 soa.Env(), soa.AddLocalReference<jobject>(method->GetDeclaringClass()->GetClassLoader()));
685 jclass_loader = soa.Env()->NewGlobalRef(local_class_loader.get());
686 // Find the dex_file
687 dex_file = method->GetDexFile();
688 class_def_idx = method->GetClassDefIndex();
689 }
690 const DexFile::CodeItem* code_item = dex_file->GetCodeItem(method->GetCodeItemOffset());
691
692 // Go to native so that we don't block GC during compilation.
693 ScopedThreadSuspension sts(self, kNative);
694
695 std::vector<const DexFile*> dex_files;
696 dex_files.push_back(dex_file);
697
698 InitializeThreadPools();
699
700 PreCompile(jclass_loader, dex_files, timings);
701
702 // Can we run DEX-to-DEX compiler on this class ?
703 optimizer::DexToDexCompilationLevel dex_to_dex_compilation_level =
704 GetDexToDexCompilationLevel(self,
705 *this,
706 jclass_loader,
707 *dex_file,
708 dex_file->GetClassDef(class_def_idx));
709
710 DCHECK(current_dex_to_dex_methods_ == nullptr);
711 CompileMethod(self,
712 this,
713 code_item,
714 access_flags,
715 invoke_type,
716 class_def_idx,
717 method_idx,
718 jclass_loader,
719 *dex_file,
720 dex_to_dex_compilation_level,
721 true,
722 dex_cache);
723
724 ArrayRef<DexFileMethodSet> dex_to_dex_references;
725 {
726 // From this point on, we shall not modify dex_to_dex_references_, so
727 // just grab a reference to it that we use without holding the mutex.
728 MutexLock lock(Thread::Current(), dex_to_dex_references_lock_);
729 dex_to_dex_references = ArrayRef<DexFileMethodSet>(dex_to_dex_references_);
730 }
731 if (!dex_to_dex_references.empty()) {
732 DCHECK_EQ(dex_to_dex_references.size(), 1u);
733 DCHECK(&dex_to_dex_references[0].GetDexFile() == dex_file);
734 current_dex_to_dex_methods_ = &dex_to_dex_references.front().GetMethodIndexes();
735 DCHECK(current_dex_to_dex_methods_->IsBitSet(method_idx));
736 DCHECK_EQ(current_dex_to_dex_methods_->NumSetBits(), 1u);
737 CompileMethod(self,
738 this,
739 code_item,
740 access_flags,
741 invoke_type,
742 class_def_idx,
743 method_idx,
744 jclass_loader,
745 *dex_file,
746 dex_to_dex_compilation_level,
747 true,
748 dex_cache);
749 current_dex_to_dex_methods_ = nullptr;
750 }
751
752 FreeThreadPools();
753
754 self->GetJniEnv()->DeleteGlobalRef(jclass_loader);
755 }
756
Resolve(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)757 void CompilerDriver::Resolve(jobject class_loader,
758 const std::vector<const DexFile*>& dex_files,
759 TimingLogger* timings) {
760 // Resolution allocates classes and needs to run single-threaded to be deterministic.
761 bool force_determinism = GetCompilerOptions().IsForceDeterminism();
762 ThreadPool* resolve_thread_pool = force_determinism
763 ? single_thread_pool_.get()
764 : parallel_thread_pool_.get();
765 size_t resolve_thread_count = force_determinism ? 1U : parallel_thread_count_;
766
767 for (size_t i = 0; i != dex_files.size(); ++i) {
768 const DexFile* dex_file = dex_files[i];
769 CHECK(dex_file != nullptr);
770 ResolveDexFile(class_loader,
771 *dex_file,
772 dex_files,
773 resolve_thread_pool,
774 resolve_thread_count,
775 timings);
776 }
777 }
778
779 // Resolve const-strings in the code. Done to have deterministic allocation behavior. Right now
780 // this is single-threaded for simplicity.
781 // TODO: Collect the relevant string indices in parallel, then allocate them sequentially in a
782 // stable order.
783
ResolveConstStrings(CompilerDriver * driver,const DexFile & dex_file,const DexFile::CodeItem * code_item)784 static void ResolveConstStrings(CompilerDriver* driver,
785 const DexFile& dex_file,
786 const DexFile::CodeItem* code_item) {
787 if (code_item == nullptr) {
788 // Abstract or native method.
789 return;
790 }
791
792 const uint16_t* code_ptr = code_item->insns_;
793 const uint16_t* code_end = code_item->insns_ + code_item->insns_size_in_code_units_;
794
795 while (code_ptr < code_end) {
796 const Instruction* inst = Instruction::At(code_ptr);
797 switch (inst->Opcode()) {
798 case Instruction::CONST_STRING: {
799 uint32_t string_index = inst->VRegB_21c();
800 driver->CanAssumeStringIsPresentInDexCache(dex_file, string_index);
801 break;
802 }
803 case Instruction::CONST_STRING_JUMBO: {
804 uint32_t string_index = inst->VRegB_31c();
805 driver->CanAssumeStringIsPresentInDexCache(dex_file, string_index);
806 break;
807 }
808
809 default:
810 break;
811 }
812
813 code_ptr += inst->SizeInCodeUnits();
814 }
815 }
816
ResolveConstStrings(CompilerDriver * driver,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)817 static void ResolveConstStrings(CompilerDriver* driver,
818 const std::vector<const DexFile*>& dex_files,
819 TimingLogger* timings) {
820 for (const DexFile* dex_file : dex_files) {
821 TimingLogger::ScopedTiming t("Resolve const-string Strings", timings);
822
823 size_t class_def_count = dex_file->NumClassDefs();
824 for (size_t class_def_index = 0; class_def_index < class_def_count; ++class_def_index) {
825 const DexFile::ClassDef& class_def = dex_file->GetClassDef(class_def_index);
826
827 const uint8_t* class_data = dex_file->GetClassData(class_def);
828 if (class_data == nullptr) {
829 // empty class, probably a marker interface
830 continue;
831 }
832
833 ClassDataItemIterator it(*dex_file, class_data);
834 // Skip fields
835 while (it.HasNextStaticField()) {
836 it.Next();
837 }
838 while (it.HasNextInstanceField()) {
839 it.Next();
840 }
841
842 bool compilation_enabled = driver->IsClassToCompile(
843 dex_file->StringByTypeIdx(class_def.class_idx_));
844 if (!compilation_enabled) {
845 // Compilation is skipped, do not resolve const-string in code of this class.
846 // TODO: Make sure that inlining honors this.
847 continue;
848 }
849
850 // Direct methods.
851 int64_t previous_direct_method_idx = -1;
852 while (it.HasNextDirectMethod()) {
853 uint32_t method_idx = it.GetMemberIndex();
854 if (method_idx == previous_direct_method_idx) {
855 // smali can create dex files with two encoded_methods sharing the same method_idx
856 // http://code.google.com/p/smali/issues/detail?id=119
857 it.Next();
858 continue;
859 }
860 previous_direct_method_idx = method_idx;
861 ResolveConstStrings(driver, *dex_file, it.GetMethodCodeItem());
862 it.Next();
863 }
864 // Virtual methods.
865 int64_t previous_virtual_method_idx = -1;
866 while (it.HasNextVirtualMethod()) {
867 uint32_t method_idx = it.GetMemberIndex();
868 if (method_idx == previous_virtual_method_idx) {
869 // smali can create dex files with two encoded_methods sharing the same method_idx
870 // http://code.google.com/p/smali/issues/detail?id=119
871 it.Next();
872 continue;
873 }
874 previous_virtual_method_idx = method_idx;
875 ResolveConstStrings(driver, *dex_file, it.GetMethodCodeItem());
876 it.Next();
877 }
878 DCHECK(!it.HasNext());
879 }
880 }
881 }
882
CheckThreadPools()883 inline void CompilerDriver::CheckThreadPools() {
884 DCHECK(parallel_thread_pool_ != nullptr);
885 DCHECK(single_thread_pool_ != nullptr);
886 }
887
PreCompile(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)888 void CompilerDriver::PreCompile(jobject class_loader,
889 const std::vector<const DexFile*>& dex_files,
890 TimingLogger* timings) {
891 CheckThreadPools();
892
893 LoadImageClasses(timings);
894 VLOG(compiler) << "LoadImageClasses: " << GetMemoryUsageString(false);
895
896 const bool verification_enabled = compiler_options_->IsVerificationEnabled();
897 const bool never_verify = compiler_options_->NeverVerify();
898 const bool verify_only_profile = compiler_options_->VerifyOnlyProfile();
899
900 // We need to resolve for never_verify since it needs to run dex to dex to add the
901 // RETURN_VOID_NO_BARRIER.
902 // Let the verifier resolve as needed for the verify_only_profile case.
903 if ((never_verify || verification_enabled) && !verify_only_profile) {
904 Resolve(class_loader, dex_files, timings);
905 VLOG(compiler) << "Resolve: " << GetMemoryUsageString(false);
906 }
907
908 if (never_verify) {
909 VLOG(compiler) << "Verify none mode specified, skipping verification.";
910 SetVerified(class_loader, dex_files, timings);
911 }
912
913 if (!verification_enabled) {
914 return;
915 }
916
917 if (GetCompilerOptions().IsForceDeterminism() && IsBootImage()) {
918 // Resolve strings from const-string. Do this now to have a deterministic image.
919 ResolveConstStrings(this, dex_files, timings);
920 VLOG(compiler) << "Resolve const-strings: " << GetMemoryUsageString(false);
921 }
922
923 Verify(class_loader, dex_files, timings);
924 VLOG(compiler) << "Verify: " << GetMemoryUsageString(false);
925
926 if (had_hard_verifier_failure_ && GetCompilerOptions().AbortOnHardVerifierFailure()) {
927 LOG(FATAL) << "Had a hard failure verifying all classes, and was asked to abort in such "
928 << "situations. Please check the log.";
929 }
930
931 InitializeClasses(class_loader, dex_files, timings);
932 VLOG(compiler) << "InitializeClasses: " << GetMemoryUsageString(false);
933
934 UpdateImageClasses(timings);
935 VLOG(compiler) << "UpdateImageClasses: " << GetMemoryUsageString(false);
936 }
937
IsImageClass(const char * descriptor) const938 bool CompilerDriver::IsImageClass(const char* descriptor) const {
939 if (image_classes_ != nullptr) {
940 // If we have a set of image classes, use those.
941 return image_classes_->find(descriptor) != image_classes_->end();
942 }
943 // No set of image classes, assume we include all the classes.
944 // NOTE: Currently only reachable from InitImageMethodVisitor for the app image case.
945 return !IsBootImage();
946 }
947
IsClassToCompile(const char * descriptor) const948 bool CompilerDriver::IsClassToCompile(const char* descriptor) const {
949 if (kRestrictCompilationFiltersToImage && !IsBootImage()) {
950 return true;
951 }
952
953 if (classes_to_compile_ == nullptr) {
954 return true;
955 }
956 return classes_to_compile_->find(descriptor) != classes_to_compile_->end();
957 }
958
IsMethodToCompile(const MethodReference & method_ref) const959 bool CompilerDriver::IsMethodToCompile(const MethodReference& method_ref) const {
960 if (kRestrictCompilationFiltersToImage && !IsBootImage()) {
961 return true;
962 }
963
964 if (methods_to_compile_ == nullptr) {
965 return true;
966 }
967
968 std::string tmp = PrettyMethod(method_ref.dex_method_index, *method_ref.dex_file, true);
969 return methods_to_compile_->find(tmp.c_str()) != methods_to_compile_->end();
970 }
971
ShouldCompileBasedOnProfile(const MethodReference & method_ref) const972 bool CompilerDriver::ShouldCompileBasedOnProfile(const MethodReference& method_ref) const {
973 if (profile_compilation_info_ == nullptr) {
974 // If we miss profile information it means that we don't do a profile guided compilation.
975 // Return true, and let the other filters decide if the method should be compiled.
976 return true;
977 }
978 bool result = profile_compilation_info_->ContainsMethod(method_ref);
979
980 if (kDebugProfileGuidedCompilation) {
981 LOG(INFO) << "[ProfileGuidedCompilation] "
982 << (result ? "Compiled" : "Skipped") << " method:"
983 << PrettyMethod(method_ref.dex_method_index, *method_ref.dex_file, true);
984 }
985 return result;
986 }
987
ShouldVerifyClassBasedOnProfile(const DexFile & dex_file,uint16_t class_idx) const988 bool CompilerDriver::ShouldVerifyClassBasedOnProfile(const DexFile& dex_file,
989 uint16_t class_idx) const {
990 if (!compiler_options_->VerifyOnlyProfile()) {
991 // No profile, verify everything.
992 return true;
993 }
994 DCHECK(profile_compilation_info_ != nullptr);
995 bool result = profile_compilation_info_->ContainsClass(dex_file, class_idx);
996 if (kDebugProfileGuidedCompilation) {
997 LOG(INFO) << "[ProfileGuidedCompilation] "
998 << (result ? "Verified" : "Skipped") << " method:"
999 << dex_file.GetClassDescriptor(dex_file.GetClassDef(class_idx));
1000 }
1001 return result;
1002 }
1003
1004 class ResolveCatchBlockExceptionsClassVisitor : public ClassVisitor {
1005 public:
ResolveCatchBlockExceptionsClassVisitor(std::set<std::pair<uint16_t,const DexFile * >> & exceptions_to_resolve)1006 ResolveCatchBlockExceptionsClassVisitor(
1007 std::set<std::pair<uint16_t, const DexFile*>>& exceptions_to_resolve)
1008 : exceptions_to_resolve_(exceptions_to_resolve) {}
1009
operator ()(mirror::Class * c)1010 virtual bool operator()(mirror::Class* c) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
1011 const auto pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1012 for (auto& m : c->GetMethods(pointer_size)) {
1013 ResolveExceptionsForMethod(&m, pointer_size);
1014 }
1015 return true;
1016 }
1017
1018 private:
ResolveExceptionsForMethod(ArtMethod * method_handle,size_t pointer_size)1019 void ResolveExceptionsForMethod(ArtMethod* method_handle, size_t pointer_size)
1020 SHARED_REQUIRES(Locks::mutator_lock_) {
1021 const DexFile::CodeItem* code_item = method_handle->GetCodeItem();
1022 if (code_item == nullptr) {
1023 return; // native or abstract method
1024 }
1025 if (code_item->tries_size_ == 0) {
1026 return; // nothing to process
1027 }
1028 const uint8_t* encoded_catch_handler_list = DexFile::GetCatchHandlerData(*code_item, 0);
1029 size_t num_encoded_catch_handlers = DecodeUnsignedLeb128(&encoded_catch_handler_list);
1030 for (size_t i = 0; i < num_encoded_catch_handlers; i++) {
1031 int32_t encoded_catch_handler_size = DecodeSignedLeb128(&encoded_catch_handler_list);
1032 bool has_catch_all = false;
1033 if (encoded_catch_handler_size <= 0) {
1034 encoded_catch_handler_size = -encoded_catch_handler_size;
1035 has_catch_all = true;
1036 }
1037 for (int32_t j = 0; j < encoded_catch_handler_size; j++) {
1038 uint16_t encoded_catch_handler_handlers_type_idx =
1039 DecodeUnsignedLeb128(&encoded_catch_handler_list);
1040 // Add to set of types to resolve if not already in the dex cache resolved types
1041 if (!method_handle->IsResolvedTypeIdx(encoded_catch_handler_handlers_type_idx,
1042 pointer_size)) {
1043 exceptions_to_resolve_.emplace(encoded_catch_handler_handlers_type_idx,
1044 method_handle->GetDexFile());
1045 }
1046 // ignore address associated with catch handler
1047 DecodeUnsignedLeb128(&encoded_catch_handler_list);
1048 }
1049 if (has_catch_all) {
1050 // ignore catch all address
1051 DecodeUnsignedLeb128(&encoded_catch_handler_list);
1052 }
1053 }
1054 }
1055
1056 std::set<std::pair<uint16_t, const DexFile*>>& exceptions_to_resolve_;
1057 };
1058
1059 class RecordImageClassesVisitor : public ClassVisitor {
1060 public:
RecordImageClassesVisitor(std::unordered_set<std::string> * image_classes)1061 explicit RecordImageClassesVisitor(std::unordered_set<std::string>* image_classes)
1062 : image_classes_(image_classes) {}
1063
operator ()(mirror::Class * klass)1064 bool operator()(mirror::Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
1065 std::string temp;
1066 image_classes_->insert(klass->GetDescriptor(&temp));
1067 return true;
1068 }
1069
1070 private:
1071 std::unordered_set<std::string>* const image_classes_;
1072 };
1073
1074 // Make a list of descriptors for classes to include in the image
LoadImageClasses(TimingLogger * timings)1075 void CompilerDriver::LoadImageClasses(TimingLogger* timings) {
1076 CHECK(timings != nullptr);
1077 if (!IsBootImage()) {
1078 return;
1079 }
1080
1081 TimingLogger::ScopedTiming t("LoadImageClasses", timings);
1082 // Make a first class to load all classes explicitly listed in the file
1083 Thread* self = Thread::Current();
1084 ScopedObjectAccess soa(self);
1085 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1086 CHECK(image_classes_.get() != nullptr);
1087 for (auto it = image_classes_->begin(), end = image_classes_->end(); it != end;) {
1088 const std::string& descriptor(*it);
1089 StackHandleScope<1> hs(self);
1090 Handle<mirror::Class> klass(
1091 hs.NewHandle(class_linker->FindSystemClass(self, descriptor.c_str())));
1092 if (klass.Get() == nullptr) {
1093 VLOG(compiler) << "Failed to find class " << descriptor;
1094 image_classes_->erase(it++);
1095 self->ClearException();
1096 } else {
1097 ++it;
1098 }
1099 }
1100
1101 // Resolve exception classes referenced by the loaded classes. The catch logic assumes
1102 // exceptions are resolved by the verifier when there is a catch block in an interested method.
1103 // Do this here so that exception classes appear to have been specified image classes.
1104 std::set<std::pair<uint16_t, const DexFile*>> unresolved_exception_types;
1105 StackHandleScope<1> hs(self);
1106 Handle<mirror::Class> java_lang_Throwable(
1107 hs.NewHandle(class_linker->FindSystemClass(self, "Ljava/lang/Throwable;")));
1108 do {
1109 unresolved_exception_types.clear();
1110 ResolveCatchBlockExceptionsClassVisitor visitor(unresolved_exception_types);
1111 class_linker->VisitClasses(&visitor);
1112 for (const std::pair<uint16_t, const DexFile*>& exception_type : unresolved_exception_types) {
1113 uint16_t exception_type_idx = exception_type.first;
1114 const DexFile* dex_file = exception_type.second;
1115 StackHandleScope<2> hs2(self);
1116 Handle<mirror::DexCache> dex_cache(hs2.NewHandle(class_linker->RegisterDexFile(*dex_file,
1117 nullptr)));
1118 Handle<mirror::Class> klass(hs2.NewHandle(
1119 class_linker->ResolveType(*dex_file,
1120 exception_type_idx,
1121 dex_cache,
1122 ScopedNullHandle<mirror::ClassLoader>())));
1123 if (klass.Get() == nullptr) {
1124 const DexFile::TypeId& type_id = dex_file->GetTypeId(exception_type_idx);
1125 const char* descriptor = dex_file->GetTypeDescriptor(type_id);
1126 LOG(FATAL) << "Failed to resolve class " << descriptor;
1127 }
1128 DCHECK(java_lang_Throwable->IsAssignableFrom(klass.Get()));
1129 }
1130 // Resolving exceptions may load classes that reference more exceptions, iterate until no
1131 // more are found
1132 } while (!unresolved_exception_types.empty());
1133
1134 // We walk the roots looking for classes so that we'll pick up the
1135 // above classes plus any classes them depend on such super
1136 // classes, interfaces, and the required ClassLinker roots.
1137 RecordImageClassesVisitor visitor(image_classes_.get());
1138 class_linker->VisitClasses(&visitor);
1139
1140 CHECK_NE(image_classes_->size(), 0U);
1141 }
1142
MaybeAddToImageClasses(Handle<mirror::Class> c,std::unordered_set<std::string> * image_classes)1143 static void MaybeAddToImageClasses(Handle<mirror::Class> c,
1144 std::unordered_set<std::string>* image_classes)
1145 SHARED_REQUIRES(Locks::mutator_lock_) {
1146 Thread* self = Thread::Current();
1147 StackHandleScope<1> hs(self);
1148 // Make a copy of the handle so that we don't clobber it doing Assign.
1149 MutableHandle<mirror::Class> klass(hs.NewHandle(c.Get()));
1150 std::string temp;
1151 const size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1152 while (!klass->IsObjectClass()) {
1153 const char* descriptor = klass->GetDescriptor(&temp);
1154 std::pair<std::unordered_set<std::string>::iterator, bool> result =
1155 image_classes->insert(descriptor);
1156 if (!result.second) { // Previously inserted.
1157 break;
1158 }
1159 VLOG(compiler) << "Adding " << descriptor << " to image classes";
1160 for (size_t i = 0; i < klass->NumDirectInterfaces(); ++i) {
1161 StackHandleScope<1> hs2(self);
1162 MaybeAddToImageClasses(hs2.NewHandle(mirror::Class::GetDirectInterface(self, klass, i)),
1163 image_classes);
1164 }
1165 for (auto& m : c->GetVirtualMethods(pointer_size)) {
1166 StackHandleScope<1> hs2(self);
1167 MaybeAddToImageClasses(hs2.NewHandle(m.GetDeclaringClass()), image_classes);
1168 }
1169 if (klass->IsArrayClass()) {
1170 StackHandleScope<1> hs2(self);
1171 MaybeAddToImageClasses(hs2.NewHandle(klass->GetComponentType()), image_classes);
1172 }
1173 klass.Assign(klass->GetSuperClass());
1174 }
1175 }
1176
1177 // Keeps all the data for the update together. Also doubles as the reference visitor.
1178 // Note: we can use object pointers because we suspend all threads.
1179 class ClinitImageUpdate {
1180 public:
Create(std::unordered_set<std::string> * image_class_descriptors,Thread * self,ClassLinker * linker,std::string * error_msg)1181 static ClinitImageUpdate* Create(std::unordered_set<std::string>* image_class_descriptors,
1182 Thread* self, ClassLinker* linker, std::string* error_msg) {
1183 std::unique_ptr<ClinitImageUpdate> res(new ClinitImageUpdate(image_class_descriptors, self,
1184 linker));
1185 if (res->dex_cache_class_ == nullptr) {
1186 *error_msg = "Could not find DexCache class.";
1187 return nullptr;
1188 }
1189
1190 return res.release();
1191 }
1192
~ClinitImageUpdate()1193 ~ClinitImageUpdate() {
1194 // Allow others to suspend again.
1195 self_->EndAssertNoThreadSuspension(old_cause_);
1196 }
1197
1198 // Visitor for VisitReferences.
operator ()(mirror::Object * object,MemberOffset field_offset,bool) const1199 void operator()(mirror::Object* object, MemberOffset field_offset, bool /* is_static */) const
1200 SHARED_REQUIRES(Locks::mutator_lock_) {
1201 mirror::Object* ref = object->GetFieldObject<mirror::Object>(field_offset);
1202 if (ref != nullptr) {
1203 VisitClinitClassesObject(ref);
1204 }
1205 }
1206
1207 // java.lang.Reference visitor for VisitReferences.
operator ()(mirror::Class * klass ATTRIBUTE_UNUSED,mirror::Reference * ref ATTRIBUTE_UNUSED) const1208 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref ATTRIBUTE_UNUSED)
1209 const {}
1210
1211 // Ignore class native roots.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const1212 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
1213 const {}
VisitRoot(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const1214 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
1215
Walk()1216 void Walk() SHARED_REQUIRES(Locks::mutator_lock_) {
1217 // Use the initial classes as roots for a search.
1218 for (mirror::Class* klass_root : image_classes_) {
1219 VisitClinitClassesObject(klass_root);
1220 }
1221 }
1222
1223 private:
1224 class FindImageClassesVisitor : public ClassVisitor {
1225 public:
FindImageClassesVisitor(ClinitImageUpdate * data)1226 explicit FindImageClassesVisitor(ClinitImageUpdate* data) : data_(data) {}
1227
operator ()(mirror::Class * klass)1228 bool operator()(mirror::Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
1229 std::string temp;
1230 const char* name = klass->GetDescriptor(&temp);
1231 if (data_->image_class_descriptors_->find(name) != data_->image_class_descriptors_->end()) {
1232 data_->image_classes_.push_back(klass);
1233 } else {
1234 // Check whether it is initialized and has a clinit. They must be kept, too.
1235 if (klass->IsInitialized() && klass->FindClassInitializer(
1236 Runtime::Current()->GetClassLinker()->GetImagePointerSize()) != nullptr) {
1237 data_->image_classes_.push_back(klass);
1238 }
1239 }
1240 return true;
1241 }
1242
1243 private:
1244 ClinitImageUpdate* const data_;
1245 };
1246
ClinitImageUpdate(std::unordered_set<std::string> * image_class_descriptors,Thread * self,ClassLinker * linker)1247 ClinitImageUpdate(std::unordered_set<std::string>* image_class_descriptors, Thread* self,
1248 ClassLinker* linker)
1249 SHARED_REQUIRES(Locks::mutator_lock_) :
1250 image_class_descriptors_(image_class_descriptors), self_(self) {
1251 CHECK(linker != nullptr);
1252 CHECK(image_class_descriptors != nullptr);
1253
1254 // Make sure nobody interferes with us.
1255 old_cause_ = self->StartAssertNoThreadSuspension("Boot image closure");
1256
1257 // Find the interesting classes.
1258 dex_cache_class_ = linker->LookupClass(self, "Ljava/lang/DexCache;",
1259 ComputeModifiedUtf8Hash("Ljava/lang/DexCache;"), nullptr);
1260
1261 // Find all the already-marked classes.
1262 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
1263 FindImageClassesVisitor visitor(this);
1264 linker->VisitClasses(&visitor);
1265 }
1266
VisitClinitClassesObject(mirror::Object * object) const1267 void VisitClinitClassesObject(mirror::Object* object) const
1268 SHARED_REQUIRES(Locks::mutator_lock_) {
1269 DCHECK(object != nullptr);
1270 if (marked_objects_.find(object) != marked_objects_.end()) {
1271 // Already processed.
1272 return;
1273 }
1274
1275 // Mark it.
1276 marked_objects_.insert(object);
1277
1278 if (object->IsClass()) {
1279 // If it is a class, add it.
1280 StackHandleScope<1> hs(self_);
1281 MaybeAddToImageClasses(hs.NewHandle(object->AsClass()), image_class_descriptors_);
1282 } else {
1283 // Else visit the object's class.
1284 VisitClinitClassesObject(object->GetClass());
1285 }
1286
1287 // If it is not a DexCache, visit all references.
1288 mirror::Class* klass = object->GetClass();
1289 if (klass != dex_cache_class_) {
1290 object->VisitReferences(*this, *this);
1291 }
1292 }
1293
1294 mutable std::unordered_set<mirror::Object*> marked_objects_;
1295 std::unordered_set<std::string>* const image_class_descriptors_;
1296 std::vector<mirror::Class*> image_classes_;
1297 const mirror::Class* dex_cache_class_;
1298 Thread* const self_;
1299 const char* old_cause_;
1300
1301 DISALLOW_COPY_AND_ASSIGN(ClinitImageUpdate);
1302 };
1303
UpdateImageClasses(TimingLogger * timings)1304 void CompilerDriver::UpdateImageClasses(TimingLogger* timings) {
1305 if (IsBootImage()) {
1306 TimingLogger::ScopedTiming t("UpdateImageClasses", timings);
1307
1308 Runtime* runtime = Runtime::Current();
1309
1310 // Suspend all threads.
1311 ScopedSuspendAll ssa(__FUNCTION__);
1312
1313 std::string error_msg;
1314 std::unique_ptr<ClinitImageUpdate> update(ClinitImageUpdate::Create(image_classes_.get(),
1315 Thread::Current(),
1316 runtime->GetClassLinker(),
1317 &error_msg));
1318 CHECK(update.get() != nullptr) << error_msg; // TODO: Soft failure?
1319
1320 // Do the marking.
1321 update->Walk();
1322 }
1323 }
1324
CanAssumeClassIsLoaded(mirror::Class * klass)1325 bool CompilerDriver::CanAssumeClassIsLoaded(mirror::Class* klass) {
1326 Runtime* runtime = Runtime::Current();
1327 if (!runtime->IsAotCompiler()) {
1328 DCHECK(runtime->UseJitCompilation());
1329 // Having the klass reference here implies that the klass is already loaded.
1330 return true;
1331 }
1332 if (!IsBootImage()) {
1333 // Assume loaded only if klass is in the boot image. App classes cannot be assumed
1334 // loaded because we don't even know what class loader will be used to load them.
1335 bool class_in_image = runtime->GetHeap()->FindSpaceFromObject(klass, false)->IsImageSpace();
1336 return class_in_image;
1337 }
1338 std::string temp;
1339 const char* descriptor = klass->GetDescriptor(&temp);
1340 return IsImageClass(descriptor);
1341 }
1342
MarkForDexToDexCompilation(Thread * self,const MethodReference & method_ref)1343 void CompilerDriver::MarkForDexToDexCompilation(Thread* self, const MethodReference& method_ref) {
1344 MutexLock lock(self, dex_to_dex_references_lock_);
1345 // Since we're compiling one dex file at a time, we need to look for the
1346 // current dex file entry only at the end of dex_to_dex_references_.
1347 if (dex_to_dex_references_.empty() ||
1348 &dex_to_dex_references_.back().GetDexFile() != method_ref.dex_file) {
1349 dex_to_dex_references_.emplace_back(*method_ref.dex_file);
1350 }
1351 dex_to_dex_references_.back().GetMethodIndexes().SetBit(method_ref.dex_method_index);
1352 }
1353
CanAssumeTypeIsPresentInDexCache(Handle<mirror::DexCache> dex_cache,uint32_t type_idx)1354 bool CompilerDriver::CanAssumeTypeIsPresentInDexCache(Handle<mirror::DexCache> dex_cache,
1355 uint32_t type_idx) {
1356 bool result = false;
1357 if ((IsBootImage() &&
1358 IsImageClass(dex_cache->GetDexFile()->StringDataByIdx(
1359 dex_cache->GetDexFile()->GetTypeId(type_idx).descriptor_idx_))) ||
1360 Runtime::Current()->UseJitCompilation()) {
1361 mirror::Class* resolved_class = dex_cache->GetResolvedType(type_idx);
1362 result = (resolved_class != nullptr);
1363 }
1364
1365 if (result) {
1366 stats_->TypeInDexCache();
1367 } else {
1368 stats_->TypeNotInDexCache();
1369 }
1370 return result;
1371 }
1372
CanAssumeStringIsPresentInDexCache(const DexFile & dex_file,uint32_t string_idx)1373 bool CompilerDriver::CanAssumeStringIsPresentInDexCache(const DexFile& dex_file,
1374 uint32_t string_idx) {
1375 // See also Compiler::ResolveDexFile
1376
1377 bool result = false;
1378 if (IsBootImage() || Runtime::Current()->UseJitCompilation()) {
1379 ScopedObjectAccess soa(Thread::Current());
1380 StackHandleScope<1> hs(soa.Self());
1381 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
1382 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->FindDexCache(
1383 soa.Self(), dex_file, false)));
1384 if (IsBootImage()) {
1385 // We resolve all const-string strings when building for the image.
1386 class_linker->ResolveString(dex_file, string_idx, dex_cache);
1387 result = true;
1388 } else {
1389 // Just check whether the dex cache already has the string.
1390 DCHECK(Runtime::Current()->UseJitCompilation());
1391 result = (dex_cache->GetResolvedString(string_idx) != nullptr);
1392 }
1393 }
1394 if (result) {
1395 stats_->StringInDexCache();
1396 } else {
1397 stats_->StringNotInDexCache();
1398 }
1399 return result;
1400 }
1401
CanAccessTypeWithoutChecks(uint32_t referrer_idx,Handle<mirror::DexCache> dex_cache,uint32_t type_idx)1402 bool CompilerDriver::CanAccessTypeWithoutChecks(uint32_t referrer_idx,
1403 Handle<mirror::DexCache> dex_cache,
1404 uint32_t type_idx) {
1405 // Get type from dex cache assuming it was populated by the verifier
1406 mirror::Class* resolved_class = dex_cache->GetResolvedType(type_idx);
1407 if (resolved_class == nullptr) {
1408 stats_->TypeNeedsAccessCheck();
1409 return false; // Unknown class needs access checks.
1410 }
1411 const DexFile::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(referrer_idx);
1412 bool is_accessible = resolved_class->IsPublic(); // Public classes are always accessible.
1413 if (!is_accessible) {
1414 mirror::Class* referrer_class = dex_cache->GetResolvedType(method_id.class_idx_);
1415 if (referrer_class == nullptr) {
1416 stats_->TypeNeedsAccessCheck();
1417 return false; // Incomplete referrer knowledge needs access check.
1418 }
1419 // Perform access check, will return true if access is ok or false if we're going to have to
1420 // check this at runtime (for example for class loaders).
1421 is_accessible = referrer_class->CanAccess(resolved_class);
1422 }
1423 if (is_accessible) {
1424 stats_->TypeDoesntNeedAccessCheck();
1425 } else {
1426 stats_->TypeNeedsAccessCheck();
1427 }
1428 return is_accessible;
1429 }
1430
CanAccessInstantiableTypeWithoutChecks(uint32_t referrer_idx,Handle<mirror::DexCache> dex_cache,uint32_t type_idx,bool * finalizable)1431 bool CompilerDriver::CanAccessInstantiableTypeWithoutChecks(uint32_t referrer_idx,
1432 Handle<mirror::DexCache> dex_cache,
1433 uint32_t type_idx,
1434 bool* finalizable) {
1435 // Get type from dex cache assuming it was populated by the verifier.
1436 mirror::Class* resolved_class = dex_cache->GetResolvedType(type_idx);
1437 if (resolved_class == nullptr) {
1438 stats_->TypeNeedsAccessCheck();
1439 // Be conservative.
1440 *finalizable = true;
1441 return false; // Unknown class needs access checks.
1442 }
1443 *finalizable = resolved_class->IsFinalizable();
1444 const DexFile::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(referrer_idx);
1445 bool is_accessible = resolved_class->IsPublic(); // Public classes are always accessible.
1446 if (!is_accessible) {
1447 mirror::Class* referrer_class = dex_cache->GetResolvedType(method_id.class_idx_);
1448 if (referrer_class == nullptr) {
1449 stats_->TypeNeedsAccessCheck();
1450 return false; // Incomplete referrer knowledge needs access check.
1451 }
1452 // Perform access and instantiable checks, will return true if access is ok or false if we're
1453 // going to have to check this at runtime (for example for class loaders).
1454 is_accessible = referrer_class->CanAccess(resolved_class);
1455 }
1456 bool result = is_accessible && resolved_class->IsInstantiable();
1457 if (result) {
1458 stats_->TypeDoesntNeedAccessCheck();
1459 } else {
1460 stats_->TypeNeedsAccessCheck();
1461 }
1462 return result;
1463 }
1464
CanEmbedTypeInCode(const DexFile & dex_file,uint32_t type_idx,bool * is_type_initialized,bool * use_direct_type_ptr,uintptr_t * direct_type_ptr,bool * out_is_finalizable)1465 bool CompilerDriver::CanEmbedTypeInCode(const DexFile& dex_file, uint32_t type_idx,
1466 bool* is_type_initialized, bool* use_direct_type_ptr,
1467 uintptr_t* direct_type_ptr, bool* out_is_finalizable) {
1468 ScopedObjectAccess soa(Thread::Current());
1469 Runtime* runtime = Runtime::Current();
1470 mirror::DexCache* dex_cache = runtime->GetClassLinker()->FindDexCache(
1471 soa.Self(), dex_file, false);
1472 mirror::Class* resolved_class = dex_cache->GetResolvedType(type_idx);
1473 if (resolved_class == nullptr) {
1474 return false;
1475 }
1476 if (GetCompilerOptions().GetCompilePic()) {
1477 // Do not allow a direct class pointer to be used when compiling for position-independent
1478 return false;
1479 }
1480 *out_is_finalizable = resolved_class->IsFinalizable();
1481 gc::Heap* heap = runtime->GetHeap();
1482 const bool compiling_boot = heap->IsCompilingBoot();
1483 const bool support_boot_image_fixup = GetSupportBootImageFixup();
1484 if (compiling_boot) {
1485 // boot -> boot class pointers.
1486 // True if the class is in the image at boot compiling time.
1487 const bool is_image_class = IsBootImage() && IsImageClass(
1488 dex_file.StringDataByIdx(dex_file.GetTypeId(type_idx).descriptor_idx_));
1489 // True if pc relative load works.
1490 if (is_image_class && support_boot_image_fixup) {
1491 *is_type_initialized = resolved_class->IsInitialized();
1492 *use_direct_type_ptr = false;
1493 *direct_type_ptr = 0;
1494 return true;
1495 } else {
1496 return false;
1497 }
1498 } else if (runtime->UseJitCompilation() && !heap->IsMovableObject(resolved_class)) {
1499 *is_type_initialized = resolved_class->IsInitialized();
1500 // If the class may move around, then don't embed it as a direct pointer.
1501 *use_direct_type_ptr = true;
1502 *direct_type_ptr = reinterpret_cast<uintptr_t>(resolved_class);
1503 return true;
1504 } else {
1505 // True if the class is in the image at app compiling time.
1506 const bool class_in_image = heap->FindSpaceFromObject(resolved_class, false)->IsImageSpace();
1507 if (class_in_image && support_boot_image_fixup) {
1508 // boot -> app class pointers.
1509 *is_type_initialized = resolved_class->IsInitialized();
1510 // TODO This is somewhat hacky. We should refactor all of this invoke codepath.
1511 *use_direct_type_ptr = !GetCompilerOptions().GetIncludePatchInformation();
1512 *direct_type_ptr = reinterpret_cast<uintptr_t>(resolved_class);
1513 return true;
1514 } else {
1515 // app -> app class pointers.
1516 // Give up because app does not have an image and class
1517 // isn't created at compile time. TODO: implement this
1518 // if/when each app gets an image.
1519 return false;
1520 }
1521 }
1522 }
1523
CanEmbedReferenceTypeInCode(ClassReference * ref,bool * use_direct_ptr,uintptr_t * direct_type_ptr)1524 bool CompilerDriver::CanEmbedReferenceTypeInCode(ClassReference* ref,
1525 bool* use_direct_ptr,
1526 uintptr_t* direct_type_ptr) {
1527 CHECK(ref != nullptr);
1528 CHECK(use_direct_ptr != nullptr);
1529 CHECK(direct_type_ptr != nullptr);
1530
1531 ScopedObjectAccess soa(Thread::Current());
1532 mirror::Class* reference_class = mirror::Reference::GetJavaLangRefReference();
1533 bool is_initialized = false;
1534 bool unused_finalizable;
1535 // Make sure we have a finished Reference class object before attempting to use it.
1536 if (!CanEmbedTypeInCode(*reference_class->GetDexCache()->GetDexFile(),
1537 reference_class->GetDexTypeIndex(), &is_initialized,
1538 use_direct_ptr, direct_type_ptr, &unused_finalizable) ||
1539 !is_initialized) {
1540 return false;
1541 }
1542 ref->first = &reference_class->GetDexFile();
1543 ref->second = reference_class->GetDexClassDefIndex();
1544 return true;
1545 }
1546
GetReferenceSlowFlagOffset() const1547 uint32_t CompilerDriver::GetReferenceSlowFlagOffset() const {
1548 ScopedObjectAccess soa(Thread::Current());
1549 mirror::Class* klass = mirror::Reference::GetJavaLangRefReference();
1550 DCHECK(klass->IsInitialized());
1551 return klass->GetSlowPathFlagOffset().Uint32Value();
1552 }
1553
GetReferenceDisableFlagOffset() const1554 uint32_t CompilerDriver::GetReferenceDisableFlagOffset() const {
1555 ScopedObjectAccess soa(Thread::Current());
1556 mirror::Class* klass = mirror::Reference::GetJavaLangRefReference();
1557 DCHECK(klass->IsInitialized());
1558 return klass->GetDisableIntrinsicFlagOffset().Uint32Value();
1559 }
1560
GetDexCacheArraysLayout(const DexFile * dex_file)1561 DexCacheArraysLayout CompilerDriver::GetDexCacheArraysLayout(const DexFile* dex_file) {
1562 return ContainsElement(GetDexFilesForOatFile(), dex_file)
1563 ? DexCacheArraysLayout(GetInstructionSetPointerSize(instruction_set_), dex_file)
1564 : DexCacheArraysLayout();
1565 }
1566
ProcessedInstanceField(bool resolved)1567 void CompilerDriver::ProcessedInstanceField(bool resolved) {
1568 if (!resolved) {
1569 stats_->UnresolvedInstanceField();
1570 } else {
1571 stats_->ResolvedInstanceField();
1572 }
1573 }
1574
ProcessedStaticField(bool resolved,bool local)1575 void CompilerDriver::ProcessedStaticField(bool resolved, bool local) {
1576 if (!resolved) {
1577 stats_->UnresolvedStaticField();
1578 } else if (local) {
1579 stats_->ResolvedLocalStaticField();
1580 } else {
1581 stats_->ResolvedStaticField();
1582 }
1583 }
1584
ProcessedInvoke(InvokeType invoke_type,int flags)1585 void CompilerDriver::ProcessedInvoke(InvokeType invoke_type, int flags) {
1586 stats_->ProcessedInvoke(invoke_type, flags);
1587 }
1588
ComputeInstanceFieldInfo(uint32_t field_idx,const DexCompilationUnit * mUnit,bool is_put,const ScopedObjectAccess & soa)1589 ArtField* CompilerDriver::ComputeInstanceFieldInfo(uint32_t field_idx,
1590 const DexCompilationUnit* mUnit, bool is_put,
1591 const ScopedObjectAccess& soa) {
1592 // Try to resolve the field and compiling method's class.
1593 ArtField* resolved_field;
1594 mirror::Class* referrer_class;
1595 Handle<mirror::DexCache> dex_cache(mUnit->GetDexCache());
1596 {
1597 StackHandleScope<1> hs(soa.Self());
1598 Handle<mirror::ClassLoader> class_loader_handle(
1599 hs.NewHandle(soa.Decode<mirror::ClassLoader*>(mUnit->GetClassLoader())));
1600 resolved_field = ResolveField(soa, dex_cache, class_loader_handle, mUnit, field_idx, false);
1601 referrer_class = resolved_field != nullptr
1602 ? ResolveCompilingMethodsClass(soa, dex_cache, class_loader_handle, mUnit) : nullptr;
1603 }
1604 bool can_link = false;
1605 if (resolved_field != nullptr && referrer_class != nullptr) {
1606 std::pair<bool, bool> fast_path = IsFastInstanceField(
1607 dex_cache.Get(), referrer_class, resolved_field, field_idx);
1608 can_link = is_put ? fast_path.second : fast_path.first;
1609 }
1610 ProcessedInstanceField(can_link);
1611 return can_link ? resolved_field : nullptr;
1612 }
1613
ComputeInstanceFieldInfo(uint32_t field_idx,const DexCompilationUnit * mUnit,bool is_put,MemberOffset * field_offset,bool * is_volatile)1614 bool CompilerDriver::ComputeInstanceFieldInfo(uint32_t field_idx, const DexCompilationUnit* mUnit,
1615 bool is_put, MemberOffset* field_offset,
1616 bool* is_volatile) {
1617 ScopedObjectAccess soa(Thread::Current());
1618 ArtField* resolved_field = ComputeInstanceFieldInfo(field_idx, mUnit, is_put, soa);
1619
1620 if (resolved_field == nullptr) {
1621 // Conservative defaults.
1622 *is_volatile = true;
1623 *field_offset = MemberOffset(static_cast<size_t>(-1));
1624 return false;
1625 } else {
1626 *is_volatile = resolved_field->IsVolatile();
1627 *field_offset = resolved_field->GetOffset();
1628 return true;
1629 }
1630 }
1631
GetCodeAndMethodForDirectCall(InvokeType * type,InvokeType sharp_type,bool no_guarantee_of_dex_cache_entry,const mirror::Class * referrer_class,ArtMethod * method,int * stats_flags,MethodReference * target_method,uintptr_t * direct_code,uintptr_t * direct_method)1632 void CompilerDriver::GetCodeAndMethodForDirectCall(InvokeType* type, InvokeType sharp_type,
1633 bool no_guarantee_of_dex_cache_entry,
1634 const mirror::Class* referrer_class,
1635 ArtMethod* method,
1636 int* stats_flags,
1637 MethodReference* target_method,
1638 uintptr_t* direct_code,
1639 uintptr_t* direct_method) {
1640 // For direct and static methods compute possible direct_code and direct_method values, ie
1641 // an address for the Method* being invoked and an address of the code for that Method*.
1642 // For interface calls compute a value for direct_method that is the interface method being
1643 // invoked, so this can be passed to the out-of-line runtime support code.
1644 *direct_code = 0;
1645 *direct_method = 0;
1646 Runtime* const runtime = Runtime::Current();
1647 gc::Heap* const heap = runtime->GetHeap();
1648 auto* cl = runtime->GetClassLinker();
1649 const auto pointer_size = cl->GetImagePointerSize();
1650 bool use_dex_cache = GetCompilerOptions().GetCompilePic(); // Off by default
1651 const bool compiling_boot = heap->IsCompilingBoot();
1652 // TODO This is somewhat hacky. We should refactor all of this invoke codepath.
1653 const bool force_relocations = (compiling_boot ||
1654 GetCompilerOptions().GetIncludePatchInformation());
1655 if (sharp_type != kStatic && sharp_type != kDirect) {
1656 return;
1657 }
1658 // TODO: support patching on all architectures.
1659 use_dex_cache = use_dex_cache || (force_relocations && !support_boot_image_fixup_);
1660 mirror::Class* declaring_class = method->GetDeclaringClass();
1661 bool method_code_in_boot = declaring_class->GetClassLoader() == nullptr;
1662 if (!use_dex_cache) {
1663 if (!method_code_in_boot) {
1664 use_dex_cache = true;
1665 } else {
1666 bool has_clinit_trampoline =
1667 method->IsStatic() && !declaring_class->IsInitialized();
1668 if (has_clinit_trampoline && declaring_class != referrer_class) {
1669 // Ensure we run the clinit trampoline unless we are invoking a static method in the same
1670 // class.
1671 use_dex_cache = true;
1672 }
1673 }
1674 }
1675 if (runtime->UseJitCompilation()) {
1676 // If we are the JIT, then don't allow a direct call to the interpreter bridge since this will
1677 // never be updated even after we compile the method.
1678 if (cl->IsQuickToInterpreterBridge(
1679 reinterpret_cast<const void*>(compiler_->GetEntryPointOf(method)))) {
1680 use_dex_cache = true;
1681 }
1682 }
1683 if (method_code_in_boot) {
1684 *stats_flags |= kFlagDirectCallToBoot | kFlagDirectMethodToBoot;
1685 }
1686 if (!use_dex_cache && force_relocations) {
1687 bool is_in_image;
1688 if (IsBootImage()) {
1689 is_in_image = IsImageClass(method->GetDeclaringClassDescriptor());
1690 } else {
1691 is_in_image = instruction_set_ != kX86 && instruction_set_ != kX86_64 &&
1692 heap->FindSpaceFromObject(method->GetDeclaringClass(), false)->IsImageSpace() &&
1693 !cl->IsQuickToInterpreterBridge(
1694 reinterpret_cast<const void*>(compiler_->GetEntryPointOf(method)));
1695 }
1696 if (!is_in_image) {
1697 // We can only branch directly to Methods that are resolved in the DexCache.
1698 // Otherwise we won't invoke the resolution trampoline.
1699 use_dex_cache = true;
1700 }
1701 }
1702 // The method is defined not within this dex file. We need a dex cache slot within the current
1703 // dex file or direct pointers.
1704 bool must_use_direct_pointers = false;
1705 mirror::DexCache* dex_cache = declaring_class->GetDexCache();
1706 if (target_method->dex_file == dex_cache->GetDexFile() &&
1707 !(runtime->UseJitCompilation() && dex_cache->GetResolvedMethod(
1708 method->GetDexMethodIndex(), pointer_size) == nullptr)) {
1709 target_method->dex_method_index = method->GetDexMethodIndex();
1710 } else {
1711 if (no_guarantee_of_dex_cache_entry) {
1712 // See if the method is also declared in this dex cache.
1713 uint32_t dex_method_idx = method->FindDexMethodIndexInOtherDexFile(
1714 *target_method->dex_file, target_method->dex_method_index);
1715 if (dex_method_idx != DexFile::kDexNoIndex) {
1716 target_method->dex_method_index = dex_method_idx;
1717 } else {
1718 if (force_relocations && !use_dex_cache) {
1719 target_method->dex_method_index = method->GetDexMethodIndex();
1720 target_method->dex_file = dex_cache->GetDexFile();
1721 }
1722 must_use_direct_pointers = true;
1723 }
1724 }
1725 }
1726 if (use_dex_cache) {
1727 if (must_use_direct_pointers) {
1728 // Fail. Test above showed the only safe dispatch was via the dex cache, however, the direct
1729 // pointers are required as the dex cache lacks an appropriate entry.
1730 VLOG(compiler) << "Dex cache devirtualization failed for: " << PrettyMethod(method);
1731 } else {
1732 *type = sharp_type;
1733 }
1734 } else {
1735 bool method_in_image = false;
1736 const std::vector<gc::space::ImageSpace*> image_spaces = heap->GetBootImageSpaces();
1737 for (gc::space::ImageSpace* image_space : image_spaces) {
1738 const auto& method_section = image_space->GetImageHeader().GetMethodsSection();
1739 if (method_section.Contains(reinterpret_cast<uint8_t*>(method) - image_space->Begin())) {
1740 method_in_image = true;
1741 break;
1742 }
1743 }
1744 if (method_in_image || compiling_boot || runtime->UseJitCompilation()) {
1745 // We know we must be able to get to the method in the image, so use that pointer.
1746 // In the case where we are the JIT, we can always use direct pointers since we know where
1747 // the method and its code are / will be. We don't sharpen to interpreter bridge since we
1748 // check IsQuickToInterpreterBridge above.
1749 CHECK(!method->IsAbstract());
1750 *type = sharp_type;
1751 *direct_method = force_relocations ? -1 : reinterpret_cast<uintptr_t>(method);
1752 *direct_code = force_relocations ? -1 : compiler_->GetEntryPointOf(method);
1753 target_method->dex_file = method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1754 target_method->dex_method_index = method->GetDexMethodIndex();
1755 } else if (!must_use_direct_pointers) {
1756 // Set the code and rely on the dex cache for the method.
1757 *type = sharp_type;
1758 if (force_relocations) {
1759 *direct_code = -1;
1760 target_method->dex_file = method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1761 target_method->dex_method_index = method->GetDexMethodIndex();
1762 } else {
1763 *direct_code = compiler_->GetEntryPointOf(method);
1764 }
1765 } else {
1766 // Direct pointers were required but none were available.
1767 VLOG(compiler) << "Dex cache devirtualization failed for: " << PrettyMethod(method);
1768 }
1769 }
1770 }
1771
ComputeInvokeInfo(const DexCompilationUnit * mUnit,const uint32_t dex_pc,bool update_stats,bool enable_devirtualization,InvokeType * invoke_type,MethodReference * target_method,int * vtable_idx,uintptr_t * direct_code,uintptr_t * direct_method)1772 bool CompilerDriver::ComputeInvokeInfo(const DexCompilationUnit* mUnit, const uint32_t dex_pc,
1773 bool update_stats, bool enable_devirtualization,
1774 InvokeType* invoke_type, MethodReference* target_method,
1775 int* vtable_idx, uintptr_t* direct_code,
1776 uintptr_t* direct_method) {
1777 InvokeType orig_invoke_type = *invoke_type;
1778 int stats_flags = 0;
1779 ScopedObjectAccess soa(Thread::Current());
1780 // Try to resolve the method and compiling method's class.
1781 StackHandleScope<2> hs(soa.Self());
1782 Handle<mirror::DexCache> dex_cache(mUnit->GetDexCache());
1783 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(
1784 soa.Decode<mirror::ClassLoader*>(mUnit->GetClassLoader())));
1785 uint32_t method_idx = target_method->dex_method_index;
1786 ArtMethod* resolved_method = ResolveMethod(
1787 soa, dex_cache, class_loader, mUnit, method_idx, orig_invoke_type);
1788 auto h_referrer_class = hs.NewHandle(resolved_method != nullptr ?
1789 ResolveCompilingMethodsClass(soa, dex_cache, class_loader, mUnit) : nullptr);
1790 bool result = false;
1791 if (resolved_method != nullptr) {
1792 *vtable_idx = GetResolvedMethodVTableIndex(resolved_method, orig_invoke_type);
1793
1794 if (enable_devirtualization && mUnit->GetVerifiedMethod() != nullptr) {
1795 const MethodReference* devirt_target = mUnit->GetVerifiedMethod()->GetDevirtTarget(dex_pc);
1796
1797 stats_flags = IsFastInvoke(
1798 soa, dex_cache, class_loader, mUnit, h_referrer_class.Get(), resolved_method,
1799 invoke_type, target_method, devirt_target, direct_code, direct_method);
1800 result = stats_flags != 0;
1801 } else {
1802 // Devirtualization not enabled. Inline IsFastInvoke(), dropping the devirtualization parts.
1803 if (UNLIKELY(h_referrer_class.Get() == nullptr) ||
1804 UNLIKELY(!h_referrer_class->CanAccessResolvedMethod(resolved_method->GetDeclaringClass(),
1805 resolved_method, dex_cache.Get(),
1806 target_method->dex_method_index)) ||
1807 *invoke_type == kSuper) {
1808 // Slow path. (Without devirtualization, all super calls go slow path as well.)
1809 } else {
1810 // Sharpening failed so generate a regular resolved method dispatch.
1811 stats_flags = kFlagMethodResolved;
1812 GetCodeAndMethodForDirectCall(
1813 invoke_type, *invoke_type, false, h_referrer_class.Get(), resolved_method, &stats_flags,
1814 target_method, direct_code, direct_method);
1815 result = true;
1816 }
1817 }
1818 }
1819 if (!result) {
1820 // Conservative defaults.
1821 *vtable_idx = -1;
1822 *direct_code = 0u;
1823 *direct_method = 0u;
1824 }
1825 if (update_stats) {
1826 ProcessedInvoke(orig_invoke_type, stats_flags);
1827 }
1828 return result;
1829 }
1830
GetVerifiedMethod(const DexFile * dex_file,uint32_t method_idx) const1831 const VerifiedMethod* CompilerDriver::GetVerifiedMethod(const DexFile* dex_file,
1832 uint32_t method_idx) const {
1833 MethodReference ref(dex_file, method_idx);
1834 return verification_results_->GetVerifiedMethod(ref);
1835 }
1836
IsSafeCast(const DexCompilationUnit * mUnit,uint32_t dex_pc)1837 bool CompilerDriver::IsSafeCast(const DexCompilationUnit* mUnit, uint32_t dex_pc) {
1838 if (!compiler_options_->IsVerificationEnabled()) {
1839 // If we didn't verify, every cast has to be treated as non-safe.
1840 return false;
1841 }
1842 DCHECK(mUnit->GetVerifiedMethod() != nullptr);
1843 bool result = mUnit->GetVerifiedMethod()->IsSafeCast(dex_pc);
1844 if (result) {
1845 stats_->SafeCast();
1846 } else {
1847 stats_->NotASafeCast();
1848 }
1849 return result;
1850 }
1851
1852 class CompilationVisitor {
1853 public:
~CompilationVisitor()1854 virtual ~CompilationVisitor() {}
1855 virtual void Visit(size_t index) = 0;
1856 };
1857
1858 class ParallelCompilationManager {
1859 public:
ParallelCompilationManager(ClassLinker * class_linker,jobject class_loader,CompilerDriver * compiler,const DexFile * dex_file,const std::vector<const DexFile * > & dex_files,ThreadPool * thread_pool)1860 ParallelCompilationManager(ClassLinker* class_linker,
1861 jobject class_loader,
1862 CompilerDriver* compiler,
1863 const DexFile* dex_file,
1864 const std::vector<const DexFile*>& dex_files,
1865 ThreadPool* thread_pool)
1866 : index_(0),
1867 class_linker_(class_linker),
1868 class_loader_(class_loader),
1869 compiler_(compiler),
1870 dex_file_(dex_file),
1871 dex_files_(dex_files),
1872 thread_pool_(thread_pool) {}
1873
GetClassLinker() const1874 ClassLinker* GetClassLinker() const {
1875 CHECK(class_linker_ != nullptr);
1876 return class_linker_;
1877 }
1878
GetClassLoader() const1879 jobject GetClassLoader() const {
1880 return class_loader_;
1881 }
1882
GetCompiler() const1883 CompilerDriver* GetCompiler() const {
1884 CHECK(compiler_ != nullptr);
1885 return compiler_;
1886 }
1887
GetDexFile() const1888 const DexFile* GetDexFile() const {
1889 CHECK(dex_file_ != nullptr);
1890 return dex_file_;
1891 }
1892
GetDexFiles() const1893 const std::vector<const DexFile*>& GetDexFiles() const {
1894 return dex_files_;
1895 }
1896
ForAll(size_t begin,size_t end,CompilationVisitor * visitor,size_t work_units)1897 void ForAll(size_t begin, size_t end, CompilationVisitor* visitor, size_t work_units)
1898 REQUIRES(!*Locks::mutator_lock_) {
1899 Thread* self = Thread::Current();
1900 self->AssertNoPendingException();
1901 CHECK_GT(work_units, 0U);
1902
1903 index_.StoreRelaxed(begin);
1904 for (size_t i = 0; i < work_units; ++i) {
1905 thread_pool_->AddTask(self, new ForAllClosure(this, end, visitor));
1906 }
1907 thread_pool_->StartWorkers(self);
1908
1909 // Ensure we're suspended while we're blocked waiting for the other threads to finish (worker
1910 // thread destructor's called below perform join).
1911 CHECK_NE(self->GetState(), kRunnable);
1912
1913 // Wait for all the worker threads to finish.
1914 thread_pool_->Wait(self, true, false);
1915
1916 // And stop the workers accepting jobs.
1917 thread_pool_->StopWorkers(self);
1918 }
1919
NextIndex()1920 size_t NextIndex() {
1921 return index_.FetchAndAddSequentiallyConsistent(1);
1922 }
1923
1924 private:
1925 class ForAllClosure : public Task {
1926 public:
ForAllClosure(ParallelCompilationManager * manager,size_t end,CompilationVisitor * visitor)1927 ForAllClosure(ParallelCompilationManager* manager, size_t end, CompilationVisitor* visitor)
1928 : manager_(manager),
1929 end_(end),
1930 visitor_(visitor) {}
1931
Run(Thread * self)1932 virtual void Run(Thread* self) {
1933 while (true) {
1934 const size_t index = manager_->NextIndex();
1935 if (UNLIKELY(index >= end_)) {
1936 break;
1937 }
1938 visitor_->Visit(index);
1939 self->AssertNoPendingException();
1940 }
1941 }
1942
Finalize()1943 virtual void Finalize() {
1944 delete this;
1945 }
1946
1947 private:
1948 ParallelCompilationManager* const manager_;
1949 const size_t end_;
1950 CompilationVisitor* const visitor_;
1951 };
1952
1953 AtomicInteger index_;
1954 ClassLinker* const class_linker_;
1955 const jobject class_loader_;
1956 CompilerDriver* const compiler_;
1957 const DexFile* const dex_file_;
1958 const std::vector<const DexFile*>& dex_files_;
1959 ThreadPool* const thread_pool_;
1960
1961 DISALLOW_COPY_AND_ASSIGN(ParallelCompilationManager);
1962 };
1963
1964 // A fast version of SkipClass above if the class pointer is available
1965 // that avoids the expensive FindInClassPath search.
SkipClass(jobject class_loader,const DexFile & dex_file,mirror::Class * klass)1966 static bool SkipClass(jobject class_loader, const DexFile& dex_file, mirror::Class* klass)
1967 SHARED_REQUIRES(Locks::mutator_lock_) {
1968 DCHECK(klass != nullptr);
1969 const DexFile& original_dex_file = *klass->GetDexCache()->GetDexFile();
1970 if (&dex_file != &original_dex_file) {
1971 if (class_loader == nullptr) {
1972 LOG(WARNING) << "Skipping class " << PrettyDescriptor(klass) << " from "
1973 << dex_file.GetLocation() << " previously found in "
1974 << original_dex_file.GetLocation();
1975 }
1976 return true;
1977 }
1978 return false;
1979 }
1980
CheckAndClearResolveException(Thread * self)1981 static void CheckAndClearResolveException(Thread* self)
1982 SHARED_REQUIRES(Locks::mutator_lock_) {
1983 CHECK(self->IsExceptionPending());
1984 mirror::Throwable* exception = self->GetException();
1985 std::string temp;
1986 const char* descriptor = exception->GetClass()->GetDescriptor(&temp);
1987 const char* expected_exceptions[] = {
1988 "Ljava/lang/IllegalAccessError;",
1989 "Ljava/lang/IncompatibleClassChangeError;",
1990 "Ljava/lang/InstantiationError;",
1991 "Ljava/lang/LinkageError;",
1992 "Ljava/lang/NoClassDefFoundError;",
1993 "Ljava/lang/NoSuchFieldError;",
1994 "Ljava/lang/NoSuchMethodError;"
1995 };
1996 bool found = false;
1997 for (size_t i = 0; (found == false) && (i < arraysize(expected_exceptions)); ++i) {
1998 if (strcmp(descriptor, expected_exceptions[i]) == 0) {
1999 found = true;
2000 }
2001 }
2002 if (!found) {
2003 LOG(FATAL) << "Unexpected exception " << exception->Dump();
2004 }
2005 self->ClearException();
2006 }
2007
RequiresConstructorBarrier(const DexFile & dex_file,uint16_t class_def_idx) const2008 bool CompilerDriver::RequiresConstructorBarrier(const DexFile& dex_file,
2009 uint16_t class_def_idx) const {
2010 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_idx);
2011 const uint8_t* class_data = dex_file.GetClassData(class_def);
2012 if (class_data == nullptr) {
2013 // Empty class such as a marker interface.
2014 return false;
2015 }
2016 ClassDataItemIterator it(dex_file, class_data);
2017 while (it.HasNextStaticField()) {
2018 it.Next();
2019 }
2020 // We require a constructor barrier if there are final instance fields.
2021 while (it.HasNextInstanceField()) {
2022 if (it.MemberIsFinal()) {
2023 return true;
2024 }
2025 it.Next();
2026 }
2027 return false;
2028 }
2029
2030 class ResolveClassFieldsAndMethodsVisitor : public CompilationVisitor {
2031 public:
ResolveClassFieldsAndMethodsVisitor(const ParallelCompilationManager * manager)2032 explicit ResolveClassFieldsAndMethodsVisitor(const ParallelCompilationManager* manager)
2033 : manager_(manager) {}
2034
Visit(size_t class_def_index)2035 void Visit(size_t class_def_index) OVERRIDE REQUIRES(!Locks::mutator_lock_) {
2036 ATRACE_CALL();
2037 Thread* const self = Thread::Current();
2038 jobject jclass_loader = manager_->GetClassLoader();
2039 const DexFile& dex_file = *manager_->GetDexFile();
2040 ClassLinker* class_linker = manager_->GetClassLinker();
2041
2042 // If an instance field is final then we need to have a barrier on the return, static final
2043 // fields are assigned within the lock held for class initialization. Conservatively assume
2044 // constructor barriers are always required.
2045 bool requires_constructor_barrier = true;
2046
2047 // Method and Field are the worst. We can't resolve without either
2048 // context from the code use (to disambiguate virtual vs direct
2049 // method and instance vs static field) or from class
2050 // definitions. While the compiler will resolve what it can as it
2051 // needs it, here we try to resolve fields and methods used in class
2052 // definitions, since many of them many never be referenced by
2053 // generated code.
2054 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
2055 ScopedObjectAccess soa(self);
2056 StackHandleScope<2> hs(soa.Self());
2057 Handle<mirror::ClassLoader> class_loader(
2058 hs.NewHandle(soa.Decode<mirror::ClassLoader*>(jclass_loader)));
2059 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->FindDexCache(
2060 soa.Self(), dex_file, false)));
2061 // Resolve the class.
2062 mirror::Class* klass = class_linker->ResolveType(dex_file, class_def.class_idx_, dex_cache,
2063 class_loader);
2064 bool resolve_fields_and_methods;
2065 if (klass == nullptr) {
2066 // Class couldn't be resolved, for example, super-class is in a different dex file. Don't
2067 // attempt to resolve methods and fields when there is no declaring class.
2068 CheckAndClearResolveException(soa.Self());
2069 resolve_fields_and_methods = false;
2070 } else {
2071 // We successfully resolved a class, should we skip it?
2072 if (SkipClass(jclass_loader, dex_file, klass)) {
2073 return;
2074 }
2075 // We want to resolve the methods and fields eagerly.
2076 resolve_fields_and_methods = true;
2077 }
2078 // Note the class_data pointer advances through the headers,
2079 // static fields, instance fields, direct methods, and virtual
2080 // methods.
2081 const uint8_t* class_data = dex_file.GetClassData(class_def);
2082 if (class_data == nullptr) {
2083 // Empty class such as a marker interface.
2084 requires_constructor_barrier = false;
2085 } else {
2086 ClassDataItemIterator it(dex_file, class_data);
2087 while (it.HasNextStaticField()) {
2088 if (resolve_fields_and_methods) {
2089 ArtField* field = class_linker->ResolveField(dex_file, it.GetMemberIndex(),
2090 dex_cache, class_loader, true);
2091 if (field == nullptr) {
2092 CheckAndClearResolveException(soa.Self());
2093 }
2094 }
2095 it.Next();
2096 }
2097 // We require a constructor barrier if there are final instance fields.
2098 requires_constructor_barrier = false;
2099 while (it.HasNextInstanceField()) {
2100 if (it.MemberIsFinal()) {
2101 requires_constructor_barrier = true;
2102 }
2103 if (resolve_fields_and_methods) {
2104 ArtField* field = class_linker->ResolveField(dex_file, it.GetMemberIndex(),
2105 dex_cache, class_loader, false);
2106 if (field == nullptr) {
2107 CheckAndClearResolveException(soa.Self());
2108 }
2109 }
2110 it.Next();
2111 }
2112 if (resolve_fields_and_methods) {
2113 while (it.HasNextDirectMethod()) {
2114 ArtMethod* method = class_linker->ResolveMethod<ClassLinker::kNoICCECheckForCache>(
2115 dex_file, it.GetMemberIndex(), dex_cache, class_loader, nullptr,
2116 it.GetMethodInvokeType(class_def));
2117 if (method == nullptr) {
2118 CheckAndClearResolveException(soa.Self());
2119 }
2120 it.Next();
2121 }
2122 while (it.HasNextVirtualMethod()) {
2123 ArtMethod* method = class_linker->ResolveMethod<ClassLinker::kNoICCECheckForCache>(
2124 dex_file, it.GetMemberIndex(), dex_cache, class_loader, nullptr,
2125 it.GetMethodInvokeType(class_def));
2126 if (method == nullptr) {
2127 CheckAndClearResolveException(soa.Self());
2128 }
2129 it.Next();
2130 }
2131 DCHECK(!it.HasNext());
2132 }
2133 }
2134 manager_->GetCompiler()->SetRequiresConstructorBarrier(self,
2135 &dex_file,
2136 class_def_index,
2137 requires_constructor_barrier);
2138 }
2139
2140 private:
2141 const ParallelCompilationManager* const manager_;
2142 };
2143
2144 class ResolveTypeVisitor : public CompilationVisitor {
2145 public:
ResolveTypeVisitor(const ParallelCompilationManager * manager)2146 explicit ResolveTypeVisitor(const ParallelCompilationManager* manager) : manager_(manager) {
2147 }
Visit(size_t type_idx)2148 virtual void Visit(size_t type_idx) OVERRIDE REQUIRES(!Locks::mutator_lock_) {
2149 // Class derived values are more complicated, they require the linker and loader.
2150 ScopedObjectAccess soa(Thread::Current());
2151 ClassLinker* class_linker = manager_->GetClassLinker();
2152 const DexFile& dex_file = *manager_->GetDexFile();
2153 StackHandleScope<2> hs(soa.Self());
2154 Handle<mirror::ClassLoader> class_loader(
2155 hs.NewHandle(soa.Decode<mirror::ClassLoader*>(manager_->GetClassLoader())));
2156 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->RegisterDexFile(
2157 dex_file,
2158 class_loader.Get())));
2159 mirror::Class* klass = class_linker->ResolveType(dex_file, type_idx, dex_cache, class_loader);
2160
2161 if (klass == nullptr) {
2162 soa.Self()->AssertPendingException();
2163 mirror::Throwable* exception = soa.Self()->GetException();
2164 VLOG(compiler) << "Exception during type resolution: " << exception->Dump();
2165 if (exception->GetClass()->DescriptorEquals("Ljava/lang/OutOfMemoryError;")) {
2166 // There's little point continuing compilation if the heap is exhausted.
2167 LOG(FATAL) << "Out of memory during type resolution for compilation";
2168 }
2169 soa.Self()->ClearException();
2170 }
2171 }
2172
2173 private:
2174 const ParallelCompilationManager* const manager_;
2175 };
2176
ResolveDexFile(jobject class_loader,const DexFile & dex_file,const std::vector<const DexFile * > & dex_files,ThreadPool * thread_pool,size_t thread_count,TimingLogger * timings)2177 void CompilerDriver::ResolveDexFile(jobject class_loader,
2178 const DexFile& dex_file,
2179 const std::vector<const DexFile*>& dex_files,
2180 ThreadPool* thread_pool,
2181 size_t thread_count,
2182 TimingLogger* timings) {
2183 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2184
2185 // TODO: we could resolve strings here, although the string table is largely filled with class
2186 // and method names.
2187
2188 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files,
2189 thread_pool);
2190 if (IsBootImage()) {
2191 // For images we resolve all types, such as array, whereas for applications just those with
2192 // classdefs are resolved by ResolveClassFieldsAndMethods.
2193 TimingLogger::ScopedTiming t("Resolve Types", timings);
2194 ResolveTypeVisitor visitor(&context);
2195 context.ForAll(0, dex_file.NumTypeIds(), &visitor, thread_count);
2196 }
2197
2198 TimingLogger::ScopedTiming t("Resolve MethodsAndFields", timings);
2199 ResolveClassFieldsAndMethodsVisitor visitor(&context);
2200 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count);
2201 }
2202
SetVerified(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)2203 void CompilerDriver::SetVerified(jobject class_loader,
2204 const std::vector<const DexFile*>& dex_files,
2205 TimingLogger* timings) {
2206 // This can be run in parallel.
2207 for (const DexFile* dex_file : dex_files) {
2208 CHECK(dex_file != nullptr);
2209 SetVerifiedDexFile(class_loader,
2210 *dex_file,
2211 dex_files,
2212 parallel_thread_pool_.get(),
2213 parallel_thread_count_,
2214 timings);
2215 }
2216 }
2217
Verify(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)2218 void CompilerDriver::Verify(jobject class_loader,
2219 const std::vector<const DexFile*>& dex_files,
2220 TimingLogger* timings) {
2221 // Note: verification should not be pulling in classes anymore when compiling the boot image,
2222 // as all should have been resolved before. As such, doing this in parallel should still
2223 // be deterministic.
2224 for (const DexFile* dex_file : dex_files) {
2225 CHECK(dex_file != nullptr);
2226 VerifyDexFile(class_loader,
2227 *dex_file,
2228 dex_files,
2229 parallel_thread_pool_.get(),
2230 parallel_thread_count_,
2231 timings);
2232 }
2233 }
2234
2235 class VerifyClassVisitor : public CompilationVisitor {
2236 public:
VerifyClassVisitor(const ParallelCompilationManager * manager,LogSeverity log_level)2237 VerifyClassVisitor(const ParallelCompilationManager* manager, LogSeverity log_level)
2238 : manager_(manager), log_level_(log_level) {}
2239
Visit(size_t class_def_index)2240 virtual void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) OVERRIDE {
2241 ATRACE_CALL();
2242 ScopedObjectAccess soa(Thread::Current());
2243 const DexFile& dex_file = *manager_->GetDexFile();
2244 if (!manager_->GetCompiler()->ShouldVerifyClassBasedOnProfile(dex_file, class_def_index)) {
2245 // Skip verification since the class is not in the profile.
2246 return;
2247 }
2248 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
2249 const char* descriptor = dex_file.GetClassDescriptor(class_def);
2250 ClassLinker* class_linker = manager_->GetClassLinker();
2251 jobject jclass_loader = manager_->GetClassLoader();
2252 StackHandleScope<3> hs(soa.Self());
2253 Handle<mirror::ClassLoader> class_loader(
2254 hs.NewHandle(soa.Decode<mirror::ClassLoader*>(jclass_loader)));
2255 Handle<mirror::Class> klass(
2256 hs.NewHandle(class_linker->FindClass(soa.Self(), descriptor, class_loader)));
2257 if (klass.Get() == nullptr) {
2258 CHECK(soa.Self()->IsExceptionPending());
2259 soa.Self()->ClearException();
2260
2261 /*
2262 * At compile time, we can still structurally verify the class even if FindClass fails.
2263 * This is to ensure the class is structurally sound for compilation. An unsound class
2264 * will be rejected by the verifier and later skipped during compilation in the compiler.
2265 */
2266 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->FindDexCache(
2267 soa.Self(), dex_file, false)));
2268 std::string error_msg;
2269 if (verifier::MethodVerifier::VerifyClass(soa.Self(),
2270 &dex_file,
2271 dex_cache,
2272 class_loader,
2273 &class_def,
2274 Runtime::Current()->GetCompilerCallbacks(),
2275 true /* allow soft failures */,
2276 log_level_,
2277 &error_msg) ==
2278 verifier::MethodVerifier::kHardFailure) {
2279 LOG(ERROR) << "Verification failed on class " << PrettyDescriptor(descriptor)
2280 << " because: " << error_msg;
2281 manager_->GetCompiler()->SetHadHardVerifierFailure();
2282 }
2283 } else if (!SkipClass(jclass_loader, dex_file, klass.Get())) {
2284 CHECK(klass->IsResolved()) << PrettyClass(klass.Get());
2285 class_linker->VerifyClass(soa.Self(), klass, log_level_);
2286
2287 if (klass->IsErroneous()) {
2288 // ClassLinker::VerifyClass throws, which isn't useful in the compiler.
2289 CHECK(soa.Self()->IsExceptionPending());
2290 soa.Self()->ClearException();
2291 manager_->GetCompiler()->SetHadHardVerifierFailure();
2292 }
2293
2294 CHECK(klass->IsCompileTimeVerified() || klass->IsErroneous())
2295 << PrettyDescriptor(klass.Get()) << ": state=" << klass->GetStatus();
2296
2297 // It is *very* problematic if there are verification errors in the boot classpath. For example,
2298 // we rely on things working OK without verification when the decryption dialog is brought up.
2299 // So abort in a debug build if we find this violated.
2300 DCHECK(!manager_->GetCompiler()->IsBootImage() || klass->IsVerified())
2301 << "Boot classpath class " << PrettyClass(klass.Get()) << " failed to fully verify.";
2302 }
2303 soa.Self()->AssertNoPendingException();
2304 }
2305
2306 private:
2307 const ParallelCompilationManager* const manager_;
2308 const LogSeverity log_level_;
2309 };
2310
VerifyDexFile(jobject class_loader,const DexFile & dex_file,const std::vector<const DexFile * > & dex_files,ThreadPool * thread_pool,size_t thread_count,TimingLogger * timings)2311 void CompilerDriver::VerifyDexFile(jobject class_loader,
2312 const DexFile& dex_file,
2313 const std::vector<const DexFile*>& dex_files,
2314 ThreadPool* thread_pool,
2315 size_t thread_count,
2316 TimingLogger* timings) {
2317 TimingLogger::ScopedTiming t("Verify Dex File", timings);
2318 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2319 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files,
2320 thread_pool);
2321 LogSeverity log_level = GetCompilerOptions().AbortOnHardVerifierFailure()
2322 ? LogSeverity::INTERNAL_FATAL
2323 : LogSeverity::WARNING;
2324 VerifyClassVisitor visitor(&context, log_level);
2325 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count);
2326 }
2327
2328 class SetVerifiedClassVisitor : public CompilationVisitor {
2329 public:
SetVerifiedClassVisitor(const ParallelCompilationManager * manager)2330 explicit SetVerifiedClassVisitor(const ParallelCompilationManager* manager) : manager_(manager) {}
2331
Visit(size_t class_def_index)2332 virtual void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) OVERRIDE {
2333 ATRACE_CALL();
2334 ScopedObjectAccess soa(Thread::Current());
2335 const DexFile& dex_file = *manager_->GetDexFile();
2336 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
2337 const char* descriptor = dex_file.GetClassDescriptor(class_def);
2338 ClassLinker* class_linker = manager_->GetClassLinker();
2339 jobject jclass_loader = manager_->GetClassLoader();
2340 StackHandleScope<3> hs(soa.Self());
2341 Handle<mirror::ClassLoader> class_loader(
2342 hs.NewHandle(soa.Decode<mirror::ClassLoader*>(jclass_loader)));
2343 Handle<mirror::Class> klass(
2344 hs.NewHandle(class_linker->FindClass(soa.Self(), descriptor, class_loader)));
2345 // Class might have failed resolution. Then don't set it to verified.
2346 if (klass.Get() != nullptr) {
2347 // Only do this if the class is resolved. If even resolution fails, quickening will go very,
2348 // very wrong.
2349 if (klass->IsResolved()) {
2350 if (klass->GetStatus() < mirror::Class::kStatusVerified) {
2351 ObjectLock<mirror::Class> lock(soa.Self(), klass);
2352 // Set class status to verified.
2353 mirror::Class::SetStatus(klass, mirror::Class::kStatusVerified, soa.Self());
2354 // Mark methods as pre-verified. If we don't do this, the interpreter will run with
2355 // access checks.
2356 klass->SetSkipAccessChecksFlagOnAllMethods(
2357 GetInstructionSetPointerSize(manager_->GetCompiler()->GetInstructionSet()));
2358 klass->SetVerificationAttempted();
2359 }
2360 // Record the final class status if necessary.
2361 ClassReference ref(manager_->GetDexFile(), class_def_index);
2362 manager_->GetCompiler()->RecordClassStatus(ref, klass->GetStatus());
2363 }
2364 } else {
2365 Thread* self = soa.Self();
2366 DCHECK(self->IsExceptionPending());
2367 self->ClearException();
2368 }
2369 }
2370
2371 private:
2372 const ParallelCompilationManager* const manager_;
2373 };
2374
SetVerifiedDexFile(jobject class_loader,const DexFile & dex_file,const std::vector<const DexFile * > & dex_files,ThreadPool * thread_pool,size_t thread_count,TimingLogger * timings)2375 void CompilerDriver::SetVerifiedDexFile(jobject class_loader,
2376 const DexFile& dex_file,
2377 const std::vector<const DexFile*>& dex_files,
2378 ThreadPool* thread_pool,
2379 size_t thread_count,
2380 TimingLogger* timings) {
2381 TimingLogger::ScopedTiming t("Verify Dex File", timings);
2382 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2383 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files,
2384 thread_pool);
2385 SetVerifiedClassVisitor visitor(&context);
2386 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count);
2387 }
2388
2389 class InitializeClassVisitor : public CompilationVisitor {
2390 public:
InitializeClassVisitor(const ParallelCompilationManager * manager)2391 explicit InitializeClassVisitor(const ParallelCompilationManager* manager) : manager_(manager) {}
2392
Visit(size_t class_def_index)2393 virtual void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) OVERRIDE {
2394 ATRACE_CALL();
2395 jobject jclass_loader = manager_->GetClassLoader();
2396 const DexFile& dex_file = *manager_->GetDexFile();
2397 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
2398 const DexFile::TypeId& class_type_id = dex_file.GetTypeId(class_def.class_idx_);
2399 const char* descriptor = dex_file.StringDataByIdx(class_type_id.descriptor_idx_);
2400
2401 ScopedObjectAccess soa(Thread::Current());
2402 StackHandleScope<3> hs(soa.Self());
2403 Handle<mirror::ClassLoader> class_loader(
2404 hs.NewHandle(soa.Decode<mirror::ClassLoader*>(jclass_loader)));
2405 Handle<mirror::Class> klass(
2406 hs.NewHandle(manager_->GetClassLinker()->FindClass(soa.Self(), descriptor, class_loader)));
2407
2408 if (klass.Get() != nullptr && !SkipClass(jclass_loader, dex_file, klass.Get())) {
2409 // Only try to initialize classes that were successfully verified.
2410 if (klass->IsVerified()) {
2411 // Attempt to initialize the class but bail if we either need to initialize the super-class
2412 // or static fields.
2413 manager_->GetClassLinker()->EnsureInitialized(soa.Self(), klass, false, false);
2414 if (!klass->IsInitialized()) {
2415 // We don't want non-trivial class initialization occurring on multiple threads due to
2416 // deadlock problems. For example, a parent class is initialized (holding its lock) that
2417 // refers to a sub-class in its static/class initializer causing it to try to acquire the
2418 // sub-class' lock. While on a second thread the sub-class is initialized (holding its lock)
2419 // after first initializing its parents, whose locks are acquired. This leads to a
2420 // parent-to-child and a child-to-parent lock ordering and consequent potential deadlock.
2421 // We need to use an ObjectLock due to potential suspension in the interpreting code. Rather
2422 // than use a special Object for the purpose we use the Class of java.lang.Class.
2423 Handle<mirror::Class> h_klass(hs.NewHandle(klass->GetClass()));
2424 ObjectLock<mirror::Class> lock(soa.Self(), h_klass);
2425 // Attempt to initialize allowing initialization of parent classes but still not static
2426 // fields.
2427 manager_->GetClassLinker()->EnsureInitialized(soa.Self(), klass, false, true);
2428 if (!klass->IsInitialized()) {
2429 // We need to initialize static fields, we only do this for image classes that aren't
2430 // marked with the $NoPreloadHolder (which implies this should not be initialized early).
2431 bool can_init_static_fields = manager_->GetCompiler()->IsBootImage() &&
2432 manager_->GetCompiler()->IsImageClass(descriptor) &&
2433 !StringPiece(descriptor).ends_with("$NoPreloadHolder;");
2434 if (can_init_static_fields) {
2435 VLOG(compiler) << "Initializing: " << descriptor;
2436 // TODO multithreading support. We should ensure the current compilation thread has
2437 // exclusive access to the runtime and the transaction. To achieve this, we could use
2438 // a ReaderWriterMutex but we're holding the mutator lock so we fail mutex sanity
2439 // checks in Thread::AssertThreadSuspensionIsAllowable.
2440 Runtime* const runtime = Runtime::Current();
2441 Transaction transaction;
2442
2443 // Run the class initializer in transaction mode.
2444 runtime->EnterTransactionMode(&transaction);
2445 const mirror::Class::Status old_status = klass->GetStatus();
2446 bool success = manager_->GetClassLinker()->EnsureInitialized(soa.Self(), klass, true,
2447 true);
2448 // TODO we detach transaction from runtime to indicate we quit the transactional
2449 // mode which prevents the GC from visiting objects modified during the transaction.
2450 // Ensure GC is not run so don't access freed objects when aborting transaction.
2451
2452 ScopedAssertNoThreadSuspension ants(soa.Self(), "Transaction end");
2453 runtime->ExitTransactionMode();
2454
2455 if (!success) {
2456 CHECK(soa.Self()->IsExceptionPending());
2457 mirror::Throwable* exception = soa.Self()->GetException();
2458 VLOG(compiler) << "Initialization of " << descriptor << " aborted because of "
2459 << exception->Dump();
2460 std::ostream* file_log = manager_->GetCompiler()->
2461 GetCompilerOptions().GetInitFailureOutput();
2462 if (file_log != nullptr) {
2463 *file_log << descriptor << "\n";
2464 *file_log << exception->Dump() << "\n";
2465 }
2466 soa.Self()->ClearException();
2467 transaction.Rollback();
2468 CHECK_EQ(old_status, klass->GetStatus()) << "Previous class status not restored";
2469 }
2470 }
2471 }
2472 soa.Self()->AssertNoPendingException();
2473 }
2474 }
2475 // Record the final class status if necessary.
2476 ClassReference ref(manager_->GetDexFile(), class_def_index);
2477 manager_->GetCompiler()->RecordClassStatus(ref, klass->GetStatus());
2478 }
2479 // Clear any class not found or verification exceptions.
2480 soa.Self()->ClearException();
2481 }
2482
2483 private:
2484 const ParallelCompilationManager* const manager_;
2485 };
2486
InitializeClasses(jobject jni_class_loader,const DexFile & dex_file,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)2487 void CompilerDriver::InitializeClasses(jobject jni_class_loader,
2488 const DexFile& dex_file,
2489 const std::vector<const DexFile*>& dex_files,
2490 TimingLogger* timings) {
2491 TimingLogger::ScopedTiming t("InitializeNoClinit", timings);
2492
2493 // Initialization allocates objects and needs to run single-threaded to be deterministic.
2494 bool force_determinism = GetCompilerOptions().IsForceDeterminism();
2495 ThreadPool* init_thread_pool = force_determinism
2496 ? single_thread_pool_.get()
2497 : parallel_thread_pool_.get();
2498 size_t init_thread_count = force_determinism ? 1U : parallel_thread_count_;
2499
2500 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2501 ParallelCompilationManager context(class_linker, jni_class_loader, this, &dex_file, dex_files,
2502 init_thread_pool);
2503 if (IsBootImage()) {
2504 // TODO: remove this when transactional mode supports multithreading.
2505 init_thread_count = 1U;
2506 }
2507 InitializeClassVisitor visitor(&context);
2508 context.ForAll(0, dex_file.NumClassDefs(), &visitor, init_thread_count);
2509 }
2510
2511 class InitializeArrayClassesAndCreateConflictTablesVisitor : public ClassVisitor {
2512 public:
operator ()(mirror::Class * klass)2513 virtual bool operator()(mirror::Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
2514 if (Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
2515 return true;
2516 }
2517 if (klass->IsArrayClass()) {
2518 StackHandleScope<1> hs(Thread::Current());
2519 Runtime::Current()->GetClassLinker()->EnsureInitialized(hs.Self(),
2520 hs.NewHandle(klass),
2521 true,
2522 true);
2523 }
2524 // Create the conflict tables.
2525 FillIMTAndConflictTables(klass);
2526 return true;
2527 }
2528
2529 private:
FillIMTAndConflictTables(mirror::Class * klass)2530 void FillIMTAndConflictTables(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_) {
2531 if (!klass->ShouldHaveImt()) {
2532 return;
2533 }
2534 if (visited_classes_.find(klass) != visited_classes_.end()) {
2535 return;
2536 }
2537 if (klass->HasSuperClass()) {
2538 FillIMTAndConflictTables(klass->GetSuperClass());
2539 }
2540 if (!klass->IsTemp()) {
2541 Runtime::Current()->GetClassLinker()->FillIMTAndConflictTables(klass);
2542 }
2543 visited_classes_.insert(klass);
2544 }
2545
2546 std::set<mirror::Class*> visited_classes_;
2547 };
2548
InitializeClasses(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)2549 void CompilerDriver::InitializeClasses(jobject class_loader,
2550 const std::vector<const DexFile*>& dex_files,
2551 TimingLogger* timings) {
2552 for (size_t i = 0; i != dex_files.size(); ++i) {
2553 const DexFile* dex_file = dex_files[i];
2554 CHECK(dex_file != nullptr);
2555 InitializeClasses(class_loader, *dex_file, dex_files, timings);
2556 }
2557 if (boot_image_ || app_image_) {
2558 // Make sure that we call EnsureIntiailized on all the array classes to call
2559 // SetVerificationAttempted so that the access flags are set. If we do not do this they get
2560 // changed at runtime resulting in more dirty image pages.
2561 // Also create conflict tables.
2562 // Only useful if we are compiling an image (image_classes_ is not null).
2563 ScopedObjectAccess soa(Thread::Current());
2564 InitializeArrayClassesAndCreateConflictTablesVisitor visitor;
2565 Runtime::Current()->GetClassLinker()->VisitClassesWithoutClassesLock(&visitor);
2566 }
2567 if (IsBootImage()) {
2568 // Prune garbage objects created during aborted transactions.
2569 Runtime::Current()->GetHeap()->CollectGarbage(true);
2570 }
2571 }
2572
Compile(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)2573 void CompilerDriver::Compile(jobject class_loader,
2574 const std::vector<const DexFile*>& dex_files,
2575 TimingLogger* timings) {
2576 if (kDebugProfileGuidedCompilation) {
2577 LOG(INFO) << "[ProfileGuidedCompilation] " <<
2578 ((profile_compilation_info_ == nullptr)
2579 ? "null"
2580 : profile_compilation_info_->DumpInfo(&dex_files));
2581 }
2582
2583 DCHECK(current_dex_to_dex_methods_ == nullptr);
2584 for (const DexFile* dex_file : dex_files) {
2585 CHECK(dex_file != nullptr);
2586 CompileDexFile(class_loader,
2587 *dex_file,
2588 dex_files,
2589 parallel_thread_pool_.get(),
2590 parallel_thread_count_,
2591 timings);
2592 const ArenaPool* const arena_pool = Runtime::Current()->GetArenaPool();
2593 const size_t arena_alloc = arena_pool->GetBytesAllocated();
2594 max_arena_alloc_ = std::max(arena_alloc, max_arena_alloc_);
2595 Runtime::Current()->ReclaimArenaPoolMemory();
2596 }
2597
2598 ArrayRef<DexFileMethodSet> dex_to_dex_references;
2599 {
2600 // From this point on, we shall not modify dex_to_dex_references_, so
2601 // just grab a reference to it that we use without holding the mutex.
2602 MutexLock lock(Thread::Current(), dex_to_dex_references_lock_);
2603 dex_to_dex_references = ArrayRef<DexFileMethodSet>(dex_to_dex_references_);
2604 }
2605 for (const auto& method_set : dex_to_dex_references) {
2606 current_dex_to_dex_methods_ = &method_set.GetMethodIndexes();
2607 CompileDexFile(class_loader,
2608 method_set.GetDexFile(),
2609 dex_files,
2610 parallel_thread_pool_.get(),
2611 parallel_thread_count_,
2612 timings);
2613 }
2614 current_dex_to_dex_methods_ = nullptr;
2615
2616 VLOG(compiler) << "Compile: " << GetMemoryUsageString(false);
2617 }
2618
2619 class CompileClassVisitor : public CompilationVisitor {
2620 public:
CompileClassVisitor(const ParallelCompilationManager * manager)2621 explicit CompileClassVisitor(const ParallelCompilationManager* manager) : manager_(manager) {}
2622
Visit(size_t class_def_index)2623 virtual void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) OVERRIDE {
2624 ATRACE_CALL();
2625 const DexFile& dex_file = *manager_->GetDexFile();
2626 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
2627 ClassLinker* class_linker = manager_->GetClassLinker();
2628 jobject jclass_loader = manager_->GetClassLoader();
2629 ClassReference ref(&dex_file, class_def_index);
2630 // Skip compiling classes with generic verifier failures since they will still fail at runtime
2631 if (manager_->GetCompiler()->verification_results_->IsClassRejected(ref)) {
2632 return;
2633 }
2634 // Use a scoped object access to perform to the quick SkipClass check.
2635 const char* descriptor = dex_file.GetClassDescriptor(class_def);
2636 ScopedObjectAccess soa(Thread::Current());
2637 StackHandleScope<3> hs(soa.Self());
2638 Handle<mirror::ClassLoader> class_loader(
2639 hs.NewHandle(soa.Decode<mirror::ClassLoader*>(jclass_loader)));
2640 Handle<mirror::Class> klass(
2641 hs.NewHandle(class_linker->FindClass(soa.Self(), descriptor, class_loader)));
2642 Handle<mirror::DexCache> dex_cache;
2643 if (klass.Get() == nullptr) {
2644 soa.Self()->AssertPendingException();
2645 soa.Self()->ClearException();
2646 dex_cache = hs.NewHandle(class_linker->FindDexCache(soa.Self(), dex_file));
2647 } else if (SkipClass(jclass_loader, dex_file, klass.Get())) {
2648 return;
2649 } else {
2650 dex_cache = hs.NewHandle(klass->GetDexCache());
2651 }
2652
2653 const uint8_t* class_data = dex_file.GetClassData(class_def);
2654 if (class_data == nullptr) {
2655 // empty class, probably a marker interface
2656 return;
2657 }
2658
2659 // Go to native so that we don't block GC during compilation.
2660 ScopedThreadSuspension sts(soa.Self(), kNative);
2661
2662 CompilerDriver* const driver = manager_->GetCompiler();
2663
2664 // Can we run DEX-to-DEX compiler on this class ?
2665 optimizer::DexToDexCompilationLevel dex_to_dex_compilation_level =
2666 GetDexToDexCompilationLevel(soa.Self(), *driver, jclass_loader, dex_file, class_def);
2667
2668 ClassDataItemIterator it(dex_file, class_data);
2669 // Skip fields
2670 while (it.HasNextStaticField()) {
2671 it.Next();
2672 }
2673 while (it.HasNextInstanceField()) {
2674 it.Next();
2675 }
2676
2677 bool compilation_enabled = driver->IsClassToCompile(
2678 dex_file.StringByTypeIdx(class_def.class_idx_));
2679
2680 // Compile direct methods
2681 int64_t previous_direct_method_idx = -1;
2682 while (it.HasNextDirectMethod()) {
2683 uint32_t method_idx = it.GetMemberIndex();
2684 if (method_idx == previous_direct_method_idx) {
2685 // smali can create dex files with two encoded_methods sharing the same method_idx
2686 // http://code.google.com/p/smali/issues/detail?id=119
2687 it.Next();
2688 continue;
2689 }
2690 previous_direct_method_idx = method_idx;
2691 CompileMethod(soa.Self(), driver, it.GetMethodCodeItem(), it.GetMethodAccessFlags(),
2692 it.GetMethodInvokeType(class_def), class_def_index,
2693 method_idx, jclass_loader, dex_file, dex_to_dex_compilation_level,
2694 compilation_enabled, dex_cache);
2695 it.Next();
2696 }
2697 // Compile virtual methods
2698 int64_t previous_virtual_method_idx = -1;
2699 while (it.HasNextVirtualMethod()) {
2700 uint32_t method_idx = it.GetMemberIndex();
2701 if (method_idx == previous_virtual_method_idx) {
2702 // smali can create dex files with two encoded_methods sharing the same method_idx
2703 // http://code.google.com/p/smali/issues/detail?id=119
2704 it.Next();
2705 continue;
2706 }
2707 previous_virtual_method_idx = method_idx;
2708 CompileMethod(soa.Self(), driver, it.GetMethodCodeItem(), it.GetMethodAccessFlags(),
2709 it.GetMethodInvokeType(class_def), class_def_index,
2710 method_idx, jclass_loader, dex_file, dex_to_dex_compilation_level,
2711 compilation_enabled, dex_cache);
2712 it.Next();
2713 }
2714 DCHECK(!it.HasNext());
2715 }
2716
2717 private:
2718 const ParallelCompilationManager* const manager_;
2719 };
2720
CompileDexFile(jobject class_loader,const DexFile & dex_file,const std::vector<const DexFile * > & dex_files,ThreadPool * thread_pool,size_t thread_count,TimingLogger * timings)2721 void CompilerDriver::CompileDexFile(jobject class_loader,
2722 const DexFile& dex_file,
2723 const std::vector<const DexFile*>& dex_files,
2724 ThreadPool* thread_pool,
2725 size_t thread_count,
2726 TimingLogger* timings) {
2727 TimingLogger::ScopedTiming t("Compile Dex File", timings);
2728 ParallelCompilationManager context(Runtime::Current()->GetClassLinker(), class_loader, this,
2729 &dex_file, dex_files, thread_pool);
2730 CompileClassVisitor visitor(&context);
2731 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count);
2732 }
2733
AddCompiledMethod(const MethodReference & method_ref,CompiledMethod * const compiled_method,size_t non_relative_linker_patch_count)2734 void CompilerDriver::AddCompiledMethod(const MethodReference& method_ref,
2735 CompiledMethod* const compiled_method,
2736 size_t non_relative_linker_patch_count) {
2737 DCHECK(GetCompiledMethod(method_ref) == nullptr)
2738 << PrettyMethod(method_ref.dex_method_index, *method_ref.dex_file);
2739 {
2740 MutexLock mu(Thread::Current(), compiled_methods_lock_);
2741 compiled_methods_.Put(method_ref, compiled_method);
2742 non_relative_linker_patch_count_ += non_relative_linker_patch_count;
2743 }
2744 DCHECK(GetCompiledMethod(method_ref) != nullptr)
2745 << PrettyMethod(method_ref.dex_method_index, *method_ref.dex_file);
2746 }
2747
RemoveCompiledMethod(const MethodReference & method_ref)2748 void CompilerDriver::RemoveCompiledMethod(const MethodReference& method_ref) {
2749 CompiledMethod* compiled_method = nullptr;
2750 {
2751 MutexLock mu(Thread::Current(), compiled_methods_lock_);
2752 auto it = compiled_methods_.find(method_ref);
2753 if (it != compiled_methods_.end()) {
2754 compiled_method = it->second;
2755 compiled_methods_.erase(it);
2756 }
2757 }
2758 if (compiled_method != nullptr) {
2759 CompiledMethod::ReleaseSwapAllocatedCompiledMethod(this, compiled_method);
2760 }
2761 }
2762
GetCompiledClass(ClassReference ref) const2763 CompiledClass* CompilerDriver::GetCompiledClass(ClassReference ref) const {
2764 MutexLock mu(Thread::Current(), compiled_classes_lock_);
2765 ClassTable::const_iterator it = compiled_classes_.find(ref);
2766 if (it == compiled_classes_.end()) {
2767 return nullptr;
2768 }
2769 CHECK(it->second != nullptr);
2770 return it->second;
2771 }
2772
RecordClassStatus(ClassReference ref,mirror::Class::Status status)2773 void CompilerDriver::RecordClassStatus(ClassReference ref, mirror::Class::Status status) {
2774 MutexLock mu(Thread::Current(), compiled_classes_lock_);
2775 auto it = compiled_classes_.find(ref);
2776 if (it == compiled_classes_.end() || it->second->GetStatus() != status) {
2777 // An entry doesn't exist or the status is lower than the new status.
2778 if (it != compiled_classes_.end()) {
2779 CHECK_GT(status, it->second->GetStatus());
2780 delete it->second;
2781 }
2782 switch (status) {
2783 case mirror::Class::kStatusNotReady:
2784 case mirror::Class::kStatusError:
2785 case mirror::Class::kStatusRetryVerificationAtRuntime:
2786 case mirror::Class::kStatusVerified:
2787 case mirror::Class::kStatusInitialized:
2788 case mirror::Class::kStatusResolved:
2789 break; // Expected states.
2790 default:
2791 LOG(FATAL) << "Unexpected class status for class "
2792 << PrettyDescriptor(ref.first->GetClassDescriptor(ref.first->GetClassDef(ref.second)))
2793 << " of " << status;
2794 }
2795 CompiledClass* compiled_class = new CompiledClass(status);
2796 compiled_classes_.Overwrite(ref, compiled_class);
2797 }
2798 }
2799
GetCompiledMethod(MethodReference ref) const2800 CompiledMethod* CompilerDriver::GetCompiledMethod(MethodReference ref) const {
2801 MutexLock mu(Thread::Current(), compiled_methods_lock_);
2802 MethodTable::const_iterator it = compiled_methods_.find(ref);
2803 if (it == compiled_methods_.end()) {
2804 return nullptr;
2805 }
2806 CHECK(it->second != nullptr);
2807 return it->second;
2808 }
2809
IsMethodVerifiedWithoutFailures(uint32_t method_idx,uint16_t class_def_idx,const DexFile & dex_file) const2810 bool CompilerDriver::IsMethodVerifiedWithoutFailures(uint32_t method_idx,
2811 uint16_t class_def_idx,
2812 const DexFile& dex_file) const {
2813 const VerifiedMethod* verified_method = GetVerifiedMethod(&dex_file, method_idx);
2814 if (verified_method != nullptr) {
2815 return !verified_method->HasVerificationFailures();
2816 }
2817
2818 // If we can't find verification metadata, check if this is a system class (we trust that system
2819 // classes have their methods verified). If it's not, be conservative and assume the method
2820 // has not been verified successfully.
2821
2822 // TODO: When compiling the boot image it should be safe to assume that everything is verified,
2823 // even if methods are not found in the verification cache.
2824 const char* descriptor = dex_file.GetClassDescriptor(dex_file.GetClassDef(class_def_idx));
2825 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2826 Thread* self = Thread::Current();
2827 ScopedObjectAccess soa(self);
2828 bool is_system_class = class_linker->FindSystemClass(self, descriptor) != nullptr;
2829 if (!is_system_class) {
2830 self->ClearException();
2831 }
2832 return is_system_class;
2833 }
2834
GetNonRelativeLinkerPatchCount() const2835 size_t CompilerDriver::GetNonRelativeLinkerPatchCount() const {
2836 MutexLock mu(Thread::Current(), compiled_methods_lock_);
2837 return non_relative_linker_patch_count_;
2838 }
2839
SetRequiresConstructorBarrier(Thread * self,const DexFile * dex_file,uint16_t class_def_index,bool requires)2840 void CompilerDriver::SetRequiresConstructorBarrier(Thread* self,
2841 const DexFile* dex_file,
2842 uint16_t class_def_index,
2843 bool requires) {
2844 WriterMutexLock mu(self, requires_constructor_barrier_lock_);
2845 requires_constructor_barrier_.emplace(ClassReference(dex_file, class_def_index), requires);
2846 }
2847
RequiresConstructorBarrier(Thread * self,const DexFile * dex_file,uint16_t class_def_index)2848 bool CompilerDriver::RequiresConstructorBarrier(Thread* self,
2849 const DexFile* dex_file,
2850 uint16_t class_def_index) {
2851 ClassReference class_ref(dex_file, class_def_index);
2852 {
2853 ReaderMutexLock mu(self, requires_constructor_barrier_lock_);
2854 auto it = requires_constructor_barrier_.find(class_ref);
2855 if (it != requires_constructor_barrier_.end()) {
2856 return it->second;
2857 }
2858 }
2859 WriterMutexLock mu(self, requires_constructor_barrier_lock_);
2860 const bool requires = RequiresConstructorBarrier(*dex_file, class_def_index);
2861 requires_constructor_barrier_.emplace(class_ref, requires);
2862 return requires;
2863 }
2864
GetMemoryUsageString(bool extended) const2865 std::string CompilerDriver::GetMemoryUsageString(bool extended) const {
2866 std::ostringstream oss;
2867 const gc::Heap* const heap = Runtime::Current()->GetHeap();
2868 const size_t java_alloc = heap->GetBytesAllocated();
2869 oss << "arena alloc=" << PrettySize(max_arena_alloc_) << " (" << max_arena_alloc_ << "B)";
2870 oss << " java alloc=" << PrettySize(java_alloc) << " (" << java_alloc << "B)";
2871 #if defined(__BIONIC__) || defined(__GLIBC__)
2872 const struct mallinfo info = mallinfo();
2873 const size_t allocated_space = static_cast<size_t>(info.uordblks);
2874 const size_t free_space = static_cast<size_t>(info.fordblks);
2875 oss << " native alloc=" << PrettySize(allocated_space) << " (" << allocated_space << "B)"
2876 << " free=" << PrettySize(free_space) << " (" << free_space << "B)";
2877 #endif
2878 compiled_method_storage_.DumpMemoryUsage(oss, extended);
2879 return oss.str();
2880 }
2881
IsStringTypeIndex(uint16_t type_index,const DexFile * dex_file)2882 bool CompilerDriver::IsStringTypeIndex(uint16_t type_index, const DexFile* dex_file) {
2883 const char* type = dex_file->GetTypeDescriptor(dex_file->GetTypeId(type_index));
2884 return strcmp(type, "Ljava/lang/String;") == 0;
2885 }
2886
IsStringInit(uint32_t method_index,const DexFile * dex_file,int32_t * offset)2887 bool CompilerDriver::IsStringInit(uint32_t method_index, const DexFile* dex_file, int32_t* offset) {
2888 DexFileMethodInliner* inliner = GetMethodInlinerMap()->GetMethodInliner(dex_file);
2889 size_t pointer_size = InstructionSetPointerSize(GetInstructionSet());
2890 *offset = inliner->GetOffsetForStringInit(method_index, pointer_size);
2891 return inliner->IsStringInitMethodIndex(method_index);
2892 }
2893
MayInlineInternal(const DexFile * inlined_from,const DexFile * inlined_into) const2894 bool CompilerDriver::MayInlineInternal(const DexFile* inlined_from,
2895 const DexFile* inlined_into) const {
2896 // We're not allowed to inline across dex files if we're the no-inline-from dex file.
2897 if (inlined_from != inlined_into &&
2898 compiler_options_->GetNoInlineFromDexFile() != nullptr &&
2899 ContainsElement(*compiler_options_->GetNoInlineFromDexFile(), inlined_from)) {
2900 return false;
2901 }
2902
2903 return true;
2904 }
2905
InitializeThreadPools()2906 void CompilerDriver::InitializeThreadPools() {
2907 size_t parallel_count = parallel_thread_count_ > 0 ? parallel_thread_count_ - 1 : 0;
2908 parallel_thread_pool_.reset(
2909 new ThreadPool("Compiler driver thread pool", parallel_count));
2910 single_thread_pool_.reset(new ThreadPool("Single-threaded Compiler driver thread pool", 0));
2911 }
2912
FreeThreadPools()2913 void CompilerDriver::FreeThreadPools() {
2914 parallel_thread_pool_.reset();
2915 single_thread_pool_.reset();
2916 }
2917
2918 } // namespace art
2919