1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "optimizing_compiler.h"
18
19 #include <fstream>
20 #include <stdint.h>
21
22 #include "art_method-inl.h"
23 #include "base/arena_allocator.h"
24 #include "base/dumpable.h"
25 #include "base/timing_logger.h"
26 #include "boolean_simplifier.h"
27 #include "bounds_check_elimination.h"
28 #include "builder.h"
29 #include "code_generator.h"
30 #include "compiled_method.h"
31 #include "compiler.h"
32 #include "constant_folding.h"
33 #include "dead_code_elimination.h"
34 #include "dex/quick/dex_file_to_method_inliner_map.h"
35 #include "dex/verified_method.h"
36 #include "dex/verification_results.h"
37 #include "driver/compiler_driver.h"
38 #include "driver/compiler_options.h"
39 #include "driver/dex_compilation_unit.h"
40 #include "elf_writer_quick.h"
41 #include "graph_visualizer.h"
42 #include "gvn.h"
43 #include "inliner.h"
44 #include "instruction_simplifier.h"
45 #include "intrinsics.h"
46 #include "licm.h"
47 #include "jni/quick/jni_compiler.h"
48 #include "nodes.h"
49 #include "prepare_for_register_allocation.h"
50 #include "reference_type_propagation.h"
51 #include "register_allocator.h"
52 #include "side_effects_analysis.h"
53 #include "ssa_builder.h"
54 #include "ssa_phi_elimination.h"
55 #include "ssa_liveness_analysis.h"
56 #include "utils/assembler.h"
57
58 namespace art {
59
60 /**
61 * Used by the code generator, to allocate the code in a vector.
62 */
63 class CodeVectorAllocator FINAL : public CodeAllocator {
64 public:
CodeVectorAllocator()65 CodeVectorAllocator() : size_(0) {}
66
Allocate(size_t size)67 virtual uint8_t* Allocate(size_t size) {
68 size_ = size;
69 memory_.resize(size);
70 return &memory_[0];
71 }
72
GetSize() const73 size_t GetSize() const { return size_; }
GetMemory() const74 const std::vector<uint8_t>& GetMemory() const { return memory_; }
75
76 private:
77 std::vector<uint8_t> memory_;
78 size_t size_;
79
80 DISALLOW_COPY_AND_ASSIGN(CodeVectorAllocator);
81 };
82
83 /**
84 * Filter to apply to the visualizer. Methods whose name contain that filter will
85 * be dumped.
86 */
87 static const char* kStringFilter = "";
88
89 class PassInfo;
90
91 class PassInfoPrinter : public ValueObject {
92 public:
PassInfoPrinter(HGraph * graph,const char * method_name,const CodeGenerator & codegen,std::ostream * visualizer_output,CompilerDriver * compiler_driver)93 PassInfoPrinter(HGraph* graph,
94 const char* method_name,
95 const CodeGenerator& codegen,
96 std::ostream* visualizer_output,
97 CompilerDriver* compiler_driver)
98 : method_name_(method_name),
99 timing_logger_enabled_(compiler_driver->GetDumpPasses()),
100 timing_logger_(method_name, true, true),
101 visualizer_enabled_(!compiler_driver->GetDumpCfgFileName().empty()),
102 visualizer_(visualizer_output, graph, codegen) {
103 if (strstr(method_name, kStringFilter) == nullptr) {
104 timing_logger_enabled_ = visualizer_enabled_ = false;
105 }
106 if (visualizer_enabled_) {
107 visualizer_.PrintHeader(method_name_);
108 }
109 }
110
~PassInfoPrinter()111 ~PassInfoPrinter() {
112 if (timing_logger_enabled_) {
113 LOG(INFO) << "TIMINGS " << method_name_;
114 LOG(INFO) << Dumpable<TimingLogger>(timing_logger_);
115 }
116 }
117
118 private:
StartPass(const char * pass_name)119 void StartPass(const char* pass_name) {
120 // Dump graph first, then start timer.
121 if (visualizer_enabled_) {
122 visualizer_.DumpGraph(pass_name, /* is_after_pass */ false);
123 }
124 if (timing_logger_enabled_) {
125 timing_logger_.StartTiming(pass_name);
126 }
127 }
128
EndPass(const char * pass_name)129 void EndPass(const char* pass_name) {
130 // Pause timer first, then dump graph.
131 if (timing_logger_enabled_) {
132 timing_logger_.EndTiming();
133 }
134 if (visualizer_enabled_) {
135 visualizer_.DumpGraph(pass_name, /* is_after_pass */ true);
136 }
137 }
138
139 const char* method_name_;
140
141 bool timing_logger_enabled_;
142 TimingLogger timing_logger_;
143
144 bool visualizer_enabled_;
145 HGraphVisualizer visualizer_;
146
147 friend PassInfo;
148
149 DISALLOW_COPY_AND_ASSIGN(PassInfoPrinter);
150 };
151
152 class PassInfo : public ValueObject {
153 public:
PassInfo(const char * pass_name,PassInfoPrinter * pass_info_printer)154 PassInfo(const char *pass_name, PassInfoPrinter* pass_info_printer)
155 : pass_name_(pass_name),
156 pass_info_printer_(pass_info_printer) {
157 pass_info_printer_->StartPass(pass_name_);
158 }
159
~PassInfo()160 ~PassInfo() {
161 pass_info_printer_->EndPass(pass_name_);
162 }
163
164 private:
165 const char* const pass_name_;
166 PassInfoPrinter* const pass_info_printer_;
167 };
168
169 class OptimizingCompiler FINAL : public Compiler {
170 public:
171 explicit OptimizingCompiler(CompilerDriver* driver);
172 ~OptimizingCompiler();
173
174 bool CanCompileMethod(uint32_t method_idx, const DexFile& dex_file, CompilationUnit* cu) const
175 OVERRIDE;
176
177 CompiledMethod* Compile(const DexFile::CodeItem* code_item,
178 uint32_t access_flags,
179 InvokeType invoke_type,
180 uint16_t class_def_idx,
181 uint32_t method_idx,
182 jobject class_loader,
183 const DexFile& dex_file) const OVERRIDE;
184
185 CompiledMethod* TryCompile(const DexFile::CodeItem* code_item,
186 uint32_t access_flags,
187 InvokeType invoke_type,
188 uint16_t class_def_idx,
189 uint32_t method_idx,
190 jobject class_loader,
191 const DexFile& dex_file) const;
192
JniCompile(uint32_t access_flags,uint32_t method_idx,const DexFile & dex_file) const193 CompiledMethod* JniCompile(uint32_t access_flags,
194 uint32_t method_idx,
195 const DexFile& dex_file) const OVERRIDE {
196 return ArtQuickJniCompileMethod(GetCompilerDriver(), access_flags, method_idx, dex_file);
197 }
198
GetEntryPointOf(ArtMethod * method) const199 uintptr_t GetEntryPointOf(ArtMethod* method) const OVERRIDE
200 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
201 return reinterpret_cast<uintptr_t>(method->GetEntryPointFromQuickCompiledCodePtrSize(
202 InstructionSetPointerSize(GetCompilerDriver()->GetInstructionSet())));
203 }
204
205 void InitCompilationUnit(CompilationUnit& cu) const OVERRIDE;
206
207 void Init() OVERRIDE;
208
209 void UnInit() const OVERRIDE;
210
MaybeRecordStat(MethodCompilationStat compilation_stat) const211 void MaybeRecordStat(MethodCompilationStat compilation_stat) const {
212 if (compilation_stats_.get() != nullptr) {
213 compilation_stats_->RecordStat(compilation_stat);
214 }
215 }
216
217 private:
218 // Whether we should run any optimization or register allocation. If false, will
219 // just run the code generation after the graph was built.
220 const bool run_optimizations_;
221
222 // Optimize and compile `graph`.
223 CompiledMethod* CompileOptimized(HGraph* graph,
224 CodeGenerator* codegen,
225 CompilerDriver* driver,
226 const DexFile& dex_file,
227 const DexCompilationUnit& dex_compilation_unit,
228 PassInfoPrinter* pass_info) const;
229
230 // Just compile without doing optimizations.
231 CompiledMethod* CompileBaseline(CodeGenerator* codegen,
232 CompilerDriver* driver,
233 const DexCompilationUnit& dex_compilation_unit) const;
234
235 std::unique_ptr<OptimizingCompilerStats> compilation_stats_;
236
237 std::unique_ptr<std::ostream> visualizer_output_;
238
239 // Delegate to Quick in case the optimizing compiler cannot compile a method.
240 std::unique_ptr<Compiler> delegate_;
241
242 DISALLOW_COPY_AND_ASSIGN(OptimizingCompiler);
243 };
244
245 static const int kMaximumCompilationTimeBeforeWarning = 100; /* ms */
246
OptimizingCompiler(CompilerDriver * driver)247 OptimizingCompiler::OptimizingCompiler(CompilerDriver* driver)
248 : Compiler(driver, kMaximumCompilationTimeBeforeWarning),
249 run_optimizations_(
250 (driver->GetCompilerOptions().GetCompilerFilter() != CompilerOptions::kTime)
251 && !driver->GetCompilerOptions().GetDebuggable()),
252 delegate_(Create(driver, Compiler::Kind::kQuick)) {}
253
Init()254 void OptimizingCompiler::Init() {
255 delegate_->Init();
256 // Enable C1visualizer output. Must be done in Init() because the compiler
257 // driver is not fully initialized when passed to the compiler's constructor.
258 CompilerDriver* driver = GetCompilerDriver();
259 const std::string cfg_file_name = driver->GetDumpCfgFileName();
260 if (!cfg_file_name.empty()) {
261 CHECK_EQ(driver->GetThreadCount(), 1U)
262 << "Graph visualizer requires the compiler to run single-threaded. "
263 << "Invoke the compiler with '-j1'.";
264 visualizer_output_.reset(new std::ofstream(cfg_file_name));
265 }
266 if (driver->GetDumpStats()) {
267 compilation_stats_.reset(new OptimizingCompilerStats());
268 }
269 }
270
UnInit() const271 void OptimizingCompiler::UnInit() const {
272 delegate_->UnInit();
273 }
274
~OptimizingCompiler()275 OptimizingCompiler::~OptimizingCompiler() {
276 if (compilation_stats_.get() != nullptr) {
277 compilation_stats_->Log();
278 }
279 }
280
InitCompilationUnit(CompilationUnit & cu) const281 void OptimizingCompiler::InitCompilationUnit(CompilationUnit& cu) const {
282 delegate_->InitCompilationUnit(cu);
283 }
284
CanCompileMethod(uint32_t method_idx ATTRIBUTE_UNUSED,const DexFile & dex_file ATTRIBUTE_UNUSED,CompilationUnit * cu ATTRIBUTE_UNUSED) const285 bool OptimizingCompiler::CanCompileMethod(uint32_t method_idx ATTRIBUTE_UNUSED,
286 const DexFile& dex_file ATTRIBUTE_UNUSED,
287 CompilationUnit* cu ATTRIBUTE_UNUSED) const {
288 return true;
289 }
290
IsInstructionSetSupported(InstructionSet instruction_set)291 static bool IsInstructionSetSupported(InstructionSet instruction_set) {
292 return instruction_set == kArm64
293 || (instruction_set == kThumb2 && !kArm32QuickCodeUseSoftFloat)
294 || instruction_set == kMips64
295 || instruction_set == kX86
296 || instruction_set == kX86_64;
297 }
298
CanOptimize(const DexFile::CodeItem & code_item)299 static bool CanOptimize(const DexFile::CodeItem& code_item) {
300 // TODO: We currently cannot optimize methods with try/catch.
301 return code_item.tries_size_ == 0;
302 }
303
RunOptimizations(HOptimization * optimizations[],size_t length,PassInfoPrinter * pass_info_printer)304 static void RunOptimizations(HOptimization* optimizations[],
305 size_t length,
306 PassInfoPrinter* pass_info_printer) {
307 for (size_t i = 0; i < length; ++i) {
308 HOptimization* optimization = optimizations[i];
309 {
310 PassInfo pass_info(optimization->GetPassName(), pass_info_printer);
311 optimization->Run();
312 }
313 optimization->Check();
314 }
315 }
316
RunOptimizations(HGraph * graph,CompilerDriver * driver,OptimizingCompilerStats * stats,const DexFile & dex_file,const DexCompilationUnit & dex_compilation_unit,PassInfoPrinter * pass_info_printer,StackHandleScopeCollection * handles)317 static void RunOptimizations(HGraph* graph,
318 CompilerDriver* driver,
319 OptimizingCompilerStats* stats,
320 const DexFile& dex_file,
321 const DexCompilationUnit& dex_compilation_unit,
322 PassInfoPrinter* pass_info_printer,
323 StackHandleScopeCollection* handles) {
324 HDeadCodeElimination dce1(graph, stats,
325 HDeadCodeElimination::kInitialDeadCodeEliminationPassName);
326 HDeadCodeElimination dce2(graph, stats,
327 HDeadCodeElimination::kFinalDeadCodeEliminationPassName);
328 HConstantFolding fold1(graph);
329 InstructionSimplifier simplify1(graph, stats);
330 HBooleanSimplifier boolean_simplify(graph);
331
332 HInliner inliner(graph, dex_compilation_unit, dex_compilation_unit, driver, stats);
333
334 HConstantFolding fold2(graph, "constant_folding_after_inlining");
335 SideEffectsAnalysis side_effects(graph);
336 GVNOptimization gvn(graph, side_effects);
337 LICM licm(graph, side_effects);
338 BoundsCheckElimination bce(graph);
339 ReferenceTypePropagation type_propagation(graph, dex_file, dex_compilation_unit, handles);
340 InstructionSimplifier simplify2(graph, stats, "instruction_simplifier_after_types");
341 InstructionSimplifier simplify3(graph, stats, "instruction_simplifier_before_codegen");
342
343 IntrinsicsRecognizer intrinsics(graph, dex_compilation_unit.GetDexFile(), driver);
344
345 HOptimization* optimizations[] = {
346 &intrinsics,
347 &fold1,
348 &simplify1,
349 &dce1,
350 &inliner,
351 // BooleanSimplifier depends on the InstructionSimplifier removing redundant
352 // suspend checks to recognize empty blocks.
353 &boolean_simplify,
354 &fold2,
355 &side_effects,
356 &gvn,
357 &licm,
358 &bce,
359 &type_propagation,
360 &simplify2,
361 &dce2,
362 // The codegen has a few assumptions that only the instruction simplifier can
363 // satisfy. For example, the code generator does not expect to see a
364 // HTypeConversion from a type to the same type.
365 &simplify3,
366 };
367
368 RunOptimizations(optimizations, arraysize(optimizations), pass_info_printer);
369 }
370
371 // The stack map we generate must be 4-byte aligned on ARM. Since existing
372 // maps are generated alongside these stack maps, we must also align them.
AlignVectorSize(std::vector<uint8_t> & vector)373 static ArrayRef<const uint8_t> AlignVectorSize(std::vector<uint8_t>& vector) {
374 size_t size = vector.size();
375 size_t aligned_size = RoundUp(size, 4);
376 for (; size < aligned_size; ++size) {
377 vector.push_back(0);
378 }
379 return ArrayRef<const uint8_t>(vector);
380 }
381
AllocateRegisters(HGraph * graph,CodeGenerator * codegen,PassInfoPrinter * pass_info_printer)382 static void AllocateRegisters(HGraph* graph,
383 CodeGenerator* codegen,
384 PassInfoPrinter* pass_info_printer) {
385 PrepareForRegisterAllocation(graph).Run();
386 SsaLivenessAnalysis liveness(graph, codegen);
387 {
388 PassInfo pass_info(SsaLivenessAnalysis::kLivenessPassName, pass_info_printer);
389 liveness.Analyze();
390 }
391 {
392 PassInfo pass_info(RegisterAllocator::kRegisterAllocatorPassName, pass_info_printer);
393 RegisterAllocator(graph->GetArena(), codegen, liveness).AllocateRegisters();
394 }
395 }
396
CompileOptimized(HGraph * graph,CodeGenerator * codegen,CompilerDriver * compiler_driver,const DexFile & dex_file,const DexCompilationUnit & dex_compilation_unit,PassInfoPrinter * pass_info_printer) const397 CompiledMethod* OptimizingCompiler::CompileOptimized(HGraph* graph,
398 CodeGenerator* codegen,
399 CompilerDriver* compiler_driver,
400 const DexFile& dex_file,
401 const DexCompilationUnit& dex_compilation_unit,
402 PassInfoPrinter* pass_info_printer) const {
403 StackHandleScopeCollection handles(Thread::Current());
404 RunOptimizations(graph, compiler_driver, compilation_stats_.get(),
405 dex_file, dex_compilation_unit, pass_info_printer, &handles);
406
407 AllocateRegisters(graph, codegen, pass_info_printer);
408
409 CodeVectorAllocator allocator;
410 codegen->CompileOptimized(&allocator);
411
412 DefaultSrcMap src_mapping_table;
413 if (compiler_driver->GetCompilerOptions().GetGenerateDebugInfo()) {
414 codegen->BuildSourceMap(&src_mapping_table);
415 }
416
417 std::vector<uint8_t> stack_map;
418 codegen->BuildStackMaps(&stack_map);
419
420 MaybeRecordStat(MethodCompilationStat::kCompiledOptimized);
421
422 return CompiledMethod::SwapAllocCompiledMethod(
423 compiler_driver,
424 codegen->GetInstructionSet(),
425 ArrayRef<const uint8_t>(allocator.GetMemory()),
426 // Follow Quick's behavior and set the frame size to zero if it is
427 // considered "empty" (see the definition of
428 // art::CodeGenerator::HasEmptyFrame).
429 codegen->HasEmptyFrame() ? 0 : codegen->GetFrameSize(),
430 codegen->GetCoreSpillMask(),
431 codegen->GetFpuSpillMask(),
432 &src_mapping_table,
433 ArrayRef<const uint8_t>(), // mapping_table.
434 ArrayRef<const uint8_t>(stack_map),
435 ArrayRef<const uint8_t>(), // native_gc_map.
436 ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()),
437 ArrayRef<const LinkerPatch>());
438 }
439
CompileBaseline(CodeGenerator * codegen,CompilerDriver * compiler_driver,const DexCompilationUnit & dex_compilation_unit) const440 CompiledMethod* OptimizingCompiler::CompileBaseline(
441 CodeGenerator* codegen,
442 CompilerDriver* compiler_driver,
443 const DexCompilationUnit& dex_compilation_unit) const {
444 CodeVectorAllocator allocator;
445 codegen->CompileBaseline(&allocator);
446
447 std::vector<uint8_t> mapping_table;
448 codegen->BuildMappingTable(&mapping_table);
449 DefaultSrcMap src_mapping_table;
450 if (compiler_driver->GetCompilerOptions().GetGenerateDebugInfo()) {
451 codegen->BuildSourceMap(&src_mapping_table);
452 }
453 std::vector<uint8_t> vmap_table;
454 codegen->BuildVMapTable(&vmap_table);
455 std::vector<uint8_t> gc_map;
456 codegen->BuildNativeGCMap(&gc_map, dex_compilation_unit);
457
458 MaybeRecordStat(MethodCompilationStat::kCompiledBaseline);
459 return CompiledMethod::SwapAllocCompiledMethod(
460 compiler_driver,
461 codegen->GetInstructionSet(),
462 ArrayRef<const uint8_t>(allocator.GetMemory()),
463 // Follow Quick's behavior and set the frame size to zero if it is
464 // considered "empty" (see the definition of
465 // art::CodeGenerator::HasEmptyFrame).
466 codegen->HasEmptyFrame() ? 0 : codegen->GetFrameSize(),
467 codegen->GetCoreSpillMask(),
468 codegen->GetFpuSpillMask(),
469 &src_mapping_table,
470 AlignVectorSize(mapping_table),
471 AlignVectorSize(vmap_table),
472 AlignVectorSize(gc_map),
473 ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()),
474 ArrayRef<const LinkerPatch>());
475 }
476
TryCompile(const DexFile::CodeItem * code_item,uint32_t access_flags,InvokeType invoke_type,uint16_t class_def_idx,uint32_t method_idx,jobject class_loader,const DexFile & dex_file) const477 CompiledMethod* OptimizingCompiler::TryCompile(const DexFile::CodeItem* code_item,
478 uint32_t access_flags,
479 InvokeType invoke_type,
480 uint16_t class_def_idx,
481 uint32_t method_idx,
482 jobject class_loader,
483 const DexFile& dex_file) const {
484 UNUSED(invoke_type);
485 std::string method_name = PrettyMethod(method_idx, dex_file);
486 MaybeRecordStat(MethodCompilationStat::kAttemptCompilation);
487 CompilerDriver* compiler_driver = GetCompilerDriver();
488 InstructionSet instruction_set = compiler_driver->GetInstructionSet();
489 // Always use the thumb2 assembler: some runtime functionality (like implicit stack
490 // overflow checks) assume thumb2.
491 if (instruction_set == kArm) {
492 instruction_set = kThumb2;
493 }
494
495 // `run_optimizations_` is set explicitly (either through a compiler filter
496 // or the debuggable flag). If it is set, we can run baseline. Otherwise, we
497 // fall back to Quick.
498 bool should_use_baseline = !run_optimizations_;
499 bool can_optimize = CanOptimize(*code_item);
500 if (!can_optimize && !should_use_baseline) {
501 // We know we will not compile this method. Bail out before doing any work.
502 return nullptr;
503 }
504
505 // Do not attempt to compile on architectures we do not support.
506 if (!IsInstructionSetSupported(instruction_set)) {
507 MaybeRecordStat(MethodCompilationStat::kNotCompiledUnsupportedIsa);
508 return nullptr;
509 }
510
511 if (Compiler::IsPathologicalCase(*code_item, method_idx, dex_file)) {
512 MaybeRecordStat(MethodCompilationStat::kNotCompiledPathological);
513 return nullptr;
514 }
515
516 // Implementation of the space filter: do not compile a code item whose size in
517 // code units is bigger than 128.
518 static constexpr size_t kSpaceFilterOptimizingThreshold = 128;
519 const CompilerOptions& compiler_options = compiler_driver->GetCompilerOptions();
520 if ((compiler_options.GetCompilerFilter() == CompilerOptions::kSpace)
521 && (code_item->insns_size_in_code_units_ > kSpaceFilterOptimizingThreshold)) {
522 MaybeRecordStat(MethodCompilationStat::kNotCompiledSpaceFilter);
523 return nullptr;
524 }
525
526 DexCompilationUnit dex_compilation_unit(
527 nullptr, class_loader, art::Runtime::Current()->GetClassLinker(), dex_file, code_item,
528 class_def_idx, method_idx, access_flags,
529 compiler_driver->GetVerifiedMethod(&dex_file, method_idx));
530
531 ArenaAllocator arena(Runtime::Current()->GetArenaPool());
532 HGraph* graph = new (&arena) HGraph(
533 &arena, dex_file, method_idx, compiler_driver->GetInstructionSet(),
534 compiler_driver->GetCompilerOptions().GetDebuggable());
535
536 // For testing purposes, we put a special marker on method names that should be compiled
537 // with this compiler. This makes sure we're not regressing.
538 bool shouldCompile = method_name.find("$opt$") != std::string::npos;
539 bool shouldOptimize = method_name.find("$opt$reg$") != std::string::npos && run_optimizations_;
540
541 std::unique_ptr<CodeGenerator> codegen(
542 CodeGenerator::Create(graph,
543 instruction_set,
544 *compiler_driver->GetInstructionSetFeatures(),
545 compiler_driver->GetCompilerOptions()));
546 if (codegen.get() == nullptr) {
547 CHECK(!shouldCompile) << "Could not find code generator for optimizing compiler";
548 MaybeRecordStat(MethodCompilationStat::kNotCompiledNoCodegen);
549 return nullptr;
550 }
551 codegen->GetAssembler()->cfi().SetEnabled(
552 compiler_driver->GetCompilerOptions().GetGenerateDebugInfo());
553
554 PassInfoPrinter pass_info_printer(graph,
555 method_name.c_str(),
556 *codegen.get(),
557 visualizer_output_.get(),
558 compiler_driver);
559
560 HGraphBuilder builder(graph,
561 &dex_compilation_unit,
562 &dex_compilation_unit,
563 &dex_file,
564 compiler_driver,
565 compilation_stats_.get());
566
567 VLOG(compiler) << "Building " << method_name;
568
569 {
570 PassInfo pass_info(HGraphBuilder::kBuilderPassName, &pass_info_printer);
571 if (!builder.BuildGraph(*code_item)) {
572 DCHECK(!(IsCompilingWithCoreImage() && shouldCompile))
573 << "Could not build graph in optimizing compiler";
574 return nullptr;
575 }
576 }
577
578 bool can_allocate_registers = RegisterAllocator::CanAllocateRegistersFor(*graph, instruction_set);
579
580 if (run_optimizations_ && can_optimize && can_allocate_registers) {
581 VLOG(compiler) << "Optimizing " << method_name;
582
583 {
584 PassInfo pass_info(SsaBuilder::kSsaBuilderPassName, &pass_info_printer);
585 if (!graph->TryBuildingSsa()) {
586 // We could not transform the graph to SSA, bailout.
587 LOG(INFO) << "Skipping compilation of " << method_name << ": it contains a non natural loop";
588 MaybeRecordStat(MethodCompilationStat::kNotCompiledCannotBuildSSA);
589 return nullptr;
590 }
591 }
592
593 return CompileOptimized(graph,
594 codegen.get(),
595 compiler_driver,
596 dex_file,
597 dex_compilation_unit,
598 &pass_info_printer);
599 } else if (shouldOptimize && can_allocate_registers) {
600 LOG(FATAL) << "Could not allocate registers in optimizing compiler";
601 UNREACHABLE();
602 } else if (should_use_baseline) {
603 VLOG(compiler) << "Compile baseline " << method_name;
604
605 if (!run_optimizations_) {
606 MaybeRecordStat(MethodCompilationStat::kNotOptimizedDisabled);
607 } else if (!can_optimize) {
608 MaybeRecordStat(MethodCompilationStat::kNotOptimizedTryCatch);
609 } else if (!can_allocate_registers) {
610 MaybeRecordStat(MethodCompilationStat::kNotOptimizedRegisterAllocator);
611 }
612
613 return CompileBaseline(codegen.get(), compiler_driver, dex_compilation_unit);
614 } else {
615 return nullptr;
616 }
617 }
618
Compile(const DexFile::CodeItem * code_item,uint32_t access_flags,InvokeType invoke_type,uint16_t class_def_idx,uint32_t method_idx,jobject jclass_loader,const DexFile & dex_file) const619 CompiledMethod* OptimizingCompiler::Compile(const DexFile::CodeItem* code_item,
620 uint32_t access_flags,
621 InvokeType invoke_type,
622 uint16_t class_def_idx,
623 uint32_t method_idx,
624 jobject jclass_loader,
625 const DexFile& dex_file) const {
626 CompilerDriver* compiler_driver = GetCompilerDriver();
627 CompiledMethod* method = nullptr;
628 if (compiler_driver->IsMethodVerifiedWithoutFailures(method_idx, class_def_idx, dex_file) &&
629 !compiler_driver->GetVerifiedMethod(&dex_file, method_idx)->HasRuntimeThrow()) {
630 method = TryCompile(code_item, access_flags, invoke_type, class_def_idx,
631 method_idx, jclass_loader, dex_file);
632 } else {
633 if (compiler_driver->GetCompilerOptions().VerifyAtRuntime()) {
634 MaybeRecordStat(MethodCompilationStat::kNotCompiledVerifyAtRuntime);
635 } else {
636 MaybeRecordStat(MethodCompilationStat::kNotCompiledClassNotVerified);
637 }
638 }
639
640 if (method != nullptr) {
641 return method;
642 }
643 method = delegate_->Compile(code_item, access_flags, invoke_type, class_def_idx, method_idx,
644 jclass_loader, dex_file);
645
646 if (method != nullptr) {
647 MaybeRecordStat(MethodCompilationStat::kCompiledQuick);
648 }
649 return method;
650 }
651
CreateOptimizingCompiler(CompilerDriver * driver)652 Compiler* CreateOptimizingCompiler(CompilerDriver* driver) {
653 return new OptimizingCompiler(driver);
654 }
655
IsCompilingWithCoreImage()656 bool IsCompilingWithCoreImage() {
657 const std::string& image = Runtime::Current()->GetImageLocation();
658 return EndsWith(image, "core.art") || EndsWith(image, "core-optimizing.art");
659 }
660
661 } // namespace art
662