• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_OPTIMIZING_COMPILER_STATS_H_
18 #define ART_COMPILER_OPTIMIZING_OPTIMIZING_COMPILER_STATS_H_
19 
20 #include <atomic>
21 #include <iomanip>
22 #include <string>
23 #include <type_traits>
24 
25 #include <android-base/logging.h>
26 
27 #include "base/atomic.h"
28 #include "base/globals.h"
29 
30 namespace art {
31 
32 enum class MethodCompilationStat {
33   kAttemptBytecodeCompilation = 0,
34   kAttemptIntrinsicCompilation,
35   kCompiledNativeStub,
36   kCompiledIntrinsic,
37   kCompiledBytecode,
38   kCHAInline,
39   kInlinedInvoke,
40   kReplacedInvokeWithSimplePattern,
41   kInstructionSimplifications,
42   kInstructionSimplificationsArch,
43   kUnresolvedMethod,
44   kUnresolvedField,
45   kUnresolvedFieldNotAFastAccess,
46   kRemovedCheckedCast,
47   kRemovedDeadInstruction,
48   kRemovedNullCheck,
49   kNotCompiledSkipped,
50   kNotCompiledInvalidBytecode,
51   kNotCompiledThrowCatchLoop,
52   kNotCompiledAmbiguousArrayOp,
53   kNotCompiledHugeMethod,
54   kNotCompiledLargeMethodNoBranches,
55   kNotCompiledMalformedOpcode,
56   kNotCompiledNoCodegen,
57   kNotCompiledPathological,
58   kNotCompiledSpaceFilter,
59   kNotCompiledUnhandledInstruction,
60   kNotCompiledUnsupportedIsa,
61   kNotCompiledVerificationError,
62   kNotCompiledVerifyAtRuntime,
63   kNotCompiledIrreducibleLoopAndStringInit,
64   kInlinedMonomorphicCall,
65   kInlinedPolymorphicCall,
66   kMonomorphicCall,
67   kPolymorphicCall,
68   kMegamorphicCall,
69   kBooleanSimplified,
70   kIntrinsicRecognized,
71   kLoopInvariantMoved,
72   kLoopVectorized,
73   kLoopVectorizedIdiom,
74   kSelectGenerated,
75   kRemovedInstanceOf,
76   kInlinedInvokeVirtualOrInterface,
77   kImplicitNullCheckGenerated,
78   kExplicitNullCheckGenerated,
79   kSimplifyIf,
80   kSimplifyThrowingInvoke,
81   kInstructionSunk,
82   kNotInlinedUnresolvedEntrypoint,
83   kNotInlinedDexCache,
84   kNotInlinedStackMaps,
85   kNotInlinedEnvironmentBudget,
86   kNotInlinedInstructionBudget,
87   kNotInlinedLoopWithoutExit,
88   kNotInlinedIrreducibleLoop,
89   kNotInlinedAlwaysThrows,
90   kNotInlinedInfiniteLoop,
91   kNotInlinedTryCatch,
92   kNotInlinedRegisterAllocator,
93   kNotInlinedCannotBuild,
94   kNotInlinedNotVerified,
95   kNotInlinedCodeItem,
96   kNotInlinedWont,
97   kNotInlinedRecursiveBudget,
98   kNotInlinedProxy,
99   kConstructorFenceGeneratedNew,
100   kConstructorFenceGeneratedFinal,
101   kConstructorFenceRemovedLSE,
102   kConstructorFenceRemovedPFRA,
103   kConstructorFenceRemovedCFRE,
104   kBitstringTypeCheck,
105   kJitOutOfMemoryForCommit,
106   kLastStat
107 };
108 std::ostream& operator<<(std::ostream& os, const MethodCompilationStat& rhs);
109 
110 class OptimizingCompilerStats {
111  public:
OptimizingCompilerStats()112   OptimizingCompilerStats() {
113     // The std::atomic<> default constructor leaves values uninitialized, so initialize them now.
114     Reset();
115   }
116 
117   void RecordStat(MethodCompilationStat stat, uint32_t count = 1) {
118     size_t stat_index = static_cast<size_t>(stat);
119     DCHECK_LT(stat_index, arraysize(compile_stats_));
120     compile_stats_[stat_index] += count;
121   }
122 
GetStat(MethodCompilationStat stat)123   uint32_t GetStat(MethodCompilationStat stat) const {
124     size_t stat_index = static_cast<size_t>(stat);
125     DCHECK_LT(stat_index, arraysize(compile_stats_));
126     return compile_stats_[stat_index];
127   }
128 
Log()129   void Log() const {
130     uint32_t compiled_intrinsics = GetStat(MethodCompilationStat::kCompiledIntrinsic);
131     uint32_t compiled_native_stubs = GetStat(MethodCompilationStat::kCompiledNativeStub);
132     uint32_t bytecode_attempts =
133         GetStat(MethodCompilationStat::kAttemptBytecodeCompilation);
134     if (compiled_intrinsics == 0u && compiled_native_stubs == 0u && bytecode_attempts == 0u) {
135       LOG(INFO) << "Did not compile any method.";
136     } else {
137       uint32_t compiled_bytecode_methods =
138           GetStat(MethodCompilationStat::kCompiledBytecode);
139       // Successful intrinsic compilation preempts other compilation attempts but failed intrinsic
140       // compilation shall still count towards bytecode or native stub compilation attempts.
141       uint32_t num_compilation_attempts =
142           compiled_intrinsics + compiled_native_stubs + bytecode_attempts;
143       uint32_t num_successful_compilations =
144           compiled_intrinsics + compiled_native_stubs + compiled_bytecode_methods;
145       float compiled_percent = num_successful_compilations * 100.0f / num_compilation_attempts;
146       LOG(INFO) << "Attempted compilation of "
147           << num_compilation_attempts << " methods: " << std::fixed << std::setprecision(2)
148           << compiled_percent << "% (" << num_successful_compilations << ") compiled.";
149 
150       for (size_t i = 0; i < arraysize(compile_stats_); ++i) {
151         if (compile_stats_[i] != 0) {
152           LOG(INFO) << "OptStat#" << static_cast<MethodCompilationStat>(i) << ": "
153               << compile_stats_[i];
154         }
155       }
156     }
157   }
158 
AddTo(OptimizingCompilerStats * other_stats)159   void AddTo(OptimizingCompilerStats* other_stats) {
160     for (size_t i = 0; i != arraysize(compile_stats_); ++i) {
161       uint32_t count = compile_stats_[i];
162       if (count != 0) {
163         other_stats->RecordStat(static_cast<MethodCompilationStat>(i), count);
164       }
165     }
166   }
167 
Reset()168   void Reset() {
169     for (std::atomic<uint32_t>& stat : compile_stats_) {
170       stat = 0u;
171     }
172   }
173 
174  private:
175   std::atomic<uint32_t> compile_stats_[static_cast<size_t>(MethodCompilationStat::kLastStat)];
176 
177   DISALLOW_COPY_AND_ASSIGN(OptimizingCompilerStats);
178 };
179 
180 inline void MaybeRecordStat(OptimizingCompilerStats* compiler_stats,
181                             MethodCompilationStat stat,
182                             uint32_t count = 1) {
183   if (compiler_stats != nullptr) {
184     compiler_stats->RecordStat(stat, count);
185   }
186 }
187 
188 }  // namespace art
189 
190 #endif  // ART_COMPILER_OPTIMIZING_OPTIMIZING_COMPILER_STATS_H_
191