1 // Copyright 2016 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_CODEGEN_OPTIMIZED_COMPILATION_INFO_H_ 6 #define V8_CODEGEN_OPTIMIZED_COMPILATION_INFO_H_ 7 8 #include <memory> 9 10 #include "src/codegen/bailout-reason.h" 11 #include "src/codegen/source-position-table.h" 12 #include "src/codegen/tick-counter.h" 13 #include "src/common/globals.h" 14 #include "src/diagnostics/basic-block-profiler.h" 15 #include "src/execution/frames.h" 16 #include "src/handles/handles.h" 17 #include "src/handles/persistent-handles.h" 18 #include "src/objects/objects.h" 19 #include "src/utils/identity-map.h" 20 #include "src/utils/utils.h" 21 #include "src/utils/vector.h" 22 23 namespace v8 { 24 25 namespace tracing { 26 class TracedValue; 27 } // namespace tracing 28 29 namespace internal { 30 31 class FunctionLiteral; 32 class Isolate; 33 class JavaScriptFrame; 34 class JSGlobalObject; 35 class Zone; 36 37 namespace wasm { 38 struct WasmCompilationResult; 39 } // namespace wasm 40 41 // OptimizedCompilationInfo encapsulates the information needed to compile 42 // optimized code for a given function, and the results of the optimized 43 // compilation. 44 class V8_EXPORT_PRIVATE OptimizedCompilationInfo final { 45 public: 46 // Various configuration flags for a compilation, as well as some properties 47 // of the compiled code produced by a compilation. 48 49 #define FLAGS(V) \ 50 V(FunctionContextSpecializing, function_context_specializing, 0) \ 51 V(Inlining, inlining, 1) \ 52 V(DisableFutureOptimization, disable_future_optimization, 2) \ 53 V(Splitting, splitting, 3) \ 54 V(SourcePositions, source_positions, 4) \ 55 V(BailoutOnUninitialized, bailout_on_uninitialized, 5) \ 56 V(LoopPeeling, loop_peeling, 6) \ 57 V(UntrustedCodeMitigations, untrusted_code_mitigations, 7) \ 58 V(SwitchJumpTable, switch_jump_table, 8) \ 59 V(CalledWithCodeStartRegister, called_with_code_start_register, 9) \ 60 V(PoisonRegisterArguments, poison_register_arguments, 10) \ 61 V(AllocationFolding, allocation_folding, 11) \ 62 V(AnalyzeEnvironmentLiveness, analyze_environment_liveness, 12) \ 63 V(TraceTurboJson, trace_turbo_json, 13) \ 64 V(TraceTurboGraph, trace_turbo_graph, 14) \ 65 V(TraceTurboScheduled, trace_turbo_scheduled, 15) \ 66 V(TraceTurboAllocation, trace_turbo_allocation, 16) \ 67 V(TraceHeapBroker, trace_heap_broker, 17) \ 68 V(WasmRuntimeExceptionSupport, wasm_runtime_exception_support, 18) \ 69 V(ConcurrentInlining, concurrent_inlining, 19) 70 71 enum Flag { 72 #define DEF_ENUM(Camel, Lower, Bit) k##Camel = 1 << Bit, 73 FLAGS(DEF_ENUM) 74 #undef DEF_ENUM 75 }; 76 77 #define DEF_GETTER(Camel, Lower, Bit) \ 78 bool Lower() const { \ 79 DCHECK(FlagGetIsValid(k##Camel)); \ 80 return GetFlag(k##Camel); \ 81 } 82 FLAGS(DEF_GETTER) 83 #undef DEF_GETTER 84 85 #define DEF_SETTER(Camel, Lower, Bit) \ 86 void set_##Lower() { \ 87 DCHECK(FlagSetIsValid(k##Camel)); \ 88 SetFlag(k##Camel); \ 89 } 90 FLAGS(DEF_SETTER) 91 #undef DEF_SETTER 92 93 #ifdef DEBUG 94 bool FlagGetIsValid(Flag flag) const; 95 bool FlagSetIsValid(Flag flag) const; 96 #endif // DEBUG 97 98 // Construct a compilation info for optimized compilation. 99 OptimizedCompilationInfo(Zone* zone, Isolate* isolate, 100 Handle<SharedFunctionInfo> shared, 101 Handle<JSFunction> closure, CodeKind code_kind); 102 // Construct a compilation info for stub compilation, Wasm, and testing. 103 OptimizedCompilationInfo(Vector<const char> debug_name, Zone* zone, 104 CodeKind code_kind); 105 106 ~OptimizedCompilationInfo(); 107 zone()108 Zone* zone() { return zone_; } is_osr()109 bool is_osr() const { return !osr_offset_.IsNone(); } shared_info()110 Handle<SharedFunctionInfo> shared_info() const { return shared_info_; } has_shared_info()111 bool has_shared_info() const { return !shared_info().is_null(); } bytecode_array()112 Handle<BytecodeArray> bytecode_array() const { return bytecode_array_; } has_bytecode_array()113 bool has_bytecode_array() const { return !bytecode_array_.is_null(); } closure()114 Handle<JSFunction> closure() const { return closure_; } code()115 Handle<Code> code() const { return code_; } code_kind()116 CodeKind code_kind() const { return code_kind_; } builtin_index()117 int32_t builtin_index() const { return builtin_index_; } set_builtin_index(int32_t index)118 void set_builtin_index(int32_t index) { builtin_index_ = index; } osr_offset()119 BailoutId osr_offset() const { return osr_offset_; } osr_frame()120 JavaScriptFrame* osr_frame() const { return osr_frame_; } 121 SetPoisoningMitigationLevel(PoisoningMitigationLevel poisoning_level)122 void SetPoisoningMitigationLevel(PoisoningMitigationLevel poisoning_level) { 123 poisoning_level_ = poisoning_level; 124 } GetPoisoningMitigationLevel()125 PoisoningMitigationLevel GetPoisoningMitigationLevel() const { 126 return poisoning_level_; 127 } 128 129 // Code getters and setters. 130 131 void SetCode(Handle<Code> code); 132 133 void SetWasmCompilationResult(std::unique_ptr<wasm::WasmCompilationResult>); 134 std::unique_ptr<wasm::WasmCompilationResult> ReleaseWasmCompilationResult(); 135 136 bool has_context() const; 137 Context context() const; 138 139 bool has_native_context() const; 140 NativeContext native_context() const; 141 142 bool has_global_object() const; 143 JSGlobalObject global_object() const; 144 145 // Accessors for the different compilation modes. IsOptimizing()146 bool IsOptimizing() const { 147 return CodeKindIsOptimizedJSFunction(code_kind()); 148 } IsNativeContextIndependent()149 bool IsNativeContextIndependent() const { 150 return code_kind() == CodeKind::NATIVE_CONTEXT_INDEPENDENT; 151 } IsTurboprop()152 bool IsTurboprop() const { return code_kind() == CodeKind::TURBOPROP; } IsWasm()153 bool IsWasm() const { return code_kind() == CodeKind::WASM_FUNCTION; } 154 SetOptimizingForOsr(BailoutId osr_offset,JavaScriptFrame * osr_frame)155 void SetOptimizingForOsr(BailoutId osr_offset, JavaScriptFrame* osr_frame) { 156 DCHECK(IsOptimizing()); 157 osr_offset_ = osr_offset; 158 osr_frame_ = osr_frame; 159 } 160 set_persistent_handles(std::unique_ptr<PersistentHandles> persistent_handles)161 void set_persistent_handles( 162 std::unique_ptr<PersistentHandles> persistent_handles) { 163 DCHECK_NULL(ph_); 164 ph_ = std::move(persistent_handles); 165 DCHECK_NOT_NULL(ph_); 166 } 167 set_canonical_handles(std::unique_ptr<CanonicalHandlesMap> canonical_handles)168 void set_canonical_handles( 169 std::unique_ptr<CanonicalHandlesMap> canonical_handles) { 170 DCHECK_NULL(canonical_handles_); 171 canonical_handles_ = std::move(canonical_handles); 172 DCHECK_NOT_NULL(canonical_handles_); 173 } 174 175 void ReopenHandlesInNewHandleScope(Isolate* isolate); 176 177 void AbortOptimization(BailoutReason reason); 178 179 void RetryOptimization(BailoutReason reason); 180 bailout_reason()181 BailoutReason bailout_reason() const { return bailout_reason_; } 182 optimization_id()183 int optimization_id() const { 184 DCHECK(IsOptimizing()); 185 return optimization_id_; 186 } 187 inlined_bytecode_size()188 unsigned inlined_bytecode_size() const { return inlined_bytecode_size_; } 189 set_inlined_bytecode_size(unsigned size)190 void set_inlined_bytecode_size(unsigned size) { 191 inlined_bytecode_size_ = size; 192 } 193 194 struct InlinedFunctionHolder { 195 Handle<SharedFunctionInfo> shared_info; 196 Handle<BytecodeArray> bytecode_array; // Explicit to prevent flushing. 197 InliningPosition position; 198 199 InlinedFunctionHolder(Handle<SharedFunctionInfo> inlined_shared_info, 200 Handle<BytecodeArray> inlined_bytecode, 201 SourcePosition pos); 202 RegisterInlinedFunctionIdInlinedFunctionHolder203 void RegisterInlinedFunctionId(size_t inlined_function_id) { 204 position.inlined_function_id = static_cast<int>(inlined_function_id); 205 } 206 }; 207 208 using InlinedFunctionList = std::vector<InlinedFunctionHolder>; inlined_functions()209 InlinedFunctionList& inlined_functions() { return inlined_functions_; } 210 211 // Returns the inlining id for source position tracking. 212 int AddInlinedFunction(Handle<SharedFunctionInfo> inlined_function, 213 Handle<BytecodeArray> inlined_bytecode, 214 SourcePosition pos); 215 216 std::unique_ptr<char[]> GetDebugName() const; 217 218 StackFrame::Type GetOutputStackFrameType() const; 219 trace_turbo_filename()220 const char* trace_turbo_filename() const { 221 return trace_turbo_filename_.get(); 222 } 223 set_trace_turbo_filename(std::unique_ptr<char[]> filename)224 void set_trace_turbo_filename(std::unique_ptr<char[]> filename) { 225 trace_turbo_filename_ = std::move(filename); 226 } 227 tick_counter()228 TickCounter& tick_counter() { return tick_counter_; } 229 profiler_data()230 BasicBlockProfilerData* profiler_data() const { return profiler_data_; } set_profiler_data(BasicBlockProfilerData * profiler_data)231 void set_profiler_data(BasicBlockProfilerData* profiler_data) { 232 profiler_data_ = profiler_data; 233 } 234 DetachPersistentHandles()235 std::unique_ptr<PersistentHandles> DetachPersistentHandles() { 236 DCHECK_NOT_NULL(ph_); 237 return std::move(ph_); 238 } 239 DetachCanonicalHandles()240 std::unique_ptr<CanonicalHandlesMap> DetachCanonicalHandles() { 241 DCHECK_NOT_NULL(canonical_handles_); 242 return std::move(canonical_handles_); 243 } 244 245 private: 246 void ConfigureFlags(); 247 SetFlag(Flag flag)248 void SetFlag(Flag flag) { flags_ |= flag; } GetFlag(Flag flag)249 bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; } 250 251 void SetTracingFlags(bool passes_filter); 252 253 // Compilation flags. 254 unsigned flags_ = 0; 255 PoisoningMitigationLevel poisoning_level_ = 256 PoisoningMitigationLevel::kDontPoison; 257 258 const CodeKind code_kind_; 259 int32_t builtin_index_ = -1; 260 261 // We retain a reference the bytecode array specifically to ensure it doesn't 262 // get flushed while we are optimizing the code. 263 Handle<BytecodeArray> bytecode_array_; 264 Handle<SharedFunctionInfo> shared_info_; 265 Handle<JSFunction> closure_; 266 267 // The compiled code. 268 Handle<Code> code_; 269 270 // Basic block profiling support. 271 BasicBlockProfilerData* profiler_data_ = nullptr; 272 273 // The WebAssembly compilation result, not published in the NativeModule yet. 274 std::unique_ptr<wasm::WasmCompilationResult> wasm_compilation_result_; 275 276 // Entry point when compiling for OSR, {BailoutId::None} otherwise. 277 BailoutId osr_offset_ = BailoutId::None(); 278 279 // The zone from which the compilation pipeline working on this 280 // OptimizedCompilationInfo allocates. 281 Zone* const zone_; 282 283 BailoutReason bailout_reason_ = BailoutReason::kNoReason; 284 285 InlinedFunctionList inlined_functions_; 286 287 static constexpr int kNoOptimizationId = -1; 288 const int optimization_id_; 289 unsigned inlined_bytecode_size_ = 0; 290 291 // The current OSR frame for specialization or {nullptr}. 292 JavaScriptFrame* osr_frame_ = nullptr; 293 294 Vector<const char> debug_name_; 295 std::unique_ptr<char[]> trace_turbo_filename_; 296 297 TickCounter tick_counter_; 298 299 // 1) PersistentHandles created via PersistentHandlesScope inside of 300 // CompilationHandleScope 301 // 2) Owned by OptimizedCompilationInfo 302 // 3) Owned by the broker's LocalHeap when entering the LocalHeapScope. 303 // 4) Back to OptimizedCompilationInfo when exiting the LocalHeapScope. 304 // 305 // In normal execution it gets destroyed when PipelineData gets destroyed. 306 // There is a special case in GenerateCodeForTesting where the JSHeapBroker 307 // will not be retired in that same method. In this case, we need to re-attach 308 // the PersistentHandles container to the JSHeapBroker. 309 std::unique_ptr<PersistentHandles> ph_; 310 311 // Canonical handles follow the same path as described by the persistent 312 // handles above. The only difference is that is created in the 313 // CanonicalHandleScope(i.e step 1) is different). 314 std::unique_ptr<CanonicalHandlesMap> canonical_handles_; 315 316 DISALLOW_COPY_AND_ASSIGN(OptimizedCompilationInfo); 317 }; 318 319 } // namespace internal 320 } // namespace v8 321 322 #endif // V8_CODEGEN_OPTIMIZED_COMPILATION_INFO_H_ 323