• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_COMPILATION_INFO_H_
6 #define V8_COMPILATION_INFO_H_
7 
8 #include <memory>
9 
10 #include "src/compilation-dependencies.h"
11 #include "src/frames.h"
12 #include "src/globals.h"
13 #include "src/handles.h"
14 #include "src/objects.h"
15 #include "src/source-position-table.h"
16 #include "src/utils.h"
17 #include "src/vector.h"
18 
19 namespace v8 {
20 namespace internal {
21 
22 class DeclarationScope;
23 class DeferredHandles;
24 class FunctionLiteral;
25 class JavaScriptFrame;
26 class ParseInfo;
27 class Isolate;
28 class Zone;
29 
30 // CompilationInfo encapsulates some information known at compile time.  It
31 // is constructed based on the resources available at compile-time.
32 class V8_EXPORT_PRIVATE CompilationInfo final {
33  public:
34   // Various configuration flags for a compilation, as well as some properties
35   // of the compiled code produced by a compilation.
36   enum Flag {
37     kDeferredCalling = 1 << 0,
38     kNonDeferredCalling = 1 << 1,
39     kSavesCallerDoubles = 1 << 2,
40     kRequiresFrame = 1 << 3,
41     kDeoptimizationSupport = 1 << 4,
42     kAccessorInliningEnabled = 1 << 5,
43     kSerializing = 1 << 6,
44     kFunctionContextSpecializing = 1 << 7,
45     kFrameSpecializing = 1 << 8,
46     kInliningEnabled = 1 << 9,
47     kDisableFutureOptimization = 1 << 10,
48     kSplittingEnabled = 1 << 11,
49     kDeoptimizationEnabled = 1 << 12,
50     kSourcePositionsEnabled = 1 << 13,
51     kBailoutOnUninitialized = 1 << 14,
52     kOptimizeFromBytecode = 1 << 15,
53     kLoopPeelingEnabled = 1 << 16,
54   };
55 
56   CompilationInfo(Zone* zone, ParseInfo* parse_info,
57                   Handle<JSFunction> closure);
58   CompilationInfo(Vector<const char> debug_name, Isolate* isolate, Zone* zone,
59                   Code::Flags code_flags);
60   ~CompilationInfo();
61 
parse_info()62   ParseInfo* parse_info() const { return parse_info_; }
63 
64   // -----------------------------------------------------------
65   // TODO(titzer): inline and delete accessors of ParseInfo
66   // -----------------------------------------------------------
67   Handle<Script> script() const;
68   FunctionLiteral* literal() const;
69   DeclarationScope* scope() const;
70   Handle<SharedFunctionInfo> shared_info() const;
71   bool has_shared_info() const;
72   // -----------------------------------------------------------
73 
isolate()74   Isolate* isolate() const { return isolate_; }
zone()75   Zone* zone() { return zone_; }
is_osr()76   bool is_osr() const { return !osr_ast_id_.IsNone(); }
closure()77   Handle<JSFunction> closure() const { return closure_; }
code()78   Handle<Code> code() const { return code_; }
code_flags()79   Code::Flags code_flags() const { return code_flags_; }
osr_ast_id()80   BailoutId osr_ast_id() const { return osr_ast_id_; }
osr_frame()81   JavaScriptFrame* osr_frame() const { return osr_frame_; }
82   int num_parameters() const;
83   int num_parameters_including_this() const;
84   bool is_this_defined() const;
85 
set_parameter_count(int parameter_count)86   void set_parameter_count(int parameter_count) {
87     DCHECK(IsStub());
88     parameter_count_ = parameter_count;
89   }
90 
has_bytecode_array()91   bool has_bytecode_array() const { return !bytecode_array_.is_null(); }
bytecode_array()92   Handle<BytecodeArray> bytecode_array() const { return bytecode_array_; }
93 
is_calling()94   bool is_calling() const {
95     return GetFlag(kDeferredCalling) || GetFlag(kNonDeferredCalling);
96   }
97 
MarkAsDeferredCalling()98   void MarkAsDeferredCalling() { SetFlag(kDeferredCalling); }
99 
is_deferred_calling()100   bool is_deferred_calling() const { return GetFlag(kDeferredCalling); }
101 
MarkAsNonDeferredCalling()102   void MarkAsNonDeferredCalling() { SetFlag(kNonDeferredCalling); }
103 
is_non_deferred_calling()104   bool is_non_deferred_calling() const { return GetFlag(kNonDeferredCalling); }
105 
MarkAsSavesCallerDoubles()106   void MarkAsSavesCallerDoubles() { SetFlag(kSavesCallerDoubles); }
107 
saves_caller_doubles()108   bool saves_caller_doubles() const { return GetFlag(kSavesCallerDoubles); }
109 
MarkAsRequiresFrame()110   void MarkAsRequiresFrame() { SetFlag(kRequiresFrame); }
111 
requires_frame()112   bool requires_frame() const { return GetFlag(kRequiresFrame); }
113 
114   // Compiles marked as debug produce unoptimized code with debug break slots.
115   // Inner functions that cannot be compiled w/o context are compiled eagerly.
116   // Always include deoptimization support to avoid having to recompile again.
MarkAsDebug()117   void MarkAsDebug() {
118     set_is_debug();
119     SetFlag(kDeoptimizationSupport);
120   }
121 
122   bool is_debug() const;
123 
124   void PrepareForSerializing();
125 
will_serialize()126   bool will_serialize() const { return GetFlag(kSerializing); }
127 
MarkAsFunctionContextSpecializing()128   void MarkAsFunctionContextSpecializing() {
129     SetFlag(kFunctionContextSpecializing);
130   }
131 
is_function_context_specializing()132   bool is_function_context_specializing() const {
133     return GetFlag(kFunctionContextSpecializing);
134   }
135 
MarkAsFrameSpecializing()136   void MarkAsFrameSpecializing() { SetFlag(kFrameSpecializing); }
137 
is_frame_specializing()138   bool is_frame_specializing() const { return GetFlag(kFrameSpecializing); }
139 
MarkAsDeoptimizationEnabled()140   void MarkAsDeoptimizationEnabled() { SetFlag(kDeoptimizationEnabled); }
141 
is_deoptimization_enabled()142   bool is_deoptimization_enabled() const {
143     return GetFlag(kDeoptimizationEnabled);
144   }
145 
MarkAsAccessorInliningEnabled()146   void MarkAsAccessorInliningEnabled() { SetFlag(kAccessorInliningEnabled); }
147 
is_accessor_inlining_enabled()148   bool is_accessor_inlining_enabled() const {
149     return GetFlag(kAccessorInliningEnabled);
150   }
151 
MarkAsSourcePositionsEnabled()152   void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }
153 
is_source_positions_enabled()154   bool is_source_positions_enabled() const {
155     return GetFlag(kSourcePositionsEnabled);
156   }
157 
MarkAsInliningEnabled()158   void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
159 
is_inlining_enabled()160   bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
161 
MarkAsSplittingEnabled()162   void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
163 
is_splitting_enabled()164   bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }
165 
MarkAsBailoutOnUninitialized()166   void MarkAsBailoutOnUninitialized() { SetFlag(kBailoutOnUninitialized); }
167 
is_bailout_on_uninitialized()168   bool is_bailout_on_uninitialized() const {
169     return GetFlag(kBailoutOnUninitialized);
170   }
171 
MarkAsOptimizeFromBytecode()172   void MarkAsOptimizeFromBytecode() { SetFlag(kOptimizeFromBytecode); }
173 
is_optimizing_from_bytecode()174   bool is_optimizing_from_bytecode() const {
175     return GetFlag(kOptimizeFromBytecode);
176   }
177 
MarkAsLoopPeelingEnabled()178   void MarkAsLoopPeelingEnabled() { SetFlag(kLoopPeelingEnabled); }
179 
is_loop_peeling_enabled()180   bool is_loop_peeling_enabled() const { return GetFlag(kLoopPeelingEnabled); }
181 
GeneratePreagedPrologue()182   bool GeneratePreagedPrologue() const {
183     // Generate a pre-aged prologue if we are optimizing for size, which
184     // will make code flushing more aggressive. Only apply to Code::FUNCTION,
185     // since StaticMarkingVisitor::IsFlushable only flushes proper functions.
186     return FLAG_optimize_for_size && FLAG_age_code && !is_debug() &&
187            output_code_kind() == Code::FUNCTION;
188   }
189 
SetCode(Handle<Code> code)190   void SetCode(Handle<Code> code) { code_ = code; }
191 
SetBytecodeArray(Handle<BytecodeArray> bytecode_array)192   void SetBytecodeArray(Handle<BytecodeArray> bytecode_array) {
193     bytecode_array_ = bytecode_array;
194   }
195 
ShouldTrapOnDeopt()196   bool ShouldTrapOnDeopt() const {
197     return (FLAG_trap_on_deopt && IsOptimizing()) ||
198            (FLAG_trap_on_stub_deopt && IsStub());
199   }
200 
201   bool has_context() const;
202   Context* context() const;
203 
204   bool has_native_context() const;
205   Context* native_context() const;
206 
207   bool has_global_object() const;
208   JSGlobalObject* global_object() const;
209 
210   // Accessors for the different compilation modes.
IsOptimizing()211   bool IsOptimizing() const { return mode_ == OPTIMIZE; }
IsStub()212   bool IsStub() const { return mode_ == STUB; }
IsWasm()213   bool IsWasm() const { return output_code_kind() == Code::WASM_FUNCTION; }
214   void SetOptimizing();
SetOptimizingForOsr(BailoutId osr_ast_id,JavaScriptFrame * osr_frame)215   void SetOptimizingForOsr(BailoutId osr_ast_id, JavaScriptFrame* osr_frame) {
216     SetOptimizing();
217     osr_ast_id_ = osr_ast_id;
218     osr_frame_ = osr_frame;
219   }
220 
221   // Deoptimization support.
HasDeoptimizationSupport()222   bool HasDeoptimizationSupport() const {
223     return GetFlag(kDeoptimizationSupport);
224   }
EnableDeoptimizationSupport()225   void EnableDeoptimizationSupport() {
226     DCHECK_EQ(BASE, mode_);
227     SetFlag(kDeoptimizationSupport);
228   }
ShouldEnsureSpaceForLazyDeopt()229   bool ShouldEnsureSpaceForLazyDeopt() { return !IsStub(); }
230 
231   bool ExpectsJSReceiverAsReceiver();
232 
233   // Determines whether or not to insert a self-optimization header.
234   bool ShouldSelfOptimize();
235 
236   void set_deferred_handles(std::shared_ptr<DeferredHandles> deferred_handles);
237   void set_deferred_handles(DeferredHandles* deferred_handles);
deferred_handles()238   std::shared_ptr<DeferredHandles> deferred_handles() {
239     return deferred_handles_;
240   }
241 
242   void ReopenHandlesInNewHandleScope();
243 
AbortOptimization(BailoutReason reason)244   void AbortOptimization(BailoutReason reason) {
245     DCHECK(reason != kNoReason);
246     if (bailout_reason_ == kNoReason) bailout_reason_ = reason;
247     SetFlag(kDisableFutureOptimization);
248   }
249 
RetryOptimization(BailoutReason reason)250   void RetryOptimization(BailoutReason reason) {
251     DCHECK(reason != kNoReason);
252     if (GetFlag(kDisableFutureOptimization)) return;
253     bailout_reason_ = reason;
254   }
255 
bailout_reason()256   BailoutReason bailout_reason() const { return bailout_reason_; }
257 
prologue_offset()258   int prologue_offset() const {
259     DCHECK_NE(Code::kPrologueOffsetNotSet, prologue_offset_);
260     return prologue_offset_;
261   }
262 
set_prologue_offset(int prologue_offset)263   void set_prologue_offset(int prologue_offset) {
264     DCHECK_EQ(Code::kPrologueOffsetNotSet, prologue_offset_);
265     prologue_offset_ = prologue_offset;
266   }
267 
dependencies()268   CompilationDependencies* dependencies() { return &dependencies_; }
269 
optimization_id()270   int optimization_id() const { return optimization_id_; }
271 
osr_expr_stack_height()272   int osr_expr_stack_height() { return osr_expr_stack_height_; }
set_osr_expr_stack_height(int height)273   void set_osr_expr_stack_height(int height) {
274     DCHECK(height >= 0);
275     osr_expr_stack_height_ = height;
276   }
277 
278   bool has_simple_parameters();
279 
280   struct InlinedFunctionHolder {
281     Handle<SharedFunctionInfo> shared_info;
282 
283     // Root that holds the unoptimized code of the inlined function alive
284     // (and out of reach of code flushing) until we finish compilation.
285     // Do not remove.
286     Handle<Code> inlined_code_object_root;
287 
288     InliningPosition position;
289 
InlinedFunctionHolderInlinedFunctionHolder290     InlinedFunctionHolder(Handle<SharedFunctionInfo> inlined_shared_info,
291                           Handle<Code> inlined_code_object_root,
292                           SourcePosition pos)
293         : shared_info(inlined_shared_info),
294           inlined_code_object_root(inlined_code_object_root) {
295       position.position = pos;
296       // initialized when generating the deoptimization literals
297       position.inlined_function_id = DeoptimizationInputData::kNotInlinedIndex;
298     }
299 
RegisterInlinedFunctionIdInlinedFunctionHolder300     void RegisterInlinedFunctionId(size_t inlined_function_id) {
301       position.inlined_function_id = static_cast<int>(inlined_function_id);
302     }
303   };
304 
305   typedef std::vector<InlinedFunctionHolder> InlinedFunctionList;
inlined_functions()306   InlinedFunctionList& inlined_functions() { return inlined_functions_; }
307 
308   // Returns the inlining id for source position tracking.
309   int AddInlinedFunction(Handle<SharedFunctionInfo> inlined_function,
310                          SourcePosition pos);
311 
312   std::unique_ptr<char[]> GetDebugName() const;
313 
314   Code::Kind output_code_kind() const;
315 
316   StackFrame::Type GetOutputStackFrameType() const;
317 
318   int GetDeclareGlobalsFlags() const;
319 
320   SourcePositionTableBuilder::RecordingMode SourcePositionRecordingMode() const;
321 
322  private:
323   // Compilation mode.
324   // BASE is generated by the full codegen, optionally prepared for bailouts.
325   // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
326   enum Mode { BASE, OPTIMIZE, STUB };
327 
328   CompilationInfo(ParseInfo* parse_info, Vector<const char> debug_name,
329                   Code::Flags code_flags, Mode mode, Isolate* isolate,
330                   Zone* zone);
331 
332   ParseInfo* parse_info_;
333   Isolate* isolate_;
334 
SetMode(Mode mode)335   void SetMode(Mode mode) { mode_ = mode; }
336 
SetFlag(Flag flag)337   void SetFlag(Flag flag) { flags_ |= flag; }
338 
SetFlag(Flag flag,bool value)339   void SetFlag(Flag flag, bool value) {
340     flags_ = value ? flags_ | flag : flags_ & ~flag;
341   }
342 
GetFlag(Flag flag)343   bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
344 
345   void set_is_debug();
346 
347   unsigned flags_;
348 
349   Code::Flags code_flags_;
350 
351   Handle<JSFunction> closure_;
352 
353   // The compiled code.
354   Handle<Code> code_;
355 
356   // Compilation mode flag and whether deoptimization is allowed.
357   Mode mode_;
358   BailoutId osr_ast_id_;
359 
360   // Holds the bytecode array generated by the interpreter.
361   // TODO(rmcilroy/mstarzinger): Temporary work-around until compiler.cc is
362   // refactored to avoid us needing to carry the BytcodeArray around.
363   Handle<BytecodeArray> bytecode_array_;
364 
365   // The zone from which the compilation pipeline working on this
366   // CompilationInfo allocates.
367   Zone* zone_;
368 
369   std::shared_ptr<DeferredHandles> deferred_handles_;
370 
371   // Dependencies for this compilation, e.g. stable maps.
372   CompilationDependencies dependencies_;
373 
374   BailoutReason bailout_reason_;
375 
376   int prologue_offset_;
377 
378   InlinedFunctionList inlined_functions_;
379 
380   // Number of parameters used for compilation of stubs that require arguments.
381   int parameter_count_;
382 
383   int optimization_id_;
384 
385   int osr_expr_stack_height_;
386 
387   // The current OSR frame for specialization or {nullptr}.
388   JavaScriptFrame* osr_frame_ = nullptr;
389 
390   Vector<const char> debug_name_;
391 
392   DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
393 };
394 
395 }  // namespace internal
396 }  // namespace v8
397 
398 #endif  // V8_COMPILATION_INFO_H_
399