1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/builtins/builtins-lazy-gen.h"
6
7 #include "src/builtins/builtins-utils-gen.h"
8 #include "src/builtins/builtins.h"
9 #include "src/common/globals.h"
10 #include "src/objects/feedback-vector.h"
11 #include "src/objects/shared-function-info.h"
12
13 namespace v8 {
14 namespace internal {
15
GenerateTailCallToJSCode(TNode<Code> code,TNode<JSFunction> function)16 void LazyBuiltinsAssembler::GenerateTailCallToJSCode(
17 TNode<Code> code, TNode<JSFunction> function) {
18 auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
19 auto context = Parameter<Context>(Descriptor::kContext);
20 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
21
22 TailCallJSCode(code, context, function, new_target, argc);
23 }
24
GenerateTailCallToReturnedCode(Runtime::FunctionId function_id,TNode<JSFunction> function)25 void LazyBuiltinsAssembler::GenerateTailCallToReturnedCode(
26 Runtime::FunctionId function_id, TNode<JSFunction> function) {
27 auto context = Parameter<Context>(Descriptor::kContext);
28 TNode<Code> code = CAST(CallRuntime(function_id, context, function));
29 GenerateTailCallToJSCode(code, function);
30 }
31
TailCallRuntimeIfMarkerEquals(TNode<Uint32T> marker,OptimizationMarker expected_marker,Runtime::FunctionId function_id,TNode<JSFunction> function)32 void LazyBuiltinsAssembler::TailCallRuntimeIfMarkerEquals(
33 TNode<Uint32T> marker, OptimizationMarker expected_marker,
34 Runtime::FunctionId function_id, TNode<JSFunction> function) {
35 Label no_match(this);
36 GotoIfNot(Word32Equal(marker, Uint32Constant(expected_marker)), &no_match);
37 GenerateTailCallToReturnedCode(function_id, function);
38 BIND(&no_match);
39 }
40
MaybeTailCallOptimizedCodeSlot(TNode<JSFunction> function,TNode<FeedbackVector> feedback_vector)41 void LazyBuiltinsAssembler::MaybeTailCallOptimizedCodeSlot(
42 TNode<JSFunction> function, TNode<FeedbackVector> feedback_vector) {
43 Label fallthrough(this), may_have_optimized_code(this);
44
45 TNode<Uint32T> optimization_state =
46 LoadObjectField<Uint32T>(feedback_vector, FeedbackVector::kFlagsOffset);
47
48 // Fall through if no optimization trigger or optimized code.
49 GotoIfNot(IsSetWord32(
50 optimization_state,
51 FeedbackVector::kHasOptimizedCodeOrCompileOptimizedMarkerMask),
52 &fallthrough);
53
54 GotoIfNot(IsSetWord32(
55 optimization_state,
56 FeedbackVector::kHasCompileOptimizedOrLogFirstExecutionMarker),
57 &may_have_optimized_code);
58
59 // TODO(ishell): introduce Runtime::kHandleOptimizationMarker and check
60 // all these marker values there.
61 TNode<Uint32T> marker =
62 DecodeWord32<FeedbackVector::OptimizationMarkerBits>(optimization_state);
63 TailCallRuntimeIfMarkerEquals(marker, OptimizationMarker::kLogFirstExecution,
64 Runtime::kFunctionFirstExecution, function);
65 TailCallRuntimeIfMarkerEquals(marker, OptimizationMarker::kCompileOptimized,
66 Runtime::kCompileOptimized_NotConcurrent,
67 function);
68 TailCallRuntimeIfMarkerEquals(
69 marker, OptimizationMarker::kCompileOptimizedConcurrent,
70 Runtime::kCompileOptimized_Concurrent, function);
71
72 Unreachable();
73 BIND(&may_have_optimized_code);
74 {
75 Label heal_optimized_code_slot(this);
76 TNode<MaybeObject> maybe_optimized_code_entry = LoadMaybeWeakObjectField(
77 feedback_vector, FeedbackVector::kMaybeOptimizedCodeOffset);
78 // Optimized code slot is a weak reference.
79 TNode<Code> optimized_code = CAST(GetHeapObjectAssumeWeak(
80 maybe_optimized_code_entry, &heal_optimized_code_slot));
81
82 // Check if the optimized code is marked for deopt. If it is, call the
83 // runtime to clear it.
84 TNode<CodeDataContainer> code_data_container =
85 CAST(LoadObjectField(optimized_code, Code::kCodeDataContainerOffset));
86
87 TNode<Int32T> code_kind_specific_flags = LoadObjectField<Int32T>(
88 code_data_container, CodeDataContainer::kKindSpecificFlagsOffset);
89 GotoIf(IsSetWord32<Code::MarkedForDeoptimizationField>(
90 code_kind_specific_flags),
91 &heal_optimized_code_slot);
92
93 // Optimized code is good, get it into the closure and link the closure into
94 // the optimized functions list, then tail call the optimized code.
95 StoreObjectField(function, JSFunction::kCodeOffset, optimized_code);
96 GenerateTailCallToJSCode(optimized_code, function);
97
98 // Optimized code slot contains deoptimized code or code is cleared and
99 // optimized code marker isn't updated. Evict the code, update the marker
100 // and re-enter the closure's code.
101 BIND(&heal_optimized_code_slot);
102 GenerateTailCallToReturnedCode(Runtime::kHealOptimizedCodeSlot, function);
103 }
104
105 // Fall-through if the optimized code cell is clear and there is no
106 // optimization marker.
107 BIND(&fallthrough);
108 }
109
CompileLazy(TNode<JSFunction> function)110 void LazyBuiltinsAssembler::CompileLazy(TNode<JSFunction> function) {
111 // First lookup code, maybe we don't need to compile!
112 Label compile_function(this, Label::kDeferred);
113
114 // Check the code object for the SFI. If SFI's code entry points to
115 // CompileLazy, then we need to lazy compile regardless of the function or
116 // feedback vector marker.
117 TNode<SharedFunctionInfo> shared =
118 CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
119 TNode<Code> sfi_code = GetSharedFunctionInfoCode(shared, &compile_function);
120
121 TNode<HeapObject> feedback_cell_value = LoadFeedbackCellValue(function);
122
123 // If feedback cell isn't initialized, compile function
124 GotoIf(IsUndefined(feedback_cell_value), &compile_function);
125
126 Label use_sfi_code(this);
127 // If there is no feedback, don't check for optimized code.
128 GotoIf(HasInstanceType(feedback_cell_value, CLOSURE_FEEDBACK_CELL_ARRAY_TYPE),
129 &use_sfi_code);
130
131 // If it isn't undefined or fixed array it must be a feedback vector.
132 CSA_ASSERT(this, IsFeedbackVector(feedback_cell_value));
133
134 // Is there an optimization marker or optimized code in the feedback vector?
135 MaybeTailCallOptimizedCodeSlot(function, CAST(feedback_cell_value));
136 Goto(&use_sfi_code);
137
138 BIND(&use_sfi_code);
139 // If not, install the SFI's code entry and jump to that.
140 CSA_ASSERT(this, TaggedNotEqual(sfi_code, HeapConstant(BUILTIN_CODE(
141 isolate(), CompileLazy))));
142 StoreObjectField(function, JSFunction::kCodeOffset, sfi_code);
143 GenerateTailCallToJSCode(sfi_code, function);
144
145 BIND(&compile_function);
146 GenerateTailCallToReturnedCode(Runtime::kCompileLazy, function);
147 }
148
TF_BUILTIN(CompileLazy,LazyBuiltinsAssembler)149 TF_BUILTIN(CompileLazy, LazyBuiltinsAssembler) {
150 auto function = Parameter<JSFunction>(Descriptor::kTarget);
151
152 CompileLazy(function);
153 }
154
TF_BUILTIN(CompileLazyDeoptimizedCode,LazyBuiltinsAssembler)155 TF_BUILTIN(CompileLazyDeoptimizedCode, LazyBuiltinsAssembler) {
156 auto function = Parameter<JSFunction>(Descriptor::kTarget);
157
158 // Set the code slot inside the JSFunction to CompileLazy.
159 TNode<Code> code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
160 StoreObjectField(function, JSFunction::kCodeOffset, code);
161 GenerateTailCallToJSCode(code, function);
162 }
163
164 } // namespace internal
165 } // namespace v8
166