1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/builtins/builtins-lazy-gen.h"
6
7 #include "src/builtins/builtins-utils-gen.h"
8 #include "src/builtins/builtins.h"
9 #include "src/common/globals.h"
10 #include "src/objects/code-inl.h"
11 #include "src/objects/feedback-vector.h"
12 #include "src/objects/shared-function-info.h"
13
14 namespace v8 {
15 namespace internal {
16
GenerateTailCallToJSCode(TNode<CodeT> code,TNode<JSFunction> function)17 void LazyBuiltinsAssembler::GenerateTailCallToJSCode(
18 TNode<CodeT> code, TNode<JSFunction> function) {
19 auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
20 auto context = Parameter<Context>(Descriptor::kContext);
21 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
22 TailCallJSCode(code, context, function, new_target, argc);
23 }
24
GenerateTailCallToReturnedCode(Runtime::FunctionId function_id,TNode<JSFunction> function)25 void LazyBuiltinsAssembler::GenerateTailCallToReturnedCode(
26 Runtime::FunctionId function_id, TNode<JSFunction> function) {
27 auto context = Parameter<Context>(Descriptor::kContext);
28 TNode<CodeT> code = CAST(CallRuntime(function_id, context, function));
29 GenerateTailCallToJSCode(code, function);
30 }
31
TailCallRuntimeIfStateEquals(TNode<Uint32T> state,TieringState expected_state,Runtime::FunctionId function_id,TNode<JSFunction> function)32 void LazyBuiltinsAssembler::TailCallRuntimeIfStateEquals(
33 TNode<Uint32T> state, TieringState expected_state,
34 Runtime::FunctionId function_id, TNode<JSFunction> function) {
35 Label no_match(this);
36 GotoIfNot(
37 Word32Equal(state, Uint32Constant(static_cast<uint32_t>(expected_state))),
38 &no_match);
39 GenerateTailCallToReturnedCode(function_id, function);
40 BIND(&no_match);
41 }
42
MaybeTailCallOptimizedCodeSlot(TNode<JSFunction> function,TNode<FeedbackVector> feedback_vector)43 void LazyBuiltinsAssembler::MaybeTailCallOptimizedCodeSlot(
44 TNode<JSFunction> function, TNode<FeedbackVector> feedback_vector) {
45 Label fallthrough(this), may_have_optimized_code(this);
46
47 TNode<Uint32T> optimization_state =
48 LoadObjectField<Uint32T>(feedback_vector, FeedbackVector::kFlagsOffset);
49
50 // Fall through if no optimization trigger or optimized code.
51 GotoIfNot(
52 IsSetWord32(
53 optimization_state,
54 FeedbackVector::kHasOptimizedCodeOrTieringStateIsAnyRequestMask),
55 &fallthrough);
56
57 GotoIfNot(IsSetWord32(optimization_state,
58 FeedbackVector::kTieringStateIsAnyRequestMask),
59 &may_have_optimized_code);
60
61 // TODO(ishell): introduce Runtime::kHandleTieringState and check
62 // all these state values there.
63 TNode<Uint32T> state =
64 DecodeWord32<FeedbackVector::TieringStateBits>(optimization_state);
65 TailCallRuntimeIfStateEquals(state,
66 TieringState::kRequestTurbofan_Synchronous,
67 Runtime::kCompileTurbofan_Synchronous, function);
68 TailCallRuntimeIfStateEquals(state, TieringState::kRequestTurbofan_Concurrent,
69 Runtime::kCompileTurbofan_Concurrent, function);
70 TailCallRuntimeIfStateEquals(state, TieringState::kRequestMaglev_Synchronous,
71 Runtime::kCompileMaglev_Synchronous, function);
72 TailCallRuntimeIfStateEquals(state, TieringState::kRequestMaglev_Concurrent,
73 Runtime::kCompileMaglev_Concurrent, function);
74
75 Unreachable();
76 BIND(&may_have_optimized_code);
77 {
78 Label heal_optimized_code_slot(this);
79 TNode<MaybeObject> maybe_optimized_code_entry = LoadMaybeWeakObjectField(
80 feedback_vector, FeedbackVector::kMaybeOptimizedCodeOffset);
81
82 // Optimized code slot is a weak reference to CodeT object.
83 TNode<CodeT> optimized_code = CAST(GetHeapObjectAssumeWeak(
84 maybe_optimized_code_entry, &heal_optimized_code_slot));
85
86 // Check if the optimized code is marked for deopt. If it is, call the
87 // runtime to clear it.
88 TNode<CodeDataContainer> code_data_container =
89 CodeDataContainerFromCodeT(optimized_code);
90 TNode<Int32T> code_kind_specific_flags = LoadObjectField<Int32T>(
91 code_data_container, CodeDataContainer::kKindSpecificFlagsOffset);
92 GotoIf(IsSetWord32<Code::MarkedForDeoptimizationField>(
93 code_kind_specific_flags),
94 &heal_optimized_code_slot);
95
96 // Optimized code is good, get it into the closure and link the closure into
97 // the optimized functions list, then tail call the optimized code.
98 StoreObjectField(function, JSFunction::kCodeOffset, optimized_code);
99 Comment("MaybeTailCallOptimizedCodeSlot:: GenerateTailCallToJSCode");
100 GenerateTailCallToJSCode(optimized_code, function);
101
102 // Optimized code slot contains deoptimized code, or the code is cleared
103 // and tiering state hasn't yet been updated. Evict the code, update the
104 // state and re-enter the closure's code.
105 BIND(&heal_optimized_code_slot);
106 GenerateTailCallToReturnedCode(Runtime::kHealOptimizedCodeSlot, function);
107 }
108
109 // Fall-through if the optimized code cell is clear and the tiering state is
110 // kNone.
111 BIND(&fallthrough);
112 }
113
CompileLazy(TNode<JSFunction> function)114 void LazyBuiltinsAssembler::CompileLazy(TNode<JSFunction> function) {
115 // First lookup code, maybe we don't need to compile!
116 Label compile_function(this, Label::kDeferred);
117
118 // Check the code object for the SFI. If SFI's code entry points to
119 // CompileLazy, then we need to lazy compile regardless of the function or
120 // tiering state.
121 TNode<SharedFunctionInfo> shared =
122 CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
123 TVARIABLE(Uint16T, sfi_data_type);
124 TNode<CodeT> sfi_code =
125 GetSharedFunctionInfoCode(shared, &sfi_data_type, &compile_function);
126
127 TNode<HeapObject> feedback_cell_value = LoadFeedbackCellValue(function);
128
129 // If feedback cell isn't initialized, compile function
130 GotoIf(IsUndefined(feedback_cell_value), &compile_function);
131
132 CSA_DCHECK(this, TaggedNotEqual(sfi_code, HeapConstant(BUILTIN_CODE(
133 isolate(), CompileLazy))));
134 StoreObjectField(function, JSFunction::kCodeOffset, sfi_code);
135
136 Label maybe_use_sfi_code(this);
137 // If there is no feedback, don't check for optimized code.
138 GotoIf(HasInstanceType(feedback_cell_value, CLOSURE_FEEDBACK_CELL_ARRAY_TYPE),
139 &maybe_use_sfi_code);
140
141 // If it isn't undefined or fixed array it must be a feedback vector.
142 CSA_DCHECK(this, IsFeedbackVector(feedback_cell_value));
143
144 // Is there a tiering state or optimized code in the feedback vector?
145 MaybeTailCallOptimizedCodeSlot(function, CAST(feedback_cell_value));
146 Goto(&maybe_use_sfi_code);
147
148 // At this point we have a candidate Code object. It's *not* a cached
149 // optimized Code object (we'd have tail-called it above). A usual case would
150 // be the InterpreterEntryTrampoline to start executing existing bytecode.
151 BIND(&maybe_use_sfi_code);
152 Label tailcall_code(this), baseline(this);
153 TVARIABLE(CodeT, code);
154
155 // Check if we have baseline code.
156 GotoIf(InstanceTypeEqual(sfi_data_type.value(), CODET_TYPE), &baseline);
157
158 code = sfi_code;
159 Goto(&tailcall_code);
160
161 BIND(&baseline);
162 // Ensure we have a feedback vector.
163 code = Select<CodeT>(
164 IsFeedbackVector(feedback_cell_value), [=]() { return sfi_code; },
165 [=]() {
166 return CAST(CallRuntime(Runtime::kInstallBaselineCode,
167 Parameter<Context>(Descriptor::kContext),
168 function));
169 });
170 Goto(&tailcall_code);
171
172 BIND(&tailcall_code);
173 GenerateTailCallToJSCode(code.value(), function);
174
175 BIND(&compile_function);
176 GenerateTailCallToReturnedCode(Runtime::kCompileLazy, function);
177 }
178
TF_BUILTIN(CompileLazy,LazyBuiltinsAssembler)179 TF_BUILTIN(CompileLazy, LazyBuiltinsAssembler) {
180 auto function = Parameter<JSFunction>(Descriptor::kTarget);
181
182 CompileLazy(function);
183 }
184
TF_BUILTIN(CompileLazyDeoptimizedCode,LazyBuiltinsAssembler)185 TF_BUILTIN(CompileLazyDeoptimizedCode, LazyBuiltinsAssembler) {
186 auto function = Parameter<JSFunction>(Descriptor::kTarget);
187
188 TNode<CodeT> code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
189 // Set the code slot inside the JSFunction to CompileLazy.
190 StoreObjectField(function, JSFunction::kCodeOffset, code);
191 GenerateTailCallToJSCode(code, function);
192 }
193
194 } // namespace internal
195 } // namespace v8
196