1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_JS_FUNCTION_INL_H_
6 #define V8_OBJECTS_JS_FUNCTION_INL_H_
7
8 #include "src/codegen/compiler.h"
9 #include "src/diagnostics/code-tracer.h"
10 #include "src/heap/heap-inl.h"
11 #include "src/ic/ic.h"
12 #include "src/init/bootstrapper.h"
13 #include "src/objects/feedback-cell-inl.h"
14 #include "src/objects/js-function.h"
15 #include "src/strings/string-builder-inl.h"
16
17 // Has to be the last include (doesn't have include guards):
18 #include "src/objects/object-macros.h"
19
20 namespace v8 {
21 namespace internal {
22
23 #include "torque-generated/src/objects/js-function-tq-inl.inc"
24
25 TQ_OBJECT_CONSTRUCTORS_IMPL(JSFunctionOrBoundFunction)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSBoundFunction)26 TQ_OBJECT_CONSTRUCTORS_IMPL(JSBoundFunction)
27 OBJECT_CONSTRUCTORS_IMPL(JSFunction, JSFunctionOrBoundFunction)
28
29 CAST_ACCESSOR(JSFunction)
30
31 ACCESSORS(JSFunction, raw_feedback_cell, FeedbackCell, kFeedbackCellOffset)
32
33 FeedbackVector JSFunction::feedback_vector() const {
34 DCHECK(has_feedback_vector());
35 return FeedbackVector::cast(raw_feedback_cell().value());
36 }
37
closure_feedback_cell_array()38 ClosureFeedbackCellArray JSFunction::closure_feedback_cell_array() const {
39 DCHECK(has_closure_feedback_cell_array());
40 return ClosureFeedbackCellArray::cast(raw_feedback_cell().value());
41 }
42
HasOptimizationMarker()43 bool JSFunction::HasOptimizationMarker() {
44 return has_feedback_vector() && feedback_vector().has_optimization_marker();
45 }
46
ClearOptimizationMarker()47 void JSFunction::ClearOptimizationMarker() {
48 DCHECK(has_feedback_vector());
49 feedback_vector().ClearOptimizationMarker();
50 }
51
ChecksOptimizationMarker()52 bool JSFunction::ChecksOptimizationMarker() {
53 return code().checks_optimization_marker();
54 }
55
IsMarkedForOptimization()56 bool JSFunction::IsMarkedForOptimization() {
57 return has_feedback_vector() && feedback_vector().optimization_marker() ==
58 OptimizationMarker::kCompileOptimized;
59 }
60
IsMarkedForConcurrentOptimization()61 bool JSFunction::IsMarkedForConcurrentOptimization() {
62 return has_feedback_vector() &&
63 feedback_vector().optimization_marker() ==
64 OptimizationMarker::kCompileOptimizedConcurrent;
65 }
66
MarkForOptimization(ConcurrencyMode mode)67 void JSFunction::MarkForOptimization(ConcurrencyMode mode) {
68 Isolate* isolate = GetIsolate();
69 if (!isolate->concurrent_recompilation_enabled() ||
70 isolate->bootstrapper()->IsActive()) {
71 mode = ConcurrencyMode::kNotConcurrent;
72 }
73
74 DCHECK(!is_compiled() || ActiveTierIsIgnition() || ActiveTierIsNCI() ||
75 ActiveTierIsMidtierTurboprop());
76 DCHECK(!ActiveTierIsTurbofan());
77 DCHECK(shared().IsInterpreted());
78 DCHECK(shared().allows_lazy_compilation() ||
79 !shared().optimization_disabled());
80
81 if (mode == ConcurrencyMode::kConcurrent) {
82 if (IsInOptimizationQueue()) {
83 if (FLAG_trace_concurrent_recompilation) {
84 PrintF(" ** Not marking ");
85 ShortPrint();
86 PrintF(" -- already in optimization queue.\n");
87 }
88 return;
89 }
90 if (FLAG_trace_concurrent_recompilation) {
91 PrintF(" ** Marking ");
92 ShortPrint();
93 PrintF(" for concurrent recompilation.\n");
94 }
95 }
96
97 SetOptimizationMarker(mode == ConcurrencyMode::kConcurrent
98 ? OptimizationMarker::kCompileOptimizedConcurrent
99 : OptimizationMarker::kCompileOptimized);
100 }
101
IsInOptimizationQueue()102 bool JSFunction::IsInOptimizationQueue() {
103 if (!has_feedback_vector()) return false;
104 return IsInOptimizationQueueMarker(feedback_vector().optimization_marker());
105 }
106
CompleteInobjectSlackTrackingIfActive()107 void JSFunction::CompleteInobjectSlackTrackingIfActive() {
108 if (!has_prototype_slot()) return;
109 if (has_initial_map() && initial_map().IsInobjectSlackTrackingInProgress()) {
110 initial_map().CompleteInobjectSlackTracking(GetIsolate());
111 }
112 }
113
abstract_code()114 AbstractCode JSFunction::abstract_code() {
115 if (ActiveTierIsIgnition()) {
116 return AbstractCode::cast(shared().GetBytecodeArray());
117 } else {
118 return AbstractCode::cast(code());
119 }
120 }
121
length()122 int JSFunction::length() { return shared().length(); }
123
code()124 Code JSFunction::code() const {
125 return Code::cast(RELAXED_READ_FIELD(*this, kCodeOffset));
126 }
127
set_code(Code value)128 void JSFunction::set_code(Code value) {
129 DCHECK(!ObjectInYoungGeneration(value));
130 RELAXED_WRITE_FIELD(*this, kCodeOffset, value);
131 #ifndef V8_DISABLE_WRITE_BARRIERS
132 WriteBarrier::Marking(*this, RawField(kCodeOffset), value);
133 #endif
134 }
135
set_code_no_write_barrier(Code value)136 void JSFunction::set_code_no_write_barrier(Code value) {
137 DCHECK(!ObjectInYoungGeneration(value));
138 RELAXED_WRITE_FIELD(*this, kCodeOffset, value);
139 }
140
141 // TODO(ishell): Why relaxed read but release store?
DEF_GETTER(JSFunction,shared,SharedFunctionInfo)142 DEF_GETTER(JSFunction, shared, SharedFunctionInfo) {
143 return SharedFunctionInfo::cast(
144 RELAXED_READ_FIELD(*this, kSharedFunctionInfoOffset));
145 }
146
set_shared(SharedFunctionInfo value,WriteBarrierMode mode)147 void JSFunction::set_shared(SharedFunctionInfo value, WriteBarrierMode mode) {
148 // Release semantics to support acquire read in NeedsResetDueToFlushedBytecode
149 RELEASE_WRITE_FIELD(*this, kSharedFunctionInfoOffset, value);
150 CONDITIONAL_WRITE_BARRIER(*this, kSharedFunctionInfoOffset, value, mode);
151 }
152
SetOptimizationMarker(OptimizationMarker marker)153 void JSFunction::SetOptimizationMarker(OptimizationMarker marker) {
154 DCHECK(has_feedback_vector());
155 DCHECK(ChecksOptimizationMarker());
156 DCHECK(!ActiveTierIsTurbofan());
157
158 feedback_vector().SetOptimizationMarker(marker);
159 }
160
has_feedback_vector()161 bool JSFunction::has_feedback_vector() const {
162 return shared().is_compiled() &&
163 raw_feedback_cell().value().IsFeedbackVector();
164 }
165
has_closure_feedback_cell_array()166 bool JSFunction::has_closure_feedback_cell_array() const {
167 return shared().is_compiled() &&
168 raw_feedback_cell().value().IsClosureFeedbackCellArray();
169 }
170
context()171 Context JSFunction::context() {
172 return TaggedField<Context, kContextOffset>::load(*this);
173 }
174
has_context()175 bool JSFunction::has_context() const {
176 return TaggedField<HeapObject, kContextOffset>::load(*this).IsContext();
177 }
178
global_proxy()179 JSGlobalProxy JSFunction::global_proxy() { return context().global_proxy(); }
180
native_context()181 NativeContext JSFunction::native_context() {
182 return context().native_context();
183 }
184
set_context(HeapObject value)185 void JSFunction::set_context(HeapObject value) {
186 DCHECK(value.IsUndefined() || value.IsContext());
187 WRITE_FIELD(*this, kContextOffset, value);
188 WRITE_BARRIER(*this, kContextOffset, value);
189 }
190
191 ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map, HeapObject,
192 kPrototypeOrInitialMapOffset, map().has_prototype_slot())
193
DEF_GETTER(JSFunction,has_prototype_slot,bool)194 DEF_GETTER(JSFunction, has_prototype_slot, bool) {
195 return map(isolate).has_prototype_slot();
196 }
197
DEF_GETTER(JSFunction,initial_map,Map)198 DEF_GETTER(JSFunction, initial_map, Map) {
199 return Map::cast(prototype_or_initial_map(isolate));
200 }
201
DEF_GETTER(JSFunction,has_initial_map,bool)202 DEF_GETTER(JSFunction, has_initial_map, bool) {
203 DCHECK(has_prototype_slot(isolate));
204 return prototype_or_initial_map(isolate).IsMap(isolate);
205 }
206
DEF_GETTER(JSFunction,has_instance_prototype,bool)207 DEF_GETTER(JSFunction, has_instance_prototype, bool) {
208 DCHECK(has_prototype_slot(isolate));
209 // Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
210 // i::GetIsolateForPtrCompr(HeapObject).
211 return has_initial_map(isolate) ||
212 !prototype_or_initial_map(isolate).IsTheHole(
213 GetReadOnlyRoots(isolate));
214 }
215
DEF_GETTER(JSFunction,has_prototype,bool)216 DEF_GETTER(JSFunction, has_prototype, bool) {
217 DCHECK(has_prototype_slot(isolate));
218 return map(isolate).has_non_instance_prototype() ||
219 has_instance_prototype(isolate);
220 }
221
DEF_GETTER(JSFunction,has_prototype_property,bool)222 DEF_GETTER(JSFunction, has_prototype_property, bool) {
223 return (has_prototype_slot(isolate) && IsConstructor(isolate)) ||
224 IsGeneratorFunction(shared(isolate).kind());
225 }
226
DEF_GETTER(JSFunction,PrototypeRequiresRuntimeLookup,bool)227 DEF_GETTER(JSFunction, PrototypeRequiresRuntimeLookup, bool) {
228 return !has_prototype_property(isolate) ||
229 map(isolate).has_non_instance_prototype();
230 }
231
DEF_GETTER(JSFunction,instance_prototype,HeapObject)232 DEF_GETTER(JSFunction, instance_prototype, HeapObject) {
233 DCHECK(has_instance_prototype(isolate));
234 if (has_initial_map(isolate)) return initial_map(isolate).prototype(isolate);
235 // When there is no initial map and the prototype is a JSReceiver, the
236 // initial map field is used for the prototype field.
237 return HeapObject::cast(prototype_or_initial_map(isolate));
238 }
239
DEF_GETTER(JSFunction,prototype,Object)240 DEF_GETTER(JSFunction, prototype, Object) {
241 DCHECK(has_prototype(isolate));
242 // If the function's prototype property has been set to a non-JSReceiver
243 // value, that value is stored in the constructor field of the map.
244 if (map(isolate).has_non_instance_prototype()) {
245 Object prototype = map(isolate).GetConstructor(isolate);
246 // The map must have a prototype in that field, not a back pointer.
247 DCHECK(!prototype.IsMap(isolate));
248 DCHECK(!prototype.IsFunctionTemplateInfo(isolate));
249 return prototype;
250 }
251 return instance_prototype(isolate);
252 }
253
is_compiled()254 bool JSFunction::is_compiled() const {
255 return code().builtin_index() != Builtins::kCompileLazy &&
256 shared().is_compiled();
257 }
258
NeedsResetDueToFlushedBytecode()259 bool JSFunction::NeedsResetDueToFlushedBytecode() {
260 // Do a raw read for shared and code fields here since this function may be
261 // called on a concurrent thread and the JSFunction might not be fully
262 // initialized yet.
263 Object maybe_shared = ACQUIRE_READ_FIELD(*this, kSharedFunctionInfoOffset);
264 Object maybe_code = RELAXED_READ_FIELD(*this, kCodeOffset);
265
266 if (!maybe_shared.IsSharedFunctionInfo() || !maybe_code.IsCode()) {
267 return false;
268 }
269
270 SharedFunctionInfo shared = SharedFunctionInfo::cast(maybe_shared);
271 Code code = Code::cast(maybe_code);
272 return !shared.is_compiled() &&
273 code.builtin_index() != Builtins::kCompileLazy;
274 }
275
ResetIfBytecodeFlushed(base::Optional<std::function<void (HeapObject object,ObjectSlot slot,HeapObject target)>> gc_notify_updated_slot)276 void JSFunction::ResetIfBytecodeFlushed(
277 base::Optional<std::function<void(HeapObject object, ObjectSlot slot,
278 HeapObject target)>>
279 gc_notify_updated_slot) {
280 if (FLAG_flush_bytecode && NeedsResetDueToFlushedBytecode()) {
281 // Bytecode was flushed and function is now uncompiled, reset JSFunction
282 // by setting code to CompileLazy and clearing the feedback vector.
283 set_code(GetIsolate()->builtins()->builtin(i::Builtins::kCompileLazy));
284 raw_feedback_cell().reset_feedback_vector(gc_notify_updated_slot);
285 }
286 }
287
288 } // namespace internal
289 } // namespace v8
290
291 #include "src/objects/object-macros-undef.h"
292
293 #endif // V8_OBJECTS_JS_FUNCTION_INL_H_
294