1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_JS_FUNCTION_INL_H_
6 #define V8_OBJECTS_JS_FUNCTION_INL_H_
7
8 #include "src/objects/js-function.h"
9
10 // Include other inline headers *after* including js-function.h, such that e.g.
11 // the definition of JSFunction is available (and this comment prevents
12 // clang-format from merging that include into the following ones).
13 #include "src/diagnostics/code-tracer.h"
14 #include "src/ic/ic.h"
15 #include "src/init/bootstrapper.h"
16 #include "src/objects/feedback-cell-inl.h"
17 #include "src/objects/map-updater.h"
18 #include "src/objects/shared-function-info-inl.h"
19
20 // Has to be the last include (doesn't have include guards):
21 #include "src/objects/object-macros.h"
22
23 namespace v8 {
24 namespace internal {
25
26 #include "torque-generated/src/objects/js-function-tq-inl.inc"
27
28 TQ_OBJECT_CONSTRUCTORS_IMPL(JSFunctionOrBoundFunctionOrWrappedFunction)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSBoundFunction)29 TQ_OBJECT_CONSTRUCTORS_IMPL(JSBoundFunction)
30 TQ_OBJECT_CONSTRUCTORS_IMPL(JSWrappedFunction)
31 TQ_OBJECT_CONSTRUCTORS_IMPL(JSFunction)
32
33 ACCESSORS(JSFunction, raw_feedback_cell, FeedbackCell, kFeedbackCellOffset)
34 RELEASE_ACQUIRE_ACCESSORS(JSFunction, raw_feedback_cell, FeedbackCell,
35 kFeedbackCellOffset)
36
37 FeedbackVector JSFunction::feedback_vector() const {
38 DCHECK(has_feedback_vector());
39 return FeedbackVector::cast(raw_feedback_cell().value());
40 }
41
closure_feedback_cell_array()42 ClosureFeedbackCellArray JSFunction::closure_feedback_cell_array() const {
43 DCHECK(has_closure_feedback_cell_array());
44 return ClosureFeedbackCellArray::cast(raw_feedback_cell().value());
45 }
46
reset_tiering_state()47 void JSFunction::reset_tiering_state() {
48 DCHECK(has_feedback_vector());
49 feedback_vector().reset_tiering_state();
50 }
51
ChecksTieringState()52 bool JSFunction::ChecksTieringState() { return code().checks_tiering_state(); }
53
CompleteInobjectSlackTrackingIfActive()54 void JSFunction::CompleteInobjectSlackTrackingIfActive() {
55 if (!has_prototype_slot()) return;
56 if (has_initial_map() && initial_map().IsInobjectSlackTrackingInProgress()) {
57 MapUpdater::CompleteInobjectSlackTracking(GetIsolate(), initial_map());
58 }
59 }
60
61 template <typename IsolateT>
abstract_code(IsolateT * isolate)62 AbstractCode JSFunction::abstract_code(IsolateT* isolate) {
63 if (ActiveTierIsIgnition()) {
64 return AbstractCode::cast(shared().GetBytecodeArray(isolate));
65 } else {
66 return AbstractCode::cast(FromCodeT(code(kAcquireLoad)));
67 }
68 }
69
length()70 int JSFunction::length() { return shared().length(); }
71
ACCESSORS_RELAXED(JSFunction,code,CodeT,kCodeOffset)72 ACCESSORS_RELAXED(JSFunction, code, CodeT, kCodeOffset)
73 RELEASE_ACQUIRE_ACCESSORS(JSFunction, code, CodeT, kCodeOffset)
74
75 #ifdef V8_EXTERNAL_CODE_SPACE
76 void JSFunction::set_code(Code code, ReleaseStoreTag, WriteBarrierMode mode) {
77 set_code(ToCodeT(code), kReleaseStore, mode);
78 }
79 #endif
80
code_entry_point()81 Address JSFunction::code_entry_point() const {
82 if (V8_EXTERNAL_CODE_SPACE_BOOL) {
83 return CodeDataContainer::cast(code()).code_entry_point();
84 } else {
85 return code().InstructionStart();
86 }
87 }
88
89 // TODO(ishell): Why relaxed read but release store?
DEF_GETTER(JSFunction,shared,SharedFunctionInfo)90 DEF_GETTER(JSFunction, shared, SharedFunctionInfo) {
91 return shared(cage_base, kRelaxedLoad);
92 }
93
DEF_RELAXED_GETTER(JSFunction,shared,SharedFunctionInfo)94 DEF_RELAXED_GETTER(JSFunction, shared, SharedFunctionInfo) {
95 return TaggedField<SharedFunctionInfo,
96 kSharedFunctionInfoOffset>::Relaxed_Load(cage_base, *this);
97 }
98
set_shared(SharedFunctionInfo value,WriteBarrierMode mode)99 void JSFunction::set_shared(SharedFunctionInfo value, WriteBarrierMode mode) {
100 // Release semantics to support acquire read in NeedsResetDueToFlushedBytecode
101 RELEASE_WRITE_FIELD(*this, kSharedFunctionInfoOffset, value);
102 CONDITIONAL_WRITE_BARRIER(*this, kSharedFunctionInfoOffset, value, mode);
103 }
104
tiering_state()105 TieringState JSFunction::tiering_state() const {
106 if (!has_feedback_vector()) return TieringState::kNone;
107 return feedback_vector().tiering_state();
108 }
109
set_tiering_state(TieringState state)110 void JSFunction::set_tiering_state(TieringState state) {
111 DCHECK(has_feedback_vector());
112 DCHECK(IsNone(state) || ChecksTieringState());
113 feedback_vector().set_tiering_state(state);
114 }
115
osr_tiering_state()116 TieringState JSFunction::osr_tiering_state() {
117 DCHECK(has_feedback_vector());
118 return feedback_vector().osr_tiering_state();
119 }
120
set_osr_tiering_state(TieringState marker)121 void JSFunction::set_osr_tiering_state(TieringState marker) {
122 DCHECK(has_feedback_vector());
123 feedback_vector().set_osr_tiering_state(marker);
124 }
125
has_feedback_vector()126 bool JSFunction::has_feedback_vector() const {
127 return shared().is_compiled() &&
128 raw_feedback_cell().value().IsFeedbackVector();
129 }
130
has_closure_feedback_cell_array()131 bool JSFunction::has_closure_feedback_cell_array() const {
132 return shared().is_compiled() &&
133 raw_feedback_cell().value().IsClosureFeedbackCellArray();
134 }
135
context()136 Context JSFunction::context() {
137 return TaggedField<Context, kContextOffset>::load(*this);
138 }
139
DEF_RELAXED_GETTER(JSFunction,context,Context)140 DEF_RELAXED_GETTER(JSFunction, context, Context) {
141 return TaggedField<Context, kContextOffset>::Relaxed_Load(cage_base, *this);
142 }
143
has_context()144 bool JSFunction::has_context() const {
145 return TaggedField<HeapObject, kContextOffset>::load(*this).IsContext();
146 }
147
global_proxy()148 JSGlobalProxy JSFunction::global_proxy() { return context().global_proxy(); }
149
native_context()150 NativeContext JSFunction::native_context() {
151 return context().native_context();
152 }
153
154 RELEASE_ACQUIRE_ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map,
155 HeapObject, kPrototypeOrInitialMapOffset,
156 map().has_prototype_slot())
157
DEF_GETTER(JSFunction,has_prototype_slot,bool)158 DEF_GETTER(JSFunction, has_prototype_slot, bool) {
159 return map(cage_base).has_prototype_slot();
160 }
161
DEF_GETTER(JSFunction,initial_map,Map)162 DEF_GETTER(JSFunction, initial_map, Map) {
163 return Map::cast(prototype_or_initial_map(cage_base, kAcquireLoad));
164 }
165
DEF_GETTER(JSFunction,has_initial_map,bool)166 DEF_GETTER(JSFunction, has_initial_map, bool) {
167 DCHECK(has_prototype_slot(cage_base));
168 return prototype_or_initial_map(cage_base, kAcquireLoad).IsMap(cage_base);
169 }
170
DEF_GETTER(JSFunction,has_instance_prototype,bool)171 DEF_GETTER(JSFunction, has_instance_prototype, bool) {
172 DCHECK(has_prototype_slot(cage_base));
173 return has_initial_map(cage_base) ||
174 !prototype_or_initial_map(cage_base, kAcquireLoad)
175 .IsTheHole(GetReadOnlyRoots(cage_base));
176 }
177
DEF_GETTER(JSFunction,has_prototype,bool)178 DEF_GETTER(JSFunction, has_prototype, bool) {
179 DCHECK(has_prototype_slot(cage_base));
180 return map(cage_base).has_non_instance_prototype() ||
181 has_instance_prototype(cage_base);
182 }
183
DEF_GETTER(JSFunction,has_prototype_property,bool)184 DEF_GETTER(JSFunction, has_prototype_property, bool) {
185 return (has_prototype_slot(cage_base) && IsConstructor(cage_base)) ||
186 IsGeneratorFunction(shared(cage_base).kind());
187 }
188
DEF_GETTER(JSFunction,PrototypeRequiresRuntimeLookup,bool)189 DEF_GETTER(JSFunction, PrototypeRequiresRuntimeLookup, bool) {
190 return !has_prototype_property(cage_base) ||
191 map(cage_base).has_non_instance_prototype();
192 }
193
DEF_GETTER(JSFunction,instance_prototype,HeapObject)194 DEF_GETTER(JSFunction, instance_prototype, HeapObject) {
195 DCHECK(has_instance_prototype(cage_base));
196 if (has_initial_map(cage_base)) {
197 return initial_map(cage_base).prototype(cage_base);
198 }
199 // When there is no initial map and the prototype is a JSReceiver, the
200 // initial map field is used for the prototype field.
201 return HeapObject::cast(prototype_or_initial_map(cage_base, kAcquireLoad));
202 }
203
DEF_GETTER(JSFunction,prototype,Object)204 DEF_GETTER(JSFunction, prototype, Object) {
205 DCHECK(has_prototype(cage_base));
206 // If the function's prototype property has been set to a non-JSReceiver
207 // value, that value is stored in the constructor field of the map.
208 if (map(cage_base).has_non_instance_prototype()) {
209 Object prototype = map(cage_base).GetConstructor(cage_base);
210 // The map must have a prototype in that field, not a back pointer.
211 DCHECK(!prototype.IsMap(cage_base));
212 DCHECK(!prototype.IsFunctionTemplateInfo(cage_base));
213 return prototype;
214 }
215 return instance_prototype(cage_base);
216 }
217
is_compiled()218 bool JSFunction::is_compiled() const {
219 return code(kAcquireLoad).builtin_id() != Builtin::kCompileLazy &&
220 shared().is_compiled();
221 }
222
ShouldFlushBaselineCode(base::EnumSet<CodeFlushMode> code_flush_mode)223 bool JSFunction::ShouldFlushBaselineCode(
224 base::EnumSet<CodeFlushMode> code_flush_mode) {
225 if (!IsBaselineCodeFlushingEnabled(code_flush_mode)) return false;
226 // Do a raw read for shared and code fields here since this function may be
227 // called on a concurrent thread. JSFunction itself should be fully
228 // initialized here but the SharedFunctionInfo, Code objects may not be
229 // initialized. We read using acquire loads to defend against that.
230 Object maybe_shared = ACQUIRE_READ_FIELD(*this, kSharedFunctionInfoOffset);
231 if (!maybe_shared.IsSharedFunctionInfo()) return false;
232
233 // See crbug.com/v8/11972 for more details on acquire / release semantics for
234 // code field. We don't use release stores when copying code pointers from
235 // SFI / FV to JSFunction but it is safe in practice.
236 Object maybe_code = ACQUIRE_READ_FIELD(*this, kCodeOffset);
237 if (!maybe_code.IsCodeT()) return false;
238 CodeT code = CodeT::cast(maybe_code);
239 if (code.kind() != CodeKind::BASELINE) return false;
240
241 SharedFunctionInfo shared = SharedFunctionInfo::cast(maybe_shared);
242 return shared.ShouldFlushCode(code_flush_mode);
243 }
244
NeedsResetDueToFlushedBytecode()245 bool JSFunction::NeedsResetDueToFlushedBytecode() {
246 // Do a raw read for shared and code fields here since this function may be
247 // called on a concurrent thread. JSFunction itself should be fully
248 // initialized here but the SharedFunctionInfo, Code objects may not be
249 // initialized. We read using acquire loads to defend against that.
250 Object maybe_shared = ACQUIRE_READ_FIELD(*this, kSharedFunctionInfoOffset);
251 if (!maybe_shared.IsSharedFunctionInfo()) return false;
252
253 Object maybe_code = ACQUIRE_READ_FIELD(*this, kCodeOffset);
254 if (!maybe_code.IsCodeT()) return false;
255 CodeT code = CodeT::cast(maybe_code);
256
257 SharedFunctionInfo shared = SharedFunctionInfo::cast(maybe_shared);
258 return !shared.is_compiled() && code.builtin_id() != Builtin::kCompileLazy;
259 }
260
NeedsResetDueToFlushedBaselineCode()261 bool JSFunction::NeedsResetDueToFlushedBaselineCode() {
262 return code().kind() == CodeKind::BASELINE && !shared().HasBaselineCode();
263 }
264
ResetIfCodeFlushed(base::Optional<std::function<void (HeapObject object,ObjectSlot slot,HeapObject target)>> gc_notify_updated_slot)265 void JSFunction::ResetIfCodeFlushed(
266 base::Optional<std::function<void(HeapObject object, ObjectSlot slot,
267 HeapObject target)>>
268 gc_notify_updated_slot) {
269 const bool kBytecodeCanFlush = FLAG_flush_bytecode || FLAG_stress_snapshot;
270 const bool kBaselineCodeCanFlush =
271 FLAG_flush_baseline_code || FLAG_stress_snapshot;
272 if (!kBytecodeCanFlush && !kBaselineCodeCanFlush) return;
273
274 DCHECK_IMPLIES(NeedsResetDueToFlushedBytecode(), kBytecodeCanFlush);
275 if (kBytecodeCanFlush && NeedsResetDueToFlushedBytecode()) {
276 // Bytecode was flushed and function is now uncompiled, reset JSFunction
277 // by setting code to CompileLazy and clearing the feedback vector.
278 set_code(*BUILTIN_CODE(GetIsolate(), CompileLazy));
279 raw_feedback_cell().reset_feedback_vector(gc_notify_updated_slot);
280 return;
281 }
282
283 DCHECK_IMPLIES(NeedsResetDueToFlushedBaselineCode(), kBaselineCodeCanFlush);
284 if (kBaselineCodeCanFlush && NeedsResetDueToFlushedBaselineCode()) {
285 // Flush baseline code from the closure if required
286 set_code(*BUILTIN_CODE(GetIsolate(), InterpreterEntryTrampoline));
287 }
288 }
289
290 } // namespace internal
291 } // namespace v8
292
293 #include "src/objects/object-macros-undef.h"
294
295 #endif // V8_OBJECTS_JS_FUNCTION_INL_H_
296