1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_SHARED_FUNCTION_INFO_INL_H_
6 #define V8_OBJECTS_SHARED_FUNCTION_INFO_INL_H_
7
8 #include "src/base/macros.h"
9 #include "src/base/platform/mutex.h"
10 #include "src/codegen/optimized-compilation-info.h"
11 #include "src/common/globals.h"
12 #include "src/handles/handles-inl.h"
13 #include "src/heap/heap-write-barrier-inl.h"
14 #include "src/objects/debug-objects-inl.h"
15 #include "src/objects/feedback-vector-inl.h"
16 #include "src/objects/scope-info-inl.h"
17 #include "src/objects/script-inl.h"
18 #include "src/objects/shared-function-info.h"
19 #include "src/objects/templates-inl.h"
20
21 #if V8_ENABLE_WEBASSEMBLY
22 #include "src/wasm/wasm-module.h"
23 #include "src/wasm/wasm-objects.h"
24 #endif // V8_ENABLE_WEBASSEMBLY
25
26 // Has to be the last include (doesn't have include guards):
27 #include "src/objects/object-macros.h"
28
29 namespace v8 {
30 namespace internal {
31
32 #include "torque-generated/src/objects/shared-function-info-tq-inl.inc"
33
TQ_OBJECT_CONSTRUCTORS_IMPL(PreparseData)34 TQ_OBJECT_CONSTRUCTORS_IMPL(PreparseData)
35
36 int PreparseData::inner_start_offset() const {
37 return InnerOffset(data_length());
38 }
39
inner_data_start()40 ObjectSlot PreparseData::inner_data_start() const {
41 return RawField(inner_start_offset());
42 }
43
clear_padding()44 void PreparseData::clear_padding() {
45 int data_end_offset = kDataStartOffset + data_length();
46 int padding_size = inner_start_offset() - data_end_offset;
47 DCHECK_LE(0, padding_size);
48 if (padding_size == 0) return;
49 memset(reinterpret_cast<void*>(address() + data_end_offset), 0, padding_size);
50 }
51
get(int index)52 byte PreparseData::get(int index) const {
53 DCHECK_LE(0, index);
54 DCHECK_LT(index, data_length());
55 int offset = kDataStartOffset + index * kByteSize;
56 return ReadField<byte>(offset);
57 }
58
set(int index,byte value)59 void PreparseData::set(int index, byte value) {
60 DCHECK_LE(0, index);
61 DCHECK_LT(index, data_length());
62 int offset = kDataStartOffset + index * kByteSize;
63 WriteField<byte>(offset, value);
64 }
65
copy_in(int index,const byte * buffer,int length)66 void PreparseData::copy_in(int index, const byte* buffer, int length) {
67 DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
68 index + length <= this->data_length());
69 Address dst_addr = field_address(kDataStartOffset + index * kByteSize);
70 memcpy(reinterpret_cast<void*>(dst_addr), buffer, length);
71 }
72
get_child(int index)73 PreparseData PreparseData::get_child(int index) const {
74 return PreparseData::cast(get_child_raw(index));
75 }
76
get_child_raw(int index)77 Object PreparseData::get_child_raw(int index) const {
78 DCHECK_LE(0, index);
79 DCHECK_LT(index, this->children_length());
80 int offset = inner_start_offset() + index * kTaggedSize;
81 return RELAXED_READ_FIELD(*this, offset);
82 }
83
set_child(int index,PreparseData value,WriteBarrierMode mode)84 void PreparseData::set_child(int index, PreparseData value,
85 WriteBarrierMode mode) {
86 DCHECK_LE(0, index);
87 DCHECK_LT(index, this->children_length());
88 int offset = inner_start_offset() + index * kTaggedSize;
89 RELAXED_WRITE_FIELD(*this, offset, value);
90 CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);
91 }
92
93 TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledData)
TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithoutPreparseData)94 TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithoutPreparseData)
95 TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithPreparseData)
96 TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithoutPreparseDataWithJob)
97 TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithPreparseDataAndJob)
98
99 TQ_OBJECT_CONSTRUCTORS_IMPL(InterpreterData)
100 TQ_OBJECT_CONSTRUCTORS_IMPL(SharedFunctionInfo)
101 NEVER_READ_ONLY_SPACE_IMPL(SharedFunctionInfo)
102 DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
103
104 RELEASE_ACQUIRE_ACCESSORS(SharedFunctionInfo, function_data, Object,
105 kFunctionDataOffset)
106 RELEASE_ACQUIRE_ACCESSORS(SharedFunctionInfo, name_or_scope_info, Object,
107 kNameOrScopeInfoOffset)
108 RELEASE_ACQUIRE_ACCESSORS(SharedFunctionInfo, script_or_debug_info, HeapObject,
109 kScriptOrDebugInfoOffset)
110
111 RENAME_TORQUE_ACCESSORS(SharedFunctionInfo,
112 raw_outer_scope_info_or_feedback_metadata,
113 outer_scope_info_or_feedback_metadata, HeapObject)
114 DEF_ACQUIRE_GETTER(SharedFunctionInfo,
115 raw_outer_scope_info_or_feedback_metadata, HeapObject) {
116 HeapObject value =
117 TaggedField<HeapObject, kOuterScopeInfoOrFeedbackMetadataOffset>::
118 Acquire_Load(cage_base, *this);
119 return value;
120 }
121
internal_formal_parameter_count_with_receiver()122 uint16_t SharedFunctionInfo::internal_formal_parameter_count_with_receiver()
123 const {
124 const uint16_t param_count = TorqueGeneratedClass::formal_parameter_count();
125 return param_count;
126 }
127
internal_formal_parameter_count_without_receiver()128 uint16_t SharedFunctionInfo::internal_formal_parameter_count_without_receiver()
129 const {
130 const uint16_t param_count = TorqueGeneratedClass::formal_parameter_count();
131 if (param_count == kDontAdaptArgumentsSentinel) return param_count;
132 return param_count - kJSArgcReceiverSlots;
133 }
134
set_internal_formal_parameter_count(int value)135 void SharedFunctionInfo::set_internal_formal_parameter_count(int value) {
136 DCHECK_EQ(value, static_cast<uint16_t>(value));
137 DCHECK_GE(value, kJSArgcReceiverSlots);
138 TorqueGeneratedClass::set_formal_parameter_count(value);
139 }
140
RENAME_PRIMITIVE_TORQUE_ACCESSORS(SharedFunctionInfo,raw_function_token_offset,function_token_offset,uint16_t)141 RENAME_PRIMITIVE_TORQUE_ACCESSORS(SharedFunctionInfo, raw_function_token_offset,
142 function_token_offset, uint16_t)
143
144 RELAXED_INT32_ACCESSORS(SharedFunctionInfo, flags, kFlagsOffset)
145 int32_t SharedFunctionInfo::relaxed_flags() const {
146 return flags(kRelaxedLoad);
147 }
set_relaxed_flags(int32_t flags)148 void SharedFunctionInfo::set_relaxed_flags(int32_t flags) {
149 return set_flags(flags, kRelaxedStore);
150 }
151
UINT8_ACCESSORS(SharedFunctionInfo,flags2,kFlags2Offset)152 UINT8_ACCESSORS(SharedFunctionInfo, flags2, kFlags2Offset)
153
154 bool SharedFunctionInfo::HasSharedName() const {
155 Object value = name_or_scope_info(kAcquireLoad);
156 if (value.IsScopeInfo()) {
157 return ScopeInfo::cast(value).HasSharedFunctionName();
158 }
159 return value != kNoSharedNameSentinel;
160 }
161
Name()162 String SharedFunctionInfo::Name() const {
163 if (!HasSharedName()) return GetReadOnlyRoots().empty_string();
164 Object value = name_or_scope_info(kAcquireLoad);
165 if (value.IsScopeInfo()) {
166 if (ScopeInfo::cast(value).HasFunctionName()) {
167 return String::cast(ScopeInfo::cast(value).FunctionName());
168 }
169 return GetReadOnlyRoots().empty_string();
170 }
171 return String::cast(value);
172 }
173
SetName(String name)174 void SharedFunctionInfo::SetName(String name) {
175 Object maybe_scope_info = name_or_scope_info(kAcquireLoad);
176 if (maybe_scope_info.IsScopeInfo()) {
177 ScopeInfo::cast(maybe_scope_info).SetFunctionName(name);
178 } else {
179 DCHECK(maybe_scope_info.IsString() ||
180 maybe_scope_info == kNoSharedNameSentinel);
181 set_name_or_scope_info(name, kReleaseStore);
182 }
183 UpdateFunctionMapIndex();
184 }
185
is_script()186 bool SharedFunctionInfo::is_script() const {
187 return scope_info(kAcquireLoad).is_script_scope() &&
188 Script::cast(script()).compilation_type() ==
189 Script::COMPILATION_TYPE_HOST;
190 }
191
needs_script_context()192 bool SharedFunctionInfo::needs_script_context() const {
193 return is_script() && scope_info(kAcquireLoad).ContextLocalCount() > 0;
194 }
195
196 template <typename IsolateT>
abstract_code(IsolateT * isolate)197 AbstractCode SharedFunctionInfo::abstract_code(IsolateT* isolate) {
198 // TODO(v8:11429): Decide if this return bytecode or baseline code, when the
199 // latter is present.
200 if (HasBytecodeArray()) {
201 return AbstractCode::cast(GetBytecodeArray(isolate));
202 } else {
203 return AbstractCode::cast(FromCodeT(GetCode()));
204 }
205 }
206
function_token_position()207 int SharedFunctionInfo::function_token_position() const {
208 int offset = raw_function_token_offset();
209 if (offset == kFunctionTokenOutOfRange) {
210 return kNoSourcePosition;
211 } else {
212 return StartPosition() - offset;
213 }
214 }
215
216 template <typename IsolateT>
AreSourcePositionsAvailable(IsolateT * isolate)217 bool SharedFunctionInfo::AreSourcePositionsAvailable(IsolateT* isolate) const {
218 if (FLAG_enable_lazy_source_positions) {
219 return !HasBytecodeArray() ||
220 GetBytecodeArray(isolate).HasSourcePositionTable();
221 }
222 return true;
223 }
224
225 template <typename IsolateT>
GetInlineability(IsolateT * isolate)226 SharedFunctionInfo::Inlineability SharedFunctionInfo::GetInlineability(
227 IsolateT* isolate) const {
228 if (!script().IsScript()) return kHasNoScript;
229
230 if (GetIsolate()->is_precise_binary_code_coverage() &&
231 !has_reported_binary_coverage()) {
232 // We may miss invocations if this function is inlined.
233 return kNeedsBinaryCoverage;
234 }
235
236 // Built-in functions are handled by the JSCallReducer.
237 if (HasBuiltinId()) return kIsBuiltin;
238
239 if (!IsUserJavaScript()) return kIsNotUserCode;
240
241 // If there is no bytecode array, it is either not compiled or it is compiled
242 // with WebAssembly for the asm.js pipeline. In either case we don't want to
243 // inline.
244 if (!HasBytecodeArray()) return kHasNoBytecode;
245
246 if (GetBytecodeArray(isolate).length() > FLAG_max_inlined_bytecode_size) {
247 return kExceedsBytecodeLimit;
248 }
249
250 if (HasBreakInfo()) return kMayContainBreakPoints;
251
252 if (optimization_disabled()) return kHasOptimizationDisabled;
253
254 return kIsInlineable;
255 }
256
BIT_FIELD_ACCESSORS(SharedFunctionInfo,flags2,class_scope_has_private_brand,SharedFunctionInfo::ClassScopeHasPrivateBrandBit)257 BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags2, class_scope_has_private_brand,
258 SharedFunctionInfo::ClassScopeHasPrivateBrandBit)
259
260 BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags2,
261 has_static_private_methods_or_accessors,
262 SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit)
263
264 BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags2, maglev_compilation_failed,
265 SharedFunctionInfo::MaglevCompilationFailedBit)
266
267 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, syntax_kind,
268 SharedFunctionInfo::FunctionSyntaxKindBits)
269
270 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, allows_lazy_compilation,
271 SharedFunctionInfo::AllowLazyCompilationBit)
272 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, has_duplicate_parameters,
273 SharedFunctionInfo::HasDuplicateParametersBit)
274
275 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, native,
276 SharedFunctionInfo::IsNativeBit)
277 #if V8_ENABLE_WEBASSEMBLY
278 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, is_asm_wasm_broken,
279 SharedFunctionInfo::IsAsmWasmBrokenBit)
280 #endif // V8_ENABLE_WEBASSEMBLY
281 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags,
282 requires_instance_members_initializer,
283 SharedFunctionInfo::RequiresInstanceMembersInitializerBit)
284
285 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags,
286 name_should_print_as_anonymous,
287 SharedFunctionInfo::NameShouldPrintAsAnonymousBit)
288 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags,
289 has_reported_binary_coverage,
290 SharedFunctionInfo::HasReportedBinaryCoverageBit)
291
292 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, is_toplevel,
293 SharedFunctionInfo::IsTopLevelBit)
294 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, properties_are_final,
295 SharedFunctionInfo::PropertiesAreFinalBit)
296 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags,
297 private_name_lookup_skips_outer_class,
298 SharedFunctionInfo::PrivateNameLookupSkipsOuterClassBit)
299
300 bool SharedFunctionInfo::optimization_disabled() const {
301 return disabled_optimization_reason() != BailoutReason::kNoReason;
302 }
303
disabled_optimization_reason()304 BailoutReason SharedFunctionInfo::disabled_optimization_reason() const {
305 return DisabledOptimizationReasonBits::decode(flags(kRelaxedLoad));
306 }
307
osr_code_cache_state()308 OSRCodeCacheStateOfSFI SharedFunctionInfo::osr_code_cache_state() const {
309 return OsrCodeCacheStateBits::decode(flags(kRelaxedLoad));
310 }
311
set_osr_code_cache_state(OSRCodeCacheStateOfSFI state)312 void SharedFunctionInfo::set_osr_code_cache_state(
313 OSRCodeCacheStateOfSFI state) {
314 int hints = flags(kRelaxedLoad);
315 hints = OsrCodeCacheStateBits::update(hints, state);
316 set_flags(hints, kRelaxedStore);
317 }
318
language_mode()319 LanguageMode SharedFunctionInfo::language_mode() const {
320 STATIC_ASSERT(LanguageModeSize == 2);
321 return construct_language_mode(IsStrictBit::decode(flags(kRelaxedLoad)));
322 }
323
set_language_mode(LanguageMode language_mode)324 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
325 STATIC_ASSERT(LanguageModeSize == 2);
326 // We only allow language mode transitions that set the same language mode
327 // again or go up in the chain:
328 DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
329 int hints = flags(kRelaxedLoad);
330 hints = IsStrictBit::update(hints, is_strict(language_mode));
331 set_flags(hints, kRelaxedStore);
332 UpdateFunctionMapIndex();
333 }
334
kind()335 FunctionKind SharedFunctionInfo::kind() const {
336 STATIC_ASSERT(FunctionKindBits::kSize == kFunctionKindBitSize);
337 return FunctionKindBits::decode(flags(kRelaxedLoad));
338 }
339
set_kind(FunctionKind kind)340 void SharedFunctionInfo::set_kind(FunctionKind kind) {
341 int hints = flags(kRelaxedLoad);
342 hints = FunctionKindBits::update(hints, kind);
343 hints = IsClassConstructorBit::update(hints, IsClassConstructor(kind));
344 set_flags(hints, kRelaxedStore);
345 UpdateFunctionMapIndex();
346 }
347
is_wrapped()348 bool SharedFunctionInfo::is_wrapped() const {
349 return syntax_kind() == FunctionSyntaxKind::kWrapped;
350 }
351
construct_as_builtin()352 bool SharedFunctionInfo::construct_as_builtin() const {
353 return ConstructAsBuiltinBit::decode(flags(kRelaxedLoad));
354 }
355
CalculateConstructAsBuiltin()356 void SharedFunctionInfo::CalculateConstructAsBuiltin() {
357 bool uses_builtins_construct_stub = false;
358 if (HasBuiltinId()) {
359 Builtin id = builtin_id();
360 if (id != Builtin::kCompileLazy && id != Builtin::kEmptyFunction) {
361 uses_builtins_construct_stub = true;
362 }
363 } else if (IsApiFunction()) {
364 uses_builtins_construct_stub = true;
365 }
366
367 int f = flags(kRelaxedLoad);
368 f = ConstructAsBuiltinBit::update(f, uses_builtins_construct_stub);
369 set_flags(f, kRelaxedStore);
370 }
371
function_map_index()372 int SharedFunctionInfo::function_map_index() const {
373 // Note: Must be kept in sync with the FastNewClosure builtin.
374 int index = Context::FIRST_FUNCTION_MAP_INDEX +
375 FunctionMapIndexBits::decode(flags(kRelaxedLoad));
376 DCHECK_LE(index, Context::LAST_FUNCTION_MAP_INDEX);
377 return index;
378 }
379
set_function_map_index(int index)380 void SharedFunctionInfo::set_function_map_index(int index) {
381 STATIC_ASSERT(Context::LAST_FUNCTION_MAP_INDEX <=
382 Context::FIRST_FUNCTION_MAP_INDEX + FunctionMapIndexBits::kMax);
383 DCHECK_LE(Context::FIRST_FUNCTION_MAP_INDEX, index);
384 DCHECK_LE(index, Context::LAST_FUNCTION_MAP_INDEX);
385 index -= Context::FIRST_FUNCTION_MAP_INDEX;
386 set_flags(FunctionMapIndexBits::update(flags(kRelaxedLoad), index),
387 kRelaxedStore);
388 }
389
clear_padding()390 void SharedFunctionInfo::clear_padding() {
391 memset(reinterpret_cast<void*>(this->address() + kSize), 0,
392 kAlignedSize - kSize);
393 }
394
UpdateFunctionMapIndex()395 void SharedFunctionInfo::UpdateFunctionMapIndex() {
396 int map_index =
397 Context::FunctionMapIndex(language_mode(), kind(), HasSharedName());
398 set_function_map_index(map_index);
399 }
400
DontAdaptArguments()401 void SharedFunctionInfo::DontAdaptArguments() {
402 #if V8_ENABLE_WEBASSEMBLY
403 // TODO(leszeks): Revise this DCHECK now that the code field is gone.
404 DCHECK(!HasWasmExportedFunctionData());
405 #endif // V8_ENABLE_WEBASSEMBLY
406 TorqueGeneratedClass::set_formal_parameter_count(kDontAdaptArgumentsSentinel);
407 }
408
IsDontAdaptArguments()409 bool SharedFunctionInfo::IsDontAdaptArguments() const {
410 return TorqueGeneratedClass::formal_parameter_count() ==
411 kDontAdaptArgumentsSentinel;
412 }
413
DEF_ACQUIRE_GETTER(SharedFunctionInfo,scope_info,ScopeInfo)414 DEF_ACQUIRE_GETTER(SharedFunctionInfo, scope_info, ScopeInfo) {
415 Object maybe_scope_info = name_or_scope_info(cage_base, kAcquireLoad);
416 if (maybe_scope_info.IsScopeInfo(cage_base)) {
417 return ScopeInfo::cast(maybe_scope_info);
418 }
419 return GetReadOnlyRoots().empty_scope_info();
420 }
421
DEF_GETTER(SharedFunctionInfo,scope_info,ScopeInfo)422 DEF_GETTER(SharedFunctionInfo, scope_info, ScopeInfo) {
423 return scope_info(cage_base, kAcquireLoad);
424 }
425
SetScopeInfo(ScopeInfo scope_info,WriteBarrierMode mode)426 void SharedFunctionInfo::SetScopeInfo(ScopeInfo scope_info,
427 WriteBarrierMode mode) {
428 // Move the existing name onto the ScopeInfo.
429 Object name = name_or_scope_info(kAcquireLoad);
430 if (name.IsScopeInfo()) {
431 name = ScopeInfo::cast(name).FunctionName();
432 }
433 DCHECK(name.IsString() || name == kNoSharedNameSentinel);
434 // Only set the function name for function scopes.
435 scope_info.SetFunctionName(name);
436 if (HasInferredName() && inferred_name().length() != 0) {
437 scope_info.SetInferredFunctionName(inferred_name());
438 }
439 set_name_or_scope_info(scope_info, kReleaseStore, mode);
440 }
441
set_raw_scope_info(ScopeInfo scope_info,WriteBarrierMode mode)442 void SharedFunctionInfo::set_raw_scope_info(ScopeInfo scope_info,
443 WriteBarrierMode mode) {
444 WRITE_FIELD(*this, kNameOrScopeInfoOffset, scope_info);
445 CONDITIONAL_WRITE_BARRIER(*this, kNameOrScopeInfoOffset, scope_info, mode);
446 }
447
outer_scope_info()448 HeapObject SharedFunctionInfo::outer_scope_info() const {
449 DCHECK(!is_compiled());
450 DCHECK(!HasFeedbackMetadata());
451 return raw_outer_scope_info_or_feedback_metadata();
452 }
453
HasOuterScopeInfo()454 bool SharedFunctionInfo::HasOuterScopeInfo() const {
455 ScopeInfo outer_info;
456 if (!is_compiled()) {
457 if (!outer_scope_info().IsScopeInfo()) return false;
458 outer_info = ScopeInfo::cast(outer_scope_info());
459 } else {
460 ScopeInfo info = scope_info(kAcquireLoad);
461 if (!info.HasOuterScopeInfo()) return false;
462 outer_info = info.OuterScopeInfo();
463 }
464 return !outer_info.IsEmpty();
465 }
466
GetOuterScopeInfo()467 ScopeInfo SharedFunctionInfo::GetOuterScopeInfo() const {
468 DCHECK(HasOuterScopeInfo());
469 if (!is_compiled()) return ScopeInfo::cast(outer_scope_info());
470 return scope_info(kAcquireLoad).OuterScopeInfo();
471 }
472
set_outer_scope_info(HeapObject value,WriteBarrierMode mode)473 void SharedFunctionInfo::set_outer_scope_info(HeapObject value,
474 WriteBarrierMode mode) {
475 DCHECK(!is_compiled());
476 DCHECK(raw_outer_scope_info_or_feedback_metadata().IsTheHole());
477 DCHECK(value.IsScopeInfo() || value.IsTheHole());
478 set_raw_outer_scope_info_or_feedback_metadata(value, mode);
479 }
480
HasFeedbackMetadata()481 bool SharedFunctionInfo::HasFeedbackMetadata() const {
482 return raw_outer_scope_info_or_feedback_metadata().IsFeedbackMetadata();
483 }
484
HasFeedbackMetadata(AcquireLoadTag tag)485 bool SharedFunctionInfo::HasFeedbackMetadata(AcquireLoadTag tag) const {
486 return raw_outer_scope_info_or_feedback_metadata(tag).IsFeedbackMetadata();
487 }
488
feedback_metadata()489 FeedbackMetadata SharedFunctionInfo::feedback_metadata() const {
490 DCHECK(HasFeedbackMetadata());
491 return FeedbackMetadata::cast(raw_outer_scope_info_or_feedback_metadata());
492 }
493
494 RELEASE_ACQUIRE_ACCESSORS_CHECKED2(SharedFunctionInfo, feedback_metadata,
495 FeedbackMetadata,
496 kOuterScopeInfoOrFeedbackMetadataOffset,
497 HasFeedbackMetadata(kAcquireLoad),
498 !HasFeedbackMetadata(kAcquireLoad) &&
499 value.IsFeedbackMetadata())
500
is_compiled()501 bool SharedFunctionInfo::is_compiled() const {
502 Object data = function_data(kAcquireLoad);
503 return data != Smi::FromEnum(Builtin::kCompileLazy) &&
504 !data.IsUncompiledData();
505 }
506
507 template <typename IsolateT>
is_compiled_scope(IsolateT * isolate)508 IsCompiledScope SharedFunctionInfo::is_compiled_scope(IsolateT* isolate) const {
509 return IsCompiledScope(*this, isolate);
510 }
511
IsCompiledScope(const SharedFunctionInfo shared,Isolate * isolate)512 IsCompiledScope::IsCompiledScope(const SharedFunctionInfo shared,
513 Isolate* isolate)
514 : is_compiled_(shared.is_compiled()) {
515 if (shared.HasBaselineCode()) {
516 retain_code_ = handle(shared.baseline_code(kAcquireLoad), isolate);
517 } else if (shared.HasBytecodeArray()) {
518 retain_code_ = handle(shared.GetBytecodeArray(isolate), isolate);
519 } else {
520 retain_code_ = MaybeHandle<HeapObject>();
521 }
522
523 DCHECK_IMPLIES(!retain_code_.is_null(), is_compiled());
524 }
525
IsCompiledScope(const SharedFunctionInfo shared,LocalIsolate * isolate)526 IsCompiledScope::IsCompiledScope(const SharedFunctionInfo shared,
527 LocalIsolate* isolate)
528 : is_compiled_(shared.is_compiled()) {
529 if (shared.HasBaselineCode()) {
530 retain_code_ = isolate->heap()->NewPersistentHandle(
531 shared.baseline_code(kAcquireLoad));
532 } else if (shared.HasBytecodeArray()) {
533 retain_code_ =
534 isolate->heap()->NewPersistentHandle(shared.GetBytecodeArray(isolate));
535 } else {
536 retain_code_ = MaybeHandle<HeapObject>();
537 }
538
539 DCHECK_IMPLIES(!retain_code_.is_null(), is_compiled());
540 }
541
has_simple_parameters()542 bool SharedFunctionInfo::has_simple_parameters() {
543 return scope_info(kAcquireLoad).HasSimpleParameters();
544 }
545
CanCollectSourcePosition(Isolate * isolate)546 bool SharedFunctionInfo::CanCollectSourcePosition(Isolate* isolate) {
547 return FLAG_enable_lazy_source_positions && HasBytecodeArray() &&
548 !GetBytecodeArray(isolate).HasSourcePositionTable();
549 }
550
IsApiFunction()551 bool SharedFunctionInfo::IsApiFunction() const {
552 return function_data(kAcquireLoad).IsFunctionTemplateInfo();
553 }
554
get_api_func_data()555 FunctionTemplateInfo SharedFunctionInfo::get_api_func_data() const {
556 DCHECK(IsApiFunction());
557 return FunctionTemplateInfo::cast(function_data(kAcquireLoad));
558 }
559
HasBytecodeArray()560 bool SharedFunctionInfo::HasBytecodeArray() const {
561 Object data = function_data(kAcquireLoad);
562 return data.IsBytecodeArray() || data.IsInterpreterData() || data.IsCodeT();
563 }
564
565 template <typename IsolateT>
GetBytecodeArray(IsolateT * isolate)566 BytecodeArray SharedFunctionInfo::GetBytecodeArray(IsolateT* isolate) const {
567 SharedMutexGuardIfOffThread<IsolateT, base::kShared> mutex_guard(
568 GetIsolate()->shared_function_info_access(), isolate);
569
570 DCHECK(HasBytecodeArray());
571 if (HasDebugInfo() && GetDebugInfo().HasInstrumentedBytecodeArray()) {
572 return GetDebugInfo().OriginalBytecodeArray();
573 }
574
575 return GetActiveBytecodeArray();
576 }
577
GetActiveBytecodeArray()578 BytecodeArray SharedFunctionInfo::GetActiveBytecodeArray() const {
579 Object data = function_data(kAcquireLoad);
580 if (data.IsCodeT()) {
581 CodeT baseline_code = CodeT::cast(data);
582 data = baseline_code.bytecode_or_interpreter_data();
583 }
584 if (data.IsBytecodeArray()) {
585 return BytecodeArray::cast(data);
586 } else {
587 DCHECK(data.IsInterpreterData());
588 return InterpreterData::cast(data).bytecode_array();
589 }
590 }
591
SetActiveBytecodeArray(BytecodeArray bytecode)592 void SharedFunctionInfo::SetActiveBytecodeArray(BytecodeArray bytecode) {
593 // We don't allow setting the active bytecode array on baseline-optimized
594 // functions. They should have been flushed earlier.
595 DCHECK(!HasBaselineCode());
596
597 Object data = function_data(kAcquireLoad);
598 if (data.IsBytecodeArray()) {
599 set_function_data(bytecode, kReleaseStore);
600 } else {
601 DCHECK(data.IsInterpreterData());
602 interpreter_data().set_bytecode_array(bytecode);
603 }
604 }
605
set_bytecode_array(BytecodeArray bytecode)606 void SharedFunctionInfo::set_bytecode_array(BytecodeArray bytecode) {
607 DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtin::kCompileLazy) ||
608 HasUncompiledData());
609 set_function_data(bytecode, kReleaseStore);
610 }
611
ShouldFlushCode(base::EnumSet<CodeFlushMode> code_flush_mode)612 bool SharedFunctionInfo::ShouldFlushCode(
613 base::EnumSet<CodeFlushMode> code_flush_mode) {
614 if (IsFlushingDisabled(code_flush_mode)) return false;
615
616 // TODO(rmcilroy): Enable bytecode flushing for resumable functions.
617 if (IsResumableFunction(kind()) || !allows_lazy_compilation()) {
618 return false;
619 }
620
621 // Get a snapshot of the function data field, and if it is a bytecode array,
622 // check if it is old. Note, this is done this way since this function can be
623 // called by the concurrent marker.
624 Object data = function_data(kAcquireLoad);
625 if (data.IsCodeT()) {
626 CodeT baseline_code = CodeT::cast(data);
627 DCHECK_EQ(baseline_code.kind(), CodeKind::BASELINE);
628 // If baseline code flushing isn't enabled and we have baseline data on SFI
629 // we cannot flush baseline / bytecode.
630 if (!IsBaselineCodeFlushingEnabled(code_flush_mode)) return false;
631 data = baseline_code.bytecode_or_interpreter_data();
632 } else if (!IsByteCodeFlushingEnabled(code_flush_mode)) {
633 // If bytecode flushing isn't enabled and there is no baseline code there is
634 // nothing to flush.
635 return false;
636 }
637 if (!data.IsBytecodeArray()) return false;
638
639 if (IsStressFlushingEnabled(code_flush_mode)) return true;
640
641 BytecodeArray bytecode = BytecodeArray::cast(data);
642
643 return bytecode.IsOld();
644 }
645
InterpreterTrampoline()646 CodeT SharedFunctionInfo::InterpreterTrampoline() const {
647 DCHECK(HasInterpreterData());
648 return interpreter_data().interpreter_trampoline();
649 }
650
HasInterpreterData()651 bool SharedFunctionInfo::HasInterpreterData() const {
652 Object data = function_data(kAcquireLoad);
653 if (data.IsCodeT()) {
654 CodeT baseline_code = CodeT::cast(data);
655 DCHECK_EQ(baseline_code.kind(), CodeKind::BASELINE);
656 data = baseline_code.bytecode_or_interpreter_data();
657 }
658 return data.IsInterpreterData();
659 }
660
interpreter_data()661 InterpreterData SharedFunctionInfo::interpreter_data() const {
662 DCHECK(HasInterpreterData());
663 Object data = function_data(kAcquireLoad);
664 if (data.IsCodeT()) {
665 CodeT baseline_code = CodeT::cast(data);
666 DCHECK_EQ(baseline_code.kind(), CodeKind::BASELINE);
667 data = baseline_code.bytecode_or_interpreter_data();
668 }
669 return InterpreterData::cast(data);
670 }
671
set_interpreter_data(InterpreterData interpreter_data)672 void SharedFunctionInfo::set_interpreter_data(
673 InterpreterData interpreter_data) {
674 DCHECK(FLAG_interpreted_frames_native_stack);
675 DCHECK(!HasBaselineCode());
676 set_function_data(interpreter_data, kReleaseStore);
677 }
678
HasBaselineCode()679 bool SharedFunctionInfo::HasBaselineCode() const {
680 Object data = function_data(kAcquireLoad);
681 if (data.IsCodeT()) {
682 DCHECK_EQ(CodeT::cast(data).kind(), CodeKind::BASELINE);
683 return true;
684 }
685 return false;
686 }
687
baseline_code(AcquireLoadTag)688 CodeT SharedFunctionInfo::baseline_code(AcquireLoadTag) const {
689 DCHECK(HasBaselineCode());
690 return CodeT::cast(function_data(kAcquireLoad));
691 }
692
set_baseline_code(CodeT baseline_code,ReleaseStoreTag)693 void SharedFunctionInfo::set_baseline_code(CodeT baseline_code,
694 ReleaseStoreTag) {
695 DCHECK_EQ(baseline_code.kind(), CodeKind::BASELINE);
696 set_function_data(baseline_code, kReleaseStore);
697 }
698
FlushBaselineCode()699 void SharedFunctionInfo::FlushBaselineCode() {
700 DCHECK(HasBaselineCode());
701 set_function_data(baseline_code(kAcquireLoad).bytecode_or_interpreter_data(),
702 kReleaseStore);
703 }
704
705 #if V8_ENABLE_WEBASSEMBLY
HasAsmWasmData()706 bool SharedFunctionInfo::HasAsmWasmData() const {
707 return function_data(kAcquireLoad).IsAsmWasmData();
708 }
709
HasWasmExportedFunctionData()710 bool SharedFunctionInfo::HasWasmExportedFunctionData() const {
711 return function_data(kAcquireLoad).IsWasmExportedFunctionData();
712 }
713
HasWasmJSFunctionData()714 bool SharedFunctionInfo::HasWasmJSFunctionData() const {
715 return function_data(kAcquireLoad).IsWasmJSFunctionData();
716 }
717
HasWasmCapiFunctionData()718 bool SharedFunctionInfo::HasWasmCapiFunctionData() const {
719 return function_data(kAcquireLoad).IsWasmCapiFunctionData();
720 }
721
HasWasmOnFulfilledData()722 bool SharedFunctionInfo::HasWasmOnFulfilledData() const {
723 return function_data(kAcquireLoad).IsWasmOnFulfilledData();
724 }
725
asm_wasm_data()726 AsmWasmData SharedFunctionInfo::asm_wasm_data() const {
727 DCHECK(HasAsmWasmData());
728 return AsmWasmData::cast(function_data(kAcquireLoad));
729 }
730
set_asm_wasm_data(AsmWasmData data)731 void SharedFunctionInfo::set_asm_wasm_data(AsmWasmData data) {
732 DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtin::kCompileLazy) ||
733 HasUncompiledData() || HasAsmWasmData());
734 set_function_data(data, kReleaseStore);
735 }
736
wasm_module()737 const wasm::WasmModule* SharedFunctionInfo::wasm_module() const {
738 if (!HasWasmExportedFunctionData()) return nullptr;
739 const WasmExportedFunctionData& function_data = wasm_exported_function_data();
740 const WasmInstanceObject& wasm_instance = function_data.instance();
741 const WasmModuleObject& wasm_module_object = wasm_instance.module_object();
742 return wasm_module_object.module();
743 }
744
wasm_function_signature()745 const wasm::FunctionSig* SharedFunctionInfo::wasm_function_signature() const {
746 const wasm::WasmModule* module = wasm_module();
747 if (!module) return nullptr;
748 const WasmExportedFunctionData& function_data = wasm_exported_function_data();
749 DCHECK_LT(function_data.function_index(), module->functions.size());
750 return module->functions[function_data.function_index()].sig;
751 }
752 #endif // V8_ENABLE_WEBASSEMBLY
753
HasBuiltinId()754 bool SharedFunctionInfo::HasBuiltinId() const {
755 return function_data(kAcquireLoad).IsSmi();
756 }
757
builtin_id()758 Builtin SharedFunctionInfo::builtin_id() const {
759 DCHECK(HasBuiltinId());
760 int id = Smi::ToInt(function_data(kAcquireLoad));
761 DCHECK(Builtins::IsBuiltinId(id));
762 return Builtins::FromInt(id);
763 }
764
set_builtin_id(Builtin builtin)765 void SharedFunctionInfo::set_builtin_id(Builtin builtin) {
766 DCHECK(Builtins::IsBuiltinId(builtin));
767 set_function_data(Smi::FromInt(static_cast<int>(builtin)), kReleaseStore,
768 SKIP_WRITE_BARRIER);
769 }
770
HasUncompiledData()771 bool SharedFunctionInfo::HasUncompiledData() const {
772 return function_data(kAcquireLoad).IsUncompiledData();
773 }
774
uncompiled_data()775 UncompiledData SharedFunctionInfo::uncompiled_data() const {
776 DCHECK(HasUncompiledData());
777 return UncompiledData::cast(function_data(kAcquireLoad));
778 }
779
set_uncompiled_data(UncompiledData uncompiled_data)780 void SharedFunctionInfo::set_uncompiled_data(UncompiledData uncompiled_data) {
781 DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtin::kCompileLazy) ||
782 HasUncompiledData());
783 DCHECK(uncompiled_data.IsUncompiledData());
784 set_function_data(uncompiled_data, kReleaseStore);
785 }
786
HasUncompiledDataWithPreparseData()787 bool SharedFunctionInfo::HasUncompiledDataWithPreparseData() const {
788 return function_data(kAcquireLoad).IsUncompiledDataWithPreparseData();
789 }
790
791 UncompiledDataWithPreparseData
uncompiled_data_with_preparse_data()792 SharedFunctionInfo::uncompiled_data_with_preparse_data() const {
793 DCHECK(HasUncompiledDataWithPreparseData());
794 return UncompiledDataWithPreparseData::cast(function_data(kAcquireLoad));
795 }
796
set_uncompiled_data_with_preparse_data(UncompiledDataWithPreparseData uncompiled_data_with_preparse_data)797 void SharedFunctionInfo::set_uncompiled_data_with_preparse_data(
798 UncompiledDataWithPreparseData uncompiled_data_with_preparse_data) {
799 DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtin::kCompileLazy));
800 DCHECK(uncompiled_data_with_preparse_data.IsUncompiledDataWithPreparseData());
801 set_function_data(uncompiled_data_with_preparse_data, kReleaseStore);
802 }
803
HasUncompiledDataWithoutPreparseData()804 bool SharedFunctionInfo::HasUncompiledDataWithoutPreparseData() const {
805 return function_data(kAcquireLoad).IsUncompiledDataWithoutPreparseData();
806 }
807
ClearUncompiledDataJobPointer()808 void SharedFunctionInfo::ClearUncompiledDataJobPointer() {
809 UncompiledData uncompiled_data = this->uncompiled_data();
810 if (uncompiled_data.IsUncompiledDataWithPreparseDataAndJob()) {
811 UncompiledDataWithPreparseDataAndJob::cast(uncompiled_data)
812 .set_job(kNullAddress);
813 } else if (uncompiled_data.IsUncompiledDataWithoutPreparseDataWithJob()) {
814 UncompiledDataWithoutPreparseDataWithJob::cast(uncompiled_data)
815 .set_job(kNullAddress);
816 }
817 }
818
ClearPreparseData()819 void SharedFunctionInfo::ClearPreparseData() {
820 DCHECK(HasUncompiledDataWithPreparseData());
821 UncompiledDataWithPreparseData data = uncompiled_data_with_preparse_data();
822
823 // Trim off the pre-parsed scope data from the uncompiled data by swapping the
824 // map, leaving only an uncompiled data without pre-parsed scope.
825 DisallowGarbageCollection no_gc;
826 Heap* heap = GetHeapFromWritableObject(data);
827
828 // Swap the map.
829 heap->NotifyObjectLayoutChange(data, no_gc);
830 STATIC_ASSERT(UncompiledDataWithoutPreparseData::kSize <
831 UncompiledDataWithPreparseData::kSize);
832 STATIC_ASSERT(UncompiledDataWithoutPreparseData::kSize ==
833 UncompiledData::kHeaderSize);
834 data.set_map(GetReadOnlyRoots().uncompiled_data_without_preparse_data_map(),
835 kReleaseStore);
836
837 // Fill the remaining space with filler.
838 heap->CreateFillerObjectAt(
839 data.address() + UncompiledDataWithoutPreparseData::kSize,
840 UncompiledDataWithPreparseData::kSize -
841 UncompiledDataWithoutPreparseData::kSize,
842 ClearRecordedSlots::kYes);
843
844 // Ensure that the clear was successful.
845 DCHECK(HasUncompiledDataWithoutPreparseData());
846 }
847
InitAfterBytecodeFlush(String inferred_name,int start_position,int end_position,std::function<void (HeapObject object,ObjectSlot slot,HeapObject target)> gc_notify_updated_slot)848 void UncompiledData::InitAfterBytecodeFlush(
849 String inferred_name, int start_position, int end_position,
850 std::function<void(HeapObject object, ObjectSlot slot, HeapObject target)>
851 gc_notify_updated_slot) {
852 set_inferred_name(inferred_name);
853 gc_notify_updated_slot(*this, RawField(UncompiledData::kInferredNameOffset),
854 inferred_name);
855 set_start_position(start_position);
856 set_end_position(end_position);
857 }
858
DEF_GETTER(SharedFunctionInfo,script,HeapObject)859 DEF_GETTER(SharedFunctionInfo, script, HeapObject) {
860 HeapObject maybe_script = script_or_debug_info(cage_base, kAcquireLoad);
861 if (maybe_script.IsDebugInfo(cage_base)) {
862 return DebugInfo::cast(maybe_script).script();
863 }
864 return maybe_script;
865 }
866
set_script(HeapObject script)867 void SharedFunctionInfo::set_script(HeapObject script) {
868 HeapObject maybe_debug_info = script_or_debug_info(kAcquireLoad);
869 if (maybe_debug_info.IsDebugInfo()) {
870 DebugInfo::cast(maybe_debug_info).set_script(script);
871 } else {
872 set_script_or_debug_info(script, kReleaseStore);
873 }
874 }
875
is_repl_mode()876 bool SharedFunctionInfo::is_repl_mode() const {
877 return script().IsScript() && Script::cast(script()).is_repl_mode();
878 }
879
HasDebugInfo()880 bool SharedFunctionInfo::HasDebugInfo() const {
881 return script_or_debug_info(kAcquireLoad).IsDebugInfo();
882 }
883
GetDebugInfo()884 DebugInfo SharedFunctionInfo::GetDebugInfo() const {
885 auto debug_info = script_or_debug_info(kAcquireLoad);
886 DCHECK(debug_info.IsDebugInfo());
887 return DebugInfo::cast(debug_info);
888 }
889
SetDebugInfo(DebugInfo debug_info)890 void SharedFunctionInfo::SetDebugInfo(DebugInfo debug_info) {
891 DCHECK(!HasDebugInfo());
892 DCHECK_EQ(debug_info.script(), script_or_debug_info(kAcquireLoad));
893 set_script_or_debug_info(debug_info, kReleaseStore);
894 }
895
HasInferredName()896 bool SharedFunctionInfo::HasInferredName() {
897 Object scope_info = name_or_scope_info(kAcquireLoad);
898 if (scope_info.IsScopeInfo()) {
899 return ScopeInfo::cast(scope_info).HasInferredFunctionName();
900 }
901 return HasUncompiledData();
902 }
903
inferred_name()904 String SharedFunctionInfo::inferred_name() {
905 Object maybe_scope_info = name_or_scope_info(kAcquireLoad);
906 if (maybe_scope_info.IsScopeInfo()) {
907 ScopeInfo scope_info = ScopeInfo::cast(maybe_scope_info);
908 if (scope_info.HasInferredFunctionName()) {
909 Object name = scope_info.InferredFunctionName();
910 if (name.IsString()) return String::cast(name);
911 }
912 } else if (HasUncompiledData()) {
913 return uncompiled_data().inferred_name();
914 }
915 return GetReadOnlyRoots().empty_string();
916 }
917
IsUserJavaScript()918 bool SharedFunctionInfo::IsUserJavaScript() const {
919 Object script_obj = script();
920 if (script_obj.IsUndefined()) return false;
921 Script script = Script::cast(script_obj);
922 return script.IsUserJavaScript();
923 }
924
IsSubjectToDebugging()925 bool SharedFunctionInfo::IsSubjectToDebugging() const {
926 #if V8_ENABLE_WEBASSEMBLY
927 if (HasAsmWasmData()) return false;
928 #endif // V8_ENABLE_WEBASSEMBLY
929 return IsUserJavaScript();
930 }
931
CanDiscardCompiled()932 bool SharedFunctionInfo::CanDiscardCompiled() const {
933 #if V8_ENABLE_WEBASSEMBLY
934 if (HasAsmWasmData()) return true;
935 #endif // V8_ENABLE_WEBASSEMBLY
936 return HasBytecodeArray() || HasUncompiledDataWithPreparseData() ||
937 HasBaselineCode();
938 }
939
is_class_constructor()940 bool SharedFunctionInfo::is_class_constructor() const {
941 return IsClassConstructorBit::decode(flags(kRelaxedLoad));
942 }
943
set_are_properties_final(bool value)944 void SharedFunctionInfo::set_are_properties_final(bool value) {
945 if (is_class_constructor()) {
946 set_properties_are_final(value);
947 }
948 }
949
are_properties_final()950 bool SharedFunctionInfo::are_properties_final() const {
951 bool bit = properties_are_final();
952 return bit && is_class_constructor();
953 }
954
955 } // namespace internal
956 } // namespace v8
957
958 #include "src/base/platform/wrappers.h"
959 #include "src/objects/object-macros-undef.h"
960
961 #endif // V8_OBJECTS_SHARED_FUNCTION_INFO_INL_H_
962