• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_CODE_INL_H_
6 #define V8_OBJECTS_CODE_INL_H_
7 
8 #include "src/objects/code.h"
9 
10 #include "src/base/memory.h"
11 #include "src/codegen/code-desc.h"
12 #include "src/execution/isolate.h"
13 #include "src/interpreter/bytecode-register.h"
14 #include "src/objects/dictionary.h"
15 #include "src/objects/instance-type-inl.h"
16 #include "src/objects/map-inl.h"
17 #include "src/objects/maybe-object-inl.h"
18 #include "src/objects/oddball.h"
19 #include "src/objects/smi-inl.h"
20 
21 // Has to be the last include (doesn't have include guards):
22 #include "src/objects/object-macros.h"
23 
24 namespace v8 {
25 namespace internal {
26 
OBJECT_CONSTRUCTORS_IMPL(DeoptimizationData,FixedArray)27 OBJECT_CONSTRUCTORS_IMPL(DeoptimizationData, FixedArray)
28 OBJECT_CONSTRUCTORS_IMPL(BytecodeArray, FixedArrayBase)
29 OBJECT_CONSTRUCTORS_IMPL(AbstractCode, HeapObject)
30 OBJECT_CONSTRUCTORS_IMPL(DependentCode, WeakFixedArray)
31 OBJECT_CONSTRUCTORS_IMPL(CodeDataContainer, HeapObject)
32 
33 NEVER_READ_ONLY_SPACE_IMPL(AbstractCode)
34 
35 CAST_ACCESSOR(AbstractCode)
36 CAST_ACCESSOR(BytecodeArray)
37 CAST_ACCESSOR(Code)
38 CAST_ACCESSOR(CodeDataContainer)
39 CAST_ACCESSOR(DependentCode)
40 CAST_ACCESSOR(DeoptimizationData)
41 
42 int AbstractCode::raw_instruction_size() {
43   if (IsCode()) {
44     return GetCode().raw_instruction_size();
45   } else {
46     return GetBytecodeArray().length();
47   }
48 }
49 
InstructionSize()50 int AbstractCode::InstructionSize() {
51   if (IsCode()) {
52     return GetCode().InstructionSize();
53   } else {
54     return GetBytecodeArray().length();
55   }
56 }
57 
source_position_table()58 ByteArray AbstractCode::source_position_table() {
59   if (IsCode()) {
60     return GetCode().SourcePositionTable();
61   } else {
62     return GetBytecodeArray().SourcePositionTable();
63   }
64 }
65 
SizeIncludingMetadata()66 int AbstractCode::SizeIncludingMetadata() {
67   if (IsCode()) {
68     return GetCode().SizeIncludingMetadata();
69   } else {
70     return GetBytecodeArray().SizeIncludingMetadata();
71   }
72 }
73 
raw_instruction_start()74 Address AbstractCode::raw_instruction_start() {
75   if (IsCode()) {
76     return GetCode().raw_instruction_start();
77   } else {
78     return GetBytecodeArray().GetFirstBytecodeAddress();
79   }
80 }
81 
InstructionStart()82 Address AbstractCode::InstructionStart() {
83   if (IsCode()) {
84     return GetCode().InstructionStart();
85   } else {
86     return GetBytecodeArray().GetFirstBytecodeAddress();
87   }
88 }
89 
raw_instruction_end()90 Address AbstractCode::raw_instruction_end() {
91   if (IsCode()) {
92     return GetCode().raw_instruction_end();
93   } else {
94     return GetBytecodeArray().GetFirstBytecodeAddress() +
95            GetBytecodeArray().length();
96   }
97 }
98 
InstructionEnd()99 Address AbstractCode::InstructionEnd() {
100   if (IsCode()) {
101     return GetCode().InstructionEnd();
102   } else {
103     return GetBytecodeArray().GetFirstBytecodeAddress() +
104            GetBytecodeArray().length();
105   }
106 }
107 
contains(Address inner_pointer)108 bool AbstractCode::contains(Address inner_pointer) {
109   return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
110 }
111 
kind()112 CodeKind AbstractCode::kind() {
113   return IsCode() ? GetCode().kind() : CodeKind::INTERPRETED_FUNCTION;
114 }
115 
GetCode()116 Code AbstractCode::GetCode() { return Code::cast(*this); }
117 
GetBytecodeArray()118 BytecodeArray AbstractCode::GetBytecodeArray() {
119   return BytecodeArray::cast(*this);
120 }
121 
next_link()122 DependentCode DependentCode::next_link() {
123   return DependentCode::cast(Get(kNextLinkIndex)->GetHeapObjectAssumeStrong());
124 }
125 
set_next_link(DependentCode next)126 void DependentCode::set_next_link(DependentCode next) {
127   Set(kNextLinkIndex, HeapObjectReference::Strong(next));
128 }
129 
flags()130 int DependentCode::flags() { return Smi::ToInt(Get(kFlagsIndex)->ToSmi()); }
131 
set_flags(int flags)132 void DependentCode::set_flags(int flags) {
133   Set(kFlagsIndex, MaybeObject::FromObject(Smi::FromInt(flags)));
134 }
135 
count()136 int DependentCode::count() { return CountField::decode(flags()); }
137 
set_count(int value)138 void DependentCode::set_count(int value) {
139   set_flags(CountField::update(flags(), value));
140 }
141 
group()142 DependentCode::DependencyGroup DependentCode::group() {
143   return static_cast<DependencyGroup>(GroupField::decode(flags()));
144 }
145 
set_object_at(int i,MaybeObject object)146 void DependentCode::set_object_at(int i, MaybeObject object) {
147   Set(kCodesStartIndex + i, object);
148 }
149 
object_at(int i)150 MaybeObject DependentCode::object_at(int i) {
151   return Get(kCodesStartIndex + i);
152 }
153 
clear_at(int i)154 void DependentCode::clear_at(int i) {
155   Set(kCodesStartIndex + i,
156       HeapObjectReference::Strong(GetReadOnlyRoots().undefined_value()));
157 }
158 
copy(int from,int to)159 void DependentCode::copy(int from, int to) {
160   Set(kCodesStartIndex + to, Get(kCodesStartIndex + from));
161 }
162 
OBJECT_CONSTRUCTORS_IMPL(Code,HeapObject)163 OBJECT_CONSTRUCTORS_IMPL(Code, HeapObject)
164 NEVER_READ_ONLY_SPACE_IMPL(Code)
165 
166 INT_ACCESSORS(Code, raw_instruction_size, kInstructionSizeOffset)
167 INT_ACCESSORS(Code, raw_metadata_size, kMetadataSizeOffset)
168 INT_ACCESSORS(Code, handler_table_offset, kHandlerTableOffsetOffset)
169 INT_ACCESSORS(Code, code_comments_offset, kCodeCommentsOffsetOffset)
170 INT32_ACCESSORS(Code, unwinding_info_offset, kUnwindingInfoOffsetOffset)
171 #define CODE_ACCESSORS(name, type, offset)           \
172   ACCESSORS_CHECKED2(Code, name, type, offset, true, \
173                      !ObjectInYoungGeneration(value))
174 #define RELEASE_ACQUIRE_CODE_ACCESSORS(name, type, offset)           \
175   RELEASE_ACQUIRE_ACCESSORS_CHECKED2(Code, name, type, offset, true, \
176                                      !ObjectInYoungGeneration(value))
177 
178 CODE_ACCESSORS(relocation_info, ByteArray, kRelocationInfoOffset)
179 CODE_ACCESSORS(deoptimization_data, FixedArray, kDeoptimizationDataOffset)
180 CODE_ACCESSORS(source_position_table, Object, kSourcePositionTableOffset)
181 // Concurrent marker needs to access kind specific flags in code data container.
182 RELEASE_ACQUIRE_CODE_ACCESSORS(code_data_container, CodeDataContainer,
183                                kCodeDataContainerOffset)
184 #undef CODE_ACCESSORS
185 #undef RELEASE_ACQUIRE_CODE_ACCESSORS
186 
187 void Code::WipeOutHeader() {
188   WRITE_FIELD(*this, kRelocationInfoOffset, Smi::FromInt(0));
189   WRITE_FIELD(*this, kDeoptimizationDataOffset, Smi::FromInt(0));
190   WRITE_FIELD(*this, kSourcePositionTableOffset, Smi::FromInt(0));
191   WRITE_FIELD(*this, kCodeDataContainerOffset, Smi::FromInt(0));
192 }
193 
clear_padding()194 void Code::clear_padding() {
195   // Clear the padding between the header and `raw_body_start`.
196   if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
197     memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
198            FIELD_SIZE(kOptionalPaddingOffset));
199   }
200 
201   // Clear the padding after `raw_body_end`.
202   size_t trailing_padding_size =
203       CodeSize() - Code::kHeaderSize - raw_body_size();
204   memset(reinterpret_cast<void*>(raw_body_end()), 0, trailing_padding_size);
205 }
206 
SourcePositionTable()207 ByteArray Code::SourcePositionTable() const {
208   Object maybe_table = source_position_table();
209   if (maybe_table.IsByteArray()) return ByteArray::cast(maybe_table);
210   ReadOnlyRoots roots = GetReadOnlyRoots();
211   DCHECK(maybe_table.IsUndefined(roots) || maybe_table.IsException(roots));
212   return roots.empty_byte_array();
213 }
214 
next_code_link()215 Object Code::next_code_link() const {
216   return code_data_container(kAcquireLoad).next_code_link();
217 }
218 
set_next_code_link(Object value)219 void Code::set_next_code_link(Object value) {
220   code_data_container(kAcquireLoad).set_next_code_link(value);
221 }
222 
raw_body_start()223 Address Code::raw_body_start() const { return raw_instruction_start(); }
224 
raw_body_end()225 Address Code::raw_body_end() const {
226   return raw_body_start() + raw_body_size();
227 }
228 
raw_body_size()229 int Code::raw_body_size() const {
230   return raw_instruction_size() + raw_metadata_size();
231 }
232 
InstructionSize()233 int Code::InstructionSize() const {
234   return V8_UNLIKELY(is_off_heap_trampoline()) ? OffHeapInstructionSize()
235                                                : raw_instruction_size();
236 }
237 
raw_instruction_start()238 Address Code::raw_instruction_start() const {
239   return field_address(kHeaderSize);
240 }
241 
InstructionStart()242 Address Code::InstructionStart() const {
243   return V8_UNLIKELY(is_off_heap_trampoline()) ? OffHeapInstructionStart()
244                                                : raw_instruction_start();
245 }
246 
raw_instruction_end()247 Address Code::raw_instruction_end() const {
248   return raw_instruction_start() + raw_instruction_size();
249 }
250 
InstructionEnd()251 Address Code::InstructionEnd() const {
252   return V8_UNLIKELY(is_off_heap_trampoline()) ? OffHeapInstructionEnd()
253                                                : raw_instruction_end();
254 }
255 
raw_metadata_start()256 Address Code::raw_metadata_start() const {
257   return raw_instruction_start() + raw_instruction_size();
258 }
259 
MetadataStart()260 Address Code::MetadataStart() const {
261   STATIC_ASSERT(kOnHeapBodyIsContiguous);
262   return V8_UNLIKELY(is_off_heap_trampoline()) ? OffHeapMetadataStart()
263                                                : raw_metadata_start();
264 }
265 
raw_metadata_end()266 Address Code::raw_metadata_end() const {
267   return raw_metadata_start() + raw_metadata_size();
268 }
269 
MetadataEnd()270 Address Code::MetadataEnd() const {
271   return V8_UNLIKELY(is_off_heap_trampoline()) ? OffHeapMetadataEnd()
272                                                : raw_metadata_end();
273 }
274 
MetadataSize()275 int Code::MetadataSize() const {
276   return V8_UNLIKELY(is_off_heap_trampoline()) ? OffHeapMetadataSize()
277                                                : raw_metadata_size();
278 }
279 
SizeIncludingMetadata()280 int Code::SizeIncludingMetadata() const {
281   int size = CodeSize();
282   size += relocation_info().Size();
283   size += deoptimization_data().Size();
284   return size;
285 }
286 
unchecked_relocation_info()287 ByteArray Code::unchecked_relocation_info() const {
288   IsolateRoot isolate = GetIsolateForPtrCompr(*this);
289   return ByteArray::unchecked_cast(
290       TaggedField<HeapObject, kRelocationInfoOffset>::load(isolate, *this));
291 }
292 
relocation_start()293 byte* Code::relocation_start() const {
294   return unchecked_relocation_info().GetDataStartAddress();
295 }
296 
relocation_end()297 byte* Code::relocation_end() const {
298   return unchecked_relocation_info().GetDataEndAddress();
299 }
300 
relocation_size()301 int Code::relocation_size() const {
302   return unchecked_relocation_info().length();
303 }
304 
entry()305 Address Code::entry() const { return raw_instruction_start(); }
306 
contains(Address inner_pointer)307 bool Code::contains(Address inner_pointer) {
308   if (is_off_heap_trampoline()) {
309     if (OffHeapInstructionStart() <= inner_pointer &&
310         inner_pointer < OffHeapInstructionEnd()) {
311       return true;
312     }
313   }
314   return (address() <= inner_pointer) && (inner_pointer < address() + Size());
315 }
316 
317 // static
CopyRelocInfoToByteArray(ByteArray dest,const CodeDesc & desc)318 void Code::CopyRelocInfoToByteArray(ByteArray dest, const CodeDesc& desc) {
319   DCHECK_EQ(dest.length(), desc.reloc_size);
320   CopyBytes(dest.GetDataStartAddress(),
321             desc.buffer + desc.buffer_size - desc.reloc_size,
322             static_cast<size_t>(desc.reloc_size));
323 }
324 
CodeSize()325 int Code::CodeSize() const { return SizeFor(raw_body_size()); }
326 
kind()327 CodeKind Code::kind() const {
328   STATIC_ASSERT(FIELD_SIZE(kFlagsOffset) == kInt32Size);
329   return KindField::decode(ReadField<uint32_t>(kFlagsOffset));
330 }
331 
initialize_flags(CodeKind kind,bool is_turbofanned,int stack_slots,bool is_off_heap_trampoline)332 void Code::initialize_flags(CodeKind kind, bool is_turbofanned, int stack_slots,
333                             bool is_off_heap_trampoline) {
334   CHECK(0 <= stack_slots && stack_slots < StackSlotsField::kMax);
335   DCHECK(!CodeKindIsInterpretedJSFunction(kind));
336   uint32_t flags = KindField::encode(kind) |
337                    IsTurbofannedField::encode(is_turbofanned) |
338                    StackSlotsField::encode(stack_slots) |
339                    IsOffHeapTrampoline::encode(is_off_heap_trampoline);
340   STATIC_ASSERT(FIELD_SIZE(kFlagsOffset) == kInt32Size);
341   WriteField<uint32_t>(kFlagsOffset, flags);
342   DCHECK_IMPLIES(stack_slots != 0, has_safepoint_info());
343 }
344 
is_interpreter_trampoline_builtin()345 inline bool Code::is_interpreter_trampoline_builtin() const {
346   // Check for kNoBuiltinId first to abort early when the current Code object
347   // is not a builtin.
348   const int index = builtin_index();
349   return index != Builtins::kNoBuiltinId &&
350          (index == Builtins::kInterpreterEntryTrampoline ||
351           index == Builtins::kInterpreterEnterBytecodeAdvance ||
352           index == Builtins::kInterpreterEnterBytecodeDispatch);
353 }
354 
checks_optimization_marker()355 inline bool Code::checks_optimization_marker() const {
356   bool checks_marker =
357       (builtin_index() == Builtins::kCompileLazy ||
358        builtin_index() == Builtins::kInterpreterEntryTrampoline ||
359        CodeKindCanTierUp(kind()));
360   return checks_marker ||
361          (CodeKindCanDeoptimize(kind()) && marked_for_deoptimization());
362 }
363 
has_tagged_params()364 inline bool Code::has_tagged_params() const {
365   return kind() != CodeKind::JS_TO_WASM_FUNCTION &&
366          kind() != CodeKind::C_WASM_ENTRY && kind() != CodeKind::WASM_FUNCTION;
367 }
368 
is_turbofanned()369 inline bool Code::is_turbofanned() const {
370   return IsTurbofannedField::decode(ReadField<uint32_t>(kFlagsOffset));
371 }
372 
can_have_weak_objects()373 inline bool Code::can_have_weak_objects() const {
374   DCHECK(CodeKindIsOptimizedJSFunction(kind()));
375   int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
376   return CanHaveWeakObjectsField::decode(flags);
377 }
378 
set_can_have_weak_objects(bool value)379 inline void Code::set_can_have_weak_objects(bool value) {
380   DCHECK(CodeKindIsOptimizedJSFunction(kind()));
381   CodeDataContainer container = code_data_container(kAcquireLoad);
382   int32_t previous = container.kind_specific_flags();
383   int32_t updated = CanHaveWeakObjectsField::update(previous, value);
384   container.set_kind_specific_flags(updated);
385 }
386 
is_promise_rejection()387 inline bool Code::is_promise_rejection() const {
388   DCHECK(kind() == CodeKind::BUILTIN);
389   int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
390   return IsPromiseRejectionField::decode(flags);
391 }
392 
set_is_promise_rejection(bool value)393 inline void Code::set_is_promise_rejection(bool value) {
394   DCHECK(kind() == CodeKind::BUILTIN);
395   CodeDataContainer container = code_data_container(kAcquireLoad);
396   int32_t previous = container.kind_specific_flags();
397   int32_t updated = IsPromiseRejectionField::update(previous, value);
398   container.set_kind_specific_flags(updated);
399 }
400 
is_exception_caught()401 inline bool Code::is_exception_caught() const {
402   DCHECK(kind() == CodeKind::BUILTIN);
403   int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
404   return IsExceptionCaughtField::decode(flags);
405 }
406 
set_is_exception_caught(bool value)407 inline void Code::set_is_exception_caught(bool value) {
408   DCHECK(kind() == CodeKind::BUILTIN);
409   CodeDataContainer container = code_data_container(kAcquireLoad);
410   int32_t previous = container.kind_specific_flags();
411   int32_t updated = IsExceptionCaughtField::update(previous, value);
412   container.set_kind_specific_flags(updated);
413 }
414 
is_off_heap_trampoline()415 inline bool Code::is_off_heap_trampoline() const {
416   return IsOffHeapTrampoline::decode(ReadField<uint32_t>(kFlagsOffset));
417 }
418 
GetBuiltinCatchPrediction()419 inline HandlerTable::CatchPrediction Code::GetBuiltinCatchPrediction() {
420   if (is_promise_rejection()) return HandlerTable::PROMISE;
421   if (is_exception_caught()) return HandlerTable::CAUGHT;
422   return HandlerTable::UNCAUGHT;
423 }
424 
builtin_index()425 int Code::builtin_index() const {
426   int index = ReadField<int>(kBuiltinIndexOffset);
427   DCHECK(index == Builtins::kNoBuiltinId || Builtins::IsBuiltinId(index));
428   return index;
429 }
430 
set_builtin_index(int index)431 void Code::set_builtin_index(int index) {
432   DCHECK(index == Builtins::kNoBuiltinId || Builtins::IsBuiltinId(index));
433   WriteField<int>(kBuiltinIndexOffset, index);
434 }
435 
is_builtin()436 bool Code::is_builtin() const {
437   return builtin_index() != Builtins::kNoBuiltinId;
438 }
439 
inlined_bytecode_size()440 unsigned Code::inlined_bytecode_size() const {
441   DCHECK(CodeKindIsOptimizedJSFunction(kind()) ||
442          ReadField<unsigned>(kInlinedBytecodeSizeOffset) == 0);
443   return ReadField<unsigned>(kInlinedBytecodeSizeOffset);
444 }
445 
set_inlined_bytecode_size(unsigned size)446 void Code::set_inlined_bytecode_size(unsigned size) {
447   DCHECK(CodeKindIsOptimizedJSFunction(kind()) || size == 0);
448   WriteField<unsigned>(kInlinedBytecodeSizeOffset, size);
449 }
450 
has_safepoint_info()451 bool Code::has_safepoint_info() const {
452   return is_turbofanned() || is_wasm_code();
453 }
454 
stack_slots()455 int Code::stack_slots() const {
456   DCHECK(has_safepoint_info());
457   return StackSlotsField::decode(ReadField<uint32_t>(kFlagsOffset));
458 }
459 
marked_for_deoptimization()460 bool Code::marked_for_deoptimization() const {
461   DCHECK(CodeKindCanDeoptimize(kind()));
462   int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
463   return MarkedForDeoptimizationField::decode(flags);
464 }
465 
set_marked_for_deoptimization(bool flag)466 void Code::set_marked_for_deoptimization(bool flag) {
467   DCHECK(CodeKindCanDeoptimize(kind()));
468   DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
469   CodeDataContainer container = code_data_container(kAcquireLoad);
470   int32_t previous = container.kind_specific_flags();
471   int32_t updated = MarkedForDeoptimizationField::update(previous, flag);
472   container.set_kind_specific_flags(updated);
473 }
474 
deoptimization_count()475 int Code::deoptimization_count() const {
476   DCHECK(CodeKindCanDeoptimize(kind()));
477   int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
478   int count = DeoptCountField::decode(flags);
479   DCHECK_GE(count, 0);
480   return count;
481 }
482 
increment_deoptimization_count()483 void Code::increment_deoptimization_count() {
484   DCHECK(CodeKindCanDeoptimize(kind()));
485   CodeDataContainer container = code_data_container(kAcquireLoad);
486   int32_t flags = container.kind_specific_flags();
487   int32_t count = DeoptCountField::decode(flags);
488   DCHECK_GE(count, 0);
489   CHECK_LE(count + 1, DeoptCountField::kMax);
490   int32_t updated = DeoptCountField::update(flags, count + 1);
491   container.set_kind_specific_flags(updated);
492 }
493 
embedded_objects_cleared()494 bool Code::embedded_objects_cleared() const {
495   DCHECK(CodeKindIsOptimizedJSFunction(kind()));
496   int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
497   return EmbeddedObjectsClearedField::decode(flags);
498 }
499 
set_embedded_objects_cleared(bool flag)500 void Code::set_embedded_objects_cleared(bool flag) {
501   DCHECK(CodeKindIsOptimizedJSFunction(kind()));
502   DCHECK_IMPLIES(flag, marked_for_deoptimization());
503   CodeDataContainer container = code_data_container(kAcquireLoad);
504   int32_t previous = container.kind_specific_flags();
505   int32_t updated = EmbeddedObjectsClearedField::update(previous, flag);
506   container.set_kind_specific_flags(updated);
507 }
508 
deopt_already_counted()509 bool Code::deopt_already_counted() const {
510   DCHECK(CodeKindCanDeoptimize(kind()));
511   int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
512   return DeoptAlreadyCountedField::decode(flags);
513 }
514 
set_deopt_already_counted(bool flag)515 void Code::set_deopt_already_counted(bool flag) {
516   DCHECK(CodeKindCanDeoptimize(kind()));
517   DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
518   CodeDataContainer container = code_data_container(kAcquireLoad);
519   int32_t previous = container.kind_specific_flags();
520   int32_t updated = DeoptAlreadyCountedField::update(previous, flag);
521   container.set_kind_specific_flags(updated);
522 }
523 
is_optimized_code()524 bool Code::is_optimized_code() const {
525   return CodeKindIsOptimizedJSFunction(kind());
526 }
is_wasm_code()527 bool Code::is_wasm_code() const { return kind() == CodeKind::WASM_FUNCTION; }
528 
constant_pool_offset()529 int Code::constant_pool_offset() const {
530   if (!FLAG_enable_embedded_constant_pool) {
531     // Redirection needed since the field doesn't exist in this case.
532     return code_comments_offset();
533   }
534   return ReadField<int>(kConstantPoolOffsetOffset);
535 }
536 
set_constant_pool_offset(int value)537 void Code::set_constant_pool_offset(int value) {
538   if (!FLAG_enable_embedded_constant_pool) {
539     // Redirection needed since the field doesn't exist in this case.
540     return;
541   }
542   DCHECK_LE(value, MetadataSize());
543   WriteField<int>(kConstantPoolOffsetOffset, value);
544 }
545 
constant_pool()546 Address Code::constant_pool() const {
547   if (!has_constant_pool()) return kNullAddress;
548   return MetadataStart() + constant_pool_offset();
549 }
550 
code_comments()551 Address Code::code_comments() const {
552   return MetadataStart() + code_comments_offset();
553 }
554 
unwinding_info_start()555 Address Code::unwinding_info_start() const {
556   return MetadataStart() + unwinding_info_offset();
557 }
558 
unwinding_info_end()559 Address Code::unwinding_info_end() const { return MetadataEnd(); }
560 
unwinding_info_size()561 int Code::unwinding_info_size() const {
562   DCHECK_GE(unwinding_info_end(), unwinding_info_start());
563   return static_cast<int>(unwinding_info_end() - unwinding_info_start());
564 }
565 
has_unwinding_info()566 bool Code::has_unwinding_info() const { return unwinding_info_size() > 0; }
567 
GetCodeFromTargetAddress(Address address)568 Code Code::GetCodeFromTargetAddress(Address address) {
569   {
570     // TODO(jgruber,v8:6666): Support embedded builtins here. We'd need to pass
571     // in the current isolate.
572     Address start =
573         reinterpret_cast<Address>(Isolate::CurrentEmbeddedBlobCode());
574     Address end = start + Isolate::CurrentEmbeddedBlobCodeSize();
575     CHECK(address < start || address >= end);
576   }
577 
578   HeapObject code = HeapObject::FromAddress(address - Code::kHeaderSize);
579   // Unchecked cast because we can't rely on the map currently
580   // not being a forwarding pointer.
581   return Code::unchecked_cast(code);
582 }
583 
GetObjectFromEntryAddress(Address location_of_address)584 Code Code::GetObjectFromEntryAddress(Address location_of_address) {
585   Address code_entry = base::Memory<Address>(location_of_address);
586   HeapObject code = HeapObject::FromAddress(code_entry - Code::kHeaderSize);
587   // Unchecked cast because we can't rely on the map currently
588   // not being a forwarding pointer.
589   return Code::unchecked_cast(code);
590 }
591 
CanContainWeakObjects()592 bool Code::CanContainWeakObjects() {
593   return is_optimized_code() && can_have_weak_objects();
594 }
595 
IsWeakObject(HeapObject object)596 bool Code::IsWeakObject(HeapObject object) {
597   return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
598 }
599 
IsWeakObjectInOptimizedCode(HeapObject object)600 bool Code::IsWeakObjectInOptimizedCode(HeapObject object) {
601   Map map = object.synchronized_map();
602   InstanceType instance_type = map.instance_type();
603   if (InstanceTypeChecker::IsMap(instance_type)) {
604     return Map::cast(object).CanTransition();
605   }
606   return InstanceTypeChecker::IsPropertyCell(instance_type) ||
607          InstanceTypeChecker::IsJSReceiver(instance_type) ||
608          InstanceTypeChecker::IsContext(instance_type);
609 }
610 
IsExecutable()611 bool Code::IsExecutable() {
612   return !Builtins::IsBuiltinId(builtin_index()) || !is_off_heap_trampoline() ||
613          Builtins::CodeObjectIsExecutable(builtin_index());
614 }
615 
616 // This field has to have relaxed atomic accessors because it is accessed in the
617 // concurrent marker.
618 STATIC_ASSERT(FIELD_SIZE(CodeDataContainer::kKindSpecificFlagsOffset) ==
619               kInt32Size);
RELAXED_INT32_ACCESSORS(CodeDataContainer,kind_specific_flags,kKindSpecificFlagsOffset)620 RELAXED_INT32_ACCESSORS(CodeDataContainer, kind_specific_flags,
621                         kKindSpecificFlagsOffset)
622 ACCESSORS(CodeDataContainer, next_code_link, Object, kNextCodeLinkOffset)
623 
624 void CodeDataContainer::clear_padding() {
625   memset(reinterpret_cast<void*>(address() + kUnalignedSize), 0,
626          kSize - kUnalignedSize);
627 }
628 
get(int index)629 byte BytecodeArray::get(int index) const {
630   DCHECK(index >= 0 && index < this->length());
631   return ReadField<byte>(kHeaderSize + index * kCharSize);
632 }
633 
set(int index,byte value)634 void BytecodeArray::set(int index, byte value) {
635   DCHECK(index >= 0 && index < this->length());
636   WriteField<byte>(kHeaderSize + index * kCharSize, value);
637 }
638 
set_frame_size(int32_t frame_size)639 void BytecodeArray::set_frame_size(int32_t frame_size) {
640   DCHECK_GE(frame_size, 0);
641   DCHECK(IsAligned(frame_size, kSystemPointerSize));
642   WriteField<int32_t>(kFrameSizeOffset, frame_size);
643 }
644 
frame_size()645 int32_t BytecodeArray::frame_size() const {
646   return ReadField<int32_t>(kFrameSizeOffset);
647 }
648 
register_count()649 int BytecodeArray::register_count() const {
650   return static_cast<int>(frame_size()) / kSystemPointerSize;
651 }
652 
set_parameter_count(int32_t number_of_parameters)653 void BytecodeArray::set_parameter_count(int32_t number_of_parameters) {
654   DCHECK_GE(number_of_parameters, 0);
655   // Parameter count is stored as the size on stack of the parameters to allow
656   // it to be used directly by generated code.
657   WriteField<int32_t>(kParameterSizeOffset,
658                   (number_of_parameters << kSystemPointerSizeLog2));
659 }
660 
incoming_new_target_or_generator_register()661 interpreter::Register BytecodeArray::incoming_new_target_or_generator_register()
662     const {
663   int32_t register_operand =
664       ReadField<int32_t>(kIncomingNewTargetOrGeneratorRegisterOffset);
665   if (register_operand == 0) {
666     return interpreter::Register::invalid_value();
667   } else {
668     return interpreter::Register::FromOperand(register_operand);
669   }
670 }
671 
set_incoming_new_target_or_generator_register(interpreter::Register incoming_new_target_or_generator_register)672 void BytecodeArray::set_incoming_new_target_or_generator_register(
673     interpreter::Register incoming_new_target_or_generator_register) {
674   if (!incoming_new_target_or_generator_register.is_valid()) {
675     WriteField<int32_t>(kIncomingNewTargetOrGeneratorRegisterOffset, 0);
676   } else {
677     DCHECK(incoming_new_target_or_generator_register.index() <
678            register_count());
679     DCHECK_NE(0, incoming_new_target_or_generator_register.ToOperand());
680     WriteField<int32_t>(kIncomingNewTargetOrGeneratorRegisterOffset,
681                     incoming_new_target_or_generator_register.ToOperand());
682   }
683 }
684 
osr_loop_nesting_level()685 int BytecodeArray::osr_loop_nesting_level() const {
686   return ReadField<int8_t>(kOsrNestingLevelOffset);
687 }
688 
set_osr_loop_nesting_level(int depth)689 void BytecodeArray::set_osr_loop_nesting_level(int depth) {
690   DCHECK(0 <= depth && depth <= AbstractCode::kMaxLoopNestingMarker);
691   STATIC_ASSERT(AbstractCode::kMaxLoopNestingMarker < kMaxInt8);
692   WriteField<int8_t>(kOsrNestingLevelOffset, depth);
693 }
694 
bytecode_age()695 BytecodeArray::Age BytecodeArray::bytecode_age() const {
696   // Bytecode is aged by the concurrent marker.
697   return static_cast<Age>(RELAXED_READ_INT8_FIELD(*this, kBytecodeAgeOffset));
698 }
699 
set_bytecode_age(BytecodeArray::Age age)700 void BytecodeArray::set_bytecode_age(BytecodeArray::Age age) {
701   DCHECK_GE(age, kFirstBytecodeAge);
702   DCHECK_LE(age, kLastBytecodeAge);
703   STATIC_ASSERT(kLastBytecodeAge <= kMaxInt8);
704   // Bytecode is aged by the concurrent marker.
705   RELAXED_WRITE_INT8_FIELD(*this, kBytecodeAgeOffset, static_cast<int8_t>(age));
706 }
707 
parameter_count()708 int32_t BytecodeArray::parameter_count() const {
709   // Parameter count is stored as the size on stack of the parameters to allow
710   // it to be used directly by generated code.
711   return ReadField<int32_t>(kParameterSizeOffset) >> kSystemPointerSizeLog2;
712 }
713 
ACCESSORS(BytecodeArray,constant_pool,FixedArray,kConstantPoolOffset)714 ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
715 ACCESSORS(BytecodeArray, handler_table, ByteArray, kHandlerTableOffset)
716 RELEASE_ACQUIRE_ACCESSORS(BytecodeArray, source_position_table, Object,
717                           kSourcePositionTableOffset)
718 
719 void BytecodeArray::clear_padding() {
720   int data_size = kHeaderSize + length();
721   memset(reinterpret_cast<void*>(address() + data_size), 0,
722          SizeFor(length()) - data_size);
723 }
724 
GetFirstBytecodeAddress()725 Address BytecodeArray::GetFirstBytecodeAddress() {
726   return ptr() - kHeapObjectTag + kHeaderSize;
727 }
728 
HasSourcePositionTable()729 bool BytecodeArray::HasSourcePositionTable() const {
730   Object maybe_table = source_position_table(kAcquireLoad);
731   return !(maybe_table.IsUndefined() || DidSourcePositionGenerationFail());
732 }
733 
DidSourcePositionGenerationFail()734 bool BytecodeArray::DidSourcePositionGenerationFail() const {
735   return source_position_table(kAcquireLoad).IsException();
736 }
737 
SetSourcePositionsFailedToCollect()738 void BytecodeArray::SetSourcePositionsFailedToCollect() {
739   set_source_position_table(GetReadOnlyRoots().exception(), kReleaseStore);
740 }
741 
SourcePositionTable()742 ByteArray BytecodeArray::SourcePositionTable() const {
743   // WARNING: This function may be called from a background thread, hence
744   // changes to how it accesses the heap can easily lead to bugs.
745   Object maybe_table = source_position_table(kAcquireLoad);
746   if (maybe_table.IsByteArray()) return ByteArray::cast(maybe_table);
747   ReadOnlyRoots roots = GetReadOnlyRoots();
748   DCHECK(maybe_table.IsUndefined(roots) || maybe_table.IsException(roots));
749   return roots.empty_byte_array();
750 }
751 
BytecodeArraySize()752 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
753 
SizeIncludingMetadata()754 int BytecodeArray::SizeIncludingMetadata() {
755   int size = BytecodeArraySize();
756   size += constant_pool().Size();
757   size += handler_table().Size();
758   ByteArray table = SourcePositionTable();
759   if (table.length() != 0) {
760     size += table.Size();
761   }
762   return size;
763 }
764 
DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray,ByteArray)765 DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
766 DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
767 DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
768 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrBytecodeOffset, Smi)
769 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
770 DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
771 DEFINE_DEOPT_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
772 DEFINE_DEOPT_ELEMENT_ACCESSORS(DeoptExitStart, Smi)
773 DEFINE_DEOPT_ELEMENT_ACCESSORS(NonLazyDeoptCount, Smi)
774 
775 DEFINE_DEOPT_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
776 DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
777 DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
778 
779 BailoutId DeoptimizationData::BytecodeOffset(int i) {
780   return BailoutId(BytecodeOffsetRaw(i).value());
781 }
782 
SetBytecodeOffset(int i,BailoutId value)783 void DeoptimizationData::SetBytecodeOffset(int i, BailoutId value) {
784   SetBytecodeOffsetRaw(i, Smi::FromInt(value.ToInt()));
785 }
786 
DeoptCount()787 int DeoptimizationData::DeoptCount() {
788   return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
789 }
790 
791 }  // namespace internal
792 }  // namespace v8
793 
794 #include "src/objects/object-macros-undef.h"
795 
796 #endif  // V8_OBJECTS_CODE_INL_H_
797