• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_CODE_INL_H_
6 #define V8_OBJECTS_CODE_INL_H_
7 
8 #include "src/base/memory.h"
9 #include "src/baseline/bytecode-offset-iterator.h"
10 #include "src/codegen/code-desc.h"
11 #include "src/common/assert-scope.h"
12 #include "src/common/globals.h"
13 #include "src/execution/isolate.h"
14 #include "src/heap/heap-inl.h"
15 #include "src/interpreter/bytecode-register.h"
16 #include "src/objects/code.h"
17 #include "src/objects/dictionary.h"
18 #include "src/objects/instance-type-inl.h"
19 #include "src/objects/map-inl.h"
20 #include "src/objects/maybe-object-inl.h"
21 #include "src/objects/oddball.h"
22 #include "src/objects/shared-function-info-inl.h"
23 #include "src/objects/smi-inl.h"
24 #include "src/utils/utils.h"
25 
26 // Has to be the last include (doesn't have include guards):
27 #include "src/objects/object-macros.h"
28 
29 namespace v8 {
30 namespace internal {
31 
32 #include "torque-generated/src/objects/code-tq-inl.inc"
33 
OBJECT_CONSTRUCTORS_IMPL(DeoptimizationData,FixedArray)34 OBJECT_CONSTRUCTORS_IMPL(DeoptimizationData, FixedArray)
35 TQ_OBJECT_CONSTRUCTORS_IMPL(BytecodeArray)
36 OBJECT_CONSTRUCTORS_IMPL(AbstractCode, HeapObject)
37 OBJECT_CONSTRUCTORS_IMPL(DependentCode, WeakArrayList)
38 OBJECT_CONSTRUCTORS_IMPL(CodeDataContainer, HeapObject)
39 NEVER_READ_ONLY_SPACE_IMPL(CodeDataContainer)
40 
41 NEVER_READ_ONLY_SPACE_IMPL(AbstractCode)
42 
43 CAST_ACCESSOR(AbstractCode)
44 CAST_ACCESSOR(Code)
45 CAST_ACCESSOR(CodeDataContainer)
46 CAST_ACCESSOR(DependentCode)
47 CAST_ACCESSOR(DeoptimizationData)
48 CAST_ACCESSOR(DeoptimizationLiteralArray)
49 
50 int AbstractCode::raw_instruction_size() {
51   if (IsCode()) {
52     return GetCode().raw_instruction_size();
53   } else {
54     return GetBytecodeArray().length();
55   }
56 }
57 
InstructionSize()58 int AbstractCode::InstructionSize() {
59   if (IsCode()) {
60     return GetCode().InstructionSize();
61   } else {
62     return GetBytecodeArray().length();
63   }
64 }
65 
SourcePositionTableInternal()66 ByteArray AbstractCode::SourcePositionTableInternal() {
67   if (IsCode()) {
68     DCHECK_NE(GetCode().kind(), CodeKind::BASELINE);
69     return GetCode().source_position_table();
70   } else {
71     return GetBytecodeArray().SourcePositionTable();
72   }
73 }
74 
SourcePositionTable(SharedFunctionInfo sfi)75 ByteArray AbstractCode::SourcePositionTable(SharedFunctionInfo sfi) {
76   if (IsCode()) {
77     return GetCode().SourcePositionTable(sfi);
78   } else {
79     return GetBytecodeArray().SourcePositionTable();
80   }
81 }
82 
SizeIncludingMetadata()83 int AbstractCode::SizeIncludingMetadata() {
84   if (IsCode()) {
85     return GetCode().SizeIncludingMetadata();
86   } else {
87     return GetBytecodeArray().SizeIncludingMetadata();
88   }
89 }
90 
raw_instruction_start()91 Address AbstractCode::raw_instruction_start() {
92   if (IsCode()) {
93     return GetCode().raw_instruction_start();
94   } else {
95     return GetBytecodeArray().GetFirstBytecodeAddress();
96   }
97 }
98 
InstructionStart()99 Address AbstractCode::InstructionStart() {
100   if (IsCode()) {
101     return GetCode().InstructionStart();
102   } else {
103     return GetBytecodeArray().GetFirstBytecodeAddress();
104   }
105 }
106 
raw_instruction_end()107 Address AbstractCode::raw_instruction_end() {
108   if (IsCode()) {
109     return GetCode().raw_instruction_end();
110   } else {
111     return GetBytecodeArray().GetFirstBytecodeAddress() +
112            GetBytecodeArray().length();
113   }
114 }
115 
InstructionEnd()116 Address AbstractCode::InstructionEnd() {
117   if (IsCode()) {
118     return GetCode().InstructionEnd();
119   } else {
120     return GetBytecodeArray().GetFirstBytecodeAddress() +
121            GetBytecodeArray().length();
122   }
123 }
124 
contains(Isolate * isolate,Address inner_pointer)125 bool AbstractCode::contains(Isolate* isolate, Address inner_pointer) {
126   PtrComprCageBase cage_base(isolate);
127   if (IsCode(cage_base)) {
128     return GetCode().contains(isolate, inner_pointer);
129   } else {
130     return (address() <= inner_pointer) &&
131            (inner_pointer <= address() + Size(cage_base));
132   }
133 }
134 
kind()135 CodeKind AbstractCode::kind() {
136   return IsCode() ? GetCode().kind() : CodeKind::INTERPRETED_FUNCTION;
137 }
138 
GetCode()139 Code AbstractCode::GetCode() { return Code::cast(*this); }
140 
GetBytecodeArray()141 BytecodeArray AbstractCode::GetBytecodeArray() {
142   return BytecodeArray::cast(*this);
143 }
144 
OBJECT_CONSTRUCTORS_IMPL(Code,HeapObject)145 OBJECT_CONSTRUCTORS_IMPL(Code, HeapObject)
146 NEVER_READ_ONLY_SPACE_IMPL(Code)
147 
148 INT_ACCESSORS(Code, raw_instruction_size, kInstructionSizeOffset)
149 INT_ACCESSORS(Code, raw_metadata_size, kMetadataSizeOffset)
150 INT_ACCESSORS(Code, handler_table_offset, kHandlerTableOffsetOffset)
151 INT_ACCESSORS(Code, code_comments_offset, kCodeCommentsOffsetOffset)
152 INT32_ACCESSORS(Code, unwinding_info_offset, kUnwindingInfoOffsetOffset)
153 
154 // Same as ACCESSORS_CHECKED2 macro but with Code as a host and using
155 // main_cage_base() for computing the base.
156 #define CODE_ACCESSORS_CHECKED2(name, type, offset, get_condition,  \
157                                 set_condition)                      \
158   type Code::name() const {                                         \
159     PtrComprCageBase cage_base = main_cage_base();                  \
160     return Code::name(cage_base);                                   \
161   }                                                                 \
162   type Code::name(PtrComprCageBase cage_base) const {               \
163     type value = TaggedField<type, offset>::load(cage_base, *this); \
164     DCHECK(get_condition);                                          \
165     return value;                                                   \
166   }                                                                 \
167   void Code::set_##name(type value, WriteBarrierMode mode) {        \
168     DCHECK(set_condition);                                          \
169     TaggedField<type, offset>::store(*this, value);                 \
170     CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);          \
171   }
172 
173 // Same as RELEASE_ACQUIRE_ACCESSORS_CHECKED2 macro but with Code as a host and
174 // using main_cage_base(kRelaxedLoad) for computing the base.
175 #define RELEASE_ACQUIRE_CODE_ACCESSORS_CHECKED2(name, type, offset,           \
176                                                 get_condition, set_condition) \
177   type Code::name(AcquireLoadTag tag) const {                                 \
178     PtrComprCageBase cage_base = main_cage_base(kRelaxedLoad);                \
179     return Code::name(cage_base, tag);                                        \
180   }                                                                           \
181   type Code::name(PtrComprCageBase cage_base, AcquireLoadTag) const {         \
182     type value = TaggedField<type, offset>::Acquire_Load(cage_base, *this);   \
183     DCHECK(get_condition);                                                    \
184     return value;                                                             \
185   }                                                                           \
186   void Code::set_##name(type value, ReleaseStoreTag, WriteBarrierMode mode) { \
187     DCHECK(set_condition);                                                    \
188     TaggedField<type, offset>::Release_Store(*this, value);                   \
189     CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);                    \
190   }
191 
192 #define CODE_ACCESSORS(name, type, offset) \
193   CODE_ACCESSORS_CHECKED2(name, type, offset, true, true)
194 
195 #define RELEASE_ACQUIRE_CODE_ACCESSORS(name, type, offset)                 \
196   RELEASE_ACQUIRE_CODE_ACCESSORS_CHECKED2(name, type, offset,              \
197                                           !ObjectInYoungGeneration(value), \
198                                           !ObjectInYoungGeneration(value))
199 
200 CODE_ACCESSORS(relocation_info, ByteArray, kRelocationInfoOffset)
201 
202 CODE_ACCESSORS_CHECKED2(deoptimization_data, FixedArray,
203                         kDeoptimizationDataOrInterpreterDataOffset,
204                         kind() != CodeKind::BASELINE,
205                         kind() != CodeKind::BASELINE &&
206                             !ObjectInYoungGeneration(value))
207 CODE_ACCESSORS_CHECKED2(bytecode_or_interpreter_data, HeapObject,
208                         kDeoptimizationDataOrInterpreterDataOffset,
209                         kind() == CodeKind::BASELINE,
210                         kind() == CodeKind::BASELINE &&
211                             !ObjectInYoungGeneration(value))
212 
213 CODE_ACCESSORS_CHECKED2(source_position_table, ByteArray, kPositionTableOffset,
214                         kind() != CodeKind::BASELINE,
215                         kind() != CodeKind::BASELINE &&
216                             !ObjectInYoungGeneration(value))
217 CODE_ACCESSORS_CHECKED2(bytecode_offset_table, ByteArray, kPositionTableOffset,
218                         kind() == CodeKind::BASELINE,
219                         kind() == CodeKind::BASELINE &&
220                             !ObjectInYoungGeneration(value))
221 
222 // Concurrent marker needs to access kind specific flags in code data container.
223 RELEASE_ACQUIRE_CODE_ACCESSORS(code_data_container, CodeDataContainer,
224                                kCodeDataContainerOffset)
225 #undef CODE_ACCESSORS
226 #undef CODE_ACCESSORS_CHECKED2
227 #undef RELEASE_ACQUIRE_CODE_ACCESSORS
228 #undef RELEASE_ACQUIRE_CODE_ACCESSORS_CHECKED2
229 
230 PtrComprCageBase Code::main_cage_base() const {
231 #ifdef V8_EXTERNAL_CODE_SPACE
232   Address cage_base_hi = ReadField<Tagged_t>(kMainCageBaseUpper32BitsOffset);
233   return PtrComprCageBase(cage_base_hi << 32);
234 #else
235   return GetPtrComprCageBase(*this);
236 #endif
237 }
238 
main_cage_base(RelaxedLoadTag)239 PtrComprCageBase Code::main_cage_base(RelaxedLoadTag) const {
240 #ifdef V8_EXTERNAL_CODE_SPACE
241   Address cage_base_hi =
242       Relaxed_ReadField<Tagged_t>(kMainCageBaseUpper32BitsOffset);
243   return PtrComprCageBase(cage_base_hi << 32);
244 #else
245   return GetPtrComprCageBase(*this);
246 #endif
247 }
248 
set_main_cage_base(Address cage_base,RelaxedStoreTag)249 void Code::set_main_cage_base(Address cage_base, RelaxedStoreTag) {
250 #ifdef V8_EXTERNAL_CODE_SPACE
251   Tagged_t cage_base_hi = static_cast<Tagged_t>(cage_base >> 32);
252   Relaxed_WriteField<Tagged_t>(kMainCageBaseUpper32BitsOffset, cage_base_hi);
253 #else
254   UNREACHABLE();
255 #endif
256 }
257 
GCSafeCodeDataContainer(AcquireLoadTag)258 CodeDataContainer Code::GCSafeCodeDataContainer(AcquireLoadTag) const {
259   PtrComprCageBase cage_base = main_cage_base(kRelaxedLoad);
260   HeapObject object =
261       TaggedField<HeapObject, kCodeDataContainerOffset>::Acquire_Load(cage_base,
262                                                                       *this);
263   DCHECK(!ObjectInYoungGeneration(object));
264   CodeDataContainer code_data_container =
265       ForwardingAddress(CodeDataContainer::unchecked_cast(object));
266   return code_data_container;
267 }
268 
269 // Helper functions for converting Code objects to CodeDataContainer and back
270 // when V8_EXTERNAL_CODE_SPACE is enabled.
ToCodeT(Code code)271 inline CodeT ToCodeT(Code code) {
272 #ifdef V8_EXTERNAL_CODE_SPACE
273   return code.code_data_container(kAcquireLoad);
274 #else
275   return code;
276 #endif
277 }
278 
ToCodeT(Handle<Code> code,Isolate * isolate)279 inline Handle<CodeT> ToCodeT(Handle<Code> code, Isolate* isolate) {
280 #ifdef V8_EXTERNAL_CODE_SPACE
281   return handle(ToCodeT(*code), isolate);
282 #else
283   return code;
284 #endif
285 }
286 
ToCodeT(MaybeHandle<Code> maybe_code,Isolate * isolate)287 inline MaybeHandle<CodeT> ToCodeT(MaybeHandle<Code> maybe_code,
288                                   Isolate* isolate) {
289 #ifdef V8_EXTERNAL_CODE_SPACE
290   Handle<Code> code;
291   if (maybe_code.ToHandle(&code)) return ToCodeT(code, isolate);
292   return {};
293 #else
294   return maybe_code;
295 #endif
296 }
297 
FromCodeT(CodeT code)298 inline Code FromCodeT(CodeT code) {
299 #ifdef V8_EXTERNAL_CODE_SPACE
300   return code.code();
301 #else
302   return code;
303 #endif
304 }
305 
FromCodeT(CodeT code,RelaxedLoadTag)306 inline Code FromCodeT(CodeT code, RelaxedLoadTag) {
307 #ifdef V8_EXTERNAL_CODE_SPACE
308   return code.code(kRelaxedLoad);
309 #else
310   return code;
311 #endif
312 }
313 
FromCodeT(Handle<CodeT> code,Isolate * isolate)314 inline Handle<Code> FromCodeT(Handle<CodeT> code, Isolate* isolate) {
315 #ifdef V8_EXTERNAL_CODE_SPACE
316   return handle(FromCodeT(*code), isolate);
317 #else
318   return code;
319 #endif
320 }
321 
ToAbstractCode(Handle<CodeT> code,Isolate * isolate)322 inline Handle<AbstractCode> ToAbstractCode(Handle<CodeT> code,
323                                            Isolate* isolate) {
324   return Handle<AbstractCode>::cast(FromCodeT(code, isolate));
325 }
326 
CodeDataContainerFromCodeT(CodeT code)327 inline CodeDataContainer CodeDataContainerFromCodeT(CodeT code) {
328 #ifdef V8_EXTERNAL_CODE_SPACE
329   return code;
330 #else
331   return code.code_data_container(kAcquireLoad);
332 #endif
333 }
334 
WipeOutHeader()335 void Code::WipeOutHeader() {
336   WRITE_FIELD(*this, kRelocationInfoOffset, Smi::FromInt(0));
337   WRITE_FIELD(*this, kDeoptimizationDataOrInterpreterDataOffset,
338               Smi::FromInt(0));
339   WRITE_FIELD(*this, kPositionTableOffset, Smi::FromInt(0));
340   WRITE_FIELD(*this, kCodeDataContainerOffset, Smi::FromInt(0));
341   if (V8_EXTERNAL_CODE_SPACE_BOOL) {
342     set_main_cage_base(kNullAddress, kRelaxedStore);
343   }
344 }
345 
clear_padding()346 void Code::clear_padding() {
347   // Clear the padding between the header and `raw_body_start`.
348   if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
349     memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
350            FIELD_SIZE(kOptionalPaddingOffset));
351   }
352 
353   // Clear the padding after `raw_body_end`.
354   size_t trailing_padding_size =
355       CodeSize() - Code::kHeaderSize - raw_body_size();
356   memset(reinterpret_cast<void*>(raw_body_end()), 0, trailing_padding_size);
357 }
358 
SourcePositionTable(SharedFunctionInfo sfi)359 ByteArray Code::SourcePositionTable(SharedFunctionInfo sfi) const {
360   DisallowGarbageCollection no_gc;
361   if (kind() == CodeKind::BASELINE) {
362     return sfi.GetBytecodeArray(sfi.GetIsolate()).SourcePositionTable();
363   }
364   return source_position_table();
365 }
366 
next_code_link()367 Object Code::next_code_link() const {
368   return code_data_container(kAcquireLoad).next_code_link();
369 }
370 
set_next_code_link(Object value)371 void Code::set_next_code_link(Object value) {
372   code_data_container(kAcquireLoad).set_next_code_link(value);
373 }
374 
raw_body_start()375 Address Code::raw_body_start() const { return raw_instruction_start(); }
376 
raw_body_end()377 Address Code::raw_body_end() const {
378   return raw_body_start() + raw_body_size();
379 }
380 
raw_body_size()381 int Code::raw_body_size() const {
382   return raw_instruction_size() + raw_metadata_size();
383 }
384 
InstructionSize()385 int Code::InstructionSize() const {
386   return V8_UNLIKELY(is_off_heap_trampoline())
387              ? OffHeapInstructionSize(*this, builtin_id())
388              : raw_instruction_size();
389 }
390 
raw_instruction_start()391 Address Code::raw_instruction_start() const {
392   return field_address(kHeaderSize);
393 }
394 
InstructionStart()395 Address Code::InstructionStart() const {
396   return V8_UNLIKELY(is_off_heap_trampoline())
397              ? i::OffHeapInstructionStart(*this, builtin_id())
398              : raw_instruction_start();
399 }
400 
raw_instruction_end()401 Address Code::raw_instruction_end() const {
402   return raw_instruction_start() + raw_instruction_size();
403 }
404 
InstructionEnd()405 Address Code::InstructionEnd() const {
406   return V8_UNLIKELY(is_off_heap_trampoline())
407              ? i::OffHeapInstructionEnd(*this, builtin_id())
408              : raw_instruction_end();
409 }
410 
raw_metadata_start()411 Address Code::raw_metadata_start() const {
412   return raw_instruction_start() + raw_instruction_size();
413 }
414 
InstructionStart(Isolate * isolate,Address pc)415 Address Code::InstructionStart(Isolate* isolate, Address pc) const {
416   return V8_UNLIKELY(is_off_heap_trampoline())
417              ? OffHeapInstructionStart(isolate, pc)
418              : raw_instruction_start();
419 }
420 
InstructionEnd(Isolate * isolate,Address pc)421 Address Code::InstructionEnd(Isolate* isolate, Address pc) const {
422   return V8_UNLIKELY(is_off_heap_trampoline())
423              ? OffHeapInstructionEnd(isolate, pc)
424              : raw_instruction_end();
425 }
426 
GetOffsetFromInstructionStart(Isolate * isolate,Address pc)427 int Code::GetOffsetFromInstructionStart(Isolate* isolate, Address pc) const {
428   Address instruction_start = InstructionStart(isolate, pc);
429   Address offset = pc - instruction_start;
430   DCHECK_LE(offset, InstructionSize());
431   return static_cast<int>(offset);
432 }
433 
raw_metadata_end()434 Address Code::raw_metadata_end() const {
435   return raw_metadata_start() + raw_metadata_size();
436 }
437 
MetadataSize()438 int Code::MetadataSize() const {
439   return V8_UNLIKELY(is_off_heap_trampoline())
440              ? OffHeapMetadataSize(*this, builtin_id())
441              : raw_metadata_size();
442 }
443 
SizeIncludingMetadata()444 int Code::SizeIncludingMetadata() const {
445   int size = CodeSize();
446   size += relocation_info().Size();
447   if (kind() != CodeKind::BASELINE) {
448     size += deoptimization_data().Size();
449   }
450   return size;
451 }
452 
SafepointTableAddress()453 Address Code::SafepointTableAddress() const {
454   return V8_UNLIKELY(is_off_heap_trampoline())
455              ? OffHeapSafepointTableAddress(*this, builtin_id())
456              : raw_metadata_start() + safepoint_table_offset();
457 }
458 
safepoint_table_size()459 int Code::safepoint_table_size() const {
460   DCHECK_GE(handler_table_offset() - safepoint_table_offset(), 0);
461   return handler_table_offset() - safepoint_table_offset();
462 }
463 
has_safepoint_table()464 bool Code::has_safepoint_table() const { return safepoint_table_size() > 0; }
465 
HandlerTableAddress()466 Address Code::HandlerTableAddress() const {
467   return V8_UNLIKELY(is_off_heap_trampoline())
468              ? OffHeapHandlerTableAddress(*this, builtin_id())
469              : raw_metadata_start() + handler_table_offset();
470 }
471 
handler_table_size()472 int Code::handler_table_size() const {
473   DCHECK_GE(constant_pool_offset() - handler_table_offset(), 0);
474   return constant_pool_offset() - handler_table_offset();
475 }
476 
has_handler_table()477 bool Code::has_handler_table() const { return handler_table_size() > 0; }
478 
constant_pool_size()479 int Code::constant_pool_size() const {
480   const int size = code_comments_offset() - constant_pool_offset();
481   DCHECK_IMPLIES(!FLAG_enable_embedded_constant_pool, size == 0);
482   DCHECK_GE(size, 0);
483   return size;
484 }
485 
has_constant_pool()486 bool Code::has_constant_pool() const { return constant_pool_size() > 0; }
487 
code_comments_size()488 int Code::code_comments_size() const {
489   DCHECK_GE(unwinding_info_offset() - code_comments_offset(), 0);
490   return unwinding_info_offset() - code_comments_offset();
491 }
492 
has_code_comments()493 bool Code::has_code_comments() const { return code_comments_size() > 0; }
494 
unchecked_relocation_info()495 ByteArray Code::unchecked_relocation_info() const {
496   PtrComprCageBase cage_base = main_cage_base();
497   return ByteArray::unchecked_cast(
498       TaggedField<HeapObject, kRelocationInfoOffset>::load(cage_base, *this));
499 }
500 
relocation_start()501 byte* Code::relocation_start() const {
502   return unchecked_relocation_info().GetDataStartAddress();
503 }
504 
relocation_end()505 byte* Code::relocation_end() const {
506   return unchecked_relocation_info().GetDataEndAddress();
507 }
508 
relocation_size()509 int Code::relocation_size() const {
510   return unchecked_relocation_info().length();
511 }
512 
entry()513 Address Code::entry() const { return raw_instruction_start(); }
514 
contains(Isolate * isolate,Address inner_pointer)515 bool Code::contains(Isolate* isolate, Address inner_pointer) {
516   if (is_off_heap_trampoline()) {
517     if (OffHeapInstructionStart(isolate, inner_pointer) <= inner_pointer &&
518         inner_pointer < OffHeapInstructionEnd(isolate, inner_pointer)) {
519       return true;
520     }
521   }
522   return (address() <= inner_pointer) &&
523          (inner_pointer < address() + CodeSize());
524 }
525 
526 // static
CopyRelocInfoToByteArray(ByteArray dest,const CodeDesc & desc)527 void Code::CopyRelocInfoToByteArray(ByteArray dest, const CodeDesc& desc) {
528   DCHECK_EQ(dest.length(), desc.reloc_size);
529   CopyBytes(dest.GetDataStartAddress(),
530             desc.buffer + desc.buffer_size - desc.reloc_size,
531             static_cast<size_t>(desc.reloc_size));
532 }
533 
CodeSize()534 int Code::CodeSize() const { return SizeFor(raw_body_size()); }
535 
DEF_GETTER(Code,Size,int)536 DEF_GETTER(Code, Size, int) { return CodeSize(); }
537 
kind()538 CodeKind Code::kind() const {
539   STATIC_ASSERT(FIELD_SIZE(kFlagsOffset) == kInt32Size);
540   const uint32_t flags = RELAXED_READ_UINT32_FIELD(*this, kFlagsOffset);
541   return KindField::decode(flags);
542 }
543 
GetBytecodeOffsetForBaselinePC(Address baseline_pc,BytecodeArray bytecodes)544 int Code::GetBytecodeOffsetForBaselinePC(Address baseline_pc,
545                                          BytecodeArray bytecodes) {
546   DisallowGarbageCollection no_gc;
547   CHECK(!is_baseline_trampoline_builtin());
548   if (is_baseline_leave_frame_builtin()) return kFunctionExitBytecodeOffset;
549   CHECK_EQ(kind(), CodeKind::BASELINE);
550   baseline::BytecodeOffsetIterator offset_iterator(
551       ByteArray::cast(bytecode_offset_table()), bytecodes);
552   Address pc = baseline_pc - InstructionStart();
553   offset_iterator.AdvanceToPCOffset(pc);
554   return offset_iterator.current_bytecode_offset();
555 }
556 
GetBaselinePCForBytecodeOffset(int bytecode_offset,BytecodeToPCPosition position,BytecodeArray bytecodes)557 uintptr_t Code::GetBaselinePCForBytecodeOffset(int bytecode_offset,
558                                                BytecodeToPCPosition position,
559                                                BytecodeArray bytecodes) {
560   DisallowGarbageCollection no_gc;
561   CHECK_EQ(kind(), CodeKind::BASELINE);
562   baseline::BytecodeOffsetIterator offset_iterator(
563       ByteArray::cast(bytecode_offset_table()), bytecodes);
564   offset_iterator.AdvanceToBytecodeOffset(bytecode_offset);
565   uintptr_t pc = 0;
566   if (position == kPcAtStartOfBytecode) {
567     pc = offset_iterator.current_pc_start_offset();
568   } else {
569     DCHECK_EQ(position, kPcAtEndOfBytecode);
570     pc = offset_iterator.current_pc_end_offset();
571   }
572   return pc;
573 }
574 
GetBaselineStartPCForBytecodeOffset(int bytecode_offset,BytecodeArray bytecodes)575 uintptr_t Code::GetBaselineStartPCForBytecodeOffset(int bytecode_offset,
576                                                     BytecodeArray bytecodes) {
577   return GetBaselinePCForBytecodeOffset(bytecode_offset, kPcAtStartOfBytecode,
578                                         bytecodes);
579 }
580 
GetBaselineEndPCForBytecodeOffset(int bytecode_offset,BytecodeArray bytecodes)581 uintptr_t Code::GetBaselineEndPCForBytecodeOffset(int bytecode_offset,
582                                                   BytecodeArray bytecodes) {
583   return GetBaselinePCForBytecodeOffset(bytecode_offset, kPcAtEndOfBytecode,
584                                         bytecodes);
585 }
586 
GetBaselinePCForNextExecutedBytecode(int bytecode_offset,BytecodeArray bytecodes)587 uintptr_t Code::GetBaselinePCForNextExecutedBytecode(int bytecode_offset,
588                                                      BytecodeArray bytecodes) {
589   DisallowGarbageCollection no_gc;
590   CHECK_EQ(kind(), CodeKind::BASELINE);
591   baseline::BytecodeOffsetIterator offset_iterator(
592       ByteArray::cast(bytecode_offset_table()), bytecodes);
593   Handle<BytecodeArray> bytecodes_handle(
594       reinterpret_cast<Address*>(&bytecodes));
595   interpreter::BytecodeArrayIterator bytecode_iterator(bytecodes_handle,
596                                                        bytecode_offset);
597   interpreter::Bytecode bytecode = bytecode_iterator.current_bytecode();
598   if (bytecode == interpreter::Bytecode::kJumpLoop) {
599     return GetBaselineStartPCForBytecodeOffset(
600         bytecode_iterator.GetJumpTargetOffset(), bytecodes);
601   } else {
602     DCHECK(!interpreter::Bytecodes::IsJump(bytecode));
603     return GetBaselineEndPCForBytecodeOffset(bytecode_offset, bytecodes);
604   }
605 }
606 
initialize_flags(CodeKind kind,bool is_turbofanned,int stack_slots,bool is_off_heap_trampoline)607 void Code::initialize_flags(CodeKind kind, bool is_turbofanned, int stack_slots,
608                             bool is_off_heap_trampoline) {
609   CHECK(0 <= stack_slots && stack_slots < StackSlotsField::kMax);
610   DCHECK(!CodeKindIsInterpretedJSFunction(kind));
611   uint32_t flags = KindField::encode(kind) |
612                    IsTurbofannedField::encode(is_turbofanned) |
613                    StackSlotsField::encode(stack_slots) |
614                    IsOffHeapTrampoline::encode(is_off_heap_trampoline);
615   STATIC_ASSERT(FIELD_SIZE(kFlagsOffset) == kInt32Size);
616   RELAXED_WRITE_UINT32_FIELD(*this, kFlagsOffset, flags);
617   DCHECK_IMPLIES(stack_slots != 0, uses_safepoint_table());
618   DCHECK_IMPLIES(!uses_safepoint_table(), stack_slots == 0);
619 }
620 
is_interpreter_trampoline_builtin()621 inline bool Code::is_interpreter_trampoline_builtin() const {
622   return IsInterpreterTrampolineBuiltin(builtin_id());
623 }
624 
is_baseline_trampoline_builtin()625 inline bool Code::is_baseline_trampoline_builtin() const {
626   return IsBaselineTrampolineBuiltin(builtin_id());
627 }
628 
is_baseline_leave_frame_builtin()629 inline bool Code::is_baseline_leave_frame_builtin() const {
630   return builtin_id() == Builtin::kBaselineLeaveFrame;
631 }
632 
633 #ifdef V8_EXTERNAL_CODE_SPACE
634 // Note, must be in sync with Code::checks_tiering_state().
checks_tiering_state()635 inline bool CodeDataContainer::checks_tiering_state() const {
636   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
637   bool checks_state = (builtin_id() == Builtin::kCompileLazy ||
638                        builtin_id() == Builtin::kInterpreterEntryTrampoline ||
639                        CodeKindCanTierUp(kind()));
640   return checks_state ||
641          (CodeKindCanDeoptimize(kind()) && marked_for_deoptimization());
642 }
643 #endif  // V8_EXTERNAL_CODE_SPACE
644 
645 // Note, must be in sync with CodeDataContainer::checks_tiering_state().
checks_tiering_state()646 inline bool Code::checks_tiering_state() const {
647   bool checks_state = (builtin_id() == Builtin::kCompileLazy ||
648                        builtin_id() == Builtin::kInterpreterEntryTrampoline ||
649                        CodeKindCanTierUp(kind()));
650   return checks_state ||
651          (CodeKindCanDeoptimize(kind()) && marked_for_deoptimization());
652 }
653 
has_tagged_outgoing_params()654 inline bool Code::has_tagged_outgoing_params() const {
655   return kind() != CodeKind::JS_TO_WASM_FUNCTION &&
656          kind() != CodeKind::C_WASM_ENTRY && kind() != CodeKind::WASM_FUNCTION;
657 }
658 
is_turbofanned()659 inline bool Code::is_turbofanned() const {
660   const uint32_t flags = RELAXED_READ_UINT32_FIELD(*this, kFlagsOffset);
661   return IsTurbofannedField::decode(flags);
662 }
663 
is_maglevved()664 bool Code::is_maglevved() const { return kind() == CodeKind::MAGLEV; }
665 
can_have_weak_objects()666 inline bool Code::can_have_weak_objects() const {
667   DCHECK(CodeKindIsOptimizedJSFunction(kind()));
668   int32_t flags =
669       code_data_container(kAcquireLoad).kind_specific_flags(kRelaxedLoad);
670   return CanHaveWeakObjectsField::decode(flags);
671 }
672 
set_can_have_weak_objects(bool value)673 inline void Code::set_can_have_weak_objects(bool value) {
674   DCHECK(CodeKindIsOptimizedJSFunction(kind()));
675   CodeDataContainer container = code_data_container(kAcquireLoad);
676   int32_t previous = container.kind_specific_flags(kRelaxedLoad);
677   int32_t updated = CanHaveWeakObjectsField::update(previous, value);
678   container.set_kind_specific_flags(updated, kRelaxedStore);
679 }
680 
is_promise_rejection()681 inline bool Code::is_promise_rejection() const {
682   DCHECK(kind() == CodeKind::BUILTIN);
683   int32_t flags =
684       code_data_container(kAcquireLoad).kind_specific_flags(kRelaxedLoad);
685   return IsPromiseRejectionField::decode(flags);
686 }
687 
set_is_promise_rejection(bool value)688 inline void Code::set_is_promise_rejection(bool value) {
689   DCHECK(kind() == CodeKind::BUILTIN);
690   CodeDataContainer container = code_data_container(kAcquireLoad);
691   int32_t previous = container.kind_specific_flags(kRelaxedLoad);
692   int32_t updated = IsPromiseRejectionField::update(previous, value);
693   container.set_kind_specific_flags(updated, kRelaxedStore);
694 }
695 
is_off_heap_trampoline()696 inline bool Code::is_off_heap_trampoline() const {
697   const uint32_t flags = RELAXED_READ_UINT32_FIELD(*this, kFlagsOffset);
698   return IsOffHeapTrampoline::decode(flags);
699 }
700 
GetBuiltinCatchPrediction()701 inline HandlerTable::CatchPrediction Code::GetBuiltinCatchPrediction() {
702   if (is_promise_rejection()) return HandlerTable::PROMISE;
703   return HandlerTable::UNCAUGHT;
704 }
705 
builtin_id()706 Builtin Code::builtin_id() const {
707   int index = RELAXED_READ_INT_FIELD(*this, kBuiltinIndexOffset);
708   DCHECK(index == static_cast<int>(Builtin::kNoBuiltinId) ||
709          Builtins::IsBuiltinId(index));
710   return static_cast<Builtin>(index);
711 }
712 
set_builtin_id(Builtin builtin)713 void Code::set_builtin_id(Builtin builtin) {
714   DCHECK(builtin == Builtin::kNoBuiltinId || Builtins::IsBuiltinId(builtin));
715   RELAXED_WRITE_INT_FIELD(*this, kBuiltinIndexOffset,
716                           static_cast<int>(builtin));
717 }
718 
is_builtin()719 bool Code::is_builtin() const { return builtin_id() != Builtin::kNoBuiltinId; }
720 
inlined_bytecode_size()721 unsigned Code::inlined_bytecode_size() const {
722   unsigned size = RELAXED_READ_UINT_FIELD(*this, kInlinedBytecodeSizeOffset);
723   DCHECK(CodeKindIsOptimizedJSFunction(kind()) || size == 0);
724   return size;
725 }
726 
set_inlined_bytecode_size(unsigned size)727 void Code::set_inlined_bytecode_size(unsigned size) {
728   DCHECK(CodeKindIsOptimizedJSFunction(kind()) || size == 0);
729   RELAXED_WRITE_UINT_FIELD(*this, kInlinedBytecodeSizeOffset, size);
730 }
731 
uses_safepoint_table()732 bool Code::uses_safepoint_table() const {
733   return is_turbofanned() || is_maglevved() || is_wasm_code();
734 }
735 
stack_slots()736 int Code::stack_slots() const {
737   const uint32_t flags = RELAXED_READ_UINT32_FIELD(*this, kFlagsOffset);
738   const int slots = StackSlotsField::decode(flags);
739   DCHECK_IMPLIES(!uses_safepoint_table(), slots == 0);
740   return slots;
741 }
742 
marked_for_deoptimization()743 bool CodeDataContainer::marked_for_deoptimization() const {
744 #ifdef V8_EXTERNAL_CODE_SPACE
745   // kind field is not available on CodeDataContainer when external code space
746   // is not enabled.
747   DCHECK(CodeKindCanDeoptimize(kind()));
748 #endif  // V8_EXTERNAL_CODE_SPACE
749   int32_t flags = kind_specific_flags(kRelaxedLoad);
750   return Code::MarkedForDeoptimizationField::decode(flags);
751 }
752 
marked_for_deoptimization()753 bool Code::marked_for_deoptimization() const {
754   DCHECK(CodeKindCanDeoptimize(kind()));
755   return code_data_container(kAcquireLoad).marked_for_deoptimization();
756 }
757 
set_marked_for_deoptimization(bool flag)758 void CodeDataContainer::set_marked_for_deoptimization(bool flag) {
759 #ifdef V8_EXTERNAL_CODE_SPACE
760   // kind field is not available on CodeDataContainer when external code space
761   // is not enabled.
762   DCHECK(CodeKindCanDeoptimize(kind()));
763 #endif  // V8_EXTERNAL_CODE_SPACE
764   DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
765   int32_t previous = kind_specific_flags(kRelaxedLoad);
766   int32_t updated = Code::MarkedForDeoptimizationField::update(previous, flag);
767   set_kind_specific_flags(updated, kRelaxedStore);
768 }
769 
set_marked_for_deoptimization(bool flag)770 void Code::set_marked_for_deoptimization(bool flag) {
771   code_data_container(kAcquireLoad).set_marked_for_deoptimization(flag);
772 }
773 
embedded_objects_cleared()774 bool Code::embedded_objects_cleared() const {
775   DCHECK(CodeKindIsOptimizedJSFunction(kind()));
776   int32_t flags =
777       code_data_container(kAcquireLoad).kind_specific_flags(kRelaxedLoad);
778   return EmbeddedObjectsClearedField::decode(flags);
779 }
780 
set_embedded_objects_cleared(bool flag)781 void Code::set_embedded_objects_cleared(bool flag) {
782   DCHECK(CodeKindIsOptimizedJSFunction(kind()));
783   DCHECK_IMPLIES(flag, marked_for_deoptimization());
784   CodeDataContainer container = code_data_container(kAcquireLoad);
785   int32_t previous = container.kind_specific_flags(kRelaxedLoad);
786   int32_t updated = EmbeddedObjectsClearedField::update(previous, flag);
787   container.set_kind_specific_flags(updated, kRelaxedStore);
788 }
789 
is_optimized_code()790 bool Code::is_optimized_code() const {
791   return CodeKindIsOptimizedJSFunction(kind());
792 }
793 
is_wasm_code()794 bool Code::is_wasm_code() const { return kind() == CodeKind::WASM_FUNCTION; }
795 
constant_pool_offset()796 int Code::constant_pool_offset() const {
797   if (!FLAG_enable_embedded_constant_pool) {
798     // Redirection needed since the field doesn't exist in this case.
799     return code_comments_offset();
800   }
801   return ReadField<int>(kConstantPoolOffsetOffset);
802 }
803 
set_constant_pool_offset(int value)804 void Code::set_constant_pool_offset(int value) {
805   if (!FLAG_enable_embedded_constant_pool) {
806     // Redirection needed since the field doesn't exist in this case.
807     return;
808   }
809   DCHECK_LE(value, MetadataSize());
810   WriteField<int>(kConstantPoolOffsetOffset, value);
811 }
812 
constant_pool()813 Address Code::constant_pool() const {
814   if (!has_constant_pool()) return kNullAddress;
815   return V8_UNLIKELY(is_off_heap_trampoline())
816              ? OffHeapConstantPoolAddress(*this, builtin_id())
817              : raw_metadata_start() + constant_pool_offset();
818 }
819 
code_comments()820 Address Code::code_comments() const {
821   return V8_UNLIKELY(is_off_heap_trampoline())
822              ? OffHeapCodeCommentsAddress(*this, builtin_id())
823              : raw_metadata_start() + code_comments_offset();
824 }
825 
unwinding_info_start()826 Address Code::unwinding_info_start() const {
827   return V8_UNLIKELY(is_off_heap_trampoline())
828              ? OffHeapUnwindingInfoAddress(*this, builtin_id())
829              : raw_metadata_start() + unwinding_info_offset();
830 }
831 
unwinding_info_end()832 Address Code::unwinding_info_end() const {
833   return V8_UNLIKELY(is_off_heap_trampoline())
834              ? OffHeapMetadataEnd(*this, builtin_id())
835              : raw_metadata_end();
836 }
837 
unwinding_info_size()838 int Code::unwinding_info_size() const {
839   DCHECK_GE(unwinding_info_end(), unwinding_info_start());
840   return static_cast<int>(unwinding_info_end() - unwinding_info_start());
841 }
842 
has_unwinding_info()843 bool Code::has_unwinding_info() const { return unwinding_info_size() > 0; }
844 
GetCodeFromTargetAddress(Address address)845 Code Code::GetCodeFromTargetAddress(Address address) {
846   {
847     // TODO(jgruber,v8:6666): Support embedded builtins here. We'd need to pass
848     // in the current isolate.
849     Address start =
850         reinterpret_cast<Address>(Isolate::CurrentEmbeddedBlobCode());
851     Address end = start + Isolate::CurrentEmbeddedBlobCodeSize();
852     CHECK(address < start || address >= end);
853   }
854 
855   HeapObject code = HeapObject::FromAddress(address - Code::kHeaderSize);
856   // Unchecked cast because we can't rely on the map currently
857   // not being a forwarding pointer.
858   return Code::unchecked_cast(code);
859 }
860 
GetObjectFromEntryAddress(Address location_of_address)861 Code Code::GetObjectFromEntryAddress(Address location_of_address) {
862   Address code_entry = base::Memory<Address>(location_of_address);
863   HeapObject code = HeapObject::FromAddress(code_entry - Code::kHeaderSize);
864   // Unchecked cast because we can't rely on the map currently
865   // not being a forwarding pointer.
866   return Code::unchecked_cast(code);
867 }
868 
CanContainWeakObjects()869 bool Code::CanContainWeakObjects() {
870   return is_optimized_code() && can_have_weak_objects();
871 }
872 
IsWeakObject(HeapObject object)873 bool Code::IsWeakObject(HeapObject object) {
874   return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
875 }
876 
IsWeakObjectInOptimizedCode(HeapObject object)877 bool Code::IsWeakObjectInOptimizedCode(HeapObject object) {
878   Map map = object.map(kAcquireLoad);
879   InstanceType instance_type = map.instance_type();
880   if (InstanceTypeChecker::IsMap(instance_type)) {
881     return Map::cast(object).CanTransition();
882   }
883   return InstanceTypeChecker::IsPropertyCell(instance_type) ||
884          InstanceTypeChecker::IsJSReceiver(instance_type) ||
885          InstanceTypeChecker::IsContext(instance_type);
886 }
887 
IsWeakObjectInDeoptimizationLiteralArray(Object object)888 bool Code::IsWeakObjectInDeoptimizationLiteralArray(Object object) {
889   // Maps must be strong because they can be used as part of the description for
890   // how to materialize an object upon deoptimization, in which case it is
891   // possible to reach the code that requires the Map without anything else
892   // holding a strong pointer to that Map.
893   return object.IsHeapObject() && !object.IsMap() &&
894          Code::IsWeakObjectInOptimizedCode(HeapObject::cast(object));
895 }
896 
IsExecutable()897 bool Code::IsExecutable() {
898   return !Builtins::IsBuiltinId(builtin_id()) || !is_off_heap_trampoline() ||
899          Builtins::CodeObjectIsExecutable(builtin_id());
900 }
901 
902 // This field has to have relaxed atomic accessors because it is accessed in the
903 // concurrent marker.
904 STATIC_ASSERT(FIELD_SIZE(CodeDataContainer::kKindSpecificFlagsOffset) ==
905               kInt32Size);
906 RELAXED_INT32_ACCESSORS(CodeDataContainer, kind_specific_flags,
907                         kKindSpecificFlagsOffset)
908 
909 #if defined(V8_TARGET_LITTLE_ENDIAN)
910 static_assert(!V8_EXTERNAL_CODE_SPACE_BOOL ||
911                   (CodeDataContainer::kCodeCageBaseUpper32BitsOffset ==
912                    CodeDataContainer::kCodeOffset + kTaggedSize),
913               "CodeDataContainer::code field layout requires updating "
914               "for little endian architectures");
915 #elif defined(V8_TARGET_BIG_ENDIAN)
916 static_assert(!V8_EXTERNAL_CODE_SPACE_BOOL,
917               "CodeDataContainer::code field layout requires updating "
918               "for big endian architectures");
919 #endif
920 
raw_code()921 Object CodeDataContainer::raw_code() const {
922   PtrComprCageBase cage_base = code_cage_base();
923   return CodeDataContainer::raw_code(cage_base);
924 }
925 
raw_code(PtrComprCageBase cage_base)926 Object CodeDataContainer::raw_code(PtrComprCageBase cage_base) const {
927   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
928   Object value = TaggedField<Object, kCodeOffset>::load(cage_base, *this);
929   return value;
930 }
931 
set_raw_code(Object value,WriteBarrierMode mode)932 void CodeDataContainer::set_raw_code(Object value, WriteBarrierMode mode) {
933   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
934   TaggedField<Object, kCodeOffset>::store(*this, value);
935   CONDITIONAL_WRITE_BARRIER(*this, kCodeOffset, value, mode);
936 }
937 
raw_code(RelaxedLoadTag tag)938 Object CodeDataContainer::raw_code(RelaxedLoadTag tag) const {
939   PtrComprCageBase cage_base = code_cage_base(tag);
940   return CodeDataContainer::raw_code(cage_base, tag);
941 }
942 
raw_code(PtrComprCageBase cage_base,RelaxedLoadTag)943 Object CodeDataContainer::raw_code(PtrComprCageBase cage_base,
944                                    RelaxedLoadTag) const {
945   Object value =
946       TaggedField<Object, kCodeOffset>::Relaxed_Load(cage_base, *this);
947   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
948   return value;
949 }
950 
ACCESSORS(CodeDataContainer,next_code_link,Object,kNextCodeLinkOffset)951 ACCESSORS(CodeDataContainer, next_code_link, Object, kNextCodeLinkOffset)
952 
953 PtrComprCageBase CodeDataContainer::code_cage_base() const {
954 #ifdef V8_EXTERNAL_CODE_SPACE
955   // TODO(v8:10391): consider protecting this value with the sandbox.
956   Address code_cage_base_hi =
957       ReadField<Tagged_t>(kCodeCageBaseUpper32BitsOffset);
958   return PtrComprCageBase(code_cage_base_hi << 32);
959 #else
960   return GetPtrComprCageBase(*this);
961 #endif
962 }
963 
set_code_cage_base(Address code_cage_base)964 void CodeDataContainer::set_code_cage_base(Address code_cage_base) {
965 #ifdef V8_EXTERNAL_CODE_SPACE
966   Tagged_t code_cage_base_hi = static_cast<Tagged_t>(code_cage_base >> 32);
967   WriteField<Tagged_t>(kCodeCageBaseUpper32BitsOffset, code_cage_base_hi);
968 #else
969   UNREACHABLE();
970 #endif
971 }
972 
code_cage_base(RelaxedLoadTag)973 PtrComprCageBase CodeDataContainer::code_cage_base(RelaxedLoadTag) const {
974 #ifdef V8_EXTERNAL_CODE_SPACE
975   // TODO(v8:10391): consider protecting this value with the sandbox.
976   Address code_cage_base_hi =
977       Relaxed_ReadField<Tagged_t>(kCodeCageBaseUpper32BitsOffset);
978   return PtrComprCageBase(code_cage_base_hi << 32);
979 #else
980   return GetPtrComprCageBase(*this);
981 #endif
982 }
983 
set_code_cage_base(Address code_cage_base,RelaxedStoreTag)984 void CodeDataContainer::set_code_cage_base(Address code_cage_base,
985                                            RelaxedStoreTag) {
986 #ifdef V8_EXTERNAL_CODE_SPACE
987   Tagged_t code_cage_base_hi = static_cast<Tagged_t>(code_cage_base >> 32);
988   Relaxed_WriteField<Tagged_t>(kCodeCageBaseUpper32BitsOffset,
989                                code_cage_base_hi);
990 #else
991   UNREACHABLE();
992 #endif
993 }
994 
AllocateExternalPointerEntries(Isolate * isolate)995 void CodeDataContainer::AllocateExternalPointerEntries(Isolate* isolate) {
996   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
997   InitExternalPointerField(kCodeEntryPointOffset, isolate, kCodeEntryPointTag);
998 }
999 
code()1000 Code CodeDataContainer::code() const {
1001   PtrComprCageBase cage_base = code_cage_base();
1002   return CodeDataContainer::code(cage_base);
1003 }
code(PtrComprCageBase cage_base)1004 Code CodeDataContainer::code(PtrComprCageBase cage_base) const {
1005   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
1006   return Code::cast(raw_code(cage_base));
1007 }
1008 
code(RelaxedLoadTag tag)1009 Code CodeDataContainer::code(RelaxedLoadTag tag) const {
1010   PtrComprCageBase cage_base = code_cage_base(tag);
1011   return CodeDataContainer::code(cage_base, tag);
1012 }
1013 
code(PtrComprCageBase cage_base,RelaxedLoadTag tag)1014 Code CodeDataContainer::code(PtrComprCageBase cage_base,
1015                              RelaxedLoadTag tag) const {
1016   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
1017   return Code::cast(raw_code(cage_base, tag));
1018 }
1019 
DEF_GETTER(CodeDataContainer,code_entry_point,Address)1020 DEF_GETTER(CodeDataContainer, code_entry_point, Address) {
1021   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
1022   Isolate* isolate = GetIsolateForSandbox(*this);
1023   return ReadExternalPointerField(kCodeEntryPointOffset, isolate,
1024                                   kCodeEntryPointTag);
1025 }
1026 
set_code_entry_point(Isolate * isolate,Address value)1027 void CodeDataContainer::set_code_entry_point(Isolate* isolate, Address value) {
1028   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
1029   WriteExternalPointerField(kCodeEntryPointOffset, isolate, value,
1030                             kCodeEntryPointTag);
1031 }
1032 
SetCodeAndEntryPoint(Isolate * isolate_for_sandbox,Code code,WriteBarrierMode mode)1033 void CodeDataContainer::SetCodeAndEntryPoint(Isolate* isolate_for_sandbox,
1034                                              Code code, WriteBarrierMode mode) {
1035   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
1036   set_raw_code(code, mode);
1037   set_code_entry_point(isolate_for_sandbox, code.InstructionStart());
1038 }
1039 
UpdateCodeEntryPoint(Isolate * isolate_for_sandbox,Code code)1040 void CodeDataContainer::UpdateCodeEntryPoint(Isolate* isolate_for_sandbox,
1041                                              Code code) {
1042   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
1043   DCHECK_EQ(raw_code(), code);
1044   set_code_entry_point(isolate_for_sandbox, code.InstructionStart());
1045 }
1046 
InstructionStart()1047 Address CodeDataContainer::InstructionStart() const {
1048   return code_entry_point();
1049 }
1050 
raw_instruction_start()1051 Address CodeDataContainer::raw_instruction_start() {
1052   return code_entry_point();
1053 }
1054 
entry()1055 Address CodeDataContainer::entry() const { return code_entry_point(); }
1056 
clear_padding()1057 void CodeDataContainer::clear_padding() {
1058   memset(reinterpret_cast<void*>(address() + kUnalignedSize), 0,
1059          kSize - kUnalignedSize);
1060 }
1061 
1062 RELAXED_UINT16_ACCESSORS(CodeDataContainer, flags, kFlagsOffset)
1063 
1064 // Ensure builtin_id field fits into int16_t, so that we can rely on sign
1065 // extension to convert int16_t{-1} to kNoBuiltinId.
1066 // If the asserts fail, update the code that use kBuiltinIdOffset below.
1067 STATIC_ASSERT(static_cast<int>(Builtin::kNoBuiltinId) == -1);
1068 STATIC_ASSERT(Builtins::kBuiltinCount < std::numeric_limits<int16_t>::max());
1069 
initialize_flags(CodeKind kind,Builtin builtin_id)1070 void CodeDataContainer::initialize_flags(CodeKind kind, Builtin builtin_id) {
1071   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
1072   uint16_t value = KindField::encode(kind);
1073   set_flags(value, kRelaxedStore);
1074 
1075   WriteField<int16_t>(kBuiltinIdOffset, static_cast<int16_t>(builtin_id));
1076 }
1077 
1078 #ifdef V8_EXTERNAL_CODE_SPACE
1079 
kind()1080 CodeKind CodeDataContainer::kind() const {
1081   return KindField::decode(flags(kRelaxedLoad));
1082 }
1083 
builtin_id()1084 Builtin CodeDataContainer::builtin_id() const {
1085   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
1086   // Rely on sign-extension when converting int16_t to int to preserve
1087   // kNoBuiltinId value.
1088   STATIC_ASSERT(static_cast<int>(static_cast<int16_t>(Builtin::kNoBuiltinId)) ==
1089                 static_cast<int>(Builtin::kNoBuiltinId));
1090   int value = ReadField<int16_t>(kBuiltinIdOffset);
1091   return static_cast<Builtin>(value);
1092 }
1093 
is_builtin()1094 bool CodeDataContainer::is_builtin() const {
1095   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
1096   return builtin_id() != Builtin::kNoBuiltinId;
1097 }
1098 
is_optimized_code()1099 bool CodeDataContainer::is_optimized_code() const {
1100   return CodeKindIsOptimizedJSFunction(kind());
1101 }
1102 
is_interpreter_trampoline_builtin()1103 inline bool CodeDataContainer::is_interpreter_trampoline_builtin() const {
1104   return IsInterpreterTrampolineBuiltin(builtin_id());
1105 }
1106 
1107 //
1108 // A collection of getters and predicates that forward queries to associated
1109 // Code object.
1110 //
1111 
1112 #define DEF_PRIMITIVE_FORWARDING_CDC_GETTER(name, type) \
1113   type CodeDataContainer::name() const { return FromCodeT(*this).name(); }
1114 
1115 #define DEF_FORWARDING_CDC_GETTER(name, type) \
1116   DEF_GETTER(CodeDataContainer, name, type) { \
1117     return FromCodeT(*this).name(cage_base);  \
1118   }
1119 
DEF_PRIMITIVE_FORWARDING_CDC_GETTER(is_maglevved,bool)1120 DEF_PRIMITIVE_FORWARDING_CDC_GETTER(is_maglevved, bool)
1121 DEF_PRIMITIVE_FORWARDING_CDC_GETTER(is_turbofanned, bool)
1122 DEF_PRIMITIVE_FORWARDING_CDC_GETTER(is_off_heap_trampoline, bool)
1123 
1124 DEF_FORWARDING_CDC_GETTER(deoptimization_data, FixedArray)
1125 DEF_FORWARDING_CDC_GETTER(bytecode_or_interpreter_data, HeapObject)
1126 DEF_FORWARDING_CDC_GETTER(source_position_table, ByteArray)
1127 DEF_FORWARDING_CDC_GETTER(bytecode_offset_table, ByteArray)
1128 
1129 #undef DEF_PRIMITIVE_FORWARDING_CDC_GETTER
1130 #undef DEF_FORWARDING_CDC_GETTER
1131 
1132 #endif  // V8_EXTERNAL_CODE_SPACE
1133 
1134 byte BytecodeArray::get(int index) const {
1135   DCHECK(index >= 0 && index < this->length());
1136   return ReadField<byte>(kHeaderSize + index * kCharSize);
1137 }
1138 
set(int index,byte value)1139 void BytecodeArray::set(int index, byte value) {
1140   DCHECK(index >= 0 && index < this->length());
1141   WriteField<byte>(kHeaderSize + index * kCharSize, value);
1142 }
1143 
set_frame_size(int32_t frame_size)1144 void BytecodeArray::set_frame_size(int32_t frame_size) {
1145   DCHECK_GE(frame_size, 0);
1146   DCHECK(IsAligned(frame_size, kSystemPointerSize));
1147   WriteField<int32_t>(kFrameSizeOffset, frame_size);
1148 }
1149 
frame_size()1150 int32_t BytecodeArray::frame_size() const {
1151   return ReadField<int32_t>(kFrameSizeOffset);
1152 }
1153 
register_count()1154 int BytecodeArray::register_count() const {
1155   return static_cast<int>(frame_size()) / kSystemPointerSize;
1156 }
1157 
set_parameter_count(int32_t number_of_parameters)1158 void BytecodeArray::set_parameter_count(int32_t number_of_parameters) {
1159   DCHECK_GE(number_of_parameters, 0);
1160   // Parameter count is stored as the size on stack of the parameters to allow
1161   // it to be used directly by generated code.
1162   WriteField<int32_t>(kParameterSizeOffset,
1163                       (number_of_parameters << kSystemPointerSizeLog2));
1164 }
1165 
incoming_new_target_or_generator_register()1166 interpreter::Register BytecodeArray::incoming_new_target_or_generator_register()
1167     const {
1168   int32_t register_operand =
1169       ReadField<int32_t>(kIncomingNewTargetOrGeneratorRegisterOffset);
1170   if (register_operand == 0) {
1171     return interpreter::Register::invalid_value();
1172   } else {
1173     return interpreter::Register::FromOperand(register_operand);
1174   }
1175 }
1176 
set_incoming_new_target_or_generator_register(interpreter::Register incoming_new_target_or_generator_register)1177 void BytecodeArray::set_incoming_new_target_or_generator_register(
1178     interpreter::Register incoming_new_target_or_generator_register) {
1179   if (!incoming_new_target_or_generator_register.is_valid()) {
1180     WriteField<int32_t>(kIncomingNewTargetOrGeneratorRegisterOffset, 0);
1181   } else {
1182     DCHECK(incoming_new_target_or_generator_register.index() <
1183            register_count());
1184     DCHECK_NE(0, incoming_new_target_or_generator_register.ToOperand());
1185     WriteField<int32_t>(kIncomingNewTargetOrGeneratorRegisterOffset,
1186                         incoming_new_target_or_generator_register.ToOperand());
1187   }
1188 }
1189 
osr_urgency()1190 int BytecodeArray::osr_urgency() const {
1191   return OsrUrgencyBits::decode(osr_urgency_and_install_target());
1192 }
1193 
set_osr_urgency(int urgency)1194 void BytecodeArray::set_osr_urgency(int urgency) {
1195   DCHECK(0 <= urgency && urgency <= BytecodeArray::kMaxOsrUrgency);
1196   STATIC_ASSERT(BytecodeArray::kMaxOsrUrgency <= OsrUrgencyBits::kMax);
1197   uint32_t value = osr_urgency_and_install_target();
1198   set_osr_urgency_and_install_target(OsrUrgencyBits::update(value, urgency));
1199 }
1200 
bytecode_age()1201 BytecodeArray::Age BytecodeArray::bytecode_age() const {
1202   // Bytecode is aged by the concurrent marker.
1203   static_assert(kBytecodeAgeSize == kUInt16Size);
1204   return static_cast<Age>(RELAXED_READ_INT16_FIELD(*this, kBytecodeAgeOffset));
1205 }
1206 
reset_osr_urgency()1207 void BytecodeArray::reset_osr_urgency() { set_osr_urgency(0); }
1208 
RequestOsrAtNextOpportunity()1209 void BytecodeArray::RequestOsrAtNextOpportunity() {
1210   set_osr_urgency(kMaxOsrUrgency);
1211 }
1212 
osr_install_target()1213 int BytecodeArray::osr_install_target() {
1214   return OsrInstallTargetBits::decode(osr_urgency_and_install_target());
1215 }
1216 
set_osr_install_target(BytecodeOffset jump_loop_offset)1217 void BytecodeArray::set_osr_install_target(BytecodeOffset jump_loop_offset) {
1218   DCHECK_LE(jump_loop_offset.ToInt(), length());
1219   set_osr_urgency_and_install_target(OsrInstallTargetBits::update(
1220       osr_urgency_and_install_target(), OsrInstallTargetFor(jump_loop_offset)));
1221 }
1222 
reset_osr_install_target()1223 void BytecodeArray::reset_osr_install_target() {
1224   uint32_t value = osr_urgency_and_install_target();
1225   set_osr_urgency_and_install_target(
1226       OsrInstallTargetBits::update(value, kNoOsrInstallTarget));
1227 }
1228 
reset_osr_urgency_and_install_target()1229 void BytecodeArray::reset_osr_urgency_and_install_target() {
1230   set_osr_urgency_and_install_target(OsrUrgencyBits::encode(0) |
1231                                      OsrInstallTargetBits::encode(0));
1232 }
1233 
set_bytecode_age(BytecodeArray::Age age)1234 void BytecodeArray::set_bytecode_age(BytecodeArray::Age age) {
1235   DCHECK_GE(age, kFirstBytecodeAge);
1236   DCHECK_LE(age, kLastBytecodeAge);
1237   static_assert(kLastBytecodeAge <= kMaxInt16);
1238   static_assert(kBytecodeAgeSize == kUInt16Size);
1239   // Bytecode is aged by the concurrent marker.
1240   RELAXED_WRITE_INT16_FIELD(*this, kBytecodeAgeOffset,
1241                             static_cast<int16_t>(age));
1242 }
1243 
parameter_count()1244 int32_t BytecodeArray::parameter_count() const {
1245   // Parameter count is stored as the size on stack of the parameters to allow
1246   // it to be used directly by generated code.
1247   return ReadField<int32_t>(kParameterSizeOffset) >> kSystemPointerSizeLog2;
1248 }
1249 
clear_padding()1250 void BytecodeArray::clear_padding() {
1251   int data_size = kHeaderSize + length();
1252   memset(reinterpret_cast<void*>(address() + data_size), 0,
1253          SizeFor(length()) - data_size);
1254 }
1255 
GetFirstBytecodeAddress()1256 Address BytecodeArray::GetFirstBytecodeAddress() {
1257   return ptr() - kHeapObjectTag + kHeaderSize;
1258 }
1259 
HasSourcePositionTable()1260 bool BytecodeArray::HasSourcePositionTable() const {
1261   Object maybe_table = source_position_table(kAcquireLoad);
1262   return !(maybe_table.IsUndefined() || DidSourcePositionGenerationFail());
1263 }
1264 
DidSourcePositionGenerationFail()1265 bool BytecodeArray::DidSourcePositionGenerationFail() const {
1266   return source_position_table(kAcquireLoad).IsException();
1267 }
1268 
SetSourcePositionsFailedToCollect()1269 void BytecodeArray::SetSourcePositionsFailedToCollect() {
1270   set_source_position_table(GetReadOnlyRoots().exception(), kReleaseStore);
1271 }
1272 
SourcePositionTable()1273 ByteArray BytecodeArray::SourcePositionTable() const {
1274   // WARNING: This function may be called from a background thread, hence
1275   // changes to how it accesses the heap can easily lead to bugs.
1276   Object maybe_table = source_position_table(kAcquireLoad);
1277   if (maybe_table.IsByteArray()) return ByteArray::cast(maybe_table);
1278   ReadOnlyRoots roots = GetReadOnlyRoots();
1279   DCHECK(maybe_table.IsUndefined(roots) || maybe_table.IsException(roots));
1280   return roots.empty_byte_array();
1281 }
1282 
BytecodeArraySize()1283 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
1284 
SizeIncludingMetadata()1285 int BytecodeArray::SizeIncludingMetadata() {
1286   int size = BytecodeArraySize();
1287   size += constant_pool().Size();
1288   size += handler_table().Size();
1289   ByteArray table = SourcePositionTable();
1290   if (table.length() != 0) {
1291     size += table.Size();
1292   }
1293   return size;
1294 }
1295 
DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray,TranslationArray)1296 DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, TranslationArray)
1297 DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
1298 DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, DeoptimizationLiteralArray)
1299 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrBytecodeOffset, Smi)
1300 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
1301 DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
1302 DEFINE_DEOPT_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
1303 DEFINE_DEOPT_ELEMENT_ACCESSORS(DeoptExitStart, Smi)
1304 DEFINE_DEOPT_ELEMENT_ACCESSORS(EagerDeoptCount, Smi)
1305 DEFINE_DEOPT_ELEMENT_ACCESSORS(LazyDeoptCount, Smi)
1306 
1307 DEFINE_DEOPT_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
1308 DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
1309 DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
1310 #ifdef DEBUG
1311 DEFINE_DEOPT_ENTRY_ACCESSORS(NodeId, Smi)
1312 #endif  // DEBUG
1313 
1314 BytecodeOffset DeoptimizationData::GetBytecodeOffset(int i) {
1315   return BytecodeOffset(BytecodeOffsetRaw(i).value());
1316 }
1317 
SetBytecodeOffset(int i,BytecodeOffset value)1318 void DeoptimizationData::SetBytecodeOffset(int i, BytecodeOffset value) {
1319   SetBytecodeOffsetRaw(i, Smi::FromInt(value.ToInt()));
1320 }
1321 
DeoptCount()1322 int DeoptimizationData::DeoptCount() {
1323   return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
1324 }
1325 
DeoptimizationLiteralArray(Address ptr)1326 inline DeoptimizationLiteralArray::DeoptimizationLiteralArray(Address ptr)
1327     : WeakFixedArray(ptr) {
1328   // No type check is possible beyond that for WeakFixedArray.
1329 }
1330 
get(int index)1331 inline Object DeoptimizationLiteralArray::get(int index) const {
1332   return get(GetPtrComprCageBase(*this), index);
1333 }
1334 
get(PtrComprCageBase cage_base,int index)1335 inline Object DeoptimizationLiteralArray::get(PtrComprCageBase cage_base,
1336                                               int index) const {
1337   MaybeObject maybe = Get(cage_base, index);
1338 
1339   // Slots in the DeoptimizationLiteralArray should only be cleared when there
1340   // is no possible code path that could need that slot. This works because the
1341   // weakly-held deoptimization literals are basically local variables that
1342   // TurboFan has decided not to keep on the stack. Thus, if the deoptimization
1343   // literal goes away, then whatever code needed it should be unreachable. The
1344   // exception is currently running Code: in that case, the deoptimization
1345   // literals array might be the only thing keeping the target object alive.
1346   // Thus, when a Code is running, we strongly mark all of its deoptimization
1347   // literals.
1348   CHECK(!maybe.IsCleared());
1349 
1350   return maybe.GetHeapObjectOrSmi();
1351 }
1352 
set(int index,Object value)1353 inline void DeoptimizationLiteralArray::set(int index, Object value) {
1354   MaybeObject maybe = MaybeObject::FromObject(value);
1355   if (Code::IsWeakObjectInDeoptimizationLiteralArray(value)) {
1356     maybe = MaybeObject::MakeWeak(maybe);
1357   }
1358   Set(index, maybe);
1359 }
1360 
1361 }  // namespace internal
1362 }  // namespace v8
1363 
1364 #include "src/objects/object-macros-undef.h"
1365 
1366 #endif  // V8_OBJECTS_CODE_INL_H_
1367