• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_CODE_H_
6 #define V8_OBJECTS_CODE_H_
7 
8 #include "src/base/bit-field.h"
9 #include "src/codegen/handler-table.h"
10 #include "src/objects/code-kind.h"
11 #include "src/objects/contexts.h"
12 #include "src/objects/fixed-array.h"
13 #include "src/objects/heap-object.h"
14 #include "src/objects/objects.h"
15 #include "src/objects/struct.h"
16 
17 // Has to be the last include (doesn't have include guards):
18 #include "src/objects/object-macros.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 class ByteArray;
24 class BytecodeArray;
25 class CodeDataContainer;
26 class CodeDesc;
27 
28 namespace interpreter {
29 class Register;
30 }  // namespace interpreter
31 
32 // CodeDataContainer is a container for all mutable fields associated with its
33 // referencing {Code} object. Since {Code} objects reside on write-protected
34 // pages within the heap, its header fields need to be immutable. There always
35 // is a 1-to-1 relation between {Code} and {CodeDataContainer}, the referencing
36 // field {Code::code_data_container} itself is immutable.
37 class CodeDataContainer : public HeapObject {
38  public:
39   NEVER_READ_ONLY_SPACE
40   DECL_ACCESSORS(next_code_link, Object)
41   DECL_INT_ACCESSORS(kind_specific_flags)
42 
43   // Clear uninitialized padding space. This ensures that the snapshot content
44   // is deterministic.
45   inline void clear_padding();
46 
47   DECL_CAST(CodeDataContainer)
48 
49   // Dispatched behavior.
50   DECL_PRINTER(CodeDataContainer)
51   DECL_VERIFIER(CodeDataContainer)
52 
53 // Layout description.
54 #define CODE_DATA_FIELDS(V)                                 \
55   /* Weak pointer fields. */                                \
56   V(kPointerFieldsStrongEndOffset, 0)                       \
57   V(kNextCodeLinkOffset, kTaggedSize)                       \
58   V(kPointerFieldsWeakEndOffset, 0)                         \
59   /* Raw data fields. */                                    \
60   V(kKindSpecificFlagsOffset, kInt32Size)                   \
61   V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize)) \
62   /* Total size. */                                         \
63   V(kSize, 0)
64 
65   DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
66 #undef CODE_DATA_FIELDS
67 
68   class BodyDescriptor;
69 
70   OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
71 };
72 
73 // Code describes objects with on-the-fly generated machine code.
74 class Code : public HeapObject {
75  public:
76   NEVER_READ_ONLY_SPACE
77   // Opaque data type for encapsulating code flags like kind, inline
78   // cache state, and arguments count.
79   using Flags = uint32_t;
80 
81   // All Code objects have the following layout:
82   //
83   //  +--------------------------+
84   //  |          header          |
85   //  | padded to code alignment |
86   //  +--------------------------+  <-- raw_body_start()
87   //  |       instructions       |   == raw_instruction_start()
88   //  |           ...            |
89   //  | padded to meta alignment |      see kMetadataAlignment
90   //  +--------------------------+  <-- raw_instruction_end()
91   //  |         metadata         |   == raw_metadata_start() (MS)
92   //  |           ...            |
93   //  |                          |  <-- MS + handler_table_offset()
94   //  |                          |  <-- MS + constant_pool_offset()
95   //  |                          |  <-- MS + code_comments_offset()
96   //  |                          |  <-- MS + unwinding_info_offset()
97   //  | padded to obj alignment  |
98   //  +--------------------------+  <-- raw_metadata_end() == raw_body_end()
99   //  | padded to code alignment |
100   //  +--------------------------+
101   //
102   // In other words, the variable-size 'body' consists of 'instructions' and
103   // 'metadata'.
104   //
105   // Note the accessor functions below may be prefixed with 'raw'. In this case,
106   // raw accessors (e.g. raw_instruction_start) always refer to the on-heap
107   // Code object, while camel-case accessors (e.g. InstructionStart) may refer
108   // to an off-heap area in the case of embedded builtins.
109   //
110   // Embedded builtins are on-heap Code objects, with an out-of-line body
111   // section. The on-heap Code object contains an essentially empty body
112   // section, while accessors, as mentioned above, redirect to the off-heap
113   // area. Metadata table offsets remain relative to MetadataStart(), i.e. they
114   // point into the off-heap metadata section. The off-heap layout is described
115   // in detail in the EmbeddedData class, but at a high level one can assume a
116   // dedicated, out-of-line, instruction and metadata section for each embedded
117   // builtin *in addition* to the on-heap Code object:
118   //
119   //  +--------------------------+  <-- InstructionStart()
120   //  |   off-heap instructions  |
121   //  |           ...            |
122   //  +--------------------------+  <-- InstructionEnd()
123   //
124   //  +--------------------------+  <-- MetadataStart() (MS)
125   //  |    off-heap metadata     |
126   //  |           ...            |  <-- MS + handler_table_offset()
127   //  |                          |  <-- MS + constant_pool_offset()
128   //  |                          |  <-- MS + code_comments_offset()
129   //  |                          |  <-- MS + unwinding_info_offset()
130   //  +--------------------------+  <-- MetadataEnd()
131 
132   // Constants for use in static asserts, stating whether the body is adjacent,
133   // i.e. instructions and metadata areas are adjacent.
134   static constexpr bool kOnHeapBodyIsContiguous = true;
135   static constexpr bool kOffHeapBodyIsContiguous = false;
136   static constexpr bool kBodyIsContiguous =
137       kOnHeapBodyIsContiguous && kOffHeapBodyIsContiguous;
138 
139   inline Address raw_body_start() const;
140   inline Address raw_body_end() const;
141   inline int raw_body_size() const;
142 
143   inline Address raw_instruction_start() const;
144   inline Address InstructionStart() const;
145   V8_EXPORT_PRIVATE Address OffHeapInstructionStart() const;
146 
147   inline Address raw_instruction_end() const;
148   inline Address InstructionEnd() const;
149   V8_EXPORT_PRIVATE Address OffHeapInstructionEnd() const;
150 
151   inline int raw_instruction_size() const;
152   inline void set_raw_instruction_size(int value);
153   inline int InstructionSize() const;
154   V8_EXPORT_PRIVATE int OffHeapInstructionSize() const;
155 
156   inline Address raw_metadata_start() const;
157   inline Address MetadataStart() const;
158   V8_EXPORT_PRIVATE Address OffHeapMetadataStart() const;
159   inline Address raw_metadata_end() const;
160   inline Address MetadataEnd() const;
161   V8_EXPORT_PRIVATE Address OffHeapMetadataEnd() const;
162   inline int raw_metadata_size() const;
163   inline void set_raw_metadata_size(int value);
164   inline int MetadataSize() const;
165   int OffHeapMetadataSize() const;
166 
167   // The metadata section is aligned to this value.
168   static constexpr int kMetadataAlignment = kIntSize;
169 
170   // [safepoint_table_offset]: The offset where the safepoint table starts.
safepoint_table_offset()171   inline int safepoint_table_offset() const { return 0; }
172   Address SafepointTableAddress() const;
173   int safepoint_table_size() const;
174   bool has_safepoint_table() const;
175 
176   // [handler_table_offset]: The offset where the exception handler table
177   // starts.
178   inline int handler_table_offset() const;
179   inline void set_handler_table_offset(int offset);
180   Address HandlerTableAddress() const;
181   int handler_table_size() const;
182   bool has_handler_table() const;
183 
184   // [constant_pool offset]: Offset of the constant pool.
185   inline int constant_pool_offset() const;
186   inline void set_constant_pool_offset(int offset);
187   inline Address constant_pool() const;
188   int constant_pool_size() const;
189   bool has_constant_pool() const;
190 
191   // [code_comments_offset]: Offset of the code comment section.
192   inline int code_comments_offset() const;
193   inline void set_code_comments_offset(int offset);
194   inline Address code_comments() const;
195   V8_EXPORT_PRIVATE int code_comments_size() const;
196   V8_EXPORT_PRIVATE bool has_code_comments() const;
197 
198   // [unwinding_info_offset]: Offset of the unwinding info section.
199   inline int32_t unwinding_info_offset() const;
200   inline void set_unwinding_info_offset(int32_t offset);
201   inline Address unwinding_info_start() const;
202   inline Address unwinding_info_end() const;
203   inline int unwinding_info_size() const;
204   inline bool has_unwinding_info() const;
205 
206 #ifdef ENABLE_DISASSEMBLER
207   const char* GetName(Isolate* isolate) const;
208   V8_EXPORT_PRIVATE void Disassemble(const char* name, std::ostream& os,
209                                      Isolate* isolate,
210                                      Address current_pc = kNullAddress);
211 #endif
212 
213   // [relocation_info]: Code relocation information
214   DECL_ACCESSORS(relocation_info, ByteArray)
215 
216   // This function should be called only from GC.
217   void ClearEmbeddedObjects(Heap* heap);
218 
219   // [deoptimization_data]: Array containing data for deopt.
220   DECL_ACCESSORS(deoptimization_data, FixedArray)
221 
222   // [source_position_table]: ByteArray for the source positions table.
223   DECL_ACCESSORS(source_position_table, Object)
224 
225   // If source positions have not been collected or an exception has been thrown
226   // this will return empty_byte_array.
227   inline ByteArray SourcePositionTable() const;
228 
229   // [code_data_container]: A container indirection for all mutable fields.
230   DECL_RELEASE_ACQUIRE_ACCESSORS(code_data_container, CodeDataContainer)
231 
232   // [next_code_link]: Link for lists of optimized or deoptimized code.
233   // Note that this field is stored in the {CodeDataContainer} to be mutable.
234   inline Object next_code_link() const;
235   inline void set_next_code_link(Object value);
236 
237   // Unchecked accessors to be used during GC.
238   inline ByteArray unchecked_relocation_info() const;
239 
240   inline int relocation_size() const;
241 
242   // [kind]: Access to specific code kind.
243   inline CodeKind kind() const;
244 
245   inline bool is_optimized_code() const;
246   inline bool is_wasm_code() const;
247 
248   // Testers for interpreter builtins.
249   inline bool is_interpreter_trampoline_builtin() const;
250 
251   // Tells whether the code checks the optimization marker in the function's
252   // feedback vector.
253   inline bool checks_optimization_marker() const;
254 
255   // Tells whether the outgoing parameters of this code are tagged pointers.
256   inline bool has_tagged_params() const;
257 
258   // [is_turbofanned]: Tells whether the code object was generated by the
259   // TurboFan optimizing compiler.
260   inline bool is_turbofanned() const;
261 
262   // [can_have_weak_objects]: If CodeKindIsOptimizedJSFunction(kind), tells
263   // whether the embedded objects in code should be treated weakly.
264   inline bool can_have_weak_objects() const;
265   inline void set_can_have_weak_objects(bool value);
266 
267   // [builtin_index]: For builtins, tells which builtin index the code object
268   // has. The builtin index is a non-negative integer for builtins, and
269   // Builtins::kNoBuiltinId (-1) otherwise.
270   inline int builtin_index() const;
271   inline void set_builtin_index(int id);
272   inline bool is_builtin() const;
273 
274   inline unsigned inlined_bytecode_size() const;
275   inline void set_inlined_bytecode_size(unsigned size);
276 
277   inline bool has_safepoint_info() const;
278 
279   // [stack_slots]: If {has_safepoint_info()}, the number of stack slots
280   // reserved in the code prologue.
281   inline int stack_slots() const;
282 
283   // [marked_for_deoptimization]: If CodeKindCanDeoptimize(kind), tells whether
284   // the code is going to be deoptimized.
285   inline bool marked_for_deoptimization() const;
286   inline void set_marked_for_deoptimization(bool flag);
287 
288   // [deoptimization_count]: If CodeKindCanDeoptimize(kind). In turboprop we
289   // retain the deoptimized code on soft deopts for a certain number of soft
290   // deopts. This field keeps track of the number of deoptimizations we have
291   // seen so far.
292   inline int deoptimization_count() const;
293   inline void increment_deoptimization_count();
294 
295   // [embedded_objects_cleared]: If CodeKindIsOptimizedJSFunction(kind), tells
296   // whether the embedded objects in the code marked for deoptimization were
297   // cleared. Note that embedded_objects_cleared() implies
298   // marked_for_deoptimization().
299   inline bool embedded_objects_cleared() const;
300   inline void set_embedded_objects_cleared(bool flag);
301 
302   // [deopt_already_counted]: If CodeKindCanDeoptimize(kind), tells whether
303   // the code was already deoptimized.
304   inline bool deopt_already_counted() const;
305   inline void set_deopt_already_counted(bool flag);
306 
307   // [is_promise_rejection]: For kind BUILTIN tells whether the
308   // exception thrown by the code will lead to promise rejection or
309   // uncaught if both this and is_exception_caught is set.
310   // Use GetBuiltinCatchPrediction to access this.
311   inline void set_is_promise_rejection(bool flag);
312 
313   // [is_exception_caught]: For kind BUILTIN tells whether the
314   // exception thrown by the code will be caught internally or
315   // uncaught if both this and is_promise_rejection is set.
316   // Use GetBuiltinCatchPrediction to access this.
317   inline void set_is_exception_caught(bool flag);
318 
319   // [is_off_heap_trampoline]: For kind BUILTIN tells whether
320   // this is a trampoline to an off-heap builtin.
321   inline bool is_off_heap_trampoline() const;
322 
323   // Get the safepoint entry for the given pc.
324   SafepointEntry GetSafepointEntry(Address pc);
325 
326   // The entire code object including its header is copied verbatim to the
327   // snapshot so that it can be written in one, fast, memcpy during
328   // deserialization. The deserializer will overwrite some pointers, rather
329   // like a runtime linker, but the random allocation addresses used in the
330   // mksnapshot process would still be present in the unlinked snapshot data,
331   // which would make snapshot production non-reproducible. This method wipes
332   // out the to-be-overwritten header data for reproducible snapshots.
333   inline void WipeOutHeader();
334 
335   // Clear uninitialized padding space. This ensures that the snapshot content
336   // is deterministic. Depending on the V8 build mode there could be no padding.
337   inline void clear_padding();
338   // Initialize the flags field. Similar to clear_padding above this ensure that
339   // the snapshot content is deterministic.
340   inline void initialize_flags(CodeKind kind, bool is_turbofanned,
341                                int stack_slots, bool is_off_heap_trampoline);
342 
343   // Convert a target address into a code object.
344   static inline Code GetCodeFromTargetAddress(Address address);
345 
346   // Convert an entry address into an object.
347   static inline Code GetObjectFromEntryAddress(Address location_of_address);
348 
349   // Returns the size of code and its metadata. This includes the size of code
350   // relocation information, deoptimization data.
351   inline int SizeIncludingMetadata() const;
352 
353   // Returns the address of the first relocation info (read backwards!).
354   inline byte* relocation_start() const;
355 
356   // Returns the address right after the relocation info (read backwards!).
357   inline byte* relocation_end() const;
358 
359   // Code entry point.
360   inline Address entry() const;
361 
362   // Returns true if pc is inside this object's instructions.
363   inline bool contains(Address pc);
364 
365   // Relocate the code by delta bytes. Called to signal that this code
366   // object has been moved by delta bytes.
367   void Relocate(intptr_t delta);
368 
369   // Migrate code from desc without flushing the instruction cache.
370   void CopyFromNoFlush(Heap* heap, const CodeDesc& desc);
371 
372   // Copy the RelocInfo portion of |desc| to |dest|. The ByteArray must be
373   // exactly the same size as the RelocInfo in |desc|.
374   static inline void CopyRelocInfoToByteArray(ByteArray dest,
375                                               const CodeDesc& desc);
376 
377   // Flushes the instruction cache for the executable instructions of this code
378   // object. Make sure to call this while the code is still writable.
379   void FlushICache() const;
380 
381   // Returns the object size for a given body (used for allocation).
SizeFor(int body_size)382   static int SizeFor(int body_size) {
383     return RoundUp(kHeaderSize + body_size, kCodeAlignment);
384   }
385 
386   DECL_CAST(Code)
387 
388   // Dispatched behavior.
389   inline int CodeSize() const;
390 
391   DECL_PRINTER(Code)
392   DECL_VERIFIER(Code)
393 
394   bool CanDeoptAt(Address pc);
395 
396   void SetMarkedForDeoptimization(const char* reason);
397 
398   inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction();
399 
400   bool IsIsolateIndependent(Isolate* isolate);
401   bool IsNativeContextIndependent(Isolate* isolate);
402 
403   inline bool CanContainWeakObjects();
404 
405   inline bool IsWeakObject(HeapObject object);
406 
407   static inline bool IsWeakObjectInOptimizedCode(HeapObject object);
408 
409   // Returns false if this is an embedded builtin Code object that's in
410   // read_only_space and hence doesn't have execute permissions.
411   inline bool IsExecutable();
412 
413   // Returns true if the function is inlined in the code.
414   bool Inlines(SharedFunctionInfo sfi);
415 
416   class OptimizedCodeIterator;
417 
418   // Layout description.
419 #define CODE_FIELDS(V)                                                        \
420   V(kRelocationInfoOffset, kTaggedSize)                                       \
421   V(kDeoptimizationDataOffset, kTaggedSize)                                   \
422   V(kSourcePositionTableOffset, kTaggedSize)                                  \
423   V(kCodeDataContainerOffset, kTaggedSize)                                    \
424   /* Data or code not directly visited by GC directly starts here. */         \
425   /* The serializer needs to copy bytes starting from here verbatim. */       \
426   /* Objects embedded into code is visited via reloc info. */                 \
427   V(kDataStart, 0)                                                            \
428   V(kInstructionSizeOffset, kIntSize)                                         \
429   V(kMetadataSizeOffset, kIntSize)                                            \
430   V(kFlagsOffset, kInt32Size)                                                 \
431   V(kBuiltinIndexOffset, kIntSize)                                            \
432   V(kInlinedBytecodeSizeOffset, kIntSize)                                     \
433   /* Offsets describing inline metadata tables, relative to MetadataStart. */ \
434   V(kHandlerTableOffsetOffset, kIntSize)                                      \
435   V(kConstantPoolOffsetOffset,                                                \
436     FLAG_enable_embedded_constant_pool ? kIntSize : 0)                        \
437   V(kCodeCommentsOffsetOffset, kIntSize)                                      \
438   V(kUnwindingInfoOffsetOffset, kInt32Size)                                   \
439   V(kUnalignedHeaderSize, 0)                                                  \
440   /* Add padding to align the instruction start following right after */      \
441   /* the Code object header. */                                               \
442   V(kOptionalPaddingOffset, CODE_POINTER_PADDING(kOptionalPaddingOffset))     \
443   V(kHeaderSize, 0)
444 
445   DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_FIELDS)
446 #undef CODE_FIELDS
447 
448   // This documents the amount of free space we have in each Code object header
449   // due to padding for code alignment.
450 #if V8_TARGET_ARCH_ARM64
451   static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 12 : 24;
452 #elif V8_TARGET_ARCH_MIPS64
453   static constexpr int kHeaderPaddingSize = 24;
454 #elif V8_TARGET_ARCH_X64
455   static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 12 : 24;
456 #elif V8_TARGET_ARCH_ARM
457   static constexpr int kHeaderPaddingSize = 12;
458 #elif V8_TARGET_ARCH_IA32
459   static constexpr int kHeaderPaddingSize = 12;
460 #elif V8_TARGET_ARCH_MIPS
461   static constexpr int kHeaderPaddingSize = 12;
462 #elif V8_TARGET_ARCH_PPC64
463   static constexpr int kHeaderPaddingSize =
464       FLAG_enable_embedded_constant_pool ? (COMPRESS_POINTERS_BOOL ? 8 : 20)
465                                          : (COMPRESS_POINTERS_BOOL ? 12 : 24);
466 #elif V8_TARGET_ARCH_S390X
467   static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 12 : 24;
468 #else
469 #error Unknown architecture.
470 #endif
471   STATIC_ASSERT(FIELD_SIZE(kOptionalPaddingOffset) == kHeaderPaddingSize);
472 
473   class BodyDescriptor;
474 
475   // Flags layout.  base::BitField<type, shift, size>.
476 #define CODE_FLAGS_BIT_FIELDS(V, _)    \
477   V(KindField, CodeKind, 4, _)         \
478   V(IsTurbofannedField, bool, 1, _)    \
479   V(StackSlotsField, int, 24, _)       \
480   V(IsOffHeapTrampoline, bool, 1, _)
481   DEFINE_BIT_FIELDS(CODE_FLAGS_BIT_FIELDS)
482 #undef CODE_FLAGS_BIT_FIELDS
483   STATIC_ASSERT(kCodeKindCount <= KindField::kNumValues);
484   STATIC_ASSERT(CODE_FLAGS_BIT_FIELDS_Ranges::kBitsCount == 30);
485   STATIC_ASSERT(CODE_FLAGS_BIT_FIELDS_Ranges::kBitsCount <=
486                 FIELD_SIZE(kFlagsOffset) * kBitsPerByte);
487 
488   // KindSpecificFlags layout.
489 #define CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS(V, _) \
490   V(MarkedForDeoptimizationField, bool, 1, _)     \
491   V(EmbeddedObjectsClearedField, bool, 1, _)      \
492   V(DeoptAlreadyCountedField, bool, 1, _)         \
493   V(CanHaveWeakObjectsField, bool, 1, _)          \
494   V(IsPromiseRejectionField, bool, 1, _)          \
495   V(IsExceptionCaughtField, bool, 1, _)           \
496   V(DeoptCountField, int, 4, _)
497   DEFINE_BIT_FIELDS(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS)
498 #undef CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS
499   STATIC_ASSERT(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS_Ranges::kBitsCount == 10);
500   STATIC_ASSERT(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS_Ranges::kBitsCount <=
501                 FIELD_SIZE(CodeDataContainer::kKindSpecificFlagsOffset) *
502                     kBitsPerByte);
503 
504   // The {marked_for_deoptimization} field is accessed from generated code.
505   static const int kMarkedForDeoptimizationBit =
506       MarkedForDeoptimizationField::kShift;
507 
508   static const int kArgumentsBits = 16;
509   // Reserve one argument count value as the "don't adapt arguments" sentinel.
510   static const int kMaxArguments = (1 << kArgumentsBits) - 2;
511 
512  private:
513   friend class RelocIterator;
514 
515   bool is_promise_rejection() const;
516   bool is_exception_caught() const;
517 
518   OBJECT_CONSTRUCTORS(Code, HeapObject);
519 };
520 
521 class Code::OptimizedCodeIterator {
522  public:
523   explicit OptimizedCodeIterator(Isolate* isolate);
524   OptimizedCodeIterator(const OptimizedCodeIterator&) = delete;
525   OptimizedCodeIterator& operator=(const OptimizedCodeIterator&) = delete;
526   Code Next();
527 
528  private:
529   NativeContext next_context_;
530   Code current_code_;
531   Isolate* isolate_;
532 
533   DISALLOW_HEAP_ALLOCATION(no_gc)
534 };
535 
536 class AbstractCode : public HeapObject {
537  public:
538   NEVER_READ_ONLY_SPACE
539 
540   int SourcePosition(int offset);
541   int SourceStatementPosition(int offset);
542 
543   // Returns the address of the first instruction.
544   inline Address raw_instruction_start();
545 
546   // Returns the address of the first instruction. For off-heap code objects
547   // this differs from instruction_start (which would point to the off-heap
548   // trampoline instead).
549   inline Address InstructionStart();
550 
551   // Returns the address right after the last instruction.
552   inline Address raw_instruction_end();
553 
554   // Returns the address right after the last instruction. For off-heap code
555   // objects this differs from instruction_end (which would point to the
556   // off-heap trampoline instead).
557   inline Address InstructionEnd();
558 
559   // Returns the size of the code instructions.
560   inline int raw_instruction_size();
561 
562   // Returns the size of the native instructions, including embedded
563   // data such as the safepoints table. For off-heap code objects
564   // this may differ from instruction_size in that this will return the size of
565   // the off-heap instruction stream rather than the on-heap trampoline located
566   // at instruction_start.
567   inline int InstructionSize();
568 
569   // Return the source position table.
570   inline ByteArray source_position_table();
571 
572   void DropStackFrameCache();
573 
574   // Returns the size of instructions and the metadata.
575   inline int SizeIncludingMetadata();
576 
577   // Returns true if pc is inside this object's instructions.
578   inline bool contains(Address pc);
579 
580   // Returns the kind of the code.
581   inline CodeKind kind();
582 
583   DECL_CAST(AbstractCode)
584   inline Code GetCode();
585   inline BytecodeArray GetBytecodeArray();
586 
587   // Max loop nesting marker used to postpose OSR. We don't take loop
588   // nesting that is deeper than 5 levels into account.
589   static const int kMaxLoopNestingMarker = 6;
590 
591   OBJECT_CONSTRUCTORS(AbstractCode, HeapObject);
592 };
593 
594 // Dependent code is a singly linked list of weak fixed arrays. Each array
595 // contains weak pointers to code objects for one dependent group. The suffix of
596 // the array can be filled with the undefined value if the number of codes is
597 // less than the length of the array.
598 //
599 // +------+-----------------+--------+--------+-----+--------+-----------+-----+
600 // | next | count & group 1 | code 1 | code 2 | ... | code n | undefined | ... |
601 // +------+-----------------+--------+--------+-----+--------+-----------+-----+
602 //    |
603 //    V
604 // +------+-----------------+--------+--------+-----+--------+-----------+-----+
605 // | next | count & group 2 | code 1 | code 2 | ... | code m | undefined | ... |
606 // +------+-----------------+--------+--------+-----+--------+-----------+-----+
607 //    |
608 //    V
609 // empty_weak_fixed_array()
610 //
611 // The list of weak fixed arrays is ordered by dependency groups.
612 
613 class DependentCode : public WeakFixedArray {
614  public:
615   DECL_CAST(DependentCode)
616 
617   enum DependencyGroup {
618     // Group of code that embed a transition to this map, and depend on being
619     // deoptimized when the transition is replaced by a new version.
620     kTransitionGroup,
621     // Group of code that omit run-time prototype checks for prototypes
622     // described by this map. The group is deoptimized whenever an object
623     // described by this map changes shape (and transitions to a new map),
624     // possibly invalidating the assumptions embedded in the code.
625     kPrototypeCheckGroup,
626     // Group of code that depends on global property values in property cells
627     // not being changed.
628     kPropertyCellChangedGroup,
629     // Group of code that omit run-time checks for field(s) introduced by
630     // this map, i.e. for the field type.
631     kFieldTypeGroup,
632     kFieldConstGroup,
633     kFieldRepresentationGroup,
634     // Group of code that omit run-time type checks for initial maps of
635     // constructors.
636     kInitialMapChangedGroup,
637     // Group of code that depends on tenuring information in AllocationSites
638     // not being changed.
639     kAllocationSiteTenuringChangedGroup,
640     // Group of code that depends on element transition information in
641     // AllocationSites not being changed.
642     kAllocationSiteTransitionChangedGroup
643   };
644 
645   // Register a dependency of {code} on {object}, of the kind given by {group}.
646   V8_EXPORT_PRIVATE static void InstallDependency(Isolate* isolate,
647                                                   const MaybeObjectHandle& code,
648                                                   Handle<HeapObject> object,
649                                                   DependencyGroup group);
650 
651   void DeoptimizeDependentCodeGroup(DependencyGroup group);
652 
653   bool MarkCodeForDeoptimization(DependencyGroup group);
654 
655   // The following low-level accessors are exposed only for tests.
656   inline DependencyGroup group();
657   inline MaybeObject object_at(int i);
658   inline int count();
659   inline DependentCode next_link();
660 
661  private:
662   static const char* DependencyGroupName(DependencyGroup group);
663 
664   // Get/Set {object}'s {DependentCode}.
665   static DependentCode GetDependentCode(Handle<HeapObject> object);
666   static void SetDependentCode(Handle<HeapObject> object,
667                                Handle<DependentCode> dep);
668 
669   static Handle<DependentCode> New(Isolate* isolate, DependencyGroup group,
670                                    const MaybeObjectHandle& object,
671                                    Handle<DependentCode> next);
672   static Handle<DependentCode> EnsureSpace(Isolate* isolate,
673                                            Handle<DependentCode> entries);
674   static Handle<DependentCode> InsertWeakCode(Isolate* isolate,
675                                               Handle<DependentCode> entries,
676                                               DependencyGroup group,
677                                               const MaybeObjectHandle& code);
678 
679   // Compact by removing cleared weak cells and return true if there was
680   // any cleared weak cell.
681   bool Compact();
682 
Grow(int number_of_entries)683   static int Grow(int number_of_entries) {
684     if (number_of_entries < 5) return number_of_entries + 1;
685     return number_of_entries * 5 / 4;
686   }
687 
688   static const int kGroupCount = kAllocationSiteTransitionChangedGroup + 1;
689   static const int kNextLinkIndex = 0;
690   static const int kFlagsIndex = 1;
691   static const int kCodesStartIndex = 2;
692 
693   inline void set_next_link(DependentCode next);
694   inline void set_count(int value);
695   inline void set_object_at(int i, MaybeObject object);
696   inline void clear_at(int i);
697   inline void copy(int from, int to);
698 
699   inline int flags();
700   inline void set_flags(int flags);
701   using GroupField = base::BitField<int, 0, 5>;
702   using CountField = base::BitField<int, 5, 27>;
703   STATIC_ASSERT(kGroupCount <= GroupField::kMax + 1);
704 
705   OBJECT_CONSTRUCTORS(DependentCode, WeakFixedArray);
706 };
707 
708 // BytecodeArray represents a sequence of interpreter bytecodes.
709 class BytecodeArray : public FixedArrayBase {
710  public:
711   enum Age {
712     kNoAgeBytecodeAge = 0,
713     kQuadragenarianBytecodeAge,
714     kQuinquagenarianBytecodeAge,
715     kSexagenarianBytecodeAge,
716     kSeptuagenarianBytecodeAge,
717     kOctogenarianBytecodeAge,
718     kAfterLastBytecodeAge,
719     kFirstBytecodeAge = kNoAgeBytecodeAge,
720     kLastBytecodeAge = kAfterLastBytecodeAge - 1,
721     kBytecodeAgeCount = kAfterLastBytecodeAge - kFirstBytecodeAge - 1,
722     kIsOldBytecodeAge = kSexagenarianBytecodeAge
723   };
724 
SizeFor(int length)725   static constexpr int SizeFor(int length) {
726     return OBJECT_POINTER_ALIGN(kHeaderSize + length);
727   }
728 
729   // Setter and getter
730   inline byte get(int index) const;
731   inline void set(int index, byte value);
732 
733   // Returns data start address.
734   inline Address GetFirstBytecodeAddress();
735 
736   // Accessors for frame size.
737   inline int32_t frame_size() const;
738   inline void set_frame_size(int32_t frame_size);
739 
740   // Accessor for register count (derived from frame_size).
741   inline int register_count() const;
742 
743   // Accessors for parameter count (including implicit 'this' receiver).
744   inline int32_t parameter_count() const;
745   inline void set_parameter_count(int32_t number_of_parameters);
746 
747   // Register used to pass the incoming new.target or generator object from the
748   // fucntion call.
749   inline interpreter::Register incoming_new_target_or_generator_register()
750       const;
751   inline void set_incoming_new_target_or_generator_register(
752       interpreter::Register incoming_new_target_or_generator_register);
753 
754   // Accessors for OSR loop nesting level.
755   inline int osr_loop_nesting_level() const;
756   inline void set_osr_loop_nesting_level(int depth);
757 
758   // Accessors for bytecode's code age.
759   inline Age bytecode_age() const;
760   inline void set_bytecode_age(Age age);
761 
762   // Accessors for the constant pool.
763   DECL_ACCESSORS(constant_pool, FixedArray)
764 
765   // Accessors for handler table containing offsets of exception handlers.
766   DECL_ACCESSORS(handler_table, ByteArray)
767 
768   // Accessors for source position table. Can contain:
769   // * undefined (initial value)
770   // * empty_byte_array (for bytecode generated for functions that will never
771   // have source positions, e.g. native functions).
772   // * ByteArray (when source positions have been collected for the bytecode)
773   // * exception (when an error occurred while explicitly collecting source
774   // positions for pre-existing bytecode).
775   DECL_RELEASE_ACQUIRE_ACCESSORS(source_position_table, Object)
776 
777   inline bool HasSourcePositionTable() const;
778   inline bool DidSourcePositionGenerationFail() const;
779 
780   // If source positions have not been collected or an exception has been thrown
781   // this will return empty_byte_array.
782   inline ByteArray SourcePositionTable() const;
783 
784   // Indicates that an attempt was made to collect source positions, but that it
785   // failed most likely due to stack exhaustion. When in this state
786   // |SourcePositionTable| will return an empty byte array rather than crashing
787   // as it would if no attempt was ever made to collect source positions.
788   inline void SetSourcePositionsFailedToCollect();
789 
790   DECL_CAST(BytecodeArray)
791 
792   // Dispatched behavior.
793   inline int BytecodeArraySize();
794 
795   inline int raw_instruction_size();
796 
797   // Returns the size of bytecode and its metadata. This includes the size of
798   // bytecode, constant pool, source position table, and handler table.
799   inline int SizeIncludingMetadata();
800 
801   DECL_PRINTER(BytecodeArray)
802   DECL_VERIFIER(BytecodeArray)
803 
804   V8_EXPORT_PRIVATE void Disassemble(std::ostream& os);
805 
806   void CopyBytecodesTo(BytecodeArray to);
807 
808   // Bytecode aging
809   V8_EXPORT_PRIVATE bool IsOld() const;
810   V8_EXPORT_PRIVATE void MakeOlder();
811 
812   // Clear uninitialized padding space. This ensures that the snapshot content
813   // is deterministic.
814   inline void clear_padding();
815 
816   // Layout description.
817   DEFINE_FIELD_OFFSET_CONSTANTS(FixedArrayBase::kHeaderSize,
818                                 TORQUE_GENERATED_BYTECODE_ARRAY_FIELDS)
819 
820   // InterpreterEntryTrampoline expects these fields to be next to each other
821   // and writes a 16-bit value to reset them.
822   STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
823                 kOsrNestingLevelOffset + kCharSize);
824 
825   // Maximal memory consumption for a single BytecodeArray.
826   static const int kMaxSize = 512 * MB;
827   // Maximal length of a single BytecodeArray.
828   static const int kMaxLength = kMaxSize - kHeaderSize;
829 
830   class BodyDescriptor;
831 
832   OBJECT_CONSTRUCTORS(BytecodeArray, FixedArrayBase);
833 };
834 
835 // DeoptimizationData is a fixed array used to hold the deoptimization data for
836 // optimized code.  It also contains information about functions that were
837 // inlined.  If N different functions were inlined then the first N elements of
838 // the literal array will contain these functions.
839 //
840 // It can be empty.
841 class DeoptimizationData : public FixedArray {
842  public:
843   // Layout description.  Indices in the array.
844   static const int kTranslationByteArrayIndex = 0;
845   static const int kInlinedFunctionCountIndex = 1;
846   static const int kLiteralArrayIndex = 2;
847   static const int kOsrBytecodeOffsetIndex = 3;
848   static const int kOsrPcOffsetIndex = 4;
849   static const int kOptimizationIdIndex = 5;
850   static const int kSharedFunctionInfoIndex = 6;
851   static const int kInliningPositionsIndex = 7;
852   static const int kDeoptExitStartIndex = 8;
853   static const int kNonLazyDeoptCountIndex = 9;
854   static const int kFirstDeoptEntryIndex = 10;
855 
856   // Offsets of deopt entry elements relative to the start of the entry.
857   static const int kBytecodeOffsetRawOffset = 0;
858   static const int kTranslationIndexOffset = 1;
859   static const int kPcOffset = 2;
860   static const int kDeoptEntrySize = 3;
861 
862 // Simple element accessors.
863 #define DECL_ELEMENT_ACCESSORS(name, type) \
864   inline type name() const;                \
865   inline void Set##name(type value);
866 
867   DECL_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
868   DECL_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
869   DECL_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
870   DECL_ELEMENT_ACCESSORS(OsrBytecodeOffset, Smi)
871   DECL_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
872   DECL_ELEMENT_ACCESSORS(OptimizationId, Smi)
873   DECL_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
874   DECL_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
875   DECL_ELEMENT_ACCESSORS(DeoptExitStart, Smi)
876   DECL_ELEMENT_ACCESSORS(NonLazyDeoptCount, Smi)
877 
878 #undef DECL_ELEMENT_ACCESSORS
879 
880 // Accessors for elements of the ith deoptimization entry.
881 #define DECL_ENTRY_ACCESSORS(name, type) \
882   inline type name(int i) const;         \
883   inline void Set##name(int i, type value);
884 
885   DECL_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
886   DECL_ENTRY_ACCESSORS(TranslationIndex, Smi)
887   DECL_ENTRY_ACCESSORS(Pc, Smi)
888 
889 #undef DECL_ENTRY_ACCESSORS
890 
891   inline BailoutId BytecodeOffset(int i);
892 
893   inline void SetBytecodeOffset(int i, BailoutId value);
894 
895   inline int DeoptCount();
896 
897   static const int kNotInlinedIndex = -1;
898 
899   // Returns the inlined function at the given position in LiteralArray, or the
900   // outer function if index == kNotInlinedIndex.
901   class SharedFunctionInfo GetInlinedFunction(int index);
902 
903   // Allocates a DeoptimizationData.
904   static Handle<DeoptimizationData> New(Isolate* isolate, int deopt_entry_count,
905                                         AllocationType allocation);
906 
907   // Return an empty DeoptimizationData.
908   V8_EXPORT_PRIVATE static Handle<DeoptimizationData> Empty(Isolate* isolate);
909 
910   DECL_CAST(DeoptimizationData)
911 
912 #ifdef ENABLE_DISASSEMBLER
913   void DeoptimizationDataPrint(std::ostream& os);  // NOLINT
914 #endif
915 
916  private:
IndexForEntry(int i)917   static int IndexForEntry(int i) {
918     return kFirstDeoptEntryIndex + (i * kDeoptEntrySize);
919   }
920 
LengthFor(int entry_count)921   static int LengthFor(int entry_count) { return IndexForEntry(entry_count); }
922 
923   OBJECT_CONSTRUCTORS(DeoptimizationData, FixedArray);
924 };
925 
926 }  // namespace internal
927 }  // namespace v8
928 
929 #include "src/objects/object-macros-undef.h"
930 
931 #endif  // V8_OBJECTS_CODE_H_
932