• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_COMMON_GLOBALS_H_
6 #define V8_COMMON_GLOBALS_H_
7 
8 #include <stddef.h>
9 #include <stdint.h>
10 
11 #include <limits>
12 #include <ostream>
13 
14 #include "include/v8-internal.h"
15 #include "src/base/atomic-utils.h"
16 #include "src/base/build_config.h"
17 #include "src/base/flags.h"
18 #include "src/base/logging.h"
19 #include "src/base/macros.h"
20 
21 #define V8_INFINITY std::numeric_limits<double>::infinity()
22 
23 namespace v8 {
24 
25 namespace base {
26 class Mutex;
27 class RecursiveMutex;
28 }  // namespace base
29 
30 namespace internal {
31 
32 constexpr int KB = 1024;
33 constexpr int MB = KB * 1024;
34 constexpr int GB = MB * 1024;
35 
36 // Determine whether we are running in a simulated environment.
37 // Setting USE_SIMULATOR explicitly from the build script will force
38 // the use of a simulated environment.
39 #if !defined(USE_SIMULATOR)
40 #if (V8_TARGET_ARCH_ARM64 && !V8_HOST_ARCH_ARM64)
41 #define USE_SIMULATOR 1
42 #endif
43 #if (V8_TARGET_ARCH_ARM && !V8_HOST_ARCH_ARM)
44 #define USE_SIMULATOR 1
45 #endif
46 #if (V8_TARGET_ARCH_PPC && !V8_HOST_ARCH_PPC)
47 #define USE_SIMULATOR 1
48 #endif
49 #if (V8_TARGET_ARCH_PPC64 && !V8_HOST_ARCH_PPC64)
50 #define USE_SIMULATOR 1
51 #endif
52 #if (V8_TARGET_ARCH_MIPS && !V8_HOST_ARCH_MIPS)
53 #define USE_SIMULATOR 1
54 #endif
55 #if (V8_TARGET_ARCH_MIPS64 && !V8_HOST_ARCH_MIPS64)
56 #define USE_SIMULATOR 1
57 #endif
58 #if (V8_TARGET_ARCH_S390 && !V8_HOST_ARCH_S390)
59 #define USE_SIMULATOR 1
60 #endif
61 #endif
62 
63 // Determine whether the architecture uses an embedded constant pool
64 // (contiguous constant pool embedded in code object).
65 #if V8_TARGET_ARCH_PPC || V8_TARGET_ARCH_PPC64
66 #define V8_EMBEDDED_CONSTANT_POOL true
67 #else
68 #define V8_EMBEDDED_CONSTANT_POOL false
69 #endif
70 
71 #if V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64
72 // Set stack limit lower for ARM and ARM64 than for other architectures because:
73 //  - on Arm stack allocating MacroAssembler takes 120K bytes.
74 //    See issue crbug.com/405338
75 //  - on Arm64 when running in single-process mode for Android WebView, when
76 //    initializing V8 we already have a large stack and so have to set the
77 //    limit lower. See issue crbug.com/v8/10575
78 #define V8_DEFAULT_STACK_SIZE_KB 864
79 #else
80 // Slightly less than 1MB, since Windows' default stack size for
81 // the main execution thread is 1MB for both 32 and 64-bit.
82 #define V8_DEFAULT_STACK_SIZE_KB 984
83 #endif
84 
85 // Minimum stack size in KB required by compilers.
86 constexpr int kStackSpaceRequiredForCompilation = 40;
87 
88 // In order to emit more efficient stack checks in optimized code,
89 // deoptimization may implicitly exceed the V8 stack limit by this many bytes.
90 // Stack checks in functions with `difference between optimized and unoptimized
91 // stack frame sizes <= slack` can simply emit the simple stack check.
92 constexpr int kStackLimitSlackForDeoptimizationInBytes = 256;
93 
94 // Sanity-check, assuming that we aim for a real OS stack size of at least 1MB.
95 STATIC_ASSERT(V8_DEFAULT_STACK_SIZE_KB* KB +
96                   kStackLimitSlackForDeoptimizationInBytes <=
97               MB);
98 
99 // Determine whether double field unboxing feature is enabled.
100 #if V8_TARGET_ARCH_64_BIT && !defined(V8_COMPRESS_POINTERS)
101 #define V8_DOUBLE_FIELDS_UNBOXING false
102 #else
103 #define V8_DOUBLE_FIELDS_UNBOXING false
104 #endif
105 
106 // Determine whether dict mode prototypes feature is enabled.
107 #ifdef V8_DICT_MODE_PROTOTYPES
108 #define V8_DICT_MODE_PROTOTYPES_BOOL true
109 #else
110 #define V8_DICT_MODE_PROTOTYPES_BOOL false
111 #endif
112 
113 // Determine whether tagged pointers are 8 bytes (used in Torque layouts for
114 // choosing where to insert padding).
115 #if V8_TARGET_ARCH_64_BIT && !defined(V8_COMPRESS_POINTERS)
116 #define TAGGED_SIZE_8_BYTES true
117 #else
118 #define TAGGED_SIZE_8_BYTES false
119 #endif
120 
121 // Some types of tracing require the SFI to store a unique ID.
122 #if defined(V8_TRACE_MAPS) || defined(V8_TRACE_IGNITION)
123 #define V8_SFI_HAS_UNIQUE_ID true
124 #else
125 #define V8_SFI_HAS_UNIQUE_ID false
126 #endif
127 
128 #if defined(V8_OS_WIN) && defined(V8_TARGET_ARCH_X64)
129 #define V8_OS_WIN_X64 true
130 #endif
131 
132 #if defined(V8_OS_WIN) && defined(V8_TARGET_ARCH_ARM64)
133 #define V8_OS_WIN_ARM64 true
134 #endif
135 
136 #if defined(V8_OS_WIN_X64) || defined(V8_OS_WIN_ARM64)
137 #define V8_OS_WIN64 true
138 #endif
139 
140 // Superclass for classes only using static method functions.
141 // The subclass of AllStatic cannot be instantiated at all.
142 class AllStatic {
143 #ifdef DEBUG
144  public:
145   AllStatic() = delete;
146 #endif
147 };
148 
149 using byte = uint8_t;
150 
151 // -----------------------------------------------------------------------------
152 // Constants
153 
154 constexpr int kMaxInt = 0x7FFFFFFF;
155 constexpr int kMinInt = -kMaxInt - 1;
156 constexpr int kMaxInt8 = (1 << 7) - 1;
157 constexpr int kMinInt8 = -(1 << 7);
158 constexpr int kMaxUInt8 = (1 << 8) - 1;
159 constexpr int kMinUInt8 = 0;
160 constexpr int kMaxInt16 = (1 << 15) - 1;
161 constexpr int kMinInt16 = -(1 << 15);
162 constexpr int kMaxUInt16 = (1 << 16) - 1;
163 constexpr int kMinUInt16 = 0;
164 constexpr int kMaxInt31 = kMaxInt / 2;
165 constexpr int kMinInt31 = kMinInt / 2;
166 
167 constexpr uint32_t kMaxUInt32 = 0xFFFFFFFFu;
168 constexpr int kMinUInt32 = 0;
169 
170 constexpr int kUInt8Size = sizeof(uint8_t);
171 constexpr int kByteSize = sizeof(byte);
172 constexpr int kCharSize = sizeof(char);
173 constexpr int kShortSize = sizeof(short);  // NOLINT
174 constexpr int kUInt16Size = sizeof(uint16_t);
175 constexpr int kIntSize = sizeof(int);
176 constexpr int kInt32Size = sizeof(int32_t);
177 constexpr int kInt64Size = sizeof(int64_t);
178 constexpr int kUInt32Size = sizeof(uint32_t);
179 constexpr int kSizetSize = sizeof(size_t);
180 constexpr int kFloatSize = sizeof(float);
181 constexpr int kDoubleSize = sizeof(double);
182 constexpr int kIntptrSize = sizeof(intptr_t);
183 constexpr int kUIntptrSize = sizeof(uintptr_t);
184 constexpr int kSystemPointerSize = sizeof(void*);
185 constexpr int kSystemPointerHexDigits = kSystemPointerSize == 4 ? 8 : 12;
186 constexpr int kPCOnStackSize = kSystemPointerSize;
187 constexpr int kFPOnStackSize = kSystemPointerSize;
188 
189 #if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_IA32
190 constexpr int kElidedFrameSlots = kPCOnStackSize / kSystemPointerSize;
191 #else
192 constexpr int kElidedFrameSlots = 0;
193 #endif
194 
195 constexpr int kDoubleSizeLog2 = 3;
196 
197 // Total wasm code space per engine (i.e. per process) is limited to make
198 // certain attacks that rely on heap spraying harder.
199 // Just below 4GB, such that {kMaxWasmCodeMemory} fits in a 32-bit size_t.
200 constexpr size_t kMaxWasmCodeMB = 4095;
201 constexpr size_t kMaxWasmCodeMemory = kMaxWasmCodeMB * MB;
202 
203 #if V8_HOST_ARCH_64_BIT
204 constexpr int kSystemPointerSizeLog2 = 3;
205 constexpr intptr_t kIntptrSignBit =
206     static_cast<intptr_t>(uintptr_t{0x8000000000000000});
207 constexpr bool kPlatformRequiresCodeRange = true;
208 #if (V8_HOST_ARCH_PPC || V8_HOST_ARCH_PPC64) && \
209     (V8_TARGET_ARCH_PPC || V8_TARGET_ARCH_PPC64) && V8_OS_LINUX
210 constexpr size_t kMaximalCodeRangeSize = 512 * MB;
211 constexpr size_t kMinExpectedOSPageSize = 64 * KB;  // OS page on PPC Linux
212 #elif V8_TARGET_ARCH_ARM64
213 constexpr size_t kMaximalCodeRangeSize = 128 * MB;
214 constexpr size_t kMinExpectedOSPageSize = 4 * KB;  // OS page.
215 #else
216 constexpr size_t kMaximalCodeRangeSize = 128 * MB;
217 constexpr size_t kMinExpectedOSPageSize = 4 * KB;  // OS page.
218 #endif
219 #if V8_OS_WIN
220 constexpr size_t kMinimumCodeRangeSize = 4 * MB;
221 constexpr size_t kReservedCodeRangePages = 1;
222 #else
223 constexpr size_t kMinimumCodeRangeSize = 3 * MB;
224 constexpr size_t kReservedCodeRangePages = 0;
225 #endif
226 #else
227 constexpr int kSystemPointerSizeLog2 = 2;
228 constexpr intptr_t kIntptrSignBit = 0x80000000;
229 #if (V8_HOST_ARCH_PPC || V8_HOST_ARCH_PPC64) && \
230     (V8_TARGET_ARCH_PPC || V8_TARGET_ARCH_PPC64) && V8_OS_LINUX
231 constexpr bool kPlatformRequiresCodeRange = false;
232 constexpr size_t kMaximalCodeRangeSize = 0 * MB;
233 constexpr size_t kMinimumCodeRangeSize = 0 * MB;
234 constexpr size_t kMinExpectedOSPageSize = 64 * KB;  // OS page on PPC Linux
235 #elif V8_TARGET_ARCH_MIPS
236 constexpr bool kPlatformRequiresCodeRange = false;
237 constexpr size_t kMaximalCodeRangeSize = 2048LL * MB;
238 constexpr size_t kMinimumCodeRangeSize = 0 * MB;
239 constexpr size_t kMinExpectedOSPageSize = 4 * KB;  // OS page.
240 #else
241 constexpr bool kPlatformRequiresCodeRange = false;
242 constexpr size_t kMaximalCodeRangeSize = 0 * MB;
243 constexpr size_t kMinimumCodeRangeSize = 0 * MB;
244 constexpr size_t kMinExpectedOSPageSize = 4 * KB;  // OS page.
245 #endif
246 constexpr size_t kReservedCodeRangePages = 0;
247 #endif
248 
249 STATIC_ASSERT(kSystemPointerSize == (1 << kSystemPointerSizeLog2));
250 
251 #ifdef V8_COMPRESS_ZONES
252 #define COMPRESS_ZONES_BOOL true
253 #else
254 #define COMPRESS_ZONES_BOOL false
255 #endif  // V8_COMPRESS_ZONES
256 
257 // The flag controls whether zones pointer compression should be enabled for
258 // TurboFan graphs or not.
259 static constexpr bool kCompressGraphZone = COMPRESS_ZONES_BOOL;
260 
261 #ifdef V8_COMPRESS_POINTERS
262 static_assert(
263     kSystemPointerSize == kInt64Size,
264     "Pointer compression can be enabled only for 64-bit architectures");
265 
266 constexpr int kTaggedSize = kInt32Size;
267 constexpr int kTaggedSizeLog2 = 2;
268 
269 // These types define raw and atomic storage types for tagged values stored
270 // on V8 heap.
271 using Tagged_t = uint32_t;
272 using AtomicTagged_t = base::Atomic32;
273 
274 #else
275 
276 constexpr int kTaggedSize = kSystemPointerSize;
277 constexpr int kTaggedSizeLog2 = kSystemPointerSizeLog2;
278 
279 // These types define raw and atomic storage types for tagged values stored
280 // on V8 heap.
281 using Tagged_t = Address;
282 using AtomicTagged_t = base::AtomicWord;
283 
284 #endif  // V8_COMPRESS_POINTERS
285 
286 STATIC_ASSERT(kTaggedSize == (1 << kTaggedSizeLog2));
287 STATIC_ASSERT((kTaggedSize == 8) == TAGGED_SIZE_8_BYTES);
288 
289 using AsAtomicTagged = base::AsAtomicPointerImpl<AtomicTagged_t>;
290 STATIC_ASSERT(sizeof(Tagged_t) == kTaggedSize);
291 STATIC_ASSERT(sizeof(AtomicTagged_t) == kTaggedSize);
292 
293 STATIC_ASSERT(kTaggedSize == kApiTaggedSize);
294 
295 // TODO(ishell): use kTaggedSize or kSystemPointerSize instead.
296 #ifndef V8_COMPRESS_POINTERS
297 constexpr int kPointerSize = kSystemPointerSize;
298 constexpr int kPointerSizeLog2 = kSystemPointerSizeLog2;
299 STATIC_ASSERT(kPointerSize == (1 << kPointerSizeLog2));
300 #endif
301 
302 // This type defines raw storage type for external (or off-V8 heap) pointers
303 // stored on V8 heap.
304 constexpr int kExternalPointerSize = sizeof(ExternalPointer_t);
305 
306 constexpr int kEmbedderDataSlotSize = kSystemPointerSize;
307 
308 constexpr int kEmbedderDataSlotSizeInTaggedSlots =
309     kEmbedderDataSlotSize / kTaggedSize;
310 STATIC_ASSERT(kEmbedderDataSlotSize >= kSystemPointerSize);
311 
312 constexpr int kExternalAllocationSoftLimit =
313     internal::Internals::kExternalAllocationSoftLimit;
314 
315 // Maximum object size that gets allocated into regular pages. Objects larger
316 // than that size are allocated in large object space and are never moved in
317 // memory. This also applies to new space allocation, since objects are never
318 // migrated from new space to large object space. Takes double alignment into
319 // account.
320 //
321 // Current value: half of the page size.
322 constexpr int kMaxRegularHeapObjectSize = (1 << (kPageSizeBits - 1));
323 
324 constexpr int kBitsPerByte = 8;
325 constexpr int kBitsPerByteLog2 = 3;
326 constexpr int kBitsPerSystemPointer = kSystemPointerSize * kBitsPerByte;
327 constexpr int kBitsPerSystemPointerLog2 =
328     kSystemPointerSizeLog2 + kBitsPerByteLog2;
329 constexpr int kBitsPerInt = kIntSize * kBitsPerByte;
330 
331 // IEEE 754 single precision floating point number bit layout.
332 constexpr uint32_t kBinary32SignMask = 0x80000000u;
333 constexpr uint32_t kBinary32ExponentMask = 0x7f800000u;
334 constexpr uint32_t kBinary32MantissaMask = 0x007fffffu;
335 constexpr int kBinary32ExponentBias = 127;
336 constexpr int kBinary32MaxExponent = 0xFE;
337 constexpr int kBinary32MinExponent = 0x01;
338 constexpr int kBinary32MantissaBits = 23;
339 constexpr int kBinary32ExponentShift = 23;
340 
341 // Quiet NaNs have bits 51 to 62 set, possibly the sign bit, and no
342 // other bits set.
343 constexpr uint64_t kQuietNaNMask = static_cast<uint64_t>(0xfff) << 51;
344 
345 // Latin1/UTF-16 constants
346 // Code-point values in Unicode 4.0 are 21 bits wide.
347 // Code units in UTF-16 are 16 bits wide.
348 using uc16 = uint16_t;
349 using uc32 = uint32_t;
350 constexpr int kOneByteSize = kCharSize;
351 constexpr int kUC16Size = sizeof(uc16);  // NOLINT
352 
353 // 128 bit SIMD value size.
354 constexpr int kSimd128Size = 16;
355 
356 // FUNCTION_ADDR(f) gets the address of a C function f.
357 #define FUNCTION_ADDR(f) (reinterpret_cast<v8::internal::Address>(f))
358 
359 // FUNCTION_CAST<F>(addr) casts an address into a function
360 // of type F. Used to invoke generated code from within C.
361 template <typename F>
FUNCTION_CAST(byte * addr)362 F FUNCTION_CAST(byte* addr) {
363   return reinterpret_cast<F>(reinterpret_cast<Address>(addr));
364 }
365 
366 template <typename F>
FUNCTION_CAST(Address addr)367 F FUNCTION_CAST(Address addr) {
368   return reinterpret_cast<F>(addr);
369 }
370 
371 // Determine whether the architecture uses function descriptors
372 // which provide a level of indirection between the function pointer
373 // and the function entrypoint.
374 #if (V8_HOST_ARCH_PPC || V8_HOST_ARCH_PPC64) &&                    \
375     (V8_OS_AIX || (V8_TARGET_ARCH_PPC64 && V8_TARGET_BIG_ENDIAN && \
376                    (!defined(_CALL_ELF) || _CALL_ELF == 1)))
377 #define USES_FUNCTION_DESCRIPTORS 1
378 #define FUNCTION_ENTRYPOINT_ADDRESS(f)       \
379   (reinterpret_cast<v8::internal::Address*>( \
380       &(reinterpret_cast<intptr_t*>(f)[0])))
381 #else
382 #define USES_FUNCTION_DESCRIPTORS 0
383 #endif
384 
385 // -----------------------------------------------------------------------------
386 // Declarations for use in both the preparser and the rest of V8.
387 
388 // The Strict Mode (ECMA-262 5th edition, 4.2.2).
389 
390 enum class LanguageMode : bool { kSloppy, kStrict };
391 static const size_t LanguageModeSize = 2;
392 
hash_value(LanguageMode mode)393 inline size_t hash_value(LanguageMode mode) {
394   return static_cast<size_t>(mode);
395 }
396 
LanguageMode2String(LanguageMode mode)397 inline const char* LanguageMode2String(LanguageMode mode) {
398   switch (mode) {
399     case LanguageMode::kSloppy:
400       return "sloppy";
401     case LanguageMode::kStrict:
402       return "strict";
403   }
404   UNREACHABLE();
405 }
406 
407 inline std::ostream& operator<<(std::ostream& os, LanguageMode mode) {
408   return os << LanguageMode2String(mode);
409 }
410 
is_sloppy(LanguageMode language_mode)411 inline bool is_sloppy(LanguageMode language_mode) {
412   return language_mode == LanguageMode::kSloppy;
413 }
414 
is_strict(LanguageMode language_mode)415 inline bool is_strict(LanguageMode language_mode) {
416   return language_mode != LanguageMode::kSloppy;
417 }
418 
is_valid_language_mode(int language_mode)419 inline bool is_valid_language_mode(int language_mode) {
420   return language_mode == static_cast<int>(LanguageMode::kSloppy) ||
421          language_mode == static_cast<int>(LanguageMode::kStrict);
422 }
423 
construct_language_mode(bool strict_bit)424 inline LanguageMode construct_language_mode(bool strict_bit) {
425   return static_cast<LanguageMode>(strict_bit);
426 }
427 
428 // Return kStrict if either of the language modes is kStrict, or kSloppy
429 // otherwise.
stricter_language_mode(LanguageMode mode1,LanguageMode mode2)430 inline LanguageMode stricter_language_mode(LanguageMode mode1,
431                                            LanguageMode mode2) {
432   STATIC_ASSERT(LanguageModeSize == 2);
433   return static_cast<LanguageMode>(static_cast<int>(mode1) |
434                                    static_cast<int>(mode2));
435 }
436 
437 // A non-keyed store is of the form a.x = foo or a["x"] = foo whereas
438 // a keyed store is of the form a[expression] = foo.
439 enum class StoreOrigin { kMaybeKeyed, kNamed };
440 
441 enum TypeofMode : int { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
442 
443 // Enums used by CEntry.
444 enum SaveFPRegsMode { kDontSaveFPRegs, kSaveFPRegs };
445 enum ArgvMode { kArgvOnStack, kArgvInRegister };
446 
447 // This constant is used as an undefined value when passing source positions.
448 constexpr int kNoSourcePosition = -1;
449 
450 // This constant is used to signal the function entry implicit stack check
451 // bytecode offset.
452 constexpr int kFunctionEntryBytecodeOffset = -1;
453 
454 // This constant is used to indicate missing deoptimization information.
455 constexpr int kNoDeoptimizationId = -1;
456 
457 // Deoptimize bailout kind:
458 // - Eager: a check failed in the optimized code and deoptimization happens
459 //   immediately.
460 // - Lazy: the code has been marked as dependent on some assumption which
461 //   is checked elsewhere and can trigger deoptimization the next time the
462 //   code is executed.
463 // - Soft: similar to lazy deoptimization, but does not contribute to the
464 //   total deopt count which can lead to disabling optimization for a function.
465 // - Bailout: a check failed in the optimized code but we don't
466 //   deoptimize the code, but try to heal the feedback and try to rerun
467 //   the optimized code again.
468 enum class DeoptimizeKind : uint8_t {
469   kEager,
470   kSoft,
471   kBailout,
472   kLazy,
473 };
474 constexpr DeoptimizeKind kFirstDeoptimizeKind = DeoptimizeKind::kEager;
475 constexpr DeoptimizeKind kLastDeoptimizeKind = DeoptimizeKind::kLazy;
476 STATIC_ASSERT(static_cast<int>(kFirstDeoptimizeKind) == 0);
477 constexpr int kDeoptimizeKindCount = static_cast<int>(kLastDeoptimizeKind) + 1;
hash_value(DeoptimizeKind kind)478 inline size_t hash_value(DeoptimizeKind kind) {
479   return static_cast<size_t>(kind);
480 }
481 inline std::ostream& operator<<(std::ostream& os, DeoptimizeKind kind) {
482   switch (kind) {
483     case DeoptimizeKind::kEager:
484       return os << "Eager";
485     case DeoptimizeKind::kSoft:
486       return os << "Soft";
487     case DeoptimizeKind::kLazy:
488       return os << "Lazy";
489     case DeoptimizeKind::kBailout:
490       return os << "Bailout";
491   }
492 }
493 
494 // Indicates whether the lookup is related to sloppy-mode block-scoped
495 // function hoisting, and is a synthetic assignment for that.
496 enum class LookupHoistingMode { kNormal, kLegacySloppy };
497 
498 inline std::ostream& operator<<(std::ostream& os,
499                                 const LookupHoistingMode& mode) {
500   switch (mode) {
501     case LookupHoistingMode::kNormal:
502       return os << "normal hoisting";
503     case LookupHoistingMode::kLegacySloppy:
504       return os << "legacy sloppy hoisting";
505   }
506   UNREACHABLE();
507 }
508 
509 static_assert(kSmiValueSize <= 32, "Unsupported Smi tagging scheme");
510 // Smi sign bit position must be 32-bit aligned so we can use sign extension
511 // instructions on 64-bit architectures without additional shifts.
512 static_assert((kSmiValueSize + kSmiShiftSize + kSmiTagSize) % 32 == 0,
513               "Unsupported Smi tagging scheme");
514 
515 constexpr bool kIsSmiValueInUpper32Bits =
516     (kSmiValueSize + kSmiShiftSize + kSmiTagSize) == 64;
517 constexpr bool kIsSmiValueInLower32Bits =
518     (kSmiValueSize + kSmiShiftSize + kSmiTagSize) == 32;
519 static_assert(!SmiValuesAre32Bits() == SmiValuesAre31Bits(),
520               "Unsupported Smi tagging scheme");
521 static_assert(SmiValuesAre32Bits() == kIsSmiValueInUpper32Bits,
522               "Unsupported Smi tagging scheme");
523 static_assert(SmiValuesAre31Bits() == kIsSmiValueInLower32Bits,
524               "Unsupported Smi tagging scheme");
525 
526 // Mask for the sign bit in a smi.
527 constexpr intptr_t kSmiSignMask = static_cast<intptr_t>(
528     uintptr_t{1} << (kSmiValueSize + kSmiShiftSize + kSmiTagSize - 1));
529 
530 // Desired alignment for tagged pointers.
531 constexpr int kObjectAlignmentBits = kTaggedSizeLog2;
532 constexpr intptr_t kObjectAlignment = 1 << kObjectAlignmentBits;
533 constexpr intptr_t kObjectAlignmentMask = kObjectAlignment - 1;
534 
535 // Desired alignment for system pointers.
536 constexpr intptr_t kPointerAlignment = (1 << kSystemPointerSizeLog2);
537 constexpr intptr_t kPointerAlignmentMask = kPointerAlignment - 1;
538 
539 // Desired alignment for double values.
540 constexpr intptr_t kDoubleAlignment = 8;
541 constexpr intptr_t kDoubleAlignmentMask = kDoubleAlignment - 1;
542 
543 // Desired alignment for generated code is 32 bytes (to improve cache line
544 // utilization).
545 constexpr int kCodeAlignmentBits = 5;
546 constexpr intptr_t kCodeAlignment = 1 << kCodeAlignmentBits;
547 constexpr intptr_t kCodeAlignmentMask = kCodeAlignment - 1;
548 
549 const Address kWeakHeapObjectMask = 1 << 1;
550 
551 // The lower 32 bits of the cleared weak reference value is always equal to
552 // the |kClearedWeakHeapObjectLower32| constant but on 64-bit architectures
553 // the value of the upper 32 bits part may be
554 // 1) zero when pointer compression is disabled,
555 // 2) upper 32 bits of the isolate root value when pointer compression is
556 //    enabled.
557 // This is necessary to make pointer decompression computation also suitable
558 // for cleared weak reference.
559 // Note, that real heap objects can't have lower 32 bits equal to 3 because
560 // this offset belongs to page header. So, in either case it's enough to
561 // compare only the lower 32 bits of a MaybeObject value in order to figure
562 // out if it's a cleared reference or not.
563 const uint32_t kClearedWeakHeapObjectLower32 = 3;
564 
565 // Zap-value: The value used for zapping dead objects.
566 // Should be a recognizable hex value tagged as a failure.
567 #ifdef V8_HOST_ARCH_64_BIT
568 constexpr uint64_t kClearedFreeMemoryValue = 0;
569 constexpr uint64_t kZapValue = uint64_t{0xdeadbeedbeadbeef};
570 constexpr uint64_t kHandleZapValue = uint64_t{0x1baddead0baddeaf};
571 constexpr uint64_t kGlobalHandleZapValue = uint64_t{0x1baffed00baffedf};
572 constexpr uint64_t kFromSpaceZapValue = uint64_t{0x1beefdad0beefdaf};
573 constexpr uint64_t kDebugZapValue = uint64_t{0xbadbaddbbadbaddb};
574 constexpr uint64_t kSlotsZapValue = uint64_t{0xbeefdeadbeefdeef};
575 constexpr uint64_t kFreeListZapValue = 0xfeed1eaffeed1eaf;
576 #else
577 constexpr uint32_t kClearedFreeMemoryValue = 0;
578 constexpr uint32_t kZapValue = 0xdeadbeef;
579 constexpr uint32_t kHandleZapValue = 0xbaddeaf;
580 constexpr uint32_t kGlobalHandleZapValue = 0xbaffedf;
581 constexpr uint32_t kFromSpaceZapValue = 0xbeefdaf;
582 constexpr uint32_t kSlotsZapValue = 0xbeefdeef;
583 constexpr uint32_t kDebugZapValue = 0xbadbaddb;
584 constexpr uint32_t kFreeListZapValue = 0xfeed1eaf;
585 #endif
586 
587 constexpr int kCodeZapValue = 0xbadc0de;
588 constexpr uint32_t kPhantomReferenceZap = 0xca11bac;
589 
590 // Page constants.
591 static const intptr_t kPageAlignmentMask = (intptr_t{1} << kPageSizeBits) - 1;
592 
593 // On Intel architecture, cache line size is 64 bytes.
594 // On ARM it may be less (32 bytes), but as far this constant is
595 // used for aligning data, it doesn't hurt to align on a greater value.
596 #define PROCESSOR_CACHE_LINE_SIZE 64
597 
598 // Constants relevant to double precision floating point numbers.
599 // If looking only at the top 32 bits, the QNaN mask is bits 19 to 30.
600 constexpr uint32_t kQuietNaNHighBitsMask = 0xfff << (51 - 32);
601 
602 enum class HeapObjectReferenceType {
603   WEAK,
604   STRONG,
605 };
606 
607 enum class ArgumentsType {
608   kRuntime,
609   kJS,
610 };
611 
612 // -----------------------------------------------------------------------------
613 // Forward declarations for frequently used classes
614 
615 class AccessorInfo;
616 template <ArgumentsType>
617 class Arguments;
618 using RuntimeArguments = Arguments<ArgumentsType::kRuntime>;
619 using JavaScriptArguments = Arguments<ArgumentsType::kJS>;
620 class Assembler;
621 class ClassScope;
622 class Code;
623 class CodeSpace;
624 class Context;
625 class DeclarationScope;
626 class Debug;
627 class DebugInfo;
628 class Descriptor;
629 class DescriptorArray;
630 class TransitionArray;
631 class ExternalReference;
632 class FeedbackVector;
633 class FixedArray;
634 class Foreign;
635 class FreeStoreAllocationPolicy;
636 class FunctionTemplateInfo;
637 class GlobalDictionary;
638 template <typename T>
639 class Handle;
640 class Heap;
641 class HeapObject;
642 class HeapObjectReference;
643 class IC;
644 class InterceptorInfo;
645 class Isolate;
646 class JSReceiver;
647 class JSArray;
648 class JSFunction;
649 class JSObject;
650 class LocalIsolate;
651 class MacroAssembler;
652 class Map;
653 class MapSpace;
654 class MarkCompactCollector;
655 template <typename T>
656 class MaybeHandle;
657 class MaybeObject;
658 class MemoryChunk;
659 class MessageLocation;
660 class ModuleScope;
661 class Name;
662 class NameDictionary;
663 class NativeContext;
664 class NewSpace;
665 class NewLargeObjectSpace;
666 class NumberDictionary;
667 class Object;
668 class OldLargeObjectSpace;
669 template <HeapObjectReferenceType kRefType, typename StorageType>
670 class TaggedImpl;
671 class StrongTaggedValue;
672 class TaggedValue;
673 class CompressedObjectSlot;
674 class CompressedMaybeObjectSlot;
675 class CompressedMapWordSlot;
676 class CompressedHeapObjectSlot;
677 class OffHeapCompressedObjectSlot;
678 class FullObjectSlot;
679 class FullMaybeObjectSlot;
680 class FullHeapObjectSlot;
681 class OffHeapFullObjectSlot;
682 class OldSpace;
683 class ReadOnlySpace;
684 class RelocInfo;
685 class Scope;
686 class ScopeInfo;
687 class Script;
688 class SimpleNumberDictionary;
689 class Smi;
690 template <typename Config, class Allocator = FreeStoreAllocationPolicy>
691 class SplayTree;
692 class String;
693 class StringStream;
694 class Struct;
695 class Symbol;
696 class Variable;
697 
698 // Slots are either full-pointer slots or compressed slots depending on whether
699 // pointer compression is enabled or not.
700 struct SlotTraits {
701 #ifdef V8_COMPRESS_POINTERS
702   using TObjectSlot = CompressedObjectSlot;
703   using TMaybeObjectSlot = CompressedMaybeObjectSlot;
704   using THeapObjectSlot = CompressedHeapObjectSlot;
705   using TOffHeapObjectSlot = OffHeapCompressedObjectSlot;
706 #else
707   using TObjectSlot = FullObjectSlot;
708   using TMaybeObjectSlot = FullMaybeObjectSlot;
709   using THeapObjectSlot = FullHeapObjectSlot;
710   using TOffHeapObjectSlot = OffHeapFullObjectSlot;
711 #endif
712 };
713 
714 // An ObjectSlot instance describes a kTaggedSize-sized on-heap field ("slot")
715 // holding an Object value (smi or strong heap object).
716 using ObjectSlot = SlotTraits::TObjectSlot;
717 
718 // A MaybeObjectSlot instance describes a kTaggedSize-sized on-heap field
719 // ("slot") holding MaybeObject (smi or weak heap object or strong heap object).
720 using MaybeObjectSlot = SlotTraits::TMaybeObjectSlot;
721 
722 // A HeapObjectSlot instance describes a kTaggedSize-sized field ("slot")
723 // holding a weak or strong pointer to a heap object (think:
724 // HeapObjectReference).
725 using HeapObjectSlot = SlotTraits::THeapObjectSlot;
726 
727 // An OffHeapObjectSlot instance describes a kTaggedSize-sized field ("slot")
728 // holding an Object value (smi or strong heap object), whose slot location is
729 // off-heap.
730 using OffHeapObjectSlot = SlotTraits::TOffHeapObjectSlot;
731 
732 using WeakSlotCallback = bool (*)(FullObjectSlot pointer);
733 
734 using WeakSlotCallbackWithHeap = bool (*)(Heap* heap, FullObjectSlot pointer);
735 
736 // -----------------------------------------------------------------------------
737 // Miscellaneous
738 
739 // NOTE: SpaceIterator depends on AllocationSpace enumeration values being
740 // consecutive.
741 enum AllocationSpace {
742   RO_SPACE,       // Immortal, immovable and immutable objects,
743   OLD_SPACE,      // Old generation regular object space.
744   CODE_SPACE,     // Old generation code object space, marked executable.
745   MAP_SPACE,      // Old generation map object space, non-movable.
746   LO_SPACE,       // Old generation large object space.
747   CODE_LO_SPACE,  // Old generation large code object space.
748   NEW_LO_SPACE,   // Young generation large object space.
749   NEW_SPACE,  // Young generation semispaces for regular objects collected with
750               // Scavenger.
751 
752   FIRST_SPACE = RO_SPACE,
753   LAST_SPACE = NEW_SPACE,
754   FIRST_MUTABLE_SPACE = OLD_SPACE,
755   LAST_MUTABLE_SPACE = NEW_SPACE,
756   FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE,
757   LAST_GROWABLE_PAGED_SPACE = MAP_SPACE
758 };
759 constexpr int kSpaceTagSize = 4;
760 STATIC_ASSERT(FIRST_SPACE == 0);
761 
762 enum class AllocationType : uint8_t {
763   kYoung,    // Regular object allocated in NEW_SPACE or NEW_LO_SPACE
764   kOld,      // Regular object allocated in OLD_SPACE or LO_SPACE
765   kCode,     // Code object allocated in CODE_SPACE or CODE_LO_SPACE
766   kMap,      // Map object allocated in MAP_SPACE
767   kReadOnly  // Object allocated in RO_SPACE
768 };
769 
hash_value(AllocationType kind)770 inline size_t hash_value(AllocationType kind) {
771   return static_cast<uint8_t>(kind);
772 }
773 
774 inline std::ostream& operator<<(std::ostream& os, AllocationType kind) {
775   switch (kind) {
776     case AllocationType::kYoung:
777       return os << "Young";
778     case AllocationType::kOld:
779       return os << "Old";
780     case AllocationType::kCode:
781       return os << "Code";
782     case AllocationType::kMap:
783       return os << "Map";
784     case AllocationType::kReadOnly:
785       return os << "ReadOnly";
786   }
787   UNREACHABLE();
788 }
789 
790 // TODO(ishell): review and rename kWordAligned to kTaggedAligned.
791 enum AllocationAlignment { kWordAligned, kDoubleAligned, kDoubleUnaligned };
792 
793 enum class AccessMode { ATOMIC, NON_ATOMIC };
794 
795 enum class AllowLargeObjects { kFalse, kTrue };
796 
797 enum MinimumCapacity {
798   USE_DEFAULT_MINIMUM_CAPACITY,
799   USE_CUSTOM_MINIMUM_CAPACITY
800 };
801 
802 enum GarbageCollector { SCAVENGER, MARK_COMPACTOR, MINOR_MARK_COMPACTOR };
803 
804 enum class LocalSpaceKind {
805   kNone,
806   kCompactionSpaceForScavenge,
807   kCompactionSpaceForMarkCompact,
808   kCompactionSpaceForMinorMarkCompact,
809 
810   kFirstCompactionSpace = kCompactionSpaceForScavenge,
811   kLastCompactionSpace = kCompactionSpaceForMinorMarkCompact,
812 };
813 
814 enum Executability { NOT_EXECUTABLE, EXECUTABLE };
815 
816 enum class BytecodeFlushMode {
817   kDoNotFlushBytecode,
818   kFlushBytecode,
819   kStressFlushBytecode,
820 };
821 
822 // Indicates whether a script should be parsed and compiled in REPL mode.
823 enum class REPLMode {
824   kYes,
825   kNo,
826 };
827 
construct_repl_mode(bool is_repl_mode)828 inline REPLMode construct_repl_mode(bool is_repl_mode) {
829   return is_repl_mode ? REPLMode::kYes : REPLMode::kNo;
830 }
831 
832 // Flag indicating whether code is built into the VM (one of the natives files).
833 enum NativesFlag { NOT_NATIVES_CODE, EXTENSION_CODE, INSPECTOR_CODE };
834 
835 // ParseRestriction is used to restrict the set of valid statements in a
836 // unit of compilation.  Restriction violations cause a syntax error.
837 enum ParseRestriction : bool {
838   NO_PARSE_RESTRICTION,         // All expressions are allowed.
839   ONLY_SINGLE_FUNCTION_LITERAL  // Only a single FunctionLiteral expression.
840 };
841 
842 // State for inline cache call sites. Aliased as IC::State.
843 enum InlineCacheState {
844   // No feedback will be collected.
845   NO_FEEDBACK,
846   // Has never been executed.
847   UNINITIALIZED,
848   // Has been executed and only one receiver type has been seen.
849   MONOMORPHIC,
850   // Check failed due to prototype (or map deprecation).
851   RECOMPUTE_HANDLER,
852   // Multiple receiver types have been seen.
853   POLYMORPHIC,
854   // Many receiver types have been seen.
855   MEGAMORPHIC,
856   // A generic handler is installed and no extra typefeedback is recorded.
857   GENERIC,
858 };
859 
860 // Printing support.
InlineCacheState2String(InlineCacheState state)861 inline const char* InlineCacheState2String(InlineCacheState state) {
862   switch (state) {
863     case NO_FEEDBACK:
864       return "NOFEEDBACK";
865     case UNINITIALIZED:
866       return "UNINITIALIZED";
867     case MONOMORPHIC:
868       return "MONOMORPHIC";
869     case RECOMPUTE_HANDLER:
870       return "RECOMPUTE_HANDLER";
871     case POLYMORPHIC:
872       return "POLYMORPHIC";
873     case MEGAMORPHIC:
874       return "MEGAMORPHIC";
875     case GENERIC:
876       return "GENERIC";
877   }
878   UNREACHABLE();
879 }
880 
881 enum WhereToStart { kStartAtReceiver, kStartAtPrototype };
882 
883 enum ResultSentinel { kNotFound = -1, kUnsupported = -2 };
884 
885 enum ShouldThrow {
886   kThrowOnError = Internals::kThrowOnError,
887   kDontThrow = Internals::kDontThrow
888 };
889 
890 enum class ThreadKind { kMain, kBackground };
891 
892 // Union used for customized checking of the IEEE double types
893 // inlined within v8 runtime, rather than going to the underlying
894 // platform headers and libraries
895 union IeeeDoubleLittleEndianArchType {
896   double d;
897   struct {
898     unsigned int man_low : 32;
899     unsigned int man_high : 20;
900     unsigned int exp : 11;
901     unsigned int sign : 1;
902   } bits;
903 };
904 
905 union IeeeDoubleBigEndianArchType {
906   double d;
907   struct {
908     unsigned int sign : 1;
909     unsigned int exp : 11;
910     unsigned int man_high : 20;
911     unsigned int man_low : 32;
912   } bits;
913 };
914 
915 #if V8_TARGET_LITTLE_ENDIAN
916 using IeeeDoubleArchType = IeeeDoubleLittleEndianArchType;
917 constexpr int kIeeeDoubleMantissaWordOffset = 0;
918 constexpr int kIeeeDoubleExponentWordOffset = 4;
919 #else
920 using IeeeDoubleArchType = IeeeDoubleBigEndianArchType;
921 constexpr int kIeeeDoubleMantissaWordOffset = 4;
922 constexpr int kIeeeDoubleExponentWordOffset = 0;
923 #endif
924 
925 // -----------------------------------------------------------------------------
926 // Macros
927 
928 // Testers for test.
929 
930 #define HAS_SMI_TAG(value) \
931   ((static_cast<i::Tagged_t>(value) & ::i::kSmiTagMask) == ::i::kSmiTag)
932 
933 #define HAS_STRONG_HEAP_OBJECT_TAG(value)                          \
934   (((static_cast<i::Tagged_t>(value) & ::i::kHeapObjectTagMask) == \
935     ::i::kHeapObjectTag))
936 
937 #define HAS_WEAK_HEAP_OBJECT_TAG(value)                            \
938   (((static_cast<i::Tagged_t>(value) & ::i::kHeapObjectTagMask) == \
939     ::i::kWeakHeapObjectTag))
940 
941 // OBJECT_POINTER_ALIGN returns the value aligned as a HeapObject pointer
942 #define OBJECT_POINTER_ALIGN(value) \
943   (((value) + ::i::kObjectAlignmentMask) & ~::i::kObjectAlignmentMask)
944 
945 // OBJECT_POINTER_PADDING returns the padding size required to align value
946 // as a HeapObject pointer
947 #define OBJECT_POINTER_PADDING(value) (OBJECT_POINTER_ALIGN(value) - (value))
948 
949 // POINTER_SIZE_ALIGN returns the value aligned as a system pointer.
950 #define POINTER_SIZE_ALIGN(value) \
951   (((value) + ::i::kPointerAlignmentMask) & ~::i::kPointerAlignmentMask)
952 
953 // POINTER_SIZE_PADDING returns the padding size required to align value
954 // as a system pointer.
955 #define POINTER_SIZE_PADDING(value) (POINTER_SIZE_ALIGN(value) - (value))
956 
957 // CODE_POINTER_ALIGN returns the value aligned as a generated code segment.
958 #define CODE_POINTER_ALIGN(value) \
959   (((value) + ::i::kCodeAlignmentMask) & ~::i::kCodeAlignmentMask)
960 
961 // CODE_POINTER_PADDING returns the padding size required to align value
962 // as a generated code segment.
963 #define CODE_POINTER_PADDING(value) (CODE_POINTER_ALIGN(value) - (value))
964 
965 // DOUBLE_POINTER_ALIGN returns the value algined for double pointers.
966 #define DOUBLE_POINTER_ALIGN(value) \
967   (((value) + ::i::kDoubleAlignmentMask) & ~::i::kDoubleAlignmentMask)
968 
969 // Defines hints about receiver values based on structural knowledge.
970 enum class ConvertReceiverMode : unsigned {
971   kNullOrUndefined,     // Guaranteed to be null or undefined.
972   kNotNullOrUndefined,  // Guaranteed to never be null or undefined.
973   kAny                  // No specific knowledge about receiver.
974 };
975 
hash_value(ConvertReceiverMode mode)976 inline size_t hash_value(ConvertReceiverMode mode) {
977   return bit_cast<unsigned>(mode);
978 }
979 
980 inline std::ostream& operator<<(std::ostream& os, ConvertReceiverMode mode) {
981   switch (mode) {
982     case ConvertReceiverMode::kNullOrUndefined:
983       return os << "NULL_OR_UNDEFINED";
984     case ConvertReceiverMode::kNotNullOrUndefined:
985       return os << "NOT_NULL_OR_UNDEFINED";
986     case ConvertReceiverMode::kAny:
987       return os << "ANY";
988   }
989   UNREACHABLE();
990 }
991 
992 // Valid hints for the abstract operation OrdinaryToPrimitive,
993 // implemented according to ES6, section 7.1.1.
994 enum class OrdinaryToPrimitiveHint { kNumber, kString };
995 
996 // Valid hints for the abstract operation ToPrimitive,
997 // implemented according to ES6, section 7.1.1.
998 enum class ToPrimitiveHint { kDefault, kNumber, kString };
999 
1000 // Defines specifics about arguments object or rest parameter creation.
1001 enum class CreateArgumentsType : uint8_t {
1002   kMappedArguments,
1003   kUnmappedArguments,
1004   kRestParameter
1005 };
1006 
hash_value(CreateArgumentsType type)1007 inline size_t hash_value(CreateArgumentsType type) {
1008   return bit_cast<uint8_t>(type);
1009 }
1010 
1011 inline std::ostream& operator<<(std::ostream& os, CreateArgumentsType type) {
1012   switch (type) {
1013     case CreateArgumentsType::kMappedArguments:
1014       return os << "MAPPED_ARGUMENTS";
1015     case CreateArgumentsType::kUnmappedArguments:
1016       return os << "UNMAPPED_ARGUMENTS";
1017     case CreateArgumentsType::kRestParameter:
1018       return os << "REST_PARAMETER";
1019   }
1020   UNREACHABLE();
1021 }
1022 
1023 enum ScopeType : uint8_t {
1024   CLASS_SCOPE,     // The scope introduced by a class.
1025   EVAL_SCOPE,      // The top-level scope for an eval source.
1026   FUNCTION_SCOPE,  // The top-level scope for a function.
1027   MODULE_SCOPE,    // The scope introduced by a module literal
1028   SCRIPT_SCOPE,    // The top-level scope for a script or a top-level eval.
1029   CATCH_SCOPE,     // The scope introduced by catch.
1030   BLOCK_SCOPE,     // The scope introduced by a new block.
1031   WITH_SCOPE       // The scope introduced by with.
1032 };
1033 
1034 inline std::ostream& operator<<(std::ostream& os, ScopeType type) {
1035   switch (type) {
1036     case ScopeType::EVAL_SCOPE:
1037       return os << "EVAL_SCOPE";
1038     case ScopeType::FUNCTION_SCOPE:
1039       return os << "FUNCTION_SCOPE";
1040     case ScopeType::MODULE_SCOPE:
1041       return os << "MODULE_SCOPE";
1042     case ScopeType::SCRIPT_SCOPE:
1043       return os << "SCRIPT_SCOPE";
1044     case ScopeType::CATCH_SCOPE:
1045       return os << "CATCH_SCOPE";
1046     case ScopeType::BLOCK_SCOPE:
1047       return os << "BLOCK_SCOPE";
1048     case ScopeType::CLASS_SCOPE:
1049       return os << "CLASS_SCOPE";
1050     case ScopeType::WITH_SCOPE:
1051       return os << "WITH_SCOPE";
1052   }
1053   UNREACHABLE();
1054 }
1055 
1056 // AllocationSiteMode controls whether allocations are tracked by an allocation
1057 // site.
1058 enum AllocationSiteMode {
1059   DONT_TRACK_ALLOCATION_SITE,
1060   TRACK_ALLOCATION_SITE,
1061   LAST_ALLOCATION_SITE_MODE = TRACK_ALLOCATION_SITE
1062 };
1063 
1064 enum class AllocationSiteUpdateMode { kUpdate, kCheckOnly };
1065 
1066 // The mips architecture prior to revision 5 has inverted encoding for sNaN.
1067 #if (V8_TARGET_ARCH_MIPS && !defined(_MIPS_ARCH_MIPS32R6) &&           \
1068      (!defined(USE_SIMULATOR) || !defined(_MIPS_TARGET_SIMULATOR))) || \
1069     (V8_TARGET_ARCH_MIPS64 && !defined(_MIPS_ARCH_MIPS64R6) &&         \
1070      (!defined(USE_SIMULATOR) || !defined(_MIPS_TARGET_SIMULATOR)))
1071 constexpr uint32_t kHoleNanUpper32 = 0xFFFF7FFF;
1072 constexpr uint32_t kHoleNanLower32 = 0xFFFF7FFF;
1073 #else
1074 constexpr uint32_t kHoleNanUpper32 = 0xFFF7FFFF;
1075 constexpr uint32_t kHoleNanLower32 = 0xFFF7FFFF;
1076 #endif
1077 
1078 constexpr uint64_t kHoleNanInt64 =
1079     (static_cast<uint64_t>(kHoleNanUpper32) << 32) | kHoleNanLower32;
1080 
1081 // ES6 section 20.1.2.6 Number.MAX_SAFE_INTEGER
1082 constexpr uint64_t kMaxSafeIntegerUint64 = 9007199254740991;  // 2^53-1
1083 constexpr double kMaxSafeInteger = static_cast<double>(kMaxSafeIntegerUint64);
1084 
1085 constexpr double kMaxUInt32Double = double{kMaxUInt32};
1086 
1087 // The order of this enum has to be kept in sync with the predicates below.
1088 enum class VariableMode : uint8_t {
1089   // User declared variables:
1090   kLet,  // declared via 'let' declarations (first lexical)
1091 
1092   kConst,  // declared via 'const' declarations (last lexical)
1093 
1094   kVar,  // declared via 'var', and 'function' declarations
1095 
1096   // Variables introduced by the compiler:
1097   kTemporary,  // temporary variables (not user-visible), stack-allocated
1098                // unless the scope as a whole has forced context allocation
1099 
1100   kDynamic,  // always require dynamic lookup (we don't know
1101              // the declaration)
1102 
1103   kDynamicGlobal,  // requires dynamic lookup, but we know that the
1104                    // variable is global unless it has been shadowed
1105                    // by an eval-introduced variable
1106 
1107   kDynamicLocal,  // requires dynamic lookup, but we know that the
1108                   // variable is local and where it is unless it
1109                   // has been shadowed by an eval-introduced
1110                   // variable
1111 
1112   // Variables for private methods or accessors whose access require
1113   // brand check. Declared only in class scopes by the compiler
1114   // and allocated only in class contexts:
1115   kPrivateMethod,  // Does not coexist with any other variable with the same
1116                    // name in the same scope.
1117 
1118   kPrivateSetterOnly,  // Incompatible with variables with the same name but
1119                        // any mode other than kPrivateGetterOnly. Transition to
1120                        // kPrivateGetterAndSetter if a later declaration for the
1121                        // same name with kPrivateGetterOnly is made.
1122 
1123   kPrivateGetterOnly,  // Incompatible with variables with the same name but
1124                        // any mode other than kPrivateSetterOnly. Transition to
1125                        // kPrivateGetterAndSetter if a later declaration for the
1126                        // same name with kPrivateSetterOnly is made.
1127 
1128   kPrivateGetterAndSetter,  // Does not coexist with any other variable with the
1129                             // same name in the same scope.
1130 
1131   kLastLexicalVariableMode = kConst,
1132 };
1133 
1134 // Printing support
1135 #ifdef DEBUG
VariableMode2String(VariableMode mode)1136 inline const char* VariableMode2String(VariableMode mode) {
1137   switch (mode) {
1138     case VariableMode::kVar:
1139       return "VAR";
1140     case VariableMode::kLet:
1141       return "LET";
1142     case VariableMode::kPrivateGetterOnly:
1143       return "PRIVATE_GETTER_ONLY";
1144     case VariableMode::kPrivateSetterOnly:
1145       return "PRIVATE_SETTER_ONLY";
1146     case VariableMode::kPrivateMethod:
1147       return "PRIVATE_METHOD";
1148     case VariableMode::kPrivateGetterAndSetter:
1149       return "PRIVATE_GETTER_AND_SETTER";
1150     case VariableMode::kConst:
1151       return "CONST";
1152     case VariableMode::kDynamic:
1153       return "DYNAMIC";
1154     case VariableMode::kDynamicGlobal:
1155       return "DYNAMIC_GLOBAL";
1156     case VariableMode::kDynamicLocal:
1157       return "DYNAMIC_LOCAL";
1158     case VariableMode::kTemporary:
1159       return "TEMPORARY";
1160   }
1161   UNREACHABLE();
1162 }
1163 #endif
1164 
1165 enum VariableKind : uint8_t {
1166   NORMAL_VARIABLE,
1167   PARAMETER_VARIABLE,
1168   THIS_VARIABLE,
1169   SLOPPY_BLOCK_FUNCTION_VARIABLE,
1170   SLOPPY_FUNCTION_NAME_VARIABLE
1171 };
1172 
IsDynamicVariableMode(VariableMode mode)1173 inline bool IsDynamicVariableMode(VariableMode mode) {
1174   return mode >= VariableMode::kDynamic && mode <= VariableMode::kDynamicLocal;
1175 }
1176 
IsDeclaredVariableMode(VariableMode mode)1177 inline bool IsDeclaredVariableMode(VariableMode mode) {
1178   STATIC_ASSERT(static_cast<uint8_t>(VariableMode::kLet) ==
1179                 0);  // Implies that mode >= VariableMode::kLet.
1180   return mode <= VariableMode::kVar;
1181 }
1182 
IsPrivateMethodOrAccessorVariableMode(VariableMode mode)1183 inline bool IsPrivateMethodOrAccessorVariableMode(VariableMode mode) {
1184   return mode >= VariableMode::kPrivateMethod &&
1185          mode <= VariableMode::kPrivateGetterAndSetter;
1186 }
1187 
IsSerializableVariableMode(VariableMode mode)1188 inline bool IsSerializableVariableMode(VariableMode mode) {
1189   return IsDeclaredVariableMode(mode) ||
1190          IsPrivateMethodOrAccessorVariableMode(mode);
1191 }
1192 
IsConstVariableMode(VariableMode mode)1193 inline bool IsConstVariableMode(VariableMode mode) {
1194   return mode == VariableMode::kConst ||
1195          IsPrivateMethodOrAccessorVariableMode(mode);
1196 }
1197 
IsLexicalVariableMode(VariableMode mode)1198 inline bool IsLexicalVariableMode(VariableMode mode) {
1199   STATIC_ASSERT(static_cast<uint8_t>(VariableMode::kLet) ==
1200                 0);  // Implies that mode >= VariableMode::kLet.
1201   return mode <= VariableMode::kLastLexicalVariableMode;
1202 }
1203 
1204 enum VariableLocation : uint8_t {
1205   // Before and during variable allocation, a variable whose location is
1206   // not yet determined.  After allocation, a variable looked up as a
1207   // property on the global object (and possibly absent).  name() is the
1208   // variable name, index() is invalid.
1209   UNALLOCATED,
1210 
1211   // A slot in the parameter section on the stack.  index() is the
1212   // parameter index, counting left-to-right.  The receiver is index -1;
1213   // the first parameter is index 0.
1214   PARAMETER,
1215 
1216   // A slot in the local section on the stack.  index() is the variable
1217   // index in the stack frame, starting at 0.
1218   LOCAL,
1219 
1220   // An indexed slot in a heap context.  index() is the variable index in
1221   // the context object on the heap, starting at 0.  scope() is the
1222   // corresponding scope.
1223   CONTEXT,
1224 
1225   // A named slot in a heap context.  name() is the variable name in the
1226   // context object on the heap, with lookup starting at the current
1227   // context.  index() is invalid.
1228   LOOKUP,
1229 
1230   // A named slot in a module's export table.
1231   MODULE,
1232 
1233   // An indexed slot in a script context. index() is the variable
1234   // index in the context object on the heap, starting at 0.
1235   // Important: REPL_GLOBAL variables from different scripts with the
1236   //            same name share a single script context slot. Every
1237   //            script context will reserve a slot, but only one will be used.
1238   // REPL_GLOBAL variables are stored in script contexts, but accessed like
1239   // globals, i.e. they always require a lookup at runtime to find the right
1240   // script context.
1241   REPL_GLOBAL,
1242 
1243   kLastVariableLocation = REPL_GLOBAL
1244 };
1245 
1246 // ES6 specifies declarative environment records with mutable and immutable
1247 // bindings that can be in two states: initialized and uninitialized.
1248 // When accessing a binding, it needs to be checked for initialization.
1249 // However in the following cases the binding is initialized immediately
1250 // after creation so the initialization check can always be skipped:
1251 //
1252 // 1. Var declared local variables.
1253 //      var foo;
1254 // 2. A local variable introduced by a function declaration.
1255 //      function foo() {}
1256 // 3. Parameters
1257 //      function x(foo) {}
1258 // 4. Catch bound variables.
1259 //      try {} catch (foo) {}
1260 // 6. Function name variables of named function expressions.
1261 //      var x = function foo() {}
1262 // 7. Implicit binding of 'this'.
1263 // 8. Implicit binding of 'arguments' in functions.
1264 //
1265 // The following enum specifies a flag that indicates if the binding needs a
1266 // distinct initialization step (kNeedsInitialization) or if the binding is
1267 // immediately initialized upon creation (kCreatedInitialized).
1268 enum InitializationFlag : uint8_t { kNeedsInitialization, kCreatedInitialized };
1269 
1270 // Static variables can only be used with the class in the closest
1271 // class scope as receivers.
1272 enum class IsStaticFlag : uint8_t { kNotStatic, kStatic };
1273 
1274 enum MaybeAssignedFlag : uint8_t { kNotAssigned, kMaybeAssigned };
1275 
1276 enum class InterpreterPushArgsMode : unsigned {
1277   kArrayFunction,
1278   kWithFinalSpread,
1279   kOther
1280 };
1281 
hash_value(InterpreterPushArgsMode mode)1282 inline size_t hash_value(InterpreterPushArgsMode mode) {
1283   return bit_cast<unsigned>(mode);
1284 }
1285 
1286 inline std::ostream& operator<<(std::ostream& os,
1287                                 InterpreterPushArgsMode mode) {
1288   switch (mode) {
1289     case InterpreterPushArgsMode::kArrayFunction:
1290       return os << "ArrayFunction";
1291     case InterpreterPushArgsMode::kWithFinalSpread:
1292       return os << "WithFinalSpread";
1293     case InterpreterPushArgsMode::kOther:
1294       return os << "Other";
1295   }
1296   UNREACHABLE();
1297 }
1298 
ObjectHash(Address address)1299 inline uint32_t ObjectHash(Address address) {
1300   // All objects are at least pointer aligned, so we can remove the trailing
1301   // zeros.
1302   return static_cast<uint32_t>(address >> kTaggedSizeLog2);
1303 }
1304 
1305 // Type feedback is encoded in such a way that, we can combine the feedback
1306 // at different points by performing an 'OR' operation. Type feedback moves
1307 // to a more generic type when we combine feedback.
1308 //
1309 //   kSignedSmall -> kSignedSmallInputs -> kNumber  -> kNumberOrOddball -> kAny
1310 //                                                     kString          -> kAny
1311 //                                                     kBigInt          -> kAny
1312 //
1313 // Technically we wouldn't need the separation between the kNumber and the
1314 // kNumberOrOddball values here, since for binary operations, we always
1315 // truncate oddballs to numbers. In practice though it causes TurboFan to
1316 // generate quite a lot of unused code though if we always handle numbers
1317 // and oddballs everywhere, although in 99% of the use sites they are only
1318 // used with numbers.
1319 class BinaryOperationFeedback {
1320  public:
1321   enum {
1322     kNone = 0x0,
1323     kSignedSmall = 0x1,
1324     kSignedSmallInputs = 0x3,
1325     kNumber = 0x7,
1326     kNumberOrOddball = 0xF,
1327     kString = 0x10,
1328     kBigInt = 0x20,
1329     kAny = 0x7F
1330   };
1331 };
1332 
1333 // Type feedback is encoded in such a way that, we can combine the feedback
1334 // at different points by performing an 'OR' operation.
1335 // This is distinct from BinaryOperationFeedback on purpose, because the
1336 // feedback that matters differs greatly as well as the way it is consumed.
1337 class CompareOperationFeedback {
1338   enum {
1339     kSignedSmallFlag = 1 << 0,
1340     kOtherNumberFlag = 1 << 1,
1341     kBooleanFlag = 1 << 2,
1342     kNullOrUndefinedFlag = 1 << 3,
1343     kInternalizedStringFlag = 1 << 4,
1344     kOtherStringFlag = 1 << 5,
1345     kSymbolFlag = 1 << 6,
1346     kBigIntFlag = 1 << 7,
1347     kReceiverFlag = 1 << 8,
1348     kAnyMask = 0x1FF,
1349   };
1350 
1351  public:
1352   enum Type {
1353     kNone = 0,
1354 
1355     kBoolean = kBooleanFlag,
1356     kNullOrUndefined = kNullOrUndefinedFlag,
1357     kOddball = kBoolean | kNullOrUndefined,
1358 
1359     kSignedSmall = kSignedSmallFlag,
1360     kNumber = kSignedSmall | kOtherNumberFlag,
1361     kNumberOrBoolean = kNumber | kBoolean,
1362     kNumberOrOddball = kNumber | kOddball,
1363 
1364     kInternalizedString = kInternalizedStringFlag,
1365     kString = kInternalizedString | kOtherStringFlag,
1366 
1367     kReceiver = kReceiverFlag,
1368     kReceiverOrNullOrUndefined = kReceiver | kNullOrUndefined,
1369 
1370     kBigInt = kBigIntFlag,
1371     kSymbol = kSymbolFlag,
1372 
1373     kAny = kAnyMask,
1374   };
1375 };
1376 
1377 enum class Operation {
1378   // Binary operations.
1379   kAdd,
1380   kSubtract,
1381   kMultiply,
1382   kDivide,
1383   kModulus,
1384   kExponentiate,
1385   kBitwiseAnd,
1386   kBitwiseOr,
1387   kBitwiseXor,
1388   kShiftLeft,
1389   kShiftRight,
1390   kShiftRightLogical,
1391   // Unary operations.
1392   kBitwiseNot,
1393   kNegate,
1394   kIncrement,
1395   kDecrement,
1396   // Compare operations.
1397   kEqual,
1398   kStrictEqual,
1399   kLessThan,
1400   kLessThanOrEqual,
1401   kGreaterThan,
1402   kGreaterThanOrEqual,
1403 };
1404 
1405 // Type feedback is encoded in such a way that, we can combine the feedback
1406 // at different points by performing an 'OR' operation. Type feedback moves
1407 // to a more generic type when we combine feedback.
1408 // kNone -> kEnumCacheKeysAndIndices -> kEnumCacheKeys -> kAny
1409 enum class ForInFeedback : uint8_t {
1410   kNone = 0x0,
1411   kEnumCacheKeysAndIndices = 0x1,
1412   kEnumCacheKeys = 0x3,
1413   kAny = 0x7
1414 };
1415 STATIC_ASSERT((static_cast<int>(ForInFeedback::kNone) |
1416                static_cast<int>(ForInFeedback::kEnumCacheKeysAndIndices)) ==
1417               static_cast<int>(ForInFeedback::kEnumCacheKeysAndIndices));
1418 STATIC_ASSERT((static_cast<int>(ForInFeedback::kEnumCacheKeysAndIndices) |
1419                static_cast<int>(ForInFeedback::kEnumCacheKeys)) ==
1420               static_cast<int>(ForInFeedback::kEnumCacheKeys));
1421 STATIC_ASSERT((static_cast<int>(ForInFeedback::kEnumCacheKeys) |
1422                static_cast<int>(ForInFeedback::kAny)) ==
1423               static_cast<int>(ForInFeedback::kAny));
1424 
1425 enum class UnicodeEncoding : uint8_t {
1426   // Different unicode encodings in a |word32|:
1427   UTF16,  // hi 16bits -> trailing surrogate or 0, low 16bits -> lead surrogate
1428   UTF32,  // full UTF32 code unit / Unicode codepoint
1429 };
1430 
hash_value(UnicodeEncoding encoding)1431 inline size_t hash_value(UnicodeEncoding encoding) {
1432   return static_cast<uint8_t>(encoding);
1433 }
1434 
1435 inline std::ostream& operator<<(std::ostream& os, UnicodeEncoding encoding) {
1436   switch (encoding) {
1437     case UnicodeEncoding::UTF16:
1438       return os << "UTF16";
1439     case UnicodeEncoding::UTF32:
1440       return os << "UTF32";
1441   }
1442   UNREACHABLE();
1443 }
1444 
1445 enum class IterationKind { kKeys, kValues, kEntries };
1446 
1447 inline std::ostream& operator<<(std::ostream& os, IterationKind kind) {
1448   switch (kind) {
1449     case IterationKind::kKeys:
1450       return os << "IterationKind::kKeys";
1451     case IterationKind::kValues:
1452       return os << "IterationKind::kValues";
1453     case IterationKind::kEntries:
1454       return os << "IterationKind::kEntries";
1455   }
1456   UNREACHABLE();
1457 }
1458 
1459 enum class CollectionKind { kMap, kSet };
1460 
1461 inline std::ostream& operator<<(std::ostream& os, CollectionKind kind) {
1462   switch (kind) {
1463     case CollectionKind::kMap:
1464       return os << "CollectionKind::kMap";
1465     case CollectionKind::kSet:
1466       return os << "CollectionKind::kSet";
1467   }
1468   UNREACHABLE();
1469 }
1470 
1471 // Flags for the runtime function kDefineDataPropertyInLiteral. A property can
1472 // be enumerable or not, and, in case of functions, the function name
1473 // can be set or not.
1474 enum class DataPropertyInLiteralFlag {
1475   kNoFlags = 0,
1476   kDontEnum = 1 << 0,
1477   kSetFunctionName = 1 << 1
1478 };
1479 using DataPropertyInLiteralFlags = base::Flags<DataPropertyInLiteralFlag>;
1480 DEFINE_OPERATORS_FOR_FLAGS(DataPropertyInLiteralFlags)
1481 
1482 enum ExternalArrayType {
1483   kExternalInt8Array = 1,
1484   kExternalUint8Array,
1485   kExternalInt16Array,
1486   kExternalUint16Array,
1487   kExternalInt32Array,
1488   kExternalUint32Array,
1489   kExternalFloat32Array,
1490   kExternalFloat64Array,
1491   kExternalUint8ClampedArray,
1492   kExternalBigInt64Array,
1493   kExternalBigUint64Array,
1494 };
1495 
1496 struct AssemblerDebugInfo {
AssemblerDebugInfoAssemblerDebugInfo1497   AssemblerDebugInfo(const char* name, const char* file, int line)
1498       : name(name), file(file), line(line) {}
1499   const char* name;
1500   const char* file;
1501   int line;
1502 };
1503 
1504 inline std::ostream& operator<<(std::ostream& os,
1505                                 const AssemblerDebugInfo& info) {
1506   os << "(" << info.name << ":" << info.file << ":" << info.line << ")";
1507   return os;
1508 }
1509 
1510 using FileAndLine = std::pair<const char*, int>;
1511 
1512 enum OptimizationMarker : int32_t {
1513   // These values are set so that it is easy to check if there is a marker where
1514   // some processing needs to be done.
1515   kNone = 0b000,
1516   kInOptimizationQueue = 0b001,
1517   kCompileOptimized = 0b010,
1518   kCompileOptimizedConcurrent = 0b011,
1519   kLogFirstExecution = 0b100,
1520   kLastOptimizationMarker = kLogFirstExecution
1521 };
1522 // For kNone or kInOptimizationQueue we don't need any special processing.
1523 // To check both cases using a single mask, we expect the kNone to be 0 and
1524 // kInOptimizationQueue to be 1 so that we can mask off the lsb for checking.
1525 STATIC_ASSERT(kNone == 0b000 && kInOptimizationQueue == 0b001);
1526 STATIC_ASSERT(kLastOptimizationMarker <= 0b111);
1527 static constexpr uint32_t kNoneOrInOptimizationQueueMask = 0b110;
1528 
IsInOptimizationQueueMarker(OptimizationMarker marker)1529 inline bool IsInOptimizationQueueMarker(OptimizationMarker marker) {
1530   return marker == OptimizationMarker::kInOptimizationQueue;
1531 }
1532 
IsCompileOptimizedMarker(OptimizationMarker marker)1533 inline bool IsCompileOptimizedMarker(OptimizationMarker marker) {
1534   return marker == OptimizationMarker::kCompileOptimized ||
1535          marker == OptimizationMarker::kCompileOptimizedConcurrent;
1536 }
1537 
1538 inline std::ostream& operator<<(std::ostream& os,
1539                                 const OptimizationMarker& marker) {
1540   switch (marker) {
1541     case OptimizationMarker::kLogFirstExecution:
1542       return os << "OptimizationMarker::kLogFirstExecution";
1543     case OptimizationMarker::kNone:
1544       return os << "OptimizationMarker::kNone";
1545     case OptimizationMarker::kCompileOptimized:
1546       return os << "OptimizationMarker::kCompileOptimized";
1547     case OptimizationMarker::kCompileOptimizedConcurrent:
1548       return os << "OptimizationMarker::kCompileOptimizedConcurrent";
1549     case OptimizationMarker::kInOptimizationQueue:
1550       return os << "OptimizationMarker::kInOptimizationQueue";
1551   }
1552 }
1553 
1554 enum class OptimizationTier {
1555   kNone = 0b00,
1556   kMidTier = 0b01,
1557   kTopTier = 0b10,
1558   kLastOptimizationTier = kTopTier
1559 };
1560 static constexpr uint32_t kNoneOrMidTierMask = 0b10;
1561 static constexpr uint32_t kNoneMask = 0b11;
1562 
1563 inline std::ostream& operator<<(std::ostream& os,
1564                                 const OptimizationTier& tier) {
1565   switch (tier) {
1566     case OptimizationTier::kNone:
1567       return os << "OptimizationTier::kNone";
1568     case OptimizationTier::kMidTier:
1569       return os << "OptimizationTier::kMidTier";
1570     case OptimizationTier::kTopTier:
1571       return os << "OptimizationTier::kTopTier";
1572   }
1573 }
1574 
1575 enum class SpeculationMode { kAllowSpeculation, kDisallowSpeculation };
1576 
1577 inline std::ostream& operator<<(std::ostream& os,
1578                                 SpeculationMode speculation_mode) {
1579   switch (speculation_mode) {
1580     case SpeculationMode::kAllowSpeculation:
1581       return os << "SpeculationMode::kAllowSpeculation";
1582     case SpeculationMode::kDisallowSpeculation:
1583       return os << "SpeculationMode::kDisallowSpeculation";
1584   }
1585   UNREACHABLE();
1586   return os;
1587 }
1588 
1589 enum class BlockingBehavior { kBlock, kDontBlock };
1590 
1591 enum class ConcurrencyMode { kNotConcurrent, kConcurrent };
1592 
1593 #define FOR_EACH_ISOLATE_ADDRESS_NAME(C)                       \
1594   C(Handler, handler)                                          \
1595   C(CEntryFP, c_entry_fp)                                      \
1596   C(CFunction, c_function)                                     \
1597   C(Context, context)                                          \
1598   C(PendingException, pending_exception)                       \
1599   C(PendingHandlerContext, pending_handler_context)            \
1600   C(PendingHandlerEntrypoint, pending_handler_entrypoint)      \
1601   C(PendingHandlerConstantPool, pending_handler_constant_pool) \
1602   C(PendingHandlerFP, pending_handler_fp)                      \
1603   C(PendingHandlerSP, pending_handler_sp)                      \
1604   C(ExternalCaughtException, external_caught_exception)        \
1605   C(JSEntrySP, js_entry_sp)
1606 
1607 enum IsolateAddressId {
1608 #define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
1609   FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
1610 #undef DECLARE_ENUM
1611       kIsolateAddressCount
1612 };
1613 
1614 enum class PoisoningMitigationLevel {
1615   kPoisonAll,
1616   kDontPoison,
1617   kPoisonCriticalOnly
1618 };
1619 
1620 enum class LoadSensitivity {
1621   kCritical,  // Critical loads are poisoned whenever we can run untrusted
1622               // code (i.e., when --untrusted-code-mitigations is on).
1623   kUnsafe,    // Unsafe loads are poisoned when full poisoning is on
1624               // (--branch-load-poisoning).
1625   kSafe       // Safe loads are never poisoned.
1626 };
1627 
1628 // The reason for a WebAssembly trap.
1629 #define FOREACH_WASM_TRAPREASON(V) \
1630   V(TrapUnreachable)               \
1631   V(TrapMemOutOfBounds)            \
1632   V(TrapUnalignedAccess)           \
1633   V(TrapDivByZero)                 \
1634   V(TrapDivUnrepresentable)        \
1635   V(TrapRemByZero)                 \
1636   V(TrapFloatUnrepresentable)      \
1637   V(TrapFuncSigMismatch)           \
1638   V(TrapDataSegmentDropped)        \
1639   V(TrapElemSegmentDropped)        \
1640   V(TrapTableOutOfBounds)          \
1641   V(TrapBrOnExnNull)               \
1642   V(TrapRethrowNull)               \
1643   V(TrapNullDereference)           \
1644   V(TrapIllegalCast)               \
1645   V(TrapArrayOutOfBounds)
1646 
1647 enum KeyedAccessLoadMode {
1648   STANDARD_LOAD,
1649   LOAD_IGNORE_OUT_OF_BOUNDS,
1650 };
1651 
1652 enum KeyedAccessStoreMode {
1653   STANDARD_STORE,
1654   STORE_AND_GROW_HANDLE_COW,
1655   STORE_IGNORE_OUT_OF_BOUNDS,
1656   STORE_HANDLE_COW
1657 };
1658 
1659 enum MutableMode { MUTABLE, IMMUTABLE };
1660 
IsCOWHandlingStoreMode(KeyedAccessStoreMode store_mode)1661 inline bool IsCOWHandlingStoreMode(KeyedAccessStoreMode store_mode) {
1662   return store_mode == STORE_HANDLE_COW ||
1663          store_mode == STORE_AND_GROW_HANDLE_COW;
1664 }
1665 
IsGrowStoreMode(KeyedAccessStoreMode store_mode)1666 inline bool IsGrowStoreMode(KeyedAccessStoreMode store_mode) {
1667   return store_mode == STORE_AND_GROW_HANDLE_COW;
1668 }
1669 
1670 enum IcCheckType { ELEMENT, PROPERTY };
1671 
1672 // Helper stubs can be called in different ways depending on where the target
1673 // code is located and how the call sequence is expected to look like:
1674 //  - CodeObject: Call on-heap {Code} object via {RelocInfo::CODE_TARGET}.
1675 //  - WasmRuntimeStub: Call native {WasmCode} stub via
1676 //    {RelocInfo::WASM_STUB_CALL}.
1677 //  - BuiltinPointer: Call a builtin based on a builtin pointer with dynamic
1678 //    contents. If builtins are embedded, we call directly into off-heap code
1679 //    without going through the on-heap Code trampoline.
1680 enum class StubCallMode {
1681   kCallCodeObject,
1682   kCallWasmRuntimeStub,
1683   kCallBuiltinPointer,
1684 };
1685 
1686 constexpr int kFunctionLiteralIdInvalid = -1;
1687 constexpr int kFunctionLiteralIdTopLevel = 0;
1688 
1689 constexpr int kSmallOrderedHashSetMinCapacity = 4;
1690 constexpr int kSmallOrderedHashMapMinCapacity = 4;
1691 
1692 static const uint16_t kDontAdaptArgumentsSentinel = static_cast<uint16_t>(-1);
1693 
1694 // Opaque data type for identifying stack frames. Used extensively
1695 // by the debugger.
1696 // ID_MIN_VALUE and ID_MAX_VALUE are specified to ensure that enumeration type
1697 // has correct value range (see Issue 830 for more details).
1698 enum StackFrameId { ID_MIN_VALUE = kMinInt, ID_MAX_VALUE = kMaxInt, NO_ID = 0 };
1699 
1700 enum class ExceptionStatus : bool { kException = false, kSuccess = true };
1701 V8_INLINE bool operator!(ExceptionStatus status) {
1702   return !static_cast<bool>(status);
1703 }
1704 
1705 enum class TraceRetainingPathMode { kEnabled, kDisabled };
1706 
1707 // Used in the ScopeInfo flags fields for the function name variable for named
1708 // function expressions, and for the receiver. Must be declared here so that it
1709 // can be used in Torque.
1710 enum class VariableAllocationInfo { NONE, STACK, CONTEXT, UNUSED };
1711 
1712 enum class DynamicMapChecksStatus : uint8_t {
1713   kSuccess = 0,
1714   kBailout = 1,
1715   kDeopt = 2
1716 };
1717 
1718 #ifdef V8_COMPRESS_POINTERS
1719 class IsolateRoot {
1720  public:
IsolateRoot(Address address)1721   explicit constexpr IsolateRoot(Address address) : address_(address) {}
1722   // NOLINTNEXTLINE
1723   inline IsolateRoot(const Isolate* isolate);
1724   // NOLINTNEXTLINE
1725   inline IsolateRoot(const LocalIsolate* isolate);
1726 
1727   inline Address address() const;
1728 
1729  private:
1730   Address address_;
1731 };
1732 #else
1733 class IsolateRoot {
1734  public:
1735   IsolateRoot() = default;
1736   // NOLINTNEXTLINE
IsolateRoot(const Isolate * isolate)1737   IsolateRoot(const Isolate* isolate) {}
1738   // NOLINTNEXTLINE
IsolateRoot(const LocalIsolate * isolate)1739   IsolateRoot(const LocalIsolate* isolate) {}
1740 };
1741 #endif
1742 
1743 class int31_t {
1744  public:
int31_t()1745   constexpr int31_t() : value_(0) {}
int31_t(int value)1746   constexpr int31_t(int value) : value_(value) {  // NOLINT(runtime/explicit)
1747     DCHECK_EQ((value & 0x80000000) != 0, (value & 0x40000000) != 0);
1748   }
1749   int31_t& operator=(int value) {
1750     DCHECK_EQ((value & 0x80000000) != 0, (value & 0x40000000) != 0);
1751     value_ = value;
1752     return *this;
1753   }
value()1754   int32_t value() const { return value_; }
int32_t()1755   operator int32_t() const { return value_; }
1756 
1757  private:
1758   int32_t value_;
1759 };
1760 
1761 }  // namespace internal
1762 
1763 // Tag dispatching support for acquire loads and release stores.
1764 struct AcquireLoadTag {};
1765 struct RelaxedLoadTag {};
1766 struct ReleaseStoreTag {};
1767 struct RelaxedStoreTag {};
1768 static constexpr AcquireLoadTag kAcquireLoad;
1769 static constexpr RelaxedLoadTag kRelaxedLoad;
1770 static constexpr ReleaseStoreTag kReleaseStore;
1771 static constexpr RelaxedStoreTag kRelaxedStore;
1772 
1773 }  // namespace v8
1774 
1775 namespace i = v8::internal;
1776 
1777 #endif  // V8_COMMON_GLOBALS_H_
1778