• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_HEAP_H_
7 
8 #include <atomic>
9 #include <cmath>
10 #include <memory>
11 #include <unordered_map>
12 #include <unordered_set>
13 #include <vector>
14 
15 // Clients of this interface shouldn't depend on lots of heap internals.
16 // Do not include anything from src/heap here!
17 #include "include/v8-callbacks.h"
18 #include "include/v8-embedder-heap.h"
19 #include "include/v8-internal.h"
20 #include "include/v8-isolate.h"
21 #include "src/base/atomic-utils.h"
22 #include "src/base/enum-set.h"
23 #include "src/base/platform/condition-variable.h"
24 #include "src/base/platform/mutex.h"
25 #include "src/builtins/accessors.h"
26 #include "src/common/assert-scope.h"
27 #include "src/common/globals.h"
28 #include "src/heap/allocation-observer.h"
29 #include "src/heap/allocation-result.h"
30 #include "src/heap/heap-allocator.h"
31 #include "src/init/heap-symbols.h"
32 #include "src/objects/allocation-site.h"
33 #include "src/objects/fixed-array.h"
34 #include "src/objects/hash-table.h"
35 #include "src/objects/heap-object.h"
36 #include "src/objects/js-array-buffer.h"
37 #include "src/objects/objects.h"
38 #include "src/objects/smi.h"
39 #include "src/objects/visitors.h"
40 #include "src/roots/roots.h"
41 #include "src/utils/allocation.h"
42 #include "testing/gtest/include/gtest/gtest_prod.h"  // nogncheck
43 
44 namespace v8 {
45 
46 namespace debug {
47 using OutOfMemoryCallback = void (*)(void* data);
48 }  // namespace debug
49 
50 namespace internal {
51 
52 namespace heap {
53 class HeapTester;
54 class TestMemoryAllocatorScope;
55 }  // namespace heap
56 
57 namespace third_party_heap {
58 class Heap;
59 class Impl;
60 }  // namespace third_party_heap
61 
62 class IncrementalMarking;
63 class BackingStore;
64 class JSArrayBuffer;
65 class JSPromise;
66 class NativeContext;
67 
68 using v8::MemoryPressureLevel;
69 
70 class ArrayBufferCollector;
71 class ArrayBufferSweeper;
72 class BasicMemoryChunk;
73 class CodeLargeObjectSpace;
74 class CodeRange;
75 class CollectionBarrier;
76 class ConcurrentAllocator;
77 class ConcurrentMarking;
78 class CppHeap;
79 class GCIdleTimeHandler;
80 class GCIdleTimeHeapState;
81 class GCTracer;
82 template <typename T>
83 class GlobalHandleVector;
84 class IsolateSafepoint;
85 class HeapObjectAllocationTracker;
86 class HeapObjectsFilter;
87 class HeapStats;
88 class Isolate;
89 class JSFinalizationRegistry;
90 class LinearAllocationArea;
91 class LocalEmbedderHeapTracer;
92 class LocalHeap;
93 class MarkingBarrier;
94 class MemoryAllocator;
95 class MemoryChunk;
96 class MemoryMeasurement;
97 class MemoryReducer;
98 class MinorMarkCompactCollector;
99 class ObjectIterator;
100 class ObjectStats;
101 class Page;
102 class PagedSpace;
103 class ReadOnlyHeap;
104 class RootVisitor;
105 class SafepointScope;
106 class ScavengeJob;
107 class Scavenger;
108 class ScavengerCollector;
109 class SharedReadOnlySpace;
110 class Space;
111 class StressScavengeObserver;
112 class TimedHistogram;
113 class WeakObjectRetainer;
114 
115 enum ArrayStorageAllocationMode {
116   DONT_INITIALIZE_ARRAY_ELEMENTS,
117   INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
118 };
119 
120 enum class ClearRecordedSlots { kYes, kNo };
121 
122 enum class InvalidateRecordedSlots { kYes, kNo };
123 
124 enum class ClearFreedMemoryMode { kClearFreedMemory, kDontClearFreedMemory };
125 
126 enum ExternalBackingStoreType { kArrayBuffer, kExternalString, kNumTypes };
127 
128 enum class RetainingPathOption { kDefault, kTrackEphemeronPath };
129 
130 // These values are persisted to logs. Entries should not be renumbered and
131 // numeric values should never be reused. If you add new items here, update
132 // src/tools/metrics/histograms/enums.xml in chromium.
133 enum class GarbageCollectionReason : int {
134   kUnknown = 0,
135   kAllocationFailure = 1,
136   kAllocationLimit = 2,
137   kContextDisposal = 3,
138   kCountersExtension = 4,
139   kDebugger = 5,
140   kDeserializer = 6,
141   kExternalMemoryPressure = 7,
142   kFinalizeMarkingViaStackGuard = 8,
143   kFinalizeMarkingViaTask = 9,
144   kFullHashtable = 10,
145   kHeapProfiler = 11,
146   kTask = 12,
147   kLastResort = 13,
148   kLowMemoryNotification = 14,
149   kMakeHeapIterable = 15,
150   kMemoryPressure = 16,
151   kMemoryReducer = 17,
152   kRuntime = 18,
153   kSamplingProfiler = 19,
154   kSnapshotCreator = 20,
155   kTesting = 21,
156   kExternalFinalize = 22,
157   kGlobalAllocationLimit = 23,
158   kMeasureMemory = 24,
159   kBackgroundAllocationFailure = 25,
160 
161   kLastReason = kBackgroundAllocationFailure,
162 };
163 
164 static_assert(kGarbageCollectionReasonMaxValue ==
165                   static_cast<int>(GarbageCollectionReason::kLastReason),
166               "The value of kGarbageCollectionReasonMaxValue is inconsistent.");
167 
168 enum class YoungGenerationHandling {
169   kRegularScavenge = 0,
170   kFastPromotionDuringScavenge = 1,
171   // Histogram::InspectConstructionArguments in chromium requires us to have at
172   // least three buckets.
173   kUnusedBucket = 2,
174   // If you add new items here, then update the young_generation_handling in
175   // counters.h.
176   // Also update src/tools/metrics/histograms/histograms.xml in chromium.
177 };
178 
179 enum class GCIdleTimeAction : uint8_t;
180 
181 enum class SkipRoot {
182   kExternalStringTable,
183   kGlobalHandles,
184   kOldGeneration,
185   kStack,
186   kMainThreadHandles,
187   kUnserializable,
188   kWeak
189 };
190 
191 enum UnprotectMemoryOrigin {
192   kMainThread,
193   kMaybeOffMainThread,
194 };
195 
196 class StrongRootsEntry final {
StrongRootsEntry(const char * label)197   explicit StrongRootsEntry(const char* label) : label(label) {}
198 
199   // Label that identifies the roots in tooling.
200   const char* label;
201   FullObjectSlot start;
202   FullObjectSlot end;
203   StrongRootsEntry* prev;
204   StrongRootsEntry* next;
205 
206   friend class Heap;
207 };
208 
209 #ifdef DEBUG
210 struct CommentStatistic {
211   const char* comment;
212   int size;
213   int count;
ClearCommentStatistic214   void Clear() {
215     comment = nullptr;
216     size = 0;
217     count = 0;
218   }
219   // Must be small, since an iteration is used for lookup.
220   static const int kMaxComments = 64;
221 };
222 #endif
223 
224 using EphemeronRememberedSet =
225     std::unordered_map<EphemeronHashTable, std::unordered_set<int>,
226                        Object::Hasher>;
227 
228 class Heap {
229  public:
230   // Stores ephemeron entries where the EphemeronHashTable is in old-space,
231   // and the key of the entry is in new-space. Such keys do not appear in the
232   // usual OLD_TO_NEW remembered set.
233   EphemeronRememberedSet ephemeron_remembered_set_;
234   enum FindMementoMode { kForRuntime, kForGC };
235 
236   enum class HeapGrowingMode { kSlow, kConservative, kMinimal, kDefault };
237 
238   enum HeapState {
239     NOT_IN_GC,
240     SCAVENGE,
241     MARK_COMPACT,
242     MINOR_MARK_COMPACT,
243     TEAR_DOWN
244   };
245 
246   // Emits GC events for DevTools timeline.
247   class V8_NODISCARD DevToolsTraceEventScope {
248    public:
249     DevToolsTraceEventScope(Heap* heap, const char* event_name,
250                             const char* event_type);
251     ~DevToolsTraceEventScope();
252 
253    private:
254     Heap* heap_;
255     const char* event_name_;
256   };
257 
258   class ExternalMemoryAccounting {
259    public:
total()260     int64_t total() { return total_.load(std::memory_order_relaxed); }
limit()261     int64_t limit() { return limit_.load(std::memory_order_relaxed); }
low_since_mark_compact()262     int64_t low_since_mark_compact() {
263       return low_since_mark_compact_.load(std::memory_order_relaxed);
264     }
265 
ResetAfterGC()266     void ResetAfterGC() {
267       set_low_since_mark_compact(total());
268       set_limit(total() + kExternalAllocationSoftLimit);
269     }
270 
Update(int64_t delta)271     int64_t Update(int64_t delta) {
272       const int64_t amount =
273           total_.fetch_add(delta, std::memory_order_relaxed) + delta;
274       if (amount < low_since_mark_compact()) {
275         set_low_since_mark_compact(amount);
276         set_limit(amount + kExternalAllocationSoftLimit);
277       }
278       return amount;
279     }
280 
AllocatedSinceMarkCompact()281     int64_t AllocatedSinceMarkCompact() {
282       int64_t total_bytes = total();
283       int64_t low_since_mark_compact_bytes = low_since_mark_compact();
284 
285       if (total_bytes <= low_since_mark_compact_bytes) {
286         return 0;
287       }
288       return static_cast<uint64_t>(total_bytes - low_since_mark_compact_bytes);
289     }
290 
291    private:
set_total(int64_t value)292     void set_total(int64_t value) {
293       total_.store(value, std::memory_order_relaxed);
294     }
295 
set_limit(int64_t value)296     void set_limit(int64_t value) {
297       limit_.store(value, std::memory_order_relaxed);
298     }
299 
set_low_since_mark_compact(int64_t value)300     void set_low_since_mark_compact(int64_t value) {
301       low_since_mark_compact_.store(value, std::memory_order_relaxed);
302     }
303 
304     // The amount of external memory registered through the API.
305     std::atomic<int64_t> total_{0};
306 
307     // The limit when to trigger memory pressure from the API.
308     std::atomic<int64_t> limit_{kExternalAllocationSoftLimit};
309 
310     // Caches the amount of external memory registered at the last MC.
311     std::atomic<int64_t> low_since_mark_compact_{0};
312   };
313 
314   using PretenuringFeedbackMap =
315       std::unordered_map<AllocationSite, size_t, Object::Hasher>;
316 
317   // Taking this mutex prevents the GC from entering a phase that relocates
318   // object references.
relocation_mutex()319   base::Mutex* relocation_mutex() { return &relocation_mutex_; }
320 
321   // Support for context snapshots.  After calling this we have a linear
322   // space to write objects in each space.
323   struct Chunk {
324     uint32_t size;
325     Address start;
326     Address end;
327   };
328   using Reservation = std::vector<Chunk>;
329 
330 #if V8_OS_ANDROID
331   // Don't apply pointer multiplier on Android since it has no swap space and
332   // should instead adapt it's heap size based on available physical memory.
333   static const int kPointerMultiplier = 1;
334   static const int kHeapLimitMultiplier = 1;
335 #else
336   static const int kPointerMultiplier = kTaggedSize / 4;
337   // The heap limit needs to be computed based on the system pointer size
338   // because we want a pointer-compressed heap to have larger limit than
339   // an orinary 32-bit which that is contrained by 2GB virtual address space.
340   static const int kHeapLimitMultiplier = kSystemPointerSize / 4;
341 #endif
342 
343   static const size_t kMaxInitialOldGenerationSize =
344       256 * MB * kHeapLimitMultiplier;
345 
346   // These constants control heap configuration based on the physical memory.
347   static constexpr size_t kPhysicalMemoryToOldGenerationRatio = 4;
348   // Young generation size is the same for compressed heaps and 32-bit heaps.
349   static constexpr size_t kOldGenerationToSemiSpaceRatio =
350       128 * kHeapLimitMultiplier / kPointerMultiplier;
351   static constexpr size_t kOldGenerationToSemiSpaceRatioLowMemory =
352       256 * kHeapLimitMultiplier / kPointerMultiplier;
353   static constexpr size_t kOldGenerationLowMemory =
354       128 * MB * kHeapLimitMultiplier;
355   static constexpr size_t kNewLargeObjectSpaceToSemiSpaceRatio = 1;
356 #if ENABLE_HUGEPAGE
357   static constexpr size_t kMinSemiSpaceSize =
358       kHugePageSize * kPointerMultiplier;
359   static constexpr size_t kMaxSemiSpaceSize =
360       kHugePageSize * 16 * kPointerMultiplier;
361 #else
362   static constexpr size_t kMinSemiSpaceSize = 512 * KB * kPointerMultiplier;
363   static constexpr size_t kMaxSemiSpaceSize = 8192 * KB * kPointerMultiplier;
364 #endif
365 
366   STATIC_ASSERT(kMinSemiSpaceSize % (1 << kPageSizeBits) == 0);
367   STATIC_ASSERT(kMaxSemiSpaceSize % (1 << kPageSizeBits) == 0);
368 
369   static const int kTraceRingBufferSize = 512;
370   static const int kStacktraceBufferSize = 512;
371 
372   static const int kNoGCFlags = 0;
373   static const int kReduceMemoryFootprintMask = 1;
374   // GCs that are forced, either through testing configurations (requring
375   // --expose-gc) or through DevTools (using LowMemoryNotificaton).
376   static const int kForcedGC = 2;
377 
378   // The minimum size of a HeapObject on the heap.
379   static const int kMinObjectSizeInTaggedWords = 2;
380 
381   static const int kMinPromotedPercentForFastPromotionMode = 90;
382 
383   STATIC_ASSERT(static_cast<int>(RootIndex::kUndefinedValue) ==
384                 Internals::kUndefinedValueRootIndex);
385   STATIC_ASSERT(static_cast<int>(RootIndex::kTheHoleValue) ==
386                 Internals::kTheHoleValueRootIndex);
387   STATIC_ASSERT(static_cast<int>(RootIndex::kNullValue) ==
388                 Internals::kNullValueRootIndex);
389   STATIC_ASSERT(static_cast<int>(RootIndex::kTrueValue) ==
390                 Internals::kTrueValueRootIndex);
391   STATIC_ASSERT(static_cast<int>(RootIndex::kFalseValue) ==
392                 Internals::kFalseValueRootIndex);
393   STATIC_ASSERT(static_cast<int>(RootIndex::kempty_string) ==
394                 Internals::kEmptyStringRootIndex);
395 
396   // Calculates the maximum amount of filler that could be required by the
397   // given alignment.
398   V8_EXPORT_PRIVATE static int GetMaximumFillToAlign(
399       AllocationAlignment alignment);
400   // Calculates the actual amount of filler required for a given address at the
401   // given alignment.
402   V8_EXPORT_PRIVATE static int GetFillToAlign(Address address,
403                                               AllocationAlignment alignment);
404 
405   // Returns the size of the initial area of a code-range, which is marked
406   // writable and reserved to contain unwind information.
407   static size_t GetCodeRangeReservedAreaSize();
408 
409   [[noreturn]] void FatalProcessOutOfMemory(const char* location);
410 
411   // Checks whether the space is valid.
412   static bool IsValidAllocationSpace(AllocationSpace space);
413 
414   // Zapping is needed for verify heap, and always done in debug builds.
ShouldZapGarbage()415   static inline bool ShouldZapGarbage() {
416 #ifdef DEBUG
417     return true;
418 #else
419 #ifdef VERIFY_HEAP
420     return FLAG_verify_heap;
421 #else
422     return false;
423 #endif
424 #endif
425   }
426 
427   // Helper function to get the bytecode flushing mode based on the flags. This
428   // is required because it is not safe to acess flags in concurrent marker.
429   static inline base::EnumSet<CodeFlushMode> GetCodeFlushMode(Isolate* isolate);
430 
ZapValue()431   static uintptr_t ZapValue() {
432     return FLAG_clear_free_memory ? kClearedFreeMemoryValue : kZapValue;
433   }
434 
IsYoungGenerationCollector(GarbageCollector collector)435   static inline bool IsYoungGenerationCollector(GarbageCollector collector) {
436     return collector == GarbageCollector::SCAVENGER ||
437            collector == GarbageCollector::MINOR_MARK_COMPACTOR;
438   }
439 
YoungGenerationCollector()440   static inline GarbageCollector YoungGenerationCollector() {
441     return (FLAG_minor_mc) ? GarbageCollector::MINOR_MARK_COMPACTOR
442                            : GarbageCollector::SCAVENGER;
443   }
444 
CollectorName(GarbageCollector collector)445   static inline const char* CollectorName(GarbageCollector collector) {
446     switch (collector) {
447       case GarbageCollector::SCAVENGER:
448         return "Scavenger";
449       case GarbageCollector::MARK_COMPACTOR:
450         return "Mark-Compact";
451       case GarbageCollector::MINOR_MARK_COMPACTOR:
452         return "Minor Mark-Compact";
453     }
454     return "Unknown collector";
455   }
456 
CollectorName(v8::GCType gc_type)457   static inline const char* CollectorName(v8::GCType gc_type) {
458     switch (gc_type) {
459       case kGCTypeScavenge:
460         return "Scavenger";
461       case kGCTypeMarkSweepCompact:
462         return "Mark-Compact";
463       case kGCTypeMinorMarkCompact:
464         return "Minor Mark-Compact";
465       default:
466         break;
467     }
468     return "Unknown collector";
469   }
470 
471   // Copy block of memory from src to dst. Size of block should be aligned
472   // by pointer size.
473   static inline void CopyBlock(Address dst, Address src, int byte_size);
474 
475   // Executes generational and/or marking write barrier for a [start, end) range
476   // of non-weak slots inside |object|.
477   template <typename TSlot>
478   V8_EXPORT_PRIVATE void WriteBarrierForRange(HeapObject object, TSlot start,
479                                               TSlot end);
480 
481   V8_EXPORT_PRIVATE static void WriteBarrierForCodeSlow(Code host);
482 
483   V8_EXPORT_PRIVATE static void GenerationalBarrierSlow(HeapObject object,
484                                                         Address slot,
485                                                         HeapObject value);
486   V8_EXPORT_PRIVATE inline void RecordEphemeronKeyWrite(
487       EphemeronHashTable table, Address key_slot);
488   V8_EXPORT_PRIVATE static void EphemeronKeyWriteBarrierFromCode(
489       Address raw_object, Address address, Isolate* isolate);
490   V8_EXPORT_PRIVATE static void GenerationalBarrierForCodeSlow(
491       Code host, RelocInfo* rinfo, HeapObject value);
492   V8_EXPORT_PRIVATE static bool PageFlagsAreConsistent(HeapObject object);
493 
494   // Notifies the heap that is ok to start marking or other activities that
495   // should not happen during deserialization.
496   void NotifyDeserializationComplete();
497 
498   void NotifyBootstrapComplete();
499 
500   void NotifyOldGenerationExpansion(AllocationSpace space, MemoryChunk* chunk);
501 
502   inline Address* NewSpaceAllocationTopAddress();
503   inline Address* NewSpaceAllocationLimitAddress();
504   inline Address* OldSpaceAllocationTopAddress();
505   inline Address* OldSpaceAllocationLimitAddress();
506 
507   size_t NewSpaceSize();
508   size_t NewSpaceCapacity();
509 
510   // Move len non-weak tagged elements from src_slot to dst_slot of dst_object.
511   // The source and destination memory ranges can overlap.
512   V8_EXPORT_PRIVATE void MoveRange(HeapObject dst_object, ObjectSlot dst_slot,
513                                    ObjectSlot src_slot, int len,
514                                    WriteBarrierMode mode);
515 
516   // Copy len non-weak tagged elements from src_slot to dst_slot of dst_object.
517   // The source and destination memory ranges must not overlap.
518   template <typename TSlot>
519   void CopyRange(HeapObject dst_object, TSlot dst_slot, TSlot src_slot, int len,
520                  WriteBarrierMode mode);
521 
522   // Initialize a filler object to keep the ability to iterate over the heap
523   // when introducing gaps within pages. If slots could have been recorded in
524   // the freed area, then pass ClearRecordedSlots::kYes as the mode. Otherwise,
525   // pass ClearRecordedSlots::kNo. Clears memory if clearing slots.
526   V8_EXPORT_PRIVATE HeapObject CreateFillerObjectAt(
527       Address addr, int size, ClearRecordedSlots clear_slots_mode);
528 
529   void CreateFillerObjectAtBackground(Address addr, int size,
530                                       ClearFreedMemoryMode clear_memory_mode);
531 
532   template <typename T>
533   void CreateFillerForArray(T object, int elements_to_trim, int bytes_to_trim);
534 
535   bool CanMoveObjectStart(HeapObject object);
536 
537   bool IsImmovable(HeapObject object);
538 
539   V8_EXPORT_PRIVATE static bool IsLargeObject(HeapObject object);
540 
541   // Trim the given array from the left. Note that this relocates the object
542   // start and hence is only valid if there is only a single reference to it.
543   V8_EXPORT_PRIVATE FixedArrayBase LeftTrimFixedArray(FixedArrayBase obj,
544                                                       int elements_to_trim);
545 
546   // Trim the given array from the right.
547   V8_EXPORT_PRIVATE void RightTrimFixedArray(FixedArrayBase obj,
548                                              int elements_to_trim);
549   void RightTrimWeakFixedArray(WeakFixedArray obj, int elements_to_trim);
550 
551   // Converts the given boolean condition to JavaScript boolean value.
552   inline Oddball ToBoolean(bool condition);
553 
554   // Notify the heap that a context has been disposed.
555   V8_EXPORT_PRIVATE int NotifyContextDisposed(bool dependant_context);
556 
set_native_contexts_list(Object object)557   void set_native_contexts_list(Object object) {
558     native_contexts_list_.store(object.ptr(), std::memory_order_release);
559   }
560 
native_contexts_list()561   Object native_contexts_list() const {
562     return Object(native_contexts_list_.load(std::memory_order_acquire));
563   }
564 
set_allocation_sites_list(Object object)565   void set_allocation_sites_list(Object object) {
566     allocation_sites_list_ = object;
567   }
allocation_sites_list()568   Object allocation_sites_list() { return allocation_sites_list_; }
569 
set_dirty_js_finalization_registries_list(Object object)570   void set_dirty_js_finalization_registries_list(Object object) {
571     dirty_js_finalization_registries_list_ = object;
572   }
dirty_js_finalization_registries_list()573   Object dirty_js_finalization_registries_list() {
574     return dirty_js_finalization_registries_list_;
575   }
set_dirty_js_finalization_registries_list_tail(Object object)576   void set_dirty_js_finalization_registries_list_tail(Object object) {
577     dirty_js_finalization_registries_list_tail_ = object;
578   }
dirty_js_finalization_registries_list_tail()579   Object dirty_js_finalization_registries_list_tail() {
580     return dirty_js_finalization_registries_list_tail_;
581   }
582 
583   // Used in CreateAllocationSiteStub and the (de)serializer.
allocation_sites_list_address()584   Address allocation_sites_list_address() {
585     return reinterpret_cast<Address>(&allocation_sites_list_);
586   }
587 
588   // Traverse all the allocaions_sites [nested_site and weak_next] in the list
589   // and foreach call the visitor
590   void ForeachAllocationSite(
591       Object list, const std::function<void(AllocationSite)>& visitor);
592 
593   // Number of mark-sweeps.
ms_count()594   int ms_count() const { return ms_count_; }
595 
596   // Checks whether the given object is allowed to be migrated from it's
597   // current space into the given destination space. Used for debugging.
598   bool AllowedToBeMigrated(Map map, HeapObject object, AllocationSpace dest);
599 
600   void CheckHandleCount();
601 
602   // Print short heap statistics.
603   void PrintShortHeapStatistics();
604 
605   // Print statistics of freelists of old_space:
606   //  with FLAG_trace_gc_freelists: summary of each FreeListCategory.
607   //  with FLAG_trace_gc_freelists_verbose: also prints the statistics of each
608   //  FreeListCategory of each page.
609   void PrintFreeListsStats();
610 
611   // Dump heap statistics in JSON format.
612   void DumpJSONHeapStatistics(std::stringstream& stream);
613 
write_protect_code_memory()614   bool write_protect_code_memory() const { return write_protect_code_memory_; }
615 
code_space_memory_modification_scope_depth()616   uintptr_t code_space_memory_modification_scope_depth() {
617     return code_space_memory_modification_scope_depth_;
618   }
619 
increment_code_space_memory_modification_scope_depth()620   void increment_code_space_memory_modification_scope_depth() {
621     code_space_memory_modification_scope_depth_++;
622   }
623 
decrement_code_space_memory_modification_scope_depth()624   void decrement_code_space_memory_modification_scope_depth() {
625     code_space_memory_modification_scope_depth_--;
626   }
627 
628   void UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk,
629                                        UnprotectMemoryOrigin origin);
630   V8_EXPORT_PRIVATE void UnprotectAndRegisterMemoryChunk(
631       HeapObject object, UnprotectMemoryOrigin origin);
632   void UnregisterUnprotectedMemoryChunk(MemoryChunk* chunk);
633   V8_EXPORT_PRIVATE void ProtectUnprotectedMemoryChunks();
634 
IncrementCodePageCollectionMemoryModificationScopeDepth()635   void IncrementCodePageCollectionMemoryModificationScopeDepth() {
636     code_page_collection_memory_modification_scope_depth_++;
637   }
638 
DecrementCodePageCollectionMemoryModificationScopeDepth()639   void DecrementCodePageCollectionMemoryModificationScopeDepth() {
640     code_page_collection_memory_modification_scope_depth_--;
641   }
642 
code_page_collection_memory_modification_scope_depth()643   uintptr_t code_page_collection_memory_modification_scope_depth() {
644     return code_page_collection_memory_modification_scope_depth_;
645   }
646 
gc_state()647   inline HeapState gc_state() const {
648     return gc_state_.load(std::memory_order_relaxed);
649   }
650   void SetGCState(HeapState state);
IsTearingDown()651   bool IsTearingDown() const { return gc_state() == TEAR_DOWN; }
force_oom()652   bool force_oom() const { return force_oom_; }
653 
ignore_local_gc_requests()654   bool ignore_local_gc_requests() const {
655     return ignore_local_gc_requests_depth_ > 0;
656   }
657 
IsInGCPostProcessing()658   inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
659 
660   bool IsGCWithoutStack() const;
661 
662   // If an object has an AllocationMemento trailing it, return it, otherwise
663   // return a null AllocationMemento.
664   template <FindMementoMode mode>
665   inline AllocationMemento FindAllocationMemento(Map map, HeapObject object);
666 
667   // Performs GC after background allocation failure.
668   void CollectGarbageForBackground(LocalHeap* local_heap);
669 
670   //
671   // Support for the API.
672   //
673 
674   void CreateApiObjects();
675 
676   // Implements the corresponding V8 API function.
677   bool IdleNotification(double deadline_in_seconds);
678   bool IdleNotification(int idle_time_in_ms);
679 
680   V8_EXPORT_PRIVATE void MemoryPressureNotification(MemoryPressureLevel level,
681                                                     bool is_isolate_locked);
682   void CheckMemoryPressure();
683 
684   V8_EXPORT_PRIVATE void AddNearHeapLimitCallback(v8::NearHeapLimitCallback,
685                                                   void* data);
686   V8_EXPORT_PRIVATE void RemoveNearHeapLimitCallback(
687       v8::NearHeapLimitCallback callback, size_t heap_limit);
688   V8_EXPORT_PRIVATE void AutomaticallyRestoreInitialHeapLimit(
689       double threshold_percent);
690 
691   void AppendArrayBufferExtension(JSArrayBuffer object,
692                                   ArrayBufferExtension* extension);
693   void DetachArrayBufferExtension(JSArrayBuffer object,
694                                   ArrayBufferExtension* extension);
695 
safepoint()696   IsolateSafepoint* safepoint() { return safepoint_.get(); }
697 
698   V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs() const;
699 
700 #if DEBUG
701   void VerifyNewSpaceTop();
702 #endif  // DEBUG
703 
704   void RecordStats(HeapStats* stats, bool take_snapshot = false);
705 
706   bool MeasureMemory(std::unique_ptr<v8::MeasureMemoryDelegate> delegate,
707                      v8::MeasureMemoryExecution execution);
708 
709   std::unique_ptr<v8::MeasureMemoryDelegate> MeasureMemoryDelegate(
710       Handle<NativeContext> context, Handle<JSPromise> promise,
711       v8::MeasureMemoryMode mode);
712 
713   // Check new space expansion criteria and expand semispaces if it was hit.
714   void CheckNewSpaceExpansionCriteria();
715 
716   void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
717 
718   // An object should be promoted if the object has survived a
719   // scavenge operation.
720   inline bool ShouldBePromoted(Address old_address);
721 
722   void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
723 
724   inline int NextScriptId();
725   inline int NextDebuggingId();
726   inline int GetNextTemplateSerialNumber();
727 
728   void SetSerializedObjects(FixedArray objects);
729   void SetSerializedGlobalProxySizes(FixedArray sizes);
730 
731   void SetBasicBlockProfilingData(Handle<ArrayList> list);
732 
733   // For post mortem debugging.
734   void RememberUnmappedPage(Address page, bool compacted);
735 
external_memory_hard_limit()736   int64_t external_memory_hard_limit() { return max_old_generation_size() / 2; }
737 
738   V8_INLINE int64_t external_memory();
739   V8_EXPORT_PRIVATE int64_t external_memory_limit();
740   V8_INLINE int64_t update_external_memory(int64_t delta);
741 
742   V8_EXPORT_PRIVATE size_t YoungArrayBufferBytes();
743   V8_EXPORT_PRIVATE size_t OldArrayBufferBytes();
744 
backing_store_bytes()745   uint64_t backing_store_bytes() const {
746     return backing_store_bytes_.load(std::memory_order_relaxed);
747   }
748 
749   void CompactWeakArrayLists();
750 
751   V8_EXPORT_PRIVATE void AddRetainedMap(Handle<NativeContext> context,
752                                         Handle<Map> map);
753 
754   // This event is triggered after object is moved to a new place.
755   void OnMoveEvent(HeapObject target, HeapObject source, int size_in_bytes);
756 
deserialization_complete()757   bool deserialization_complete() const { return deserialization_complete_; }
758 
759   // We can only invoke Safepoint() on the main thread local heap after
760   // deserialization is complete. Before that, main_thread_local_heap_ might be
761   // null.
CanSafepoint()762   V8_INLINE bool CanSafepoint() const { return deserialization_complete(); }
763 
764   bool HasLowAllocationRate();
765   bool HasHighFragmentation();
766   bool HasHighFragmentation(size_t used, size_t committed);
767 
768   void ActivateMemoryReducerIfNeeded();
769 
770   V8_EXPORT_PRIVATE bool ShouldOptimizeForMemoryUsage();
771 
HighMemoryPressure()772   bool HighMemoryPressure() {
773     return memory_pressure_level_.load(std::memory_order_relaxed) !=
774            MemoryPressureLevel::kNone;
775   }
776 
777   bool CollectionRequested();
778 
779   void CheckCollectionRequested();
780 
RestoreHeapLimit(size_t heap_limit)781   void RestoreHeapLimit(size_t heap_limit) {
782     // Do not set the limit lower than the live size + some slack.
783     size_t min_limit = SizeOfObjects() + SizeOfObjects() / 4;
784     set_max_old_generation_size(
785         std::min(max_old_generation_size(), std::max(heap_limit, min_limit)));
786   }
787 
788 #if V8_ENABLE_WEBASSEMBLY
789   // TODO(manoskouk): Inline this if STRONG_MUTABLE_MOVABLE_ROOT_LIST setters
790   // become public.
791   void EnsureWasmCanonicalRttsSize(int length);
792 #endif
793 
794   // ===========================================================================
795   // Initialization. ===========================================================
796   // ===========================================================================
797 
798   void ConfigureHeap(const v8::ResourceConstraints& constraints);
799   void ConfigureHeapDefault();
800 
801   // Prepares the heap, setting up for deserialization.
802   void SetUp(LocalHeap* main_thread_local_heap);
803 
804   // Sets read-only heap and space.
805   void SetUpFromReadOnlyHeap(ReadOnlyHeap* ro_heap);
806 
807   void ReplaceReadOnlySpace(SharedReadOnlySpace* shared_ro_space);
808 
809   // Sets up the heap memory without creating any objects.
810   void SetUpSpaces(LinearAllocationArea* new_allocation_info,
811                    LinearAllocationArea* old_allocation_info);
812 
813   // Prepares the heap, setting up for deserialization.
814   void InitializeMainThreadLocalHeap(LocalHeap* main_thread_local_heap);
815 
816   // (Re-)Initialize hash seed from flag or RNG.
817   void InitializeHashSeed();
818 
819   // Invoked once for the process from V8::Initialize.
820   static void InitializeOncePerProcess();
821 
822   // Bootstraps the object heap with the core set of objects required to run.
823   // Returns whether it succeeded.
824   bool CreateHeapObjects();
825 
826   // Create ObjectStats if live_object_stats_ or dead_object_stats_ are nullptr.
827   void CreateObjectStats();
828 
829   // Sets the TearDown state, so no new GC tasks get posted.
830   void StartTearDown();
831 
832   // Destroys all memory allocated by the heap.
833   void TearDown();
834 
835   // Returns whether SetUp has been called.
836   bool HasBeenSetUp() const;
837 
838   // ===========================================================================
839   // Getters for spaces. =======================================================
840   // ===========================================================================
841 
842   inline Address NewSpaceTop();
843 
new_space()844   NewSpace* new_space() { return new_space_; }
old_space()845   OldSpace* old_space() { return old_space_; }
shared_old_space()846   OldSpace* shared_old_space() { return shared_old_space_; }
code_space()847   CodeSpace* code_space() { return code_space_; }
map_space()848   MapSpace* map_space() { return map_space_; }
849   inline PagedSpace* space_for_maps();
lo_space()850   OldLargeObjectSpace* lo_space() { return lo_space_; }
code_lo_space()851   CodeLargeObjectSpace* code_lo_space() { return code_lo_space_; }
new_lo_space()852   NewLargeObjectSpace* new_lo_space() { return new_lo_space_; }
read_only_space()853   ReadOnlySpace* read_only_space() { return read_only_space_; }
854 
855   inline PagedSpace* paged_space(int idx);
856   inline Space* space(int idx);
857 
858   // ===========================================================================
859   // Getters to other components. ==============================================
860   // ===========================================================================
861 
tracer()862   GCTracer* tracer() { return tracer_.get(); }
863 
memory_allocator()864   MemoryAllocator* memory_allocator() { return memory_allocator_.get(); }
memory_allocator()865   const MemoryAllocator* memory_allocator() const {
866     return memory_allocator_.get();
867   }
868 
869   inline ConcurrentAllocator* concurrent_allocator_for_maps();
870 
871   inline Isolate* isolate();
872 
mark_compact_collector()873   MarkCompactCollector* mark_compact_collector() {
874     return mark_compact_collector_.get();
875   }
876 
minor_mark_compact_collector()877   MinorMarkCompactCollector* minor_mark_compact_collector() {
878     return minor_mark_compact_collector_.get();
879   }
880 
array_buffer_sweeper()881   ArrayBufferSweeper* array_buffer_sweeper() {
882     return array_buffer_sweeper_.get();
883   }
884 
885   // The potentially overreserved address space region reserved by the code
886   // range if it exists or empty region otherwise.
887   const base::AddressRegion& code_region();
888 
code_range()889   CodeRange* code_range() { return code_range_.get(); }
890 
891   // The base of the code range if it exists or null address.
892   inline Address code_range_base();
893 
main_thread_local_heap()894   LocalHeap* main_thread_local_heap() { return main_thread_local_heap_; }
895 
AsHeap()896   Heap* AsHeap() { return this; }
897 
898   // ===========================================================================
899   // Root set access. ==========================================================
900   // ===========================================================================
901 
902   // Shortcut to the roots table stored in the Isolate.
903   V8_INLINE RootsTable& roots_table();
904 
905 // Heap root getters.
906 #define ROOT_ACCESSOR(type, name, CamelName) inline type name();
907   MUTABLE_ROOT_LIST(ROOT_ACCESSOR)
908 #undef ROOT_ACCESSOR
909 
910   V8_INLINE void SetRootMaterializedObjects(FixedArray objects);
911   V8_INLINE void SetRootScriptList(Object value);
912   V8_INLINE void SetRootNoScriptSharedFunctionInfos(Object value);
913   V8_INLINE void SetMessageListeners(TemplateList value);
914   V8_INLINE void SetPendingOptimizeForTestBytecode(Object bytecode);
915 
916   StrongRootsEntry* RegisterStrongRoots(const char* label, FullObjectSlot start,
917                                         FullObjectSlot end);
918   void UnregisterStrongRoots(StrongRootsEntry* entry);
919   void UpdateStrongRoots(StrongRootsEntry* entry, FullObjectSlot start,
920                          FullObjectSlot end);
921 
922   void SetBuiltinsConstantsTable(FixedArray cache);
923   void SetDetachedContexts(WeakArrayList detached_contexts);
924 
925   // A full copy of the interpreter entry trampoline, used as a template to
926   // create copies of the builtin at runtime. The copies are used to create
927   // better profiling information for ticks in bytecode execution. Note that
928   // this is always a copy of the full builtin, i.e. not the off-heap
929   // trampoline.
930   // See also: FLAG_interpreted_frames_native_stack.
931   void SetInterpreterEntryTrampolineForProfiling(Code code);
932 
933   void EnqueueDirtyJSFinalizationRegistry(
934       JSFinalizationRegistry finalization_registry,
935       std::function<void(HeapObject object, ObjectSlot slot, Object target)>
936           gc_notify_updated_slot);
937 
938   MaybeHandle<JSFinalizationRegistry> DequeueDirtyJSFinalizationRegistry();
939 
940   // Called from Heap::NotifyContextDisposed to remove all
941   // FinalizationRegistries with {context} from the dirty list when the context
942   // e.g. navigates away or is detached. If the dirty list is empty afterwards,
943   // the cleanup task is aborted if needed.
944   void RemoveDirtyFinalizationRegistriesOnContext(NativeContext context);
945 
946   inline bool HasDirtyJSFinalizationRegistries();
947 
948   void PostFinalizationRegistryCleanupTaskIfNeeded();
949 
set_is_finalization_registry_cleanup_task_posted(bool posted)950   void set_is_finalization_registry_cleanup_task_posted(bool posted) {
951     is_finalization_registry_cleanup_task_posted_ = posted;
952   }
953 
is_finalization_registry_cleanup_task_posted()954   bool is_finalization_registry_cleanup_task_posted() {
955     return is_finalization_registry_cleanup_task_posted_;
956   }
957 
958   V8_EXPORT_PRIVATE void KeepDuringJob(Handle<JSReceiver> target);
959   void ClearKeptObjects();
960 
961   // ===========================================================================
962   // Inline allocation. ========================================================
963   // ===========================================================================
964 
965   // Switch whether inline bump-pointer allocation should be used.
966   V8_EXPORT_PRIVATE void EnableInlineAllocation();
967   V8_EXPORT_PRIVATE void DisableInlineAllocation();
968 
969   // ===========================================================================
970   // Methods triggering GCs. ===================================================
971   // ===========================================================================
972 
973   // Performs garbage collection operation.
974   // Returns whether there is a chance that another major GC could
975   // collect more garbage.
976   V8_EXPORT_PRIVATE bool CollectGarbage(
977       AllocationSpace space, GarbageCollectionReason gc_reason,
978       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
979 
980   // Performs a full garbage collection.
981   V8_EXPORT_PRIVATE void CollectAllGarbage(
982       int flags, GarbageCollectionReason gc_reason,
983       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
984 
985   // Last hope GC, should try to squeeze as much as possible.
986   V8_EXPORT_PRIVATE void CollectAllAvailableGarbage(
987       GarbageCollectionReason gc_reason);
988 
989   // Precise garbage collection that potentially finalizes already running
990   // incremental marking before performing an atomic garbage collection.
991   // Only use if absolutely necessary or in tests to avoid floating garbage!
992   V8_EXPORT_PRIVATE void PreciseCollectAllGarbage(
993       int flags, GarbageCollectionReason gc_reason,
994       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
995 
996   // Performs garbage collection operation for the shared heap.
997   V8_EXPORT_PRIVATE void CollectSharedGarbage(
998       GarbageCollectionReason gc_reason);
999 
1000   // Reports and external memory pressure event, either performs a major GC or
1001   // completes incremental marking in order to free external resources.
1002   void ReportExternalMemoryPressure();
1003 
1004   using GetExternallyAllocatedMemoryInBytesCallback =
1005       v8::Isolate::GetExternallyAllocatedMemoryInBytesCallback;
1006 
SetGetExternallyAllocatedMemoryInBytesCallback(GetExternallyAllocatedMemoryInBytesCallback callback)1007   void SetGetExternallyAllocatedMemoryInBytesCallback(
1008       GetExternallyAllocatedMemoryInBytesCallback callback) {
1009     external_memory_callback_ = callback;
1010   }
1011 
1012   // Invoked when GC was requested via the stack guard.
1013   void HandleGCRequest();
1014 
1015   // ===========================================================================
1016   // Iterators. ================================================================
1017   // ===========================================================================
1018 
1019   // None of these methods iterate over the read-only roots. To do this use
1020   // ReadOnlyRoots::Iterate. Read-only root iteration is not necessary for
1021   // garbage collection and is usually only performed as part of
1022   // (de)serialization or heap verification.
1023 
1024   // Iterates over the strong roots and the weak roots.
1025   void IterateRoots(RootVisitor* v, base::EnumSet<SkipRoot> options);
1026   void IterateRootsIncludingClients(RootVisitor* v,
1027                                     base::EnumSet<SkipRoot> options);
1028 
1029   // Iterates over entries in the smi roots list.  Only interesting to the
1030   // serializer/deserializer, since GC does not care about smis.
1031   void IterateSmiRoots(RootVisitor* v);
1032   // Iterates over weak string tables.
1033   void IterateWeakRoots(RootVisitor* v, base::EnumSet<SkipRoot> options);
1034   void IterateWeakGlobalHandles(RootVisitor* v);
1035   void IterateBuiltins(RootVisitor* v);
1036   void IterateStackRoots(RootVisitor* v);
1037 
1038   // ===========================================================================
1039   // Remembered set API. =======================================================
1040   // ===========================================================================
1041 
1042   // Used for query incremental marking status in generated code.
IsMarkingFlagAddress()1043   Address* IsMarkingFlagAddress() {
1044     return reinterpret_cast<Address*>(&is_marking_flag_);
1045   }
1046 
SetIsMarkingFlag(uint8_t flag)1047   void SetIsMarkingFlag(uint8_t flag) { is_marking_flag_ = flag; }
1048 
1049   void ClearRecordedSlot(HeapObject object, ObjectSlot slot);
1050   void ClearRecordedSlotRange(Address start, Address end);
1051   static int InsertIntoRememberedSetFromCode(MemoryChunk* chunk, Address slot);
1052 
1053 #ifdef DEBUG
1054   void VerifyClearedSlot(HeapObject object, ObjectSlot slot);
1055   void VerifySlotRangeHasNoRecordedSlots(Address start, Address end);
1056 #endif
1057 
1058   // ===========================================================================
1059   // Incremental marking API. ==================================================
1060   // ===========================================================================
1061 
GCFlagsForIncrementalMarking()1062   int GCFlagsForIncrementalMarking() {
1063     return ShouldOptimizeForMemoryUsage() ? kReduceMemoryFootprintMask
1064                                           : kNoGCFlags;
1065   }
1066 
1067   // Start incremental marking and ensure that idle time handler can perform
1068   // incremental steps.
1069   V8_EXPORT_PRIVATE void StartIdleIncrementalMarking(
1070       GarbageCollectionReason gc_reason,
1071       GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
1072 
1073   // Starts incremental marking assuming incremental marking is currently
1074   // stopped.
1075   V8_EXPORT_PRIVATE void StartIncrementalMarking(
1076       int gc_flags, GarbageCollectionReason gc_reason,
1077       GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
1078 
1079   void StartIncrementalMarkingIfAllocationLimitIsReached(
1080       int gc_flags,
1081       GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
1082   void StartIncrementalMarkingIfAllocationLimitIsReachedBackground();
1083 
1084   void FinalizeIncrementalMarkingIfComplete(GarbageCollectionReason gc_reason);
1085   // Synchronously finalizes incremental marking.
1086   V8_EXPORT_PRIVATE void FinalizeIncrementalMarkingAtomically(
1087       GarbageCollectionReason gc_reason);
1088 
1089   void CompleteSweepingFull();
1090   void CompleteSweepingYoung(GarbageCollector collector);
1091 
1092   // Ensures that sweeping is finished for that object's page.
1093   void EnsureSweepingCompleted(HeapObject object);
1094 
incremental_marking()1095   IncrementalMarking* incremental_marking() const {
1096     return incremental_marking_.get();
1097   }
1098 
marking_barrier()1099   MarkingBarrier* marking_barrier() const { return marking_barrier_.get(); }
1100 
1101   // ===========================================================================
1102   // Concurrent marking API. ===================================================
1103   // ===========================================================================
1104 
concurrent_marking()1105   ConcurrentMarking* concurrent_marking() const {
1106     return concurrent_marking_.get();
1107   }
1108 
1109   // The runtime uses this function to notify potentially unsafe object layout
1110   // changes that require special synchronization with the concurrent marker.
1111   // The old size is the size of the object before layout change.
1112   // By default recorded slots in the object are invalidated. Pass
1113   // InvalidateRecordedSlots::kNo if this is not necessary or to perform this
1114   // manually.
1115   void NotifyObjectLayoutChange(
1116       HeapObject object, const DisallowGarbageCollection&,
1117       InvalidateRecordedSlots invalidate_recorded_slots =
1118           InvalidateRecordedSlots::kYes);
1119 
1120 #ifdef VERIFY_HEAP
1121   // This function checks that either
1122   // - the map transition is safe,
1123   // - or it was communicated to GC using NotifyObjectLayoutChange.
1124   V8_EXPORT_PRIVATE void VerifyObjectLayoutChange(HeapObject object,
1125                                                   Map new_map);
1126   // Checks that this is a safe map transition.
1127   V8_EXPORT_PRIVATE void VerifySafeMapTransition(HeapObject object,
1128                                                  Map new_map);
1129 #endif
1130 
1131   // ===========================================================================
1132   // Deoptimization support API. ===============================================
1133   // ===========================================================================
1134 
1135   // Setters for code offsets of well-known deoptimization targets.
1136   void SetConstructStubCreateDeoptPCOffset(int pc_offset);
1137   void SetConstructStubInvokeDeoptPCOffset(int pc_offset);
1138   void SetInterpreterEntryReturnPCOffset(int pc_offset);
1139 
1140   // Invalidates references in the given {code} object that are referenced
1141   // transitively from the deoptimization data. Mutates write-protected code.
1142   void InvalidateCodeDeoptimizationData(Code code);
1143 
1144   void DeoptMarkedAllocationSites();
1145 
1146   bool DeoptMaybeTenuredAllocationSites();
1147 
1148   // ===========================================================================
1149   // Embedder heap tracer support. =============================================
1150   // ===========================================================================
1151 
local_embedder_heap_tracer()1152   LocalEmbedderHeapTracer* local_embedder_heap_tracer() const {
1153     return local_embedder_heap_tracer_.get();
1154   }
1155 
1156   V8_EXPORT_PRIVATE void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
1157   EmbedderHeapTracer* GetEmbedderHeapTracer() const;
1158 
1159   void RegisterExternallyReferencedObject(Address* location);
1160 
1161   EmbedderHeapTracer::TraceFlags flags_for_embedder_tracer() const;
1162 
1163   // ===========================================================================
1164   // Unified heap (C++) support. ===============================================
1165   // ===========================================================================
1166 
1167   V8_EXPORT_PRIVATE void AttachCppHeap(v8::CppHeap* cpp_heap);
1168   V8_EXPORT_PRIVATE void DetachCppHeap();
1169 
cpp_heap()1170   v8::CppHeap* cpp_heap() const { return cpp_heap_; }
1171 
1172   const cppgc::EmbedderStackState* overriden_stack_state() const;
1173 
1174   // ===========================================================================
1175   // Embedder roots optimizations. =============================================
1176   // ===========================================================================
1177 
1178   V8_EXPORT_PRIVATE void SetEmbedderRootsHandler(EmbedderRootsHandler* handler);
1179 
1180   EmbedderRootsHandler* GetEmbedderRootsHandler() const;
1181 
1182   // ===========================================================================
1183   // External string table API. ================================================
1184   // ===========================================================================
1185 
1186   // Registers an external string.
1187   inline void RegisterExternalString(String string);
1188 
1189   // Called when a string's resource is changed. The size of the payload is sent
1190   // as argument of the method.
1191   V8_EXPORT_PRIVATE void UpdateExternalString(String string, size_t old_payload,
1192                                               size_t new_payload);
1193 
1194   // Finalizes an external string by deleting the associated external
1195   // data and clearing the resource pointer.
1196   inline void FinalizeExternalString(String string);
1197 
1198   static String UpdateYoungReferenceInExternalStringTableEntry(
1199       Heap* heap, FullObjectSlot pointer);
1200 
1201   // ===========================================================================
1202   // Methods checking/returning the space of a given object/address. ===========
1203   // ===========================================================================
1204 
1205   // Returns whether the object resides in new space.
1206   static inline bool InYoungGeneration(Object object);
1207   static inline bool InYoungGeneration(MaybeObject object);
1208   static inline bool InYoungGeneration(HeapObject heap_object);
1209   static inline bool InFromPage(Object object);
1210   static inline bool InFromPage(MaybeObject object);
1211   static inline bool InFromPage(HeapObject heap_object);
1212   static inline bool InToPage(Object object);
1213   static inline bool InToPage(MaybeObject object);
1214   static inline bool InToPage(HeapObject heap_object);
1215 
1216   // Returns whether the object resides in old space.
1217   inline bool InOldSpace(Object object);
1218 
1219   // Checks whether an address/object is in the non-read-only heap (including
1220   // auxiliary area and unused area). Use IsValidHeapObject if checking both
1221   // heaps is required.
1222   V8_EXPORT_PRIVATE bool Contains(HeapObject value) const;
1223   // Same as above, but checks whether the object resides in any of the code
1224   // spaces.
1225   V8_EXPORT_PRIVATE bool ContainsCode(HeapObject value) const;
1226 
1227   // Checks whether an address/object is in the non-read-only heap (including
1228   // auxiliary area and unused area). Use IsValidHeapObject if checking both
1229   // heaps is required.
1230   V8_EXPORT_PRIVATE bool SharedHeapContains(HeapObject value) const;
1231 
1232   // Returns whether the object should be in the shared old space.
1233   V8_EXPORT_PRIVATE bool ShouldBeInSharedOldSpace(HeapObject value);
1234 
1235   // Checks whether an address/object in a space.
1236   // Currently used by tests, serialization and heap verification only.
1237   V8_EXPORT_PRIVATE bool InSpace(HeapObject value, AllocationSpace space) const;
1238 
1239   // Returns true when this heap is shared.
1240   V8_EXPORT_PRIVATE bool IsShared();
1241 
1242   // Slow methods that can be used for verification as they can also be used
1243   // with off-heap Addresses.
1244   V8_EXPORT_PRIVATE bool InSpaceSlow(Address addr, AllocationSpace space) const;
1245 
1246   static inline Heap* FromWritableHeapObject(HeapObject obj);
1247 
1248   // ===========================================================================
1249   // Object statistics tracking. ===============================================
1250   // ===========================================================================
1251 
1252   // Returns the number of buckets used by object statistics tracking during a
1253   // major GC. Note that the following methods fail gracefully when the bounds
1254   // are exceeded though.
1255   size_t NumberOfTrackedHeapObjectTypes();
1256 
1257   // Returns object statistics about count and size at the last major GC.
1258   // Objects are being grouped into buckets that roughly resemble existing
1259   // instance types.
1260   size_t ObjectCountAtLastGC(size_t index);
1261   size_t ObjectSizeAtLastGC(size_t index);
1262 
1263   // Retrieves names of buckets used by object statistics tracking.
1264   bool GetObjectTypeName(size_t index, const char** object_type,
1265                          const char** object_sub_type);
1266 
1267   // The total number of native contexts object on the heap.
1268   size_t NumberOfNativeContexts();
1269   // The total number of native contexts that were detached but were not
1270   // garbage collected yet.
1271   size_t NumberOfDetachedContexts();
1272 
1273   // ===========================================================================
1274   // Code statistics. ==========================================================
1275   // ===========================================================================
1276 
1277   // Collect code (Code and BytecodeArray objects) statistics.
1278   void CollectCodeStatistics();
1279 
1280   // ===========================================================================
1281   // GC statistics. ============================================================
1282   // ===========================================================================
1283 
1284   // Returns the maximum amount of memory reserved for the heap.
1285   V8_EXPORT_PRIVATE size_t MaxReserved();
MaxSemiSpaceSize()1286   size_t MaxSemiSpaceSize() { return max_semi_space_size_; }
InitialSemiSpaceSize()1287   size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
MaxOldGenerationSize()1288   size_t MaxOldGenerationSize() { return max_old_generation_size(); }
1289 
1290   // Limit on the max old generation size imposed by the underlying allocator.
1291   V8_EXPORT_PRIVATE static size_t AllocatorLimitOnMaxOldGenerationSize();
1292 
1293   V8_EXPORT_PRIVATE static size_t HeapSizeFromPhysicalMemory(
1294       uint64_t physical_memory);
1295   V8_EXPORT_PRIVATE static void GenerationSizesFromHeapSize(
1296       size_t heap_size, size_t* young_generation_size,
1297       size_t* old_generation_size);
1298   V8_EXPORT_PRIVATE static size_t YoungGenerationSizeFromOldGenerationSize(
1299       size_t old_generation_size);
1300   V8_EXPORT_PRIVATE static size_t YoungGenerationSizeFromSemiSpaceSize(
1301       size_t semi_space_size);
1302   V8_EXPORT_PRIVATE static size_t SemiSpaceSizeFromYoungGenerationSize(
1303       size_t young_generation_size);
1304   V8_EXPORT_PRIVATE static size_t MinYoungGenerationSize();
1305   V8_EXPORT_PRIVATE static size_t MinOldGenerationSize();
1306   V8_EXPORT_PRIVATE static size_t MaxOldGenerationSize(
1307       uint64_t physical_memory);
1308 
1309   // Returns the capacity of the heap in bytes w/o growing. Heap grows when
1310   // more spaces are needed until it reaches the limit.
1311   size_t Capacity();
1312 
1313   // Returns the capacity of the old generation.
1314   V8_EXPORT_PRIVATE size_t OldGenerationCapacity();
1315 
1316   // Returns the amount of memory currently held alive by the unmapper.
1317   size_t CommittedMemoryOfUnmapper();
1318 
1319   // Returns the amount of memory currently committed for the heap.
1320   size_t CommittedMemory();
1321 
1322   // Returns the amount of memory currently committed for the old space.
1323   size_t CommittedOldGenerationMemory();
1324 
1325   // Returns the amount of executable memory currently committed for the heap.
1326   size_t CommittedMemoryExecutable();
1327 
1328   // Returns the amount of phyical memory currently committed for the heap.
1329   size_t CommittedPhysicalMemory();
1330 
1331   // Returns the maximum amount of memory ever committed for the heap.
MaximumCommittedMemory()1332   size_t MaximumCommittedMemory() { return maximum_committed_; }
1333 
1334   // Updates the maximum committed memory for the heap. Should be called
1335   // whenever a space grows.
1336   void UpdateMaximumCommitted();
1337 
1338   // Returns the available bytes in space w/o growing.
1339   // Heap doesn't guarantee that it can allocate an object that requires
1340   // all available bytes. Check MaxHeapObjectSize() instead.
1341   size_t Available();
1342 
1343   // Returns size of all objects residing in the heap.
1344   V8_EXPORT_PRIVATE size_t SizeOfObjects();
1345 
1346   // Returns size of all global handles in the heap.
1347   V8_EXPORT_PRIVATE size_t TotalGlobalHandlesSize();
1348 
1349   // Returns size of all allocated/used global handles in the heap.
1350   V8_EXPORT_PRIVATE size_t UsedGlobalHandlesSize();
1351 
1352   void UpdateSurvivalStatistics(int start_new_space_size);
1353 
IncrementPromotedObjectsSize(size_t object_size)1354   inline void IncrementPromotedObjectsSize(size_t object_size) {
1355     promoted_objects_size_ += object_size;
1356   }
promoted_objects_size()1357   inline size_t promoted_objects_size() { return promoted_objects_size_; }
1358 
IncrementSemiSpaceCopiedObjectSize(size_t object_size)1359   inline void IncrementSemiSpaceCopiedObjectSize(size_t object_size) {
1360     semi_space_copied_object_size_ += object_size;
1361   }
semi_space_copied_object_size()1362   inline size_t semi_space_copied_object_size() {
1363     return semi_space_copied_object_size_;
1364   }
1365 
SurvivedYoungObjectSize()1366   inline size_t SurvivedYoungObjectSize() {
1367     return promoted_objects_size_ + semi_space_copied_object_size_;
1368   }
1369 
IncrementNodesDiedInNewSpace()1370   inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
1371 
IncrementNodesCopiedInNewSpace()1372   inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
1373 
IncrementNodesPromoted()1374   inline void IncrementNodesPromoted() { nodes_promoted_++; }
1375 
IncrementYoungSurvivorsCounter(size_t survived)1376   inline void IncrementYoungSurvivorsCounter(size_t survived) {
1377     survived_last_scavenge_ = survived;
1378     survived_since_last_expansion_ += survived;
1379   }
1380 
1381   void UpdateNewSpaceAllocationCounter();
1382 
1383   V8_EXPORT_PRIVATE size_t NewSpaceAllocationCounter();
1384 
1385   // This should be used only for testing.
set_new_space_allocation_counter(size_t new_value)1386   void set_new_space_allocation_counter(size_t new_value) {
1387     new_space_allocation_counter_ = new_value;
1388   }
1389 
UpdateOldGenerationAllocationCounter()1390   void UpdateOldGenerationAllocationCounter() {
1391     old_generation_allocation_counter_at_last_gc_ =
1392         OldGenerationAllocationCounter();
1393     old_generation_size_at_last_gc_ = 0;
1394   }
1395 
OldGenerationAllocationCounter()1396   size_t OldGenerationAllocationCounter() {
1397     return old_generation_allocation_counter_at_last_gc_ +
1398            PromotedSinceLastGC();
1399   }
1400 
1401   size_t EmbedderAllocationCounter() const;
1402 
1403   // This should be used only for testing.
set_old_generation_allocation_counter_at_last_gc(size_t new_value)1404   void set_old_generation_allocation_counter_at_last_gc(size_t new_value) {
1405     old_generation_allocation_counter_at_last_gc_ = new_value;
1406   }
1407 
PromotedSinceLastGC()1408   size_t PromotedSinceLastGC() {
1409     size_t old_generation_size = OldGenerationSizeOfObjects();
1410     return old_generation_size > old_generation_size_at_last_gc_
1411                ? old_generation_size - old_generation_size_at_last_gc_
1412                : 0;
1413   }
1414 
gc_count()1415   int gc_count() const { return gc_count_; }
1416 
is_current_gc_forced()1417   bool is_current_gc_forced() const { return is_current_gc_forced_; }
1418 
1419   // Returns whether the currently in-progress GC should avoid increasing the
1420   // ages on any objects that live for a set number of collections.
ShouldCurrentGCKeepAgesUnchanged()1421   bool ShouldCurrentGCKeepAgesUnchanged() const {
1422     return is_current_gc_forced_ || is_current_gc_for_heap_profiler_;
1423   }
1424 
1425   // Returns the size of objects residing in non-new spaces.
1426   // Excludes external memory held by those objects.
1427   V8_EXPORT_PRIVATE size_t OldGenerationSizeOfObjects();
1428 
1429   // Returns the size of objects held by the EmbedderHeapTracer.
1430   V8_EXPORT_PRIVATE size_t EmbedderSizeOfObjects() const;
1431 
1432   // Returns the global size of objects (embedder + V8 non-new spaces).
1433   V8_EXPORT_PRIVATE size_t GlobalSizeOfObjects();
1434 
1435   // We allow incremental marking to overshoot the V8 and global allocation
1436   // limit for performace reasons. If the overshoot is too large then we are
1437   // more eager to finalize incremental marking.
1438   bool AllocationLimitOvershotByLargeMargin();
1439 
1440   // Return the maximum size objects can be before having to allocate them as
1441   // large objects. This takes into account allocating in the code space for
1442   // which the size of the allocatable space per V8 page may depend on the OS
1443   // page size at runtime. You may use kMaxRegularHeapObjectSize as a constant
1444   // instead if you know the allocation isn't in the code spaces.
1445   inline V8_EXPORT_PRIVATE int MaxRegularHeapObjectSize(
1446       AllocationType allocation);
1447 
1448   // ===========================================================================
1449   // Prologue/epilogue callback methods.========================================
1450   // ===========================================================================
1451 
1452   void AddGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
1453                              GCType gc_type_filter, void* data);
1454   void RemoveGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
1455                                 void* data);
1456 
1457   void AddGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
1458                              GCType gc_type_filter, void* data);
1459   void RemoveGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
1460                                 void* data);
1461 
1462   void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1463   void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
1464 
1465   // ===========================================================================
1466   // Allocation methods. =======================================================
1467   // ===========================================================================
1468 
1469   // Creates a filler object and returns a heap object immediately after it.
1470   V8_EXPORT_PRIVATE HeapObject PrecedeWithFiller(HeapObject object,
1471                                                  int filler_size);
1472 
1473   // Creates a filler object if needed for alignment and returns a heap object
1474   // immediately after it. If any space is left after the returned object,
1475   // another filler object is created so the over allocated memory is iterable.
1476   V8_WARN_UNUSED_RESULT HeapObject
1477   AlignWithFiller(HeapObject object, int object_size, int allocation_size,
1478                   AllocationAlignment alignment);
1479 
1480   // Allocate an external backing store with the given allocation callback.
1481   // If the callback fails (indicated by a nullptr result) then this function
1482   // will re-try the allocation after performing GCs. This is useful for
1483   // external backing stores that may be retained by (unreachable) V8 objects
1484   // such as ArrayBuffers, ExternalStrings, etc.
1485   //
1486   // The function may also proactively trigger GCs even if the allocation
1487   // callback does not fail to keep the memory usage low.
1488   V8_EXPORT_PRIVATE void* AllocateExternalBackingStore(
1489       const std::function<void*(size_t)>& allocate, size_t byte_length);
1490 
1491   // ===========================================================================
1492   // Allocation site tracking. =================================================
1493   // ===========================================================================
1494 
1495   // Updates the AllocationSite of a given {object}. The entry (including the
1496   // count) is cached on the local pretenuring feedback.
1497   inline void UpdateAllocationSite(
1498       Map map, HeapObject object, PretenuringFeedbackMap* pretenuring_feedback);
1499 
1500   // Merges local pretenuring feedback into the global one. Note that this
1501   // method needs to be called after evacuation, as allocation sites may be
1502   // evacuated and this method resolves forward pointers accordingly.
1503   void MergeAllocationSitePretenuringFeedback(
1504       const PretenuringFeedbackMap& local_pretenuring_feedback);
1505 
1506   // Adds an allocation site to the list of sites to be pretenured during the
1507   // next collection. Added allocation sites are pretenured independent of
1508   // their feedback.
1509   V8_EXPORT_PRIVATE void PretenureAllocationSiteOnNextCollection(
1510       AllocationSite site);
1511 
1512   // ===========================================================================
1513   // Allocation tracking. ======================================================
1514   // ===========================================================================
1515 
1516   // Adds {new_space_observer} to new space and {observer} to any other space.
1517   void AddAllocationObserversToAllSpaces(
1518       AllocationObserver* observer, AllocationObserver* new_space_observer);
1519 
1520   // Removes {new_space_observer} from new space and {observer} from any other
1521   // space.
1522   void RemoveAllocationObserversFromAllSpaces(
1523       AllocationObserver* observer, AllocationObserver* new_space_observer);
1524 
1525   // Check if the given object was recently allocated and its fields may appear
1526   // as uninitialized to background threads.
1527   // This predicate may be invoked from a background thread.
1528   inline bool IsPendingAllocation(HeapObject object);
1529   inline bool IsPendingAllocation(Object object);
1530 
1531   // Notifies that all previously allocated objects are properly initialized
1532   // and ensures that IsPendingAllocation returns false for them. This function
1533   // may be invoked only on the main thread.
1534   V8_EXPORT_PRIVATE void PublishPendingAllocations();
1535 
1536   // ===========================================================================
1537   // Heap object allocation tracking. ==========================================
1538   // ===========================================================================
1539 
1540   V8_EXPORT_PRIVATE void AddHeapObjectAllocationTracker(
1541       HeapObjectAllocationTracker* tracker);
1542   V8_EXPORT_PRIVATE void RemoveHeapObjectAllocationTracker(
1543       HeapObjectAllocationTracker* tracker);
has_heap_object_allocation_tracker()1544   bool has_heap_object_allocation_tracker() const {
1545     return !allocation_trackers_.empty();
1546   }
1547 
1548   // ===========================================================================
1549   // Retaining path tracking. ==================================================
1550   // ===========================================================================
1551 
1552   // Adds the given object to the weak table of retaining path targets.
1553   // On each GC if the marker discovers the object, it will print the retaining
1554   // path. This requires --track-retaining-path flag.
1555   void AddRetainingPathTarget(Handle<HeapObject> object,
1556                               RetainingPathOption option);
1557 
1558   // ===========================================================================
1559   // Stack frame support. ======================================================
1560   // ===========================================================================
1561 
1562   // Returns the Code object for a given interior pointer.
1563   Code GcSafeFindCodeForInnerPointer(Address inner_pointer);
1564 
1565   // Returns true if {addr} is contained within {code} and false otherwise.
1566   // Mostly useful for debugging.
1567   bool GcSafeCodeContains(Code code, Address addr);
1568 
1569   // Casts a heap object to a code object and checks if the inner_pointer is
1570   // within the object.
1571   Code GcSafeCastToCode(HeapObject object, Address inner_pointer);
1572 
1573   // Returns the map of an object. Can be used during garbage collection, i.e.
1574   // it supports a forwarded map. Fails if the map is not the code map.
1575   Map GcSafeMapOfCodeSpaceObject(HeapObject object);
1576 
1577 // =============================================================================
1578 #ifdef VERIFY_HEAP
1579   // Verify the heap is in its normal state before or after a GC.
1580   V8_EXPORT_PRIVATE void Verify();
1581   // Verify the read-only heap after all read-only heap objects have been
1582   // created.
1583   void VerifyReadOnlyHeap();
1584   void VerifyRememberedSetFor(HeapObject object);
1585 #endif
1586 
1587 #ifdef V8_ENABLE_ALLOCATION_TIMEOUT
1588   void V8_EXPORT_PRIVATE set_allocation_timeout(int allocation_timeout);
1589 #endif  // V8_ENABLE_ALLOCATION_TIMEOUT
1590 
1591 #ifdef DEBUG
1592   void VerifyCountersAfterSweeping();
1593   void VerifyCountersBeforeConcurrentSweeping();
1594   void VerifyCommittedPhysicalMemory();
1595 
1596   void Print();
1597   void PrintHandles();
1598 
1599   // Report code statistics.
1600   void ReportCodeStatistics(const char* title);
1601 #endif  // DEBUG
GetRandomMmapAddr()1602   void* GetRandomMmapAddr() {
1603     void* result = v8::internal::GetRandomMmapAddr();
1604 #if V8_TARGET_ARCH_X64
1605 #if V8_OS_DARWIN
1606     // The Darwin kernel [as of macOS 10.12.5] does not clean up page
1607     // directory entries [PDE] created from mmap or mach_vm_allocate, even
1608     // after the region is destroyed. Using a virtual address space that is
1609     // too large causes a leak of about 1 wired [can never be paged out] page
1610     // per call to mmap(). The page is only reclaimed when the process is
1611     // killed. Confine the hint to a 32-bit section of the virtual address
1612     // space. See crbug.com/700928.
1613     uintptr_t offset = reinterpret_cast<uintptr_t>(result) & kMmapRegionMask;
1614     result = reinterpret_cast<void*>(mmap_region_base_ + offset);
1615 #endif  // V8_OS_DARWIN
1616 #endif  // V8_TARGET_ARCH_X64
1617     return result;
1618   }
1619 
1620   void RegisterCodeObject(Handle<Code> code);
1621 
1622   static const char* GarbageCollectionReasonToString(
1623       GarbageCollectionReason gc_reason);
1624 
1625   // Calculates the nof entries for the full sized number to string cache.
1626   inline int MaxNumberToStringCacheSize() const;
1627 
1628   static Isolate* GetIsolateFromWritableObject(HeapObject object);
1629 
1630   // Ensure that we have swept all spaces in such a way that we can iterate
1631   // over all objects.
1632   void MakeHeapIterable();
1633 
1634  private:
1635   class AllocationTrackerForDebugging;
1636 
1637   using ExternalStringTableUpdaterCallback = String (*)(Heap* heap,
1638                                                         FullObjectSlot pointer);
1639 
1640   // External strings table is a place where all external strings are
1641   // registered.  We need to keep track of such strings to properly
1642   // finalize them.
1643   class ExternalStringTable {
1644    public:
ExternalStringTable(Heap * heap)1645     explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
1646     ExternalStringTable(const ExternalStringTable&) = delete;
1647     ExternalStringTable& operator=(const ExternalStringTable&) = delete;
1648 
1649     // Registers an external string.
1650     inline void AddString(String string);
1651     bool Contains(String string);
1652 
1653     void IterateAll(RootVisitor* v);
1654     void IterateYoung(RootVisitor* v);
1655     void PromoteYoung();
1656 
1657     // Restores internal invariant and gets rid of collected strings. Must be
1658     // called after each Iterate*() that modified the strings.
1659     void CleanUpAll();
1660     void CleanUpYoung();
1661 
1662     // Finalize all registered external strings and clear tables.
1663     void TearDown();
1664 
1665     void UpdateYoungReferences(
1666         Heap::ExternalStringTableUpdaterCallback updater_func);
1667     void UpdateReferences(
1668         Heap::ExternalStringTableUpdaterCallback updater_func);
1669 
1670    private:
1671     void Verify();
1672     void VerifyYoung();
1673 
1674     Heap* const heap_;
1675 
1676     // To speed up scavenge collections young string are kept separate from old
1677     // strings.
1678     std::vector<Object> young_strings_;
1679     std::vector<Object> old_strings_;
1680   };
1681 
1682   struct StringTypeTable {
1683     InstanceType type;
1684     int size;
1685     RootIndex index;
1686   };
1687 
1688   struct ConstantStringTable {
1689     const char* contents;
1690     RootIndex index;
1691   };
1692 
1693   struct StructTable {
1694     InstanceType type;
1695     int size;
1696     RootIndex index;
1697   };
1698 
1699   struct GCCallbackTuple {
GCCallbackTupleGCCallbackTuple1700     GCCallbackTuple(v8::Isolate::GCCallbackWithData callback, GCType gc_type,
1701                     void* data)
1702         : callback(callback), gc_type(gc_type), data(data) {}
1703 
1704     bool operator==(const GCCallbackTuple& other) const;
1705 
1706     v8::Isolate::GCCallbackWithData callback;
1707     GCType gc_type;
1708     void* data;
1709   };
1710 
1711   static const int kInitialEvalCacheSize = 64;
1712   static const int kInitialNumberStringCacheSize = 256;
1713 
1714   static const int kRememberedUnmappedPages = 128;
1715 
1716   static const StringTypeTable string_type_table[];
1717   static const ConstantStringTable constant_string_table[];
1718   static const StructTable struct_table[];
1719 
1720   static const int kYoungSurvivalRateHighThreshold = 90;
1721   static const int kYoungSurvivalRateAllowedDeviation = 15;
1722   static const int kOldSurvivalRateLowThreshold = 10;
1723 
1724   static const int kMaxMarkCompactsInIdleRound = 7;
1725 
1726   static const int kInitialFeedbackCapacity = 256;
1727 
1728   Heap();
1729   ~Heap();
1730 
1731   Heap(const Heap&) = delete;
1732   Heap& operator=(const Heap&) = delete;
1733 
IsRegularObjectAllocation(AllocationType allocation)1734   static bool IsRegularObjectAllocation(AllocationType allocation) {
1735     return AllocationType::kYoung == allocation ||
1736            AllocationType::kOld == allocation;
1737   }
1738 
DefaultGetExternallyAllocatedMemoryInBytesCallback()1739   static size_t DefaultGetExternallyAllocatedMemoryInBytesCallback() {
1740     return 0;
1741   }
1742 
1743 #define ROOT_ACCESSOR(type, name, CamelName) inline void set_##name(type value);
ROOT_LIST(ROOT_ACCESSOR)1744   ROOT_LIST(ROOT_ACCESSOR)
1745 #undef ROOT_ACCESSOR
1746 
1747   void set_current_gc_flags(int flags) { current_gc_flags_ = flags; }
1748 
ShouldReduceMemory()1749   inline bool ShouldReduceMemory() const {
1750     return (current_gc_flags_ & kReduceMemoryFootprintMask) != 0;
1751   }
1752 
1753   int NumberOfScavengeTasks();
1754 
1755   // Checks whether a global GC is necessary
1756   GarbageCollector SelectGarbageCollector(AllocationSpace space,
1757                                           const char** reason);
1758 
1759   // Free all LABs in the heap.
1760   void FreeLinearAllocationAreas();
1761 
1762   // Free all shared LABs.
1763   void FreeSharedLinearAllocationAreas();
1764 
1765   // Free all shared LABs of main thread.
1766   void FreeMainThreadSharedLinearAllocationAreas();
1767 
1768   // Performs garbage collection in a safepoint.
1769   // Returns the number of freed global handles.
1770   size_t PerformGarbageCollection(
1771       GarbageCollector collector, GarbageCollectionReason gc_reason,
1772       const char* collector_reason,
1773       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1774 
1775   // Performs garbage collection in the shared heap.
1776   void PerformSharedGarbageCollection(Isolate* initiator,
1777                                       GarbageCollectionReason gc_reason);
1778 
1779   inline void UpdateOldSpaceLimits();
1780 
1781   bool CreateInitialMaps();
1782   void CreateInternalAccessorInfoObjects();
1783   void CreateInitialObjects();
1784 
1785   // Commits from space if it is uncommitted.
1786   void EnsureFromSpaceIsCommitted();
1787 
1788   // Uncommit unused semi space.
1789   V8_EXPORT_PRIVATE bool UncommitFromSpace();
1790 
1791   // Fill in bogus values in from space
1792   void ZapFromSpace();
1793 
1794   // Zaps the memory of a code object.
1795   V8_EXPORT_PRIVATE void ZapCodeObject(Address start_address,
1796                                        int size_in_bytes);
1797 
1798   // Initialize a filler object to keep the ability to iterate over the heap
1799   // when introducing gaps within pages. If the memory after the object header
1800   // of the filler should be cleared, pass in kClearFreedMemory. The default is
1801   // kDontClearFreedMemory.
1802   V8_EXPORT_PRIVATE HeapObject
1803   CreateFillerObjectAt(Address addr, int size,
1804                        ClearFreedMemoryMode clear_memory_mode =
1805                            ClearFreedMemoryMode::kDontClearFreedMemory);
1806 
1807   // Range write barrier implementation.
1808   template <int kModeMask, typename TSlot>
1809   V8_INLINE void WriteBarrierForRangeImpl(MemoryChunk* source_page,
1810                                           HeapObject object, TSlot start_slot,
1811                                           TSlot end_slot);
1812 
1813   // Deopts all code that contains allocation instruction which are tenured or
1814   // not tenured. Moreover it clears the pretenuring allocation site statistics.
1815   void ResetAllAllocationSitesDependentCode(AllocationType allocation);
1816 
1817   // Evaluates local pretenuring for the old space and calls
1818   // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
1819   // the old space.
1820   void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1821 
1822   // Record statistics after garbage collection.
1823   void ReportStatisticsAfterGC();
1824 
1825   // Flush the number to string cache.
1826   void FlushNumberStringCache();
1827 
1828   void ConfigureInitialOldGenerationSize();
1829 
1830   double ComputeMutatorUtilization(const char* tag, double mutator_speed,
1831                                    double gc_speed);
1832   bool HasLowYoungGenerationAllocationRate();
1833   bool HasLowOldGenerationAllocationRate();
1834   bool HasLowEmbedderAllocationRate();
1835 
1836   void ReduceNewSpaceSize();
1837 
1838   GCIdleTimeHeapState ComputeHeapState();
1839 
1840   bool PerformIdleTimeAction(GCIdleTimeAction action,
1841                              GCIdleTimeHeapState heap_state,
1842                              double deadline_in_ms);
1843 
1844   void IdleNotificationEpilogue(GCIdleTimeAction action,
1845                                 GCIdleTimeHeapState heap_state, double start_ms,
1846                                 double deadline_in_ms);
1847 
1848   void PrintMaxMarkingLimitReached();
1849   void PrintMaxNewSpaceSizeReached();
1850 
1851   int NextStressMarkingLimit();
1852 
1853   void AddToRingBuffer(const char* string);
1854   void GetFromRingBuffer(char* buffer);
1855 
1856   void CompactRetainedMaps(WeakArrayList retained_maps);
1857 
1858   void CollectGarbageOnMemoryPressure();
1859 
1860   void EagerlyFreeExternalMemory();
1861 
1862   bool InvokeNearHeapLimitCallback();
1863 
1864   void ComputeFastPromotionMode();
1865 
1866   // Attempt to over-approximate the weak closure by marking object groups and
1867   // implicit references from global handles, but don't atomically complete
1868   // marking. If we continue to mark incrementally, we might have marked
1869   // objects that die later.
1870   void FinalizeIncrementalMarkingIncrementally(
1871       GarbageCollectionReason gc_reason);
1872 
1873   void InvokeIncrementalMarkingPrologueCallbacks();
1874   void InvokeIncrementalMarkingEpilogueCallbacks();
1875 
1876   // ===========================================================================
1877   // Pretenuring. ==============================================================
1878   // ===========================================================================
1879 
1880   // Pretenuring decisions are made based on feedback collected during new space
1881   // evacuation. Note that between feedback collection and calling this method
1882   // object in old space must not move.
1883   void ProcessPretenuringFeedback();
1884 
1885   // Removes an entry from the global pretenuring storage.
1886   void RemoveAllocationSitePretenuringFeedback(AllocationSite site);
1887 
1888   // ===========================================================================
1889   // Actual GC. ================================================================
1890   // ===========================================================================
1891 
1892   // Code that should be run before and after each GC.  Includes some
1893   // reporting/verification activities when compiled with DEBUG set.
1894   void GarbageCollectionPrologue(GarbageCollectionReason gc_reason,
1895                                  const v8::GCCallbackFlags gc_callback_flags);
1896   void GarbageCollectionPrologueInSafepoint();
1897   void GarbageCollectionEpilogue(GarbageCollector collector);
1898   void GarbageCollectionEpilogueInSafepoint(GarbageCollector collector);
1899 
1900   // Performs a major collection in the whole heap.
1901   void MarkCompact();
1902   // Performs a minor collection of just the young generation.
1903   void MinorMarkCompact();
1904 
1905   // Code to be run before and after mark-compact.
1906   void MarkCompactPrologue();
1907   void MarkCompactEpilogue();
1908 
1909   // Performs a minor collection in new generation.
1910   void Scavenge();
1911   void EvacuateYoungGeneration();
1912 
1913   void UpdateYoungReferencesInExternalStringTable(
1914       ExternalStringTableUpdaterCallback updater_func);
1915 
1916   void UpdateReferencesInExternalStringTable(
1917       ExternalStringTableUpdaterCallback updater_func);
1918 
1919   void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
1920   void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
1921   void ProcessNativeContexts(WeakObjectRetainer* retainer);
1922   void ProcessAllocationSites(WeakObjectRetainer* retainer);
1923   void ProcessDirtyJSFinalizationRegistries(WeakObjectRetainer* retainer);
1924   void ProcessWeakListRoots(WeakObjectRetainer* retainer);
1925 
1926   // ===========================================================================
1927   // GC statistics. ============================================================
1928   // ===========================================================================
1929 
OldGenerationSpaceAvailable()1930   inline size_t OldGenerationSpaceAvailable() {
1931     uint64_t bytes = OldGenerationSizeOfObjects() +
1932                      AllocatedExternalMemorySinceMarkCompact();
1933 
1934     if (old_generation_allocation_limit() <= bytes) return 0;
1935     return old_generation_allocation_limit() - static_cast<size_t>(bytes);
1936   }
1937 
1938   void UpdateTotalGCTime(double duration);
1939 
MaximumSizeScavenge()1940   bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
1941 
1942   bool IsIneffectiveMarkCompact(size_t old_generation_size,
1943                                 double mutator_utilization);
1944   void CheckIneffectiveMarkCompact(size_t old_generation_size,
1945                                    double mutator_utilization);
1946 
1947   inline void IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
1948                                                  size_t amount);
1949 
1950   inline void DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
1951                                                  size_t amount);
1952 
1953   // ===========================================================================
1954   // Growing strategy. =========================================================
1955   // ===========================================================================
1956 
memory_reducer()1957   MemoryReducer* memory_reducer() { return memory_reducer_.get(); }
1958 
1959   // For some webpages RAIL mode does not switch from PERFORMANCE_LOAD.
1960   // This constant limits the effect of load RAIL mode on GC.
1961   // The value is arbitrary and chosen as the largest load time observed in
1962   // v8 browsing benchmarks.
1963   static const int kMaxLoadTimeMs = 7000;
1964 
1965   bool ShouldOptimizeForLoadTime();
1966 
old_generation_allocation_limit()1967   size_t old_generation_allocation_limit() const {
1968     return old_generation_allocation_limit_.load(std::memory_order_relaxed);
1969   }
1970 
set_old_generation_allocation_limit(size_t newlimit)1971   void set_old_generation_allocation_limit(size_t newlimit) {
1972     old_generation_allocation_limit_.store(newlimit, std::memory_order_relaxed);
1973   }
1974 
global_allocation_limit()1975   size_t global_allocation_limit() const { return global_allocation_limit_; }
1976 
max_old_generation_size()1977   size_t max_old_generation_size() {
1978     return max_old_generation_size_.load(std::memory_order_relaxed);
1979   }
1980 
set_max_old_generation_size(size_t value)1981   void set_max_old_generation_size(size_t value) {
1982     max_old_generation_size_.store(value, std::memory_order_relaxed);
1983   }
1984 
always_allocate()1985   bool always_allocate() { return always_allocate_scope_count_ != 0; }
1986 
1987   V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size);
1988   V8_EXPORT_PRIVATE bool CanExpandOldGenerationBackground(LocalHeap* local_heap,
1989                                                           size_t size);
1990   V8_EXPORT_PRIVATE bool CanPromoteYoungAndExpandOldGeneration(size_t size);
1991 
1992   bool ShouldExpandOldGenerationOnSlowAllocation(
1993       LocalHeap* local_heap = nullptr);
1994   bool IsRetryOfFailedAllocation(LocalHeap* local_heap);
1995   bool IsMainThreadParked(LocalHeap* local_heap);
1996 
1997   HeapGrowingMode CurrentHeapGrowingMode();
1998 
1999   double PercentToOldGenerationLimit();
2000   double PercentToGlobalMemoryLimit();
2001   enum class IncrementalMarkingLimit {
2002     kNoLimit,
2003     kSoftLimit,
2004     kHardLimit,
2005     kFallbackForEmbedderLimit
2006   };
2007   IncrementalMarkingLimit IncrementalMarkingLimitReached();
2008 
2009   bool ShouldStressCompaction() const;
2010 
UseGlobalMemoryScheduling()2011   bool UseGlobalMemoryScheduling() const {
2012     return FLAG_global_gc_scheduling && local_embedder_heap_tracer();
2013   }
2014 
2015   base::Optional<size_t> GlobalMemoryAvailable();
2016 
2017   void RecomputeLimits(GarbageCollector collector);
2018 
2019   // ===========================================================================
2020   // Idle notification. ========================================================
2021   // ===========================================================================
2022 
2023   bool RecentIdleNotificationHappened();
2024 
2025   // ===========================================================================
2026   // GC Tasks. =================================================================
2027   // ===========================================================================
2028 
2029   void ScheduleScavengeTaskIfNeeded();
2030 
2031   // ===========================================================================
2032   // Allocation methods. =======================================================
2033   // ===========================================================================
2034 
allocator()2035   HeapAllocator* allocator() { return &heap_allocator_; }
2036 
2037   // Allocates a JS Map in the heap.
2038   V8_WARN_UNUSED_RESULT AllocationResult
2039   AllocateMap(InstanceType instance_type, int instance_size,
2040               ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND,
2041               int inobject_properties = 0);
2042 
2043   // Allocate an uninitialized object.  The memory is non-executable if the
2044   // hardware and OS allow.  This is the single choke-point for allocations
2045   // performed by the runtime and should not be bypassed (to extend this to
2046   // inlined allocations, use the Heap::DisableInlineAllocation() support).
2047   V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult
2048   AllocateRaw(int size_in_bytes, AllocationType allocation,
2049               AllocationOrigin origin = AllocationOrigin::kRuntime,
2050               AllocationAlignment alignment = kTaggedAligned);
2051 
2052   // This method will try to allocate objects quickly (AllocationType::kYoung)
2053   // otherwise it falls back to a slower path indicated by the mode.
2054   enum AllocationRetryMode { kLightRetry, kRetryOrFail };
2055   template <AllocationRetryMode mode>
2056   V8_WARN_UNUSED_RESULT V8_INLINE HeapObject
2057   AllocateRawWith(int size, AllocationType allocation,
2058                   AllocationOrigin origin = AllocationOrigin::kRuntime,
2059                   AllocationAlignment alignment = kTaggedAligned);
2060 
2061   // Call AllocateRawWith with kRetryOrFail. Matches the method in LocalHeap.
2062   V8_WARN_UNUSED_RESULT inline Address AllocateRawOrFail(
2063       int size, AllocationType allocation,
2064       AllocationOrigin origin = AllocationOrigin::kRuntime,
2065       AllocationAlignment alignment = kTaggedAligned);
2066 
2067   // Allocates a heap object based on the map.
2068   V8_WARN_UNUSED_RESULT AllocationResult Allocate(Handle<Map> map,
2069                                                   AllocationType allocation);
2070 
2071   // Allocates a partial map for bootstrapping.
2072   V8_WARN_UNUSED_RESULT AllocationResult
2073   AllocatePartialMap(InstanceType instance_type, int instance_size);
2074 
2075   void FinalizePartialMap(Map map);
2076 
set_force_oom(bool value)2077   void set_force_oom(bool value) { force_oom_ = value; }
set_force_gc_on_next_allocation()2078   void set_force_gc_on_next_allocation() {
2079     force_gc_on_next_allocation_ = true;
2080   }
2081 
2082   // Helper for IsPendingAllocation.
2083   inline bool IsPendingAllocationInternal(HeapObject object);
2084 
2085   // ===========================================================================
2086   // Retaining path tracing ====================================================
2087   // ===========================================================================
2088 
2089   void AddRetainer(HeapObject retainer, HeapObject object);
2090   void AddEphemeronRetainer(HeapObject retainer, HeapObject object);
2091   void AddRetainingRoot(Root root, HeapObject object);
2092   // Returns true if the given object is a target of retaining path tracking.
2093   // Stores the option corresponding to the object in the provided *option.
2094   bool IsRetainingPathTarget(HeapObject object, RetainingPathOption* option);
2095   void PrintRetainingPath(HeapObject object, RetainingPathOption option);
2096   void UpdateRetainersAfterScavenge();
2097 
2098 #ifdef DEBUG
2099   V8_EXPORT_PRIVATE void IncrementObjectCounters();
2100 #endif  // DEBUG
2101 
2102   std::vector<Handle<NativeContext>> FindAllNativeContexts();
2103   std::vector<WeakArrayList> FindAllRetainedMaps();
memory_measurement()2104   MemoryMeasurement* memory_measurement() { return memory_measurement_.get(); }
2105 
allocation_type_for_in_place_internalizable_strings()2106   AllocationType allocation_type_for_in_place_internalizable_strings() const {
2107     return allocation_type_for_in_place_internalizable_strings_;
2108   }
2109 
2110   bool IsStressingScavenge();
2111 
2112   ExternalMemoryAccounting external_memory_;
2113 
2114   // This can be calculated directly from a pointer to the heap; however, it is
2115   // more expedient to get at the isolate directly from within Heap methods.
2116   Isolate* isolate_ = nullptr;
2117 
2118   HeapAllocator heap_allocator_;
2119 
2120   // These limits are initialized in Heap::ConfigureHeap based on the resource
2121   // constraints and flags.
2122   size_t code_range_size_ = 0;
2123   size_t max_semi_space_size_ = 0;
2124   size_t initial_semispace_size_ = 0;
2125   // Full garbage collections can be skipped if the old generation size
2126   // is below this threshold.
2127   size_t min_old_generation_size_ = 0;
2128   // If the old generation size exceeds this limit, then V8 will
2129   // crash with out-of-memory error.
2130   std::atomic<size_t> max_old_generation_size_{0};
2131   // TODO(mlippautz): Clarify whether this should take some embedder
2132   // configurable limit into account.
2133   size_t min_global_memory_size_ = 0;
2134   size_t max_global_memory_size_ = 0;
2135 
2136   size_t initial_max_old_generation_size_ = 0;
2137   size_t initial_max_old_generation_size_threshold_ = 0;
2138   size_t initial_old_generation_size_ = 0;
2139   bool old_generation_size_configured_ = false;
2140   size_t maximum_committed_ = 0;
2141   size_t old_generation_capacity_after_bootstrap_ = 0;
2142 
2143   // Backing store bytes (array buffers and external strings).
2144   // Use uint64_t counter since the counter could overflow the 32-bit range
2145   // temporarily on 32-bit.
2146   std::atomic<uint64_t> backing_store_bytes_{0};
2147 
2148   // For keeping track of how much data has survived
2149   // scavenge since last new space expansion.
2150   size_t survived_since_last_expansion_ = 0;
2151 
2152   // ... and since the last scavenge.
2153   size_t survived_last_scavenge_ = 0;
2154 
2155   // This is not the depth of nested AlwaysAllocateScope's but rather a single
2156   // count, as scopes can be acquired from multiple tasks (read: threads).
2157   std::atomic<size_t> always_allocate_scope_count_{0};
2158 
2159   // Stores the memory pressure level that set by MemoryPressureNotification
2160   // and reset by a mark-compact garbage collection.
2161   std::atomic<MemoryPressureLevel> memory_pressure_level_;
2162 
2163   std::vector<std::pair<v8::NearHeapLimitCallback, void*>>
2164       near_heap_limit_callbacks_;
2165 
2166   // For keeping track of context disposals.
2167   int contexts_disposed_ = 0;
2168 
2169   NewSpace* new_space_ = nullptr;
2170   OldSpace* old_space_ = nullptr;
2171   CodeSpace* code_space_ = nullptr;
2172   MapSpace* map_space_ = nullptr;
2173   OldLargeObjectSpace* lo_space_ = nullptr;
2174   CodeLargeObjectSpace* code_lo_space_ = nullptr;
2175   NewLargeObjectSpace* new_lo_space_ = nullptr;
2176   ReadOnlySpace* read_only_space_ = nullptr;
2177 
2178   OldSpace* shared_old_space_ = nullptr;
2179   MapSpace* shared_map_space_ = nullptr;
2180 
2181   std::unique_ptr<ConcurrentAllocator> shared_old_allocator_;
2182   std::unique_ptr<ConcurrentAllocator> shared_map_allocator_;
2183 
2184   // Map from the space id to the space.
2185   Space* space_[LAST_SPACE + 1];
2186 
2187   LocalHeap* main_thread_local_heap_ = nullptr;
2188 
2189   // List for tracking ArrayBufferExtensions
2190   ArrayBufferExtension* old_array_buffer_extensions_ = nullptr;
2191   ArrayBufferExtension* young_array_buffer_extensions_ = nullptr;
2192 
2193   // Determines whether code space is write-protected. This is essentially a
2194   // race-free copy of the {FLAG_write_protect_code_memory} flag.
2195   bool write_protect_code_memory_ = false;
2196 
2197   // Holds the number of open CodeSpaceMemoryModificationScopes.
2198   uintptr_t code_space_memory_modification_scope_depth_ = 0;
2199 
2200   // Holds the number of open CodePageCollectionMemoryModificationScopes.
2201   std::atomic<uintptr_t> code_page_collection_memory_modification_scope_depth_{
2202       0};
2203 
2204   std::atomic<HeapState> gc_state_{NOT_IN_GC};
2205 
2206   int gc_post_processing_depth_ = 0;
2207 
2208   // Returns the amount of external memory registered since last global gc.
2209   V8_EXPORT_PRIVATE uint64_t AllocatedExternalMemorySinceMarkCompact();
2210 
2211   // Starts marking when stress_marking_percentage_% of the marking start limit
2212   // is reached.
2213   std::atomic<int> stress_marking_percentage_{0};
2214 
2215   // Observer that causes more frequent checks for reached incremental
2216   // marking limit.
2217   AllocationObserver* stress_marking_observer_ = nullptr;
2218 
2219   // Observer that can cause early scavenge start.
2220   StressScavengeObserver* stress_scavenge_observer_ = nullptr;
2221 
2222   // The maximum percent of the marking limit reached wihout causing marking.
2223   // This is tracked when specyfing --fuzzer-gc-analysis.
2224   double max_marking_limit_reached_ = 0.0;
2225 
2226   // How many mark-sweep collections happened.
2227   unsigned int ms_count_ = 0;
2228 
2229   // How many gc happened.
2230   unsigned int gc_count_ = 0;
2231 
2232   // The number of Mark-Compact garbage collections that are considered as
2233   // ineffective. See IsIneffectiveMarkCompact() predicate.
2234   int consecutive_ineffective_mark_compacts_ = 0;
2235 
2236   static const uintptr_t kMmapRegionMask = 0xFFFFFFFFu;
2237   uintptr_t mmap_region_base_ = 0;
2238 
2239   // For post mortem debugging.
2240   int remembered_unmapped_pages_index_ = 0;
2241   Address remembered_unmapped_pages_[kRememberedUnmappedPages];
2242 
2243   // Limit that triggers a global GC on the next (normally caused) GC.  This
2244   // is checked when we have already decided to do a GC to help determine
2245   // which collector to invoke, before expanding a paged space in the old
2246   // generation and on every allocation in large object space.
2247   std::atomic<size_t> old_generation_allocation_limit_{0};
2248   size_t global_allocation_limit_ = 0;
2249 
2250   // Weak list heads, threaded through the objects.
2251   // List heads are initialized lazily and contain the undefined_value at start.
2252   // {native_contexts_list_} is an Address instead of an Object to allow the use
2253   // of atomic accessors.
2254   std::atomic<Address> native_contexts_list_;
2255   Object allocation_sites_list_;
2256   Object dirty_js_finalization_registries_list_;
2257   // Weak list tails.
2258   Object dirty_js_finalization_registries_list_tail_;
2259 
2260   std::vector<GCCallbackTuple> gc_epilogue_callbacks_;
2261   std::vector<GCCallbackTuple> gc_prologue_callbacks_;
2262 
2263   GetExternallyAllocatedMemoryInBytesCallback external_memory_callback_;
2264 
2265   int deferred_counters_[v8::Isolate::kUseCounterFeatureCount];
2266 
2267   size_t promoted_objects_size_ = 0;
2268   double promotion_ratio_ = 0.0;
2269   double promotion_rate_ = 0.0;
2270   size_t semi_space_copied_object_size_ = 0;
2271   size_t previous_semi_space_copied_object_size_ = 0;
2272   double semi_space_copied_rate_ = 0.0;
2273   int nodes_died_in_new_space_ = 0;
2274   int nodes_copied_in_new_space_ = 0;
2275   int nodes_promoted_ = 0;
2276 
2277   // This is the pretenuring trigger for allocation sites that are in maybe
2278   // tenure state. When we switched to the maximum new space size we deoptimize
2279   // the code that belongs to the allocation site and derive the lifetime
2280   // of the allocation site.
2281   unsigned int maximum_size_scavenges_ = 0;
2282 
2283   // Total time spent in GC.
2284   double total_gc_time_ms_ = 0.0;
2285 
2286   // Last time an idle notification happened.
2287   double last_idle_notification_time_ = 0.0;
2288 
2289   // Last time a garbage collection happened.
2290   double last_gc_time_ = 0.0;
2291 
2292   std::unique_ptr<GCTracer> tracer_;
2293   std::unique_ptr<MarkCompactCollector> mark_compact_collector_;
2294   std::unique_ptr<MinorMarkCompactCollector> minor_mark_compact_collector_;
2295   std::unique_ptr<ScavengerCollector> scavenger_collector_;
2296   std::unique_ptr<ArrayBufferSweeper> array_buffer_sweeper_;
2297 
2298   std::unique_ptr<MemoryAllocator> memory_allocator_;
2299   std::unique_ptr<IncrementalMarking> incremental_marking_;
2300   std::unique_ptr<ConcurrentMarking> concurrent_marking_;
2301   std::unique_ptr<GCIdleTimeHandler> gc_idle_time_handler_;
2302   std::unique_ptr<MemoryMeasurement> memory_measurement_;
2303   std::unique_ptr<MemoryReducer> memory_reducer_;
2304   std::unique_ptr<ObjectStats> live_object_stats_;
2305   std::unique_ptr<ObjectStats> dead_object_stats_;
2306   std::unique_ptr<ScavengeJob> scavenge_job_;
2307   std::unique_ptr<AllocationObserver> scavenge_task_observer_;
2308   std::unique_ptr<AllocationObserver> stress_concurrent_allocation_observer_;
2309   std::unique_ptr<LocalEmbedderHeapTracer> local_embedder_heap_tracer_;
2310   std::unique_ptr<MarkingBarrier> marking_barrier_;
2311   std::unique_ptr<AllocationTrackerForDebugging>
2312       allocation_tracker_for_debugging_;
2313 
2314   // This object controls virtual space reserved for code on the V8 heap. This
2315   // is only valid for 64-bit architectures where kRequiresCodeRange.
2316   //
2317   // Owned by the heap when !V8_COMPRESS_POINTERS_IN_SHARED_CAGE, otherwise is
2318   // process-wide.
2319   std::shared_ptr<CodeRange> code_range_;
2320 
2321   // The embedder owns the C++ heap.
2322   v8::CppHeap* cpp_heap_ = nullptr;
2323 
2324   EmbedderRootsHandler* embedder_roots_handler_ = nullptr;
2325 
2326   StrongRootsEntry* strong_roots_head_ = nullptr;
2327   base::Mutex strong_roots_mutex_;
2328 
2329   bool need_to_remove_stress_concurrent_allocation_observer_ = false;
2330 
2331   // This counter is increased before each GC and never reset.
2332   // To account for the bytes allocated since the last GC, use the
2333   // NewSpaceAllocationCounter() function.
2334   size_t new_space_allocation_counter_ = 0;
2335 
2336   // This counter is increased before each GC and never reset. To
2337   // account for the bytes allocated since the last GC, use the
2338   // OldGenerationAllocationCounter() function.
2339   size_t old_generation_allocation_counter_at_last_gc_ = 0;
2340 
2341   // The size of objects in old generation after the last MarkCompact GC.
2342   size_t old_generation_size_at_last_gc_{0};
2343 
2344   // The size of global memory after the last MarkCompact GC.
2345   size_t global_memory_at_last_gc_ = 0;
2346 
2347   // The feedback storage is used to store allocation sites (keys) and how often
2348   // they have been visited (values) by finding a memento behind an object. The
2349   // storage is only alive temporary during a GC. The invariant is that all
2350   // pointers in this map are already fixed, i.e., they do not point to
2351   // forwarding pointers.
2352   PretenuringFeedbackMap global_pretenuring_feedback_;
2353 
2354   std::unique_ptr<GlobalHandleVector<AllocationSite>>
2355       allocation_sites_to_pretenure_;
2356 
2357   char trace_ring_buffer_[kTraceRingBufferSize];
2358 
2359   // Used as boolean.
2360   uint8_t is_marking_flag_ = 0;
2361 
2362   // If it's not full then the data is from 0 to ring_buffer_end_.  If it's
2363   // full then the data is from ring_buffer_end_ to the end of the buffer and
2364   // from 0 to ring_buffer_end_.
2365   bool ring_buffer_full_ = false;
2366   size_t ring_buffer_end_ = 0;
2367 
2368   // Flag is set when the heap has been configured.  The heap can be repeatedly
2369   // configured through the API until it is set up.
2370   bool configured_ = false;
2371 
2372   // Currently set GC flags that are respected by all GC components.
2373   int current_gc_flags_ = Heap::kNoGCFlags;
2374 
2375   // Currently set GC callback flags that are used to pass information between
2376   // the embedder and V8's GC.
2377   GCCallbackFlags current_gc_callback_flags_ =
2378       GCCallbackFlags::kNoGCCallbackFlags;
2379 
2380   std::unique_ptr<IsolateSafepoint> safepoint_;
2381 
2382   bool is_current_gc_forced_ = false;
2383   bool is_current_gc_for_heap_profiler_ = false;
2384 
2385   ExternalStringTable external_string_table_;
2386 
2387   const AllocationType allocation_type_for_in_place_internalizable_strings_;
2388 
2389   base::Mutex relocation_mutex_;
2390 
2391   std::unique_ptr<CollectionBarrier> collection_barrier_;
2392 
2393   int ignore_local_gc_requests_depth_ = 0;
2394 
2395   int gc_callbacks_depth_ = 0;
2396 
2397   bool deserialization_complete_ = false;
2398 
2399   int max_regular_code_object_size_ = 0;
2400 
2401   bool fast_promotion_mode_ = false;
2402 
2403   // Used for testing purposes.
2404   bool force_oom_ = false;
2405   bool force_gc_on_next_allocation_ = false;
2406   bool delay_sweeper_tasks_for_testing_ = false;
2407 
2408   HeapObject pending_layout_change_object_;
2409 
2410   base::Mutex unprotected_memory_chunks_mutex_;
2411   std::unordered_set<MemoryChunk*> unprotected_memory_chunks_;
2412 
2413   std::unordered_map<HeapObject, HeapObject, Object::Hasher> retainer_;
2414   std::unordered_map<HeapObject, Root, Object::Hasher> retaining_root_;
2415   // If an object is retained by an ephemeron, then the retaining key of the
2416   // ephemeron is stored in this map.
2417   std::unordered_map<HeapObject, HeapObject, Object::Hasher>
2418       ephemeron_retainer_;
2419   // For each index inthe retaining_path_targets_ array this map
2420   // stores the option of the corresponding target.
2421   std::unordered_map<int, RetainingPathOption> retaining_path_target_option_;
2422 
2423   std::vector<HeapObjectAllocationTracker*> allocation_trackers_;
2424 
2425   bool is_finalization_registry_cleanup_task_posted_ = false;
2426 
2427   std::unique_ptr<third_party_heap::Heap> tp_heap_;
2428 
2429   // Classes in "heap" can be friends.
2430   friend class AlwaysAllocateScope;
2431   friend class ArrayBufferCollector;
2432   friend class ArrayBufferSweeper;
2433   friend class ConcurrentMarking;
2434   friend class EvacuateVisitorBase;
2435   friend class GCCallbacksScope;
2436   friend class GCTracer;
2437   friend class HeapAllocator;
2438   friend class HeapObjectIterator;
2439   friend class ScavengeTaskObserver;
2440   friend class IgnoreLocalGCRequests;
2441   friend class IncrementalMarking;
2442   friend class IncrementalMarkingRootMarkingVisitor;
2443   friend class IncrementalMarkingJob;
2444   friend class LargeObjectSpace;
2445   friend class LocalHeap;
2446   friend class MarkingBarrier;
2447   friend class OldLargeObjectSpace;
2448   friend class OptionalAlwaysAllocateScope;
2449   template <typename ConcreteVisitor, typename MarkingState>
2450   friend class MarkingVisitorBase;
2451   friend class MarkCompactCollector;
2452   friend class MarkCompactCollectorBase;
2453   friend class MinorMarkCompactCollector;
2454   friend class NewLargeObjectSpace;
2455   friend class NewSpace;
2456   friend class ObjectStatsCollector;
2457   friend class Page;
2458   friend class PagedSpace;
2459   friend class ReadOnlyRoots;
2460   friend class Scavenger;
2461   friend class ScavengerCollector;
2462   friend class StressConcurrentAllocationObserver;
2463   friend class Space;
2464   friend class Sweeper;
2465   friend class UnifiedHeapMarkingState;
2466   friend class heap::TestMemoryAllocatorScope;
2467   friend class third_party_heap::Heap;
2468   friend class third_party_heap::Impl;
2469 
2470   // The allocator interface.
2471   friend class Factory;
2472   friend class LocalFactory;
2473   template <typename IsolateT>
2474   friend class Deserializer;
2475 
2476   // The Isolate constructs us.
2477   friend class Isolate;
2478 
2479   // Used in cctest.
2480   friend class heap::HeapTester;
2481 };
2482 
2483 class HeapStats {
2484  public:
2485   static const int kStartMarker = 0xDECADE00;
2486   static const int kEndMarker = 0xDECADE01;
2487 
2488   intptr_t* start_marker;                  //  0
2489   size_t* ro_space_size;                   //  1
2490   size_t* ro_space_capacity;               //  2
2491   size_t* new_space_size;                  //  3
2492   size_t* new_space_capacity;              //  4
2493   size_t* old_space_size;                  //  5
2494   size_t* old_space_capacity;              //  6
2495   size_t* code_space_size;                 //  7
2496   size_t* code_space_capacity;             //  8
2497   size_t* map_space_size;                  //  9
2498   size_t* map_space_capacity;              // 10
2499   size_t* lo_space_size;                   // 11
2500   size_t* code_lo_space_size;              // 12
2501   size_t* global_handle_count;             // 13
2502   size_t* weak_global_handle_count;        // 14
2503   size_t* pending_global_handle_count;     // 15
2504   size_t* near_death_global_handle_count;  // 16
2505   size_t* free_global_handle_count;        // 17
2506   size_t* memory_allocator_size;           // 18
2507   size_t* memory_allocator_capacity;       // 19
2508   size_t* malloced_memory;                 // 20
2509   size_t* malloced_peak_memory;            // 21
2510   size_t* objects_per_type;                // 22
2511   size_t* size_per_type;                   // 23
2512   int* os_error;                           // 24
2513   char* last_few_messages;                 // 25
2514   char* js_stacktrace;                     // 26
2515   intptr_t* end_marker;                    // 27
2516 };
2517 
2518 // Disables GC for all allocations. It should not be used
2519 // outside heap, deserializer, and isolate bootstrap.
2520 // Use AlwaysAllocateScopeForTesting in tests.
2521 class V8_NODISCARD AlwaysAllocateScope {
2522  public:
2523   inline ~AlwaysAllocateScope();
2524 
2525  private:
2526   friend class AlwaysAllocateScopeForTesting;
2527   friend class Evacuator;
2528   friend class Heap;
2529   friend class HeapAllocator;
2530   friend class Isolate;
2531 
2532   explicit inline AlwaysAllocateScope(Heap* heap);
2533   Heap* heap_;
2534 };
2535 
2536 // Like AlwaysAllocateScope if the heap argument to the constructor is
2537 // non-null. No-op otherwise.
2538 //
2539 // This class exists because AlwaysAllocateScope doesn't compose with
2540 // base::Optional, since supporting that composition requires making
2541 // base::Optional a friend class, defeating the purpose of hiding its
2542 // constructor.
2543 class V8_NODISCARD OptionalAlwaysAllocateScope {
2544  public:
2545   inline ~OptionalAlwaysAllocateScope();
2546 
2547  private:
2548   friend class Heap;
2549 
2550   explicit inline OptionalAlwaysAllocateScope(Heap* heap);
2551   Heap* heap_;
2552 };
2553 
2554 class V8_NODISCARD AlwaysAllocateScopeForTesting {
2555  public:
2556   explicit inline AlwaysAllocateScopeForTesting(Heap* heap);
2557 
2558  private:
2559   AlwaysAllocateScope scope_;
2560 };
2561 
2562 // The CodeSpaceMemoryModificationScope can only be used by the main thread.
2563 class V8_NODISCARD CodeSpaceMemoryModificationScope {
2564  public:
2565   explicit inline CodeSpaceMemoryModificationScope(Heap* heap);
2566   inline ~CodeSpaceMemoryModificationScope();
2567 
2568  private:
2569   Heap* heap_;
2570 };
2571 
2572 // The CodePageCollectionMemoryModificationScope can be used by any thread. It
2573 // will not be enabled if a CodeSpaceMemoryModificationScope is already active.
2574 class V8_NODISCARD CodePageCollectionMemoryModificationScope {
2575  public:
2576   explicit inline CodePageCollectionMemoryModificationScope(Heap* heap);
2577   inline ~CodePageCollectionMemoryModificationScope();
2578 
2579  private:
2580   Heap* heap_;
2581 };
2582 
2583 // The CodePageMemoryModificationScope does not check if tansitions to
2584 // writeable and back to executable are actually allowed, i.e. the MemoryChunk
2585 // was registered to be executable. It can be used by concurrent threads.
2586 class V8_NODISCARD CodePageMemoryModificationScope {
2587  public:
2588   explicit inline CodePageMemoryModificationScope(BasicMemoryChunk* chunk);
2589   explicit inline CodePageMemoryModificationScope(Code object);
2590   inline ~CodePageMemoryModificationScope();
2591 
2592  private:
2593   BasicMemoryChunk* chunk_;
2594   bool scope_active_;
2595 
2596   // Disallow any GCs inside this scope, as a relocation of the underlying
2597   // object would change the {MemoryChunk} that this scope targets.
2598   DISALLOW_GARBAGE_COLLECTION(no_heap_allocation_)
2599 };
2600 
2601 class V8_NODISCARD IgnoreLocalGCRequests {
2602  public:
2603   explicit inline IgnoreLocalGCRequests(Heap* heap);
2604   inline ~IgnoreLocalGCRequests();
2605 
2606  private:
2607   Heap* heap_;
2608 };
2609 
2610 // Visitor class to verify interior pointers in spaces that do not contain
2611 // or care about intergenerational references. All heap object pointers have to
2612 // point into the heap to a location that has a map pointer at its first word.
2613 // Caveat: Heap::Contains is an approximation because it can return true for
2614 // objects in a heap space but above the allocation pointer.
2615 class VerifyPointersVisitor : public ObjectVisitorWithCageBases,
2616                               public RootVisitor {
2617  public:
2618   V8_INLINE explicit VerifyPointersVisitor(Heap* heap);
2619   void VisitPointers(HeapObject host, ObjectSlot start,
2620                      ObjectSlot end) override;
2621   void VisitPointers(HeapObject host, MaybeObjectSlot start,
2622                      MaybeObjectSlot end) override;
2623   void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override;
2624   void VisitCodeTarget(Code host, RelocInfo* rinfo) override;
2625   void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override;
2626 
2627   void VisitRootPointers(Root root, const char* description,
2628                          FullObjectSlot start, FullObjectSlot end) override;
2629   void VisitRootPointers(Root root, const char* description,
2630                          OffHeapObjectSlot start,
2631                          OffHeapObjectSlot end) override;
2632 
2633  protected:
2634   V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object);
2635   V8_INLINE void VerifyCodeObjectImpl(HeapObject heap_object);
2636 
2637   template <typename TSlot>
2638   V8_INLINE void VerifyPointersImpl(TSlot start, TSlot end);
2639 
2640   virtual void VerifyPointers(HeapObject host, MaybeObjectSlot start,
2641                               MaybeObjectSlot end);
2642 
2643   Heap* heap_;
2644 };
2645 
2646 // Verify that all objects are Smis.
2647 class VerifySmisVisitor : public RootVisitor {
2648  public:
2649   void VisitRootPointers(Root root, const char* description,
2650                          FullObjectSlot start, FullObjectSlot end) override;
2651 };
2652 
2653 // Space iterator for iterating over all the paged spaces of the heap: Map
2654 // space, old space and code space. Returns each space in turn, and null when it
2655 // is done.
2656 class V8_EXPORT_PRIVATE PagedSpaceIterator {
2657  public:
PagedSpaceIterator(Heap * heap)2658   explicit PagedSpaceIterator(Heap* heap)
2659       : heap_(heap), counter_(FIRST_GROWABLE_PAGED_SPACE) {}
2660   PagedSpace* Next();
2661 
2662  private:
2663   Heap* heap_;
2664   int counter_;
2665 };
2666 
2667 class V8_EXPORT_PRIVATE SpaceIterator : public Malloced {
2668  public:
2669   explicit SpaceIterator(Heap* heap);
2670   virtual ~SpaceIterator();
2671 
2672   bool HasNext();
2673   Space* Next();
2674 
2675  private:
2676   Heap* heap_;
2677   int current_space_;  // from enum AllocationSpace.
2678 };
2679 
2680 // A HeapObjectIterator provides iteration over the entire non-read-only heap.
2681 // It aggregates the specific iterators for the different spaces as these can
2682 // only iterate over one space only.
2683 //
2684 // HeapObjectIterator ensures there is no allocation during its lifetime (using
2685 // an embedded DisallowGarbageCollection instance).
2686 //
2687 // HeapObjectIterator can skip free list nodes (that is, de-allocated heap
2688 // objects that still remain in the heap). As implementation of free nodes
2689 // filtering uses GC marks, it can't be used during MS/MC GC phases. Also, it is
2690 // forbidden to interrupt iteration in this mode, as this will leave heap
2691 // objects marked (and thus, unusable).
2692 //
2693 // See ReadOnlyHeapObjectIterator if you need to iterate over read-only space
2694 // objects, or CombinedHeapObjectIterator if you need to iterate over both
2695 // heaps.
2696 class V8_EXPORT_PRIVATE HeapObjectIterator {
2697  public:
2698   enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
2699 
2700   explicit HeapObjectIterator(Heap* heap,
2701                               HeapObjectsFiltering filtering = kNoFiltering);
2702   ~HeapObjectIterator();
2703 
2704   HeapObject Next();
2705 
2706  private:
2707   HeapObject NextObject();
2708 
2709   Heap* heap_;
2710   std::unique_ptr<SafepointScope> safepoint_scope_;
2711   HeapObjectsFiltering filtering_;
2712   HeapObjectsFilter* filter_;
2713   // Space iterator for iterating all the spaces.
2714   SpaceIterator* space_iterator_;
2715   // Object iterator for the space currently being iterated.
2716   std::unique_ptr<ObjectIterator> object_iterator_;
2717 
2718   DISALLOW_GARBAGE_COLLECTION(no_heap_allocation_)
2719 };
2720 
2721 // Abstract base class for checking whether a weak object should be retained.
2722 class WeakObjectRetainer {
2723  public:
2724   virtual ~WeakObjectRetainer() = default;
2725 
2726   // Return whether this object should be retained. If nullptr is returned the
2727   // object has no references. Otherwise the address of the retained object
2728   // should be returned as in some GC situations the object has been moved.
2729   virtual Object RetainAs(Object object) = 0;
2730 };
2731 
2732 // -----------------------------------------------------------------------------
2733 // Allows observation of heap object allocations.
2734 class HeapObjectAllocationTracker {
2735  public:
2736   virtual void AllocationEvent(Address addr, int size) = 0;
MoveEvent(Address from,Address to,int size)2737   virtual void MoveEvent(Address from, Address to, int size) {}
UpdateObjectSizeEvent(Address addr,int size)2738   virtual void UpdateObjectSizeEvent(Address addr, int size) {}
2739   virtual ~HeapObjectAllocationTracker() = default;
2740 };
2741 
2742 template <typename T>
2743 inline T ForwardingAddress(T heap_obj);
2744 
2745 // Address block allocator compatible with standard containers which registers
2746 // its allocated range as strong roots.
2747 class StrongRootBlockAllocator {
2748  public:
2749   using pointer = Address*;
2750   using const_pointer = const Address*;
2751   using reference = Address&;
2752   using const_reference = const Address&;
2753   using value_type = Address;
2754   using size_type = size_t;
2755   using difference_type = ptrdiff_t;
2756   template <class U>
2757   struct rebind;
2758 
StrongRootBlockAllocator(Heap * heap)2759   explicit StrongRootBlockAllocator(Heap* heap) : heap_(heap) {}
2760 
2761   Address* allocate(size_t n);
2762   void deallocate(Address* p, size_t n) noexcept;
2763 
2764  private:
2765   Heap* heap_;
2766 };
2767 
2768 // Rebinding to Address gives another StrongRootBlockAllocator.
2769 template <>
2770 struct StrongRootBlockAllocator::rebind<Address> {
2771   using other = StrongRootBlockAllocator;
2772 };
2773 
2774 // Rebinding to something other than Address gives a std::allocator that
2775 // is copy-constructable from StrongRootBlockAllocator.
2776 template <class U>
2777 struct StrongRootBlockAllocator::rebind {
2778   class other : public std::allocator<U> {
2779    public:
2780     // NOLINTNEXTLINE
2781     other(const StrongRootBlockAllocator&) {}
2782   };
2783 };
2784 
2785 class V8_EXPORT_PRIVATE V8_NODISCARD EmbedderStackStateScope final {
2786  public:
2787   enum Origin {
2788     kImplicitThroughTask,
2789     kExplicitInvocation,
2790   };
2791 
2792   // Only used for testing where the Origin is always an explicit invocation.
2793   static EmbedderStackStateScope ExplicitScopeForTesting(
2794       LocalEmbedderHeapTracer* local_tracer,
2795       EmbedderHeapTracer::EmbedderStackState stack_state);
2796 
2797   EmbedderStackStateScope(Heap* heap, Origin origin,
2798                           EmbedderHeapTracer::EmbedderStackState stack_state);
2799   ~EmbedderStackStateScope();
2800 
2801  private:
2802   EmbedderStackStateScope(LocalEmbedderHeapTracer* local_tracer,
2803                           EmbedderHeapTracer::EmbedderStackState stack_state);
2804 
2805   LocalEmbedderHeapTracer* const local_tracer_;
2806   const EmbedderHeapTracer::EmbedderStackState old_stack_state_;
2807 };
2808 
2809 }  // namespace internal
2810 }  // namespace v8
2811 
2812 #endif  // V8_HEAP_HEAP_H_
2813