• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_CONCURRENT_MARKING_H_
6 #define V8_HEAP_CONCURRENT_MARKING_H_
7 
8 #include <memory>
9 
10 #include "include/v8-platform.h"
11 #include "src/base/atomic-utils.h"
12 #include "src/base/platform/condition-variable.h"
13 #include "src/base/platform/mutex.h"
14 #include "src/heap/marking-visitor.h"
15 #include "src/heap/marking-worklist.h"
16 #include "src/heap/memory-measurement.h"
17 #include "src/heap/slot-set.h"
18 #include "src/heap/spaces.h"
19 #include "src/heap/worklist.h"
20 #include "src/init/v8.h"
21 #include "src/tasks/cancelable-task.h"
22 #include "src/utils/allocation.h"
23 #include "src/utils/utils.h"
24 
25 namespace v8 {
26 namespace internal {
27 
28 class Heap;
29 class Isolate;
30 class MajorNonAtomicMarkingState;
31 class MemoryChunk;
32 class WeakObjects;
33 
34 struct MemoryChunkData {
35   intptr_t live_bytes;
36   std::unique_ptr<TypedSlots> typed_slots;
37 };
38 
39 using MemoryChunkDataMap =
40     std::unordered_map<MemoryChunk*, MemoryChunkData, MemoryChunk::Hasher>;
41 
42 class V8_EXPORT_PRIVATE ConcurrentMarking {
43  public:
44   // When the scope is entered, the concurrent marking tasks
45   // are preempted and are not looking at the heap objects, concurrent marking
46   // is resumed when the scope is exited.
47   class PauseScope {
48    public:
49     explicit PauseScope(ConcurrentMarking* concurrent_marking);
50     ~PauseScope();
51 
52    private:
53     ConcurrentMarking* const concurrent_marking_;
54     const bool resume_on_exit_;
55   };
56 
57   // TODO(gab): The only thing that prevents this being above 7 is
58   // Worklist::kMaxNumTasks being maxed at 8 (concurrent marking doesn't use
59   // task 0, reserved for the main thread).
60   static constexpr int kMaxTasks = 7;
61 
62   ConcurrentMarking(Heap* heap, MarkingWorklists* marking_worklists,
63                     WeakObjects* weak_objects);
64 
65   // Schedules asynchronous job to perform concurrent marking at |priority|.
66   // Objects in the heap should not be moved while these are active (can be
67   // stopped safely via Stop() or PauseScope).
68   void ScheduleJob(TaskPriority priority = TaskPriority::kUserVisible);
69 
70   // Waits for scheduled job to complete.
71   void Join();
72   // Preempts ongoing job ASAP. Returns true if concurrent marking was in
73   // progress, false otherwise.
74   bool Pause();
75 
76   // Schedules asynchronous job to perform concurrent marking at |priority| if
77   // not already running, otherwise adjusts the number of workers running job
78   // and the priority if diffrent from the default kUserVisible.
79   void RescheduleJobIfNeeded(
80       TaskPriority priority = TaskPriority::kUserVisible);
81   // Flushes native context sizes to the given table of the main thread.
82   void FlushNativeContexts(NativeContextStats* main_stats);
83   // Flushes memory chunk data using the given marking state.
84   void FlushMemoryChunkData(MajorNonAtomicMarkingState* marking_state);
85   // This function is called for a new space page that was cleared after
86   // scavenge and is going to be re-used.
87   void ClearMemoryChunkData(MemoryChunk* chunk);
88 
89   // Checks if all threads are stopped.
90   bool IsStopped();
91 
92   size_t TotalMarkedBytes();
93 
set_ephemeron_marked(bool ephemeron_marked)94   void set_ephemeron_marked(bool ephemeron_marked) {
95     ephemeron_marked_.store(ephemeron_marked);
96   }
ephemeron_marked()97   bool ephemeron_marked() { return ephemeron_marked_.load(); }
98 
99  private:
100   struct TaskState {
101     size_t marked_bytes = 0;
102     MemoryChunkDataMap memory_chunk_data;
103     NativeContextInferrer native_context_inferrer;
104     NativeContextStats native_context_stats;
105     char cache_line_padding[64];
106   };
107   class JobTask;
108   void Run(JobDelegate* delegate, unsigned mark_compact_epoch,
109            bool is_forced_gc);
110   size_t GetMaxConcurrency(size_t worker_count);
111 
112   std::unique_ptr<JobHandle> job_handle_;
113   Heap* const heap_;
114   MarkingWorklists* const marking_worklists_;
115   WeakObjects* const weak_objects_;
116   TaskState task_state_[kMaxTasks + 1];
117   std::atomic<size_t> total_marked_bytes_{0};
118   std::atomic<bool> ephemeron_marked_{false};
119 };
120 
121 }  // namespace internal
122 }  // namespace v8
123 
124 #endif  // V8_HEAP_CONCURRENT_MARKING_H_
125