• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1  // Copyright 2020 the V8 project authors. All rights reserved.
2  // Use of this source code is governed by a BSD-style license that can be
3  // found in the LICENSE file.
4  
5  #ifndef V8_HEAP_CPPGC_HEAP_PAGE_H_
6  #define V8_HEAP_CPPGC_HEAP_PAGE_H_
7  
8  #include "src/base/iterator.h"
9  #include "src/base/macros.h"
10  #include "src/heap/cppgc/globals.h"
11  #include "src/heap/cppgc/heap-object-header.h"
12  #include "src/heap/cppgc/object-start-bitmap.h"
13  
14  namespace cppgc {
15  namespace internal {
16  
17  class BaseSpace;
18  class NormalPageSpace;
19  class LargePageSpace;
20  class HeapBase;
21  class PageBackend;
22  
23  class V8_EXPORT_PRIVATE BasePage {
24   public:
25    static inline BasePage* FromPayload(void*);
26    static inline const BasePage* FromPayload(const void*);
27  
28    static BasePage* FromInnerAddress(const HeapBase*, void*);
29    static const BasePage* FromInnerAddress(const HeapBase*, const void*);
30  
31    static void Destroy(BasePage*);
32  
33    BasePage(const BasePage&) = delete;
34    BasePage& operator=(const BasePage&) = delete;
35  
heap()36    HeapBase* heap() { return heap_; }
heap()37    const HeapBase* heap() const { return heap_; }
38  
space()39    BaseSpace* space() { return space_; }
space()40    const BaseSpace* space() const { return space_; }
set_space(BaseSpace * space)41    void set_space(BaseSpace* space) { space_ = space; }
42  
is_large()43    bool is_large() const { return type_ == PageType::kLarge; }
44  
45    Address PayloadStart();
46    ConstAddress PayloadStart() const;
47    Address PayloadEnd();
48    ConstAddress PayloadEnd() const;
49  
50    // |address| must refer to real object.
51    template <AccessMode = AccessMode::kNonAtomic>
52    HeapObjectHeader& ObjectHeaderFromInnerAddress(void* address) const;
53    template <AccessMode = AccessMode::kNonAtomic>
54    const HeapObjectHeader& ObjectHeaderFromInnerAddress(
55        const void* address) const;
56  
57    // |address| is guaranteed to point into the page but not payload. Returns
58    // nullptr when pointing into free list entries and the valid header
59    // otherwise.
60    HeapObjectHeader* TryObjectHeaderFromInnerAddress(void* address) const;
61    const HeapObjectHeader* TryObjectHeaderFromInnerAddress(
62        const void* address) const;
63  
64    // SynchronizedLoad and SynchronizedStore are used to sync pages after they
65    // are allocated. std::atomic_thread_fence is sufficient in practice but is
66    // not recognized by tsan. Atomic load and store of the |type_| field are
67    // added for tsan builds.
SynchronizedLoad()68    void SynchronizedLoad() const {
69  #if defined(THREAD_SANITIZER)
70      v8::base::AsAtomicPtr(&type_)->load(std::memory_order_acquire);
71  #endif
72    }
SynchronizedStore()73    void SynchronizedStore() {
74      std::atomic_thread_fence(std::memory_order_seq_cst);
75  #if defined(THREAD_SANITIZER)
76      v8::base::AsAtomicPtr(&type_)->store(type_, std::memory_order_release);
77  #endif
78    }
79  
80   protected:
81    enum class PageType : uint8_t { kNormal, kLarge };
82    BasePage(HeapBase*, BaseSpace*, PageType);
83  
84   private:
85    HeapBase* heap_;
86    BaseSpace* space_;
87    PageType type_;
88  };
89  
90  class V8_EXPORT_PRIVATE NormalPage final : public BasePage {
91    template <typename T>
92    class IteratorImpl : v8::base::iterator<std::forward_iterator_tag, T> {
93     public:
94      explicit IteratorImpl(T* p, ConstAddress lab_start = nullptr,
95                            size_t lab_size = 0)
p_(p)96          : p_(p), lab_start_(lab_start), lab_size_(lab_size) {
97        DCHECK(p);
98        DCHECK_EQ(0, (lab_size & (sizeof(T) - 1)));
99        if (reinterpret_cast<ConstAddress>(p_) == lab_start_) {
100          p_ += (lab_size_ / sizeof(T));
101        }
102      }
103  
104      T& operator*() { return *p_; }
105      const T& operator*() const { return *p_; }
106  
107      bool operator==(IteratorImpl other) const { return p_ == other.p_; }
108      bool operator!=(IteratorImpl other) const { return !(*this == other); }
109  
110      IteratorImpl& operator++() {
111        const size_t size = p_->GetSize();
112        DCHECK_EQ(0, (size & (sizeof(T) - 1)));
113        p_ += (size / sizeof(T));
114        if (reinterpret_cast<ConstAddress>(p_) == lab_start_) {
115          p_ += (lab_size_ / sizeof(T));
116        }
117        return *this;
118      }
119      IteratorImpl operator++(int) {
120        IteratorImpl temp(*this);
121        ++(*this);
122        return temp;
123      }
124  
base()125      T* base() const { return p_; }
126  
127     private:
128      T* p_;
129      ConstAddress lab_start_;
130      size_t lab_size_;
131    };
132  
133   public:
134    using iterator = IteratorImpl<HeapObjectHeader>;
135    using const_iterator = IteratorImpl<const HeapObjectHeader>;
136  
137    // Allocates a new page in the detached state.
138    static NormalPage* Create(PageBackend*, NormalPageSpace*);
139    // Destroys and frees the page. The page must be detached from the
140    // corresponding space (i.e. be swept when called).
141    static void Destroy(NormalPage*);
142  
From(BasePage * page)143    static NormalPage* From(BasePage* page) {
144      DCHECK(!page->is_large());
145      return static_cast<NormalPage*>(page);
146    }
From(const BasePage * page)147    static const NormalPage* From(const BasePage* page) {
148      return From(const_cast<BasePage*>(page));
149    }
150  
151    iterator begin();
152    const_iterator begin() const;
153  
end()154    iterator end() {
155      return iterator(reinterpret_cast<HeapObjectHeader*>(PayloadEnd()));
156    }
end()157    const_iterator end() const {
158      return const_iterator(
159          reinterpret_cast<const HeapObjectHeader*>(PayloadEnd()));
160    }
161  
162    Address PayloadStart();
163    ConstAddress PayloadStart() const;
164    Address PayloadEnd();
165    ConstAddress PayloadEnd() const;
166  
167    static size_t PayloadSize();
168  
PayloadContains(ConstAddress address)169    bool PayloadContains(ConstAddress address) const {
170      return (PayloadStart() <= address) && (address < PayloadEnd());
171    }
172  
object_start_bitmap()173    PlatformAwareObjectStartBitmap& object_start_bitmap() {
174      return object_start_bitmap_;
175    }
object_start_bitmap()176    const PlatformAwareObjectStartBitmap& object_start_bitmap() const {
177      return object_start_bitmap_;
178    }
179  
180   private:
181    NormalPage(HeapBase* heap, BaseSpace* space);
182    ~NormalPage();
183  
184    PlatformAwareObjectStartBitmap object_start_bitmap_;
185  };
186  
187  class V8_EXPORT_PRIVATE LargePage final : public BasePage {
188   public:
189    // Allocates a new page in the detached state.
190    static LargePage* Create(PageBackend*, LargePageSpace*, size_t);
191    // Destroys and frees the page. The page must be detached from the
192    // corresponding space (i.e. be swept when called).
193    static void Destroy(LargePage*);
194  
From(BasePage * page)195    static LargePage* From(BasePage* page) {
196      DCHECK(page->is_large());
197      return static_cast<LargePage*>(page);
198    }
From(const BasePage * page)199    static const LargePage* From(const BasePage* page) {
200      return From(const_cast<BasePage*>(page));
201    }
202  
203    HeapObjectHeader* ObjectHeader();
204    const HeapObjectHeader* ObjectHeader() const;
205  
206    Address PayloadStart();
207    ConstAddress PayloadStart() const;
208    Address PayloadEnd();
209    ConstAddress PayloadEnd() const;
210  
PayloadSize()211    size_t PayloadSize() const { return payload_size_; }
212  
PayloadContains(ConstAddress address)213    bool PayloadContains(ConstAddress address) const {
214      return (PayloadStart() <= address) && (address < PayloadEnd());
215    }
216  
217   private:
218    LargePage(HeapBase* heap, BaseSpace* space, size_t);
219    ~LargePage();
220  
221    size_t payload_size_;
222  };
223  
224  // static
FromPayload(void * payload)225  BasePage* BasePage::FromPayload(void* payload) {
226    return reinterpret_cast<BasePage*>(
227        (reinterpret_cast<uintptr_t>(payload) & kPageBaseMask) + kGuardPageSize);
228  }
229  
230  // static
FromPayload(const void * payload)231  const BasePage* BasePage::FromPayload(const void* payload) {
232    return reinterpret_cast<const BasePage*>(
233        (reinterpret_cast<uintptr_t>(const_cast<void*>(payload)) &
234         kPageBaseMask) +
235        kGuardPageSize);
236  }
237  
238  template <AccessMode mode = AccessMode::kNonAtomic>
ObjectHeaderFromInnerAddressImpl(const BasePage * page,const void * address)239  const HeapObjectHeader* ObjectHeaderFromInnerAddressImpl(const BasePage* page,
240                                                           const void* address) {
241    if (page->is_large()) {
242      return LargePage::From(page)->ObjectHeader();
243    }
244    const PlatformAwareObjectStartBitmap& bitmap =
245        NormalPage::From(page)->object_start_bitmap();
246    const HeapObjectHeader* header =
247        bitmap.FindHeader<mode>(static_cast<ConstAddress>(address));
248    DCHECK_LT(address, reinterpret_cast<ConstAddress>(header) +
249                           header->GetSize<AccessMode::kAtomic>());
250    return header;
251  }
252  
253  template <AccessMode mode>
ObjectHeaderFromInnerAddress(void * address)254  HeapObjectHeader& BasePage::ObjectHeaderFromInnerAddress(void* address) const {
255    return const_cast<HeapObjectHeader&>(
256        ObjectHeaderFromInnerAddress<mode>(const_cast<const void*>(address)));
257  }
258  
259  template <AccessMode mode>
ObjectHeaderFromInnerAddress(const void * address)260  const HeapObjectHeader& BasePage::ObjectHeaderFromInnerAddress(
261      const void* address) const {
262    // This method might be called for |address| found via a Trace method of
263    // another object. If |address| is on a newly allocated page , there will
264    // be no sync between the page allocation and a concurrent marking thread,
265    // resulting in a race with page initialization (specifically with writing
266    // the page |type_| field). This can occur when tracing a Member holding a
267    // reference to a mixin type
268    SynchronizedLoad();
269    const HeapObjectHeader* header =
270        ObjectHeaderFromInnerAddressImpl<mode>(this, address);
271    DCHECK_NE(kFreeListGCInfoIndex, header->GetGCInfoIndex());
272    return *header;
273  }
274  
275  }  // namespace internal
276  }  // namespace cppgc
277  
278  #endif  // V8_HEAP_CPPGC_HEAP_PAGE_H_
279