1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "include/cppgc/internal/write-barrier.h"
6
7 #include "include/cppgc/internal/pointer-policies.h"
8 #include "src/heap/cppgc/globals.h"
9 #include "src/heap/cppgc/heap-object-header.h"
10 #include "src/heap/cppgc/heap-page.h"
11 #include "src/heap/cppgc/heap.h"
12 #include "src/heap/cppgc/marker.h"
13 #include "src/heap/cppgc/marking-visitor.h"
14
15 #if defined(CPPGC_CAGED_HEAP)
16 #include "include/cppgc/internal/caged-heap-local-data.h"
17 #endif
18
19 namespace cppgc {
20 namespace internal {
21
22 namespace {
23
MarkValue(const BasePage * page,MarkerBase * marker,const void * value)24 void MarkValue(const BasePage* page, MarkerBase* marker, const void* value) {
25 #if defined(CPPGC_CAGED_HEAP)
26 DCHECK(reinterpret_cast<CagedHeapLocalData*>(
27 reinterpret_cast<uintptr_t>(value) &
28 ~(kCagedHeapReservationAlignment - 1))
29 ->is_marking_in_progress);
30 #endif
31 auto& header =
32 const_cast<HeapObjectHeader&>(page->ObjectHeaderFromInnerAddress(value));
33 if (!header.TryMarkAtomic()) return;
34
35 DCHECK(marker);
36
37 if (V8_UNLIKELY(header.IsInConstruction<AccessMode::kNonAtomic>())) {
38 // In construction objects are traced only if they are unmarked. If marking
39 // reaches this object again when it is fully constructed, it will re-mark
40 // it and tracing it as a previously not fully constructed object would know
41 // to bail out.
42 header.Unmark<AccessMode::kAtomic>();
43 marker->WriteBarrierForInConstructionObject(header);
44 return;
45 }
46
47 marker->WriteBarrierForObject(header);
48 }
49
50 } // namespace
51
MarkingBarrierSlowWithSentinelCheck(const void * value)52 void WriteBarrier::MarkingBarrierSlowWithSentinelCheck(const void* value) {
53 if (!value || value == kSentinelPointer) return;
54
55 MarkingBarrierSlow(value);
56 }
57
MarkingBarrierSlow(const void * value)58 void WriteBarrier::MarkingBarrierSlow(const void* value) {
59 const BasePage* page = BasePage::FromPayload(value);
60 const auto* heap = page->heap();
61
62 // Marker being not set up means that no incremental/concurrent marking is in
63 // progress.
64 if (!heap->marker()) return;
65
66 MarkValue(page, heap->marker(), value);
67 }
68
69 #if defined(CPPGC_YOUNG_GENERATION)
GenerationalBarrierSlow(CagedHeapLocalData * local_data,const AgeTable & age_table,const void * slot,uintptr_t value_offset)70 void WriteBarrier::GenerationalBarrierSlow(CagedHeapLocalData* local_data,
71 const AgeTable& age_table,
72 const void* slot,
73 uintptr_t value_offset) {
74 if (age_table[value_offset] == AgeTable::Age::kOld) return;
75 // Record slot.
76 local_data->heap_base->remembered_slots().insert(const_cast<void*>(slot));
77 }
78 #endif
79
80 } // namespace internal
81 } // namespace cppgc
82