• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "runtime/mem/gc/gc_barrier_set.h"
17 
18 #include <atomic>
19 #include "runtime/include/managed_thread.h"
20 #include "libpandabase/mem/gc_barrier.h"
21 #include "libpandabase/mem/mem.h"
22 #include "runtime/include/object_header.h"
23 #include "runtime/mem/rem_set.h"
24 #include "runtime/mem/gc/heap-space-misc/crossing_map.h"
25 
26 namespace panda::mem {
27 
28 GCBarrierSet::~GCBarrierSet() = default;
29 
CheckPostBarrier(CardTable * card_table,const void * obj_addr,bool check_card_table=true)30 bool CheckPostBarrier(CardTable *card_table, const void *obj_addr, bool check_card_table = true)
31 {
32     if constexpr (PANDA_CROSSING_MAP_MANAGE_CROSSED_BORDER) {
33         return true;
34     }
35 
36     // check that obj_addr must be object header
37     ASSERT(IsInObjectsAddressSpace(ToUintPtr(obj_addr)));
38     [[maybe_unused]] auto *object = reinterpret_cast<const ObjectHeader *>(obj_addr);
39     ASSERT(IsInObjectsAddressSpace(ToUintPtr(object->ClassAddr<BaseClass>())));
40 
41     // we need to check that card related by object header must be young/marked/processed.
42     // doesn't for G1, because card_table is processed concurrently, so it can be cleared before we enter here
43     bool res = true;
44     if (check_card_table) {
45         res = !card_table->GetCardPtr(ToUintPtr(obj_addr))->IsClear();
46     }
47     return res;
48 }
49 
PreSATBBarrier(ObjectHeader * pre_val)50 static void PreSATBBarrier(ObjectHeader *pre_val)
51 {
52     if (pre_val != nullptr) {
53         LOG(DEBUG, GC) << "GC PreSATBBarrier pre val -> new val:" << pre_val;
54         auto pre_buff = static_cast<PandaVector<ObjectHeader *> *>(ManagedThread::GetCurrent()->GetPreBuff());
55         ASSERT(pre_buff != nullptr);
56         ValidateObject(RootType::SATB_BUFFER, pre_val);
57         pre_buff->push_back(pre_val);
58     }
59 }
60 
PostIntergenerationalBarrier(const void * min_addr,uint8_t * card_table_addr,uint8_t card_bits,uint8_t dirty_card_value,const void * obj_field_addr)61 void PostIntergenerationalBarrier(const void *min_addr, uint8_t *card_table_addr, uint8_t card_bits,
62                                   uint8_t dirty_card_value, const void *obj_field_addr)
63 {
64     size_t card_index = (ToUintPtr(obj_field_addr) - *static_cast<const uintptr_t *>(min_addr)) >> card_bits;
65     auto *card_addr = static_cast<std::atomic_uint8_t *>(ToVoidPtr(ToUintPtr(card_table_addr) + card_index));
66     // Atomic with relaxed order reason: data race with card_addr with no synchronization or ordering constraints
67     // imposed on other reads or writes
68     card_addr->store(dirty_card_value, std::memory_order_relaxed);
69 }
70 
PostInterregionBarrier(const void * obj_addr,const void * ref,const size_t region_size_bits,const CardTable * card_table,objTwoRefProcessFunc update_func)71 void PostInterregionBarrier(const void *obj_addr, const void *ref, const size_t region_size_bits,
72                             const CardTable *card_table, objTwoRefProcessFunc update_func)
73 {
74     if (ref != nullptr) {
75         // If it is cross-region reference
76         auto obj_card_ptr = card_table->GetCardPtr(ToUintPtr(obj_addr));
77         if (!obj_card_ptr->IsMarked()) {
78             if ((((ToObjPtrType(obj_addr) ^ ToObjPtrType(ref)) >> region_size_bits) != 0)
79                             && (!obj_card_ptr->IsYoung())) {
80                 LOG(DEBUG, GC) << "GC Interregion barrier write to " << std::hex << obj_addr << " value " << ref;
81                 (*update_func)(obj_addr, ref);
82             }
83         }
84     }
85 }
86 
GetBarrierOperand(BarrierPosition barrier_position,std::string_view name)87 BarrierOperand GCBarrierSet::GetBarrierOperand(BarrierPosition barrier_position, std::string_view name)
88 {
89     if (barrier_position == BarrierPosition::BARRIER_POSITION_PRE) {
90         if (UNLIKELY(pre_operands_.find(name.data()) == pre_operands_.end())) {
91             LOG(FATAL, GC) << "Operand " << name << " not found for pre barrier";
92         }
93         return pre_operands_.at(name.data());
94     }
95     if (UNLIKELY(post_operands_.find(name.data()) == post_operands_.end())) {
96         LOG(FATAL, GC) << "Operand " << name << " not found for post barrier";
97     }
98     return post_operands_.at(name.data());
99 }
100 
GetPostBarrierOperand(std::string_view name)101 BarrierOperand GCBarrierSet::GetPostBarrierOperand(std::string_view name)
102 {
103     return GetBarrierOperand(BarrierPosition::BARRIER_POSITION_POST, name);
104 }
105 
PreBarrier(void * pre_val_addr)106 void GCGenBarrierSet::PreBarrier([[maybe_unused]] void *pre_val_addr) {}
107 
PostBarrier(const void * obj_addr,void * stored_val_addr)108 void GCGenBarrierSet::PostBarrier(const void *obj_addr, [[maybe_unused]] void *stored_val_addr)
109 {
110     LOG(DEBUG, GC) << "GC PostBarrier: write to " << std::hex << obj_addr << " value " << stored_val_addr;
111     PostIntergenerationalBarrier(min_addr_, card_table_addr_, card_bits_, dirty_card_value_, obj_addr);
112     ASSERT(CheckPostBarrier(card_table_, obj_addr));
113 }
114 
PostBarrierArrayWrite(const void * obj_addr,size_t size)115 void GCGenBarrierSet::PostBarrierArrayWrite(const void *obj_addr, [[maybe_unused]] size_t size)
116 {
117     PostIntergenerationalBarrier(min_addr_, card_table_addr_, card_bits_, dirty_card_value_, obj_addr);
118     ASSERT(CheckPostBarrier(card_table_, obj_addr));
119 }
120 
PostBarrierEveryObjectFieldWrite(const void * obj_addr,size_t size)121 void GCGenBarrierSet::PostBarrierEveryObjectFieldWrite(const void *obj_addr, [[maybe_unused]] size_t size)
122 {
123     // NOTE: We can improve an implementation here
124     // because now we consider every field as an object reference field.
125     // Maybe, it will be better to check it, but there can be possible performance degradation.
126     PostIntergenerationalBarrier(min_addr_, card_table_addr_, card_bits_, dirty_card_value_, obj_addr);
127     ASSERT(CheckPostBarrier(card_table_, obj_addr));
128 }
129 
IsPreBarrierEnabled()130 bool GCG1BarrierSet::IsPreBarrierEnabled()
131 {
132     // Atomic with relaxed order reason: no data race because G1GC sets this flag on pause
133     return concurrent_marking_flag_->load(std::memory_order_relaxed);
134 }
135 
PreBarrier(void * pre_val_addr)136 void GCG1BarrierSet::PreBarrier(void *pre_val_addr)
137 {
138     LOG_IF(pre_val_addr != nullptr, DEBUG, GC) << "GC PreBarrier: with pre-value " << pre_val_addr;
139     ASSERT(*concurrent_marking_flag_);
140     PreSATBBarrier(reinterpret_cast<ObjectHeader *>(pre_val_addr));
141 }
142 
PostBarrier(const void * obj_addr,void * stored_val_addr)143 void GCG1BarrierSet::PostBarrier(const void *obj_addr, void *stored_val_addr)
144 {
145     LOG_IF(stored_val_addr != nullptr, DEBUG, GC)
146         << "GC PostBarrier: write to " << std::hex << obj_addr << " value " << stored_val_addr;
147     PostInterregionBarrier(obj_addr, stored_val_addr, region_size_bits_count_, card_table_, post_func_);
148     ASSERT(stored_val_addr == nullptr ||
149            (((ToObjPtrType(obj_addr) ^ ToObjPtrType(stored_val_addr)) >> region_size_bits_count_) == 0) ||
150            CheckPostBarrier(card_table_, obj_addr, false));
151 }
152 
PostBarrierArrayWrite(const void * obj_addr,size_t size)153 void GCG1BarrierSet::PostBarrierArrayWrite(const void *obj_addr, [[maybe_unused]] size_t size)
154 {
155     // Force post inter-region barrier
156     PostInterregionBarrier(obj_addr, ToVoidPtr(ToUintPtr(obj_addr) + (1U << region_size_bits_count_) + 1U),
157                            region_size_bits_count_, card_table_, post_func_);
158     ASSERT(CheckPostBarrier(card_table_, obj_addr, false));
159 }
160 
PostBarrierEveryObjectFieldWrite(const void * obj_addr,size_t size)161 void GCG1BarrierSet::PostBarrierEveryObjectFieldWrite(const void *obj_addr, [[maybe_unused]] size_t size)
162 {
163     // Force post inter-region barrier
164     PostInterregionBarrier(obj_addr, ToVoidPtr(ToUintPtr(obj_addr) + (1U << region_size_bits_count_) + 1U),
165                            region_size_bits_count_, card_table_, post_func_);
166     ASSERT(CheckPostBarrier(card_table_, obj_addr, false));
167     // NOTE: We can improve an implementation here
168     // because now we consider every field as an object reference field.
169     // Maybe, it will be better to check it, but there can be possible performance degradation.
170 }
171 
172 }  // namespace panda::mem
173