• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_READ_BARRIER_INL_H_
18 #define ART_RUNTIME_READ_BARRIER_INL_H_
19 
20 #include "read_barrier.h"
21 
22 #include "gc/collector/concurrent_copying-inl.h"
23 #include "gc/heap.h"
24 #include "mirror/object_reference.h"
25 #include "mirror/reference.h"
26 #include "runtime.h"
27 #include "utils.h"
28 
29 namespace art {
30 
31 template <typename MirrorType, ReadBarrierOption kReadBarrierOption, bool kAlwaysUpdateField>
Barrier(mirror::Object * obj,MemberOffset offset,mirror::HeapReference<MirrorType> * ref_addr)32 inline MirrorType* ReadBarrier::Barrier(
33     mirror::Object* obj, MemberOffset offset, mirror::HeapReference<MirrorType>* ref_addr) {
34   constexpr bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
35   if (kUseReadBarrier && with_read_barrier) {
36     if (kIsDebugBuild) {
37       Thread* const self = Thread::Current();
38       if (self != nullptr) {
39         CHECK_EQ(self->GetDebugDisallowReadBarrierCount(), 0u);
40       }
41     }
42     if (kUseBakerReadBarrier) {
43       // The higher bits of the rb_ptr, rb_ptr_high_bits (must be zero)
44       // is used to create artificial data dependency from the is_gray
45       // load to the ref field (ptr) load to avoid needing a load-load
46       // barrier between the two.
47       uintptr_t rb_ptr_high_bits;
48       bool is_gray = HasGrayReadBarrierPointer(obj, &rb_ptr_high_bits);
49       ref_addr = reinterpret_cast<mirror::HeapReference<MirrorType>*>(
50           rb_ptr_high_bits | reinterpret_cast<uintptr_t>(ref_addr));
51       MirrorType* ref = ref_addr->AsMirrorPtr();
52       MirrorType* old_ref = ref;
53       if (is_gray) {
54         // Slow-path.
55         ref = reinterpret_cast<MirrorType*>(Mark(ref));
56         // If kAlwaysUpdateField is true, update the field atomically. This may fail if mutator
57         // updates before us, but it's ok.
58         if (kAlwaysUpdateField && ref != old_ref) {
59           obj->CasFieldStrongRelaxedObjectWithoutWriteBarrier<false, false>(
60               offset, old_ref, ref);
61         }
62       }
63       if (kEnableReadBarrierInvariantChecks) {
64         CHECK_EQ(rb_ptr_high_bits, 0U) << obj << " rb_ptr=" << obj->GetReadBarrierPointer();
65       }
66       AssertToSpaceInvariant(obj, offset, ref);
67       return ref;
68     } else if (kUseBrooksReadBarrier) {
69       // To be implemented.
70       return ref_addr->AsMirrorPtr();
71     } else if (kUseTableLookupReadBarrier) {
72       MirrorType* ref = ref_addr->AsMirrorPtr();
73       MirrorType* old_ref = ref;
74       // The heap or the collector can be null at startup. TODO: avoid the need for this null check.
75       gc::Heap* heap = Runtime::Current()->GetHeap();
76       if (heap != nullptr && heap->GetReadBarrierTable()->IsSet(old_ref)) {
77         ref = reinterpret_cast<MirrorType*>(Mark(old_ref));
78         // Update the field atomically. This may fail if mutator updates before us, but it's ok.
79         if (ref != old_ref) {
80           obj->CasFieldStrongRelaxedObjectWithoutWriteBarrier<false, false>(
81               offset, old_ref, ref);
82         }
83       }
84       AssertToSpaceInvariant(obj, offset, ref);
85       return ref;
86     } else {
87       LOG(FATAL) << "Unexpected read barrier type";
88       UNREACHABLE();
89     }
90   } else {
91     // No read barrier.
92     return ref_addr->AsMirrorPtr();
93   }
94 }
95 
96 template <typename MirrorType, ReadBarrierOption kReadBarrierOption>
BarrierForRoot(MirrorType ** root,GcRootSource * gc_root_source)97 inline MirrorType* ReadBarrier::BarrierForRoot(MirrorType** root,
98                                                GcRootSource* gc_root_source) {
99   MirrorType* ref = *root;
100   const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
101   if (kUseReadBarrier && with_read_barrier) {
102     if (kIsDebugBuild) {
103       Thread* const self = Thread::Current();
104       if (self != nullptr) {
105         CHECK_EQ(self->GetDebugDisallowReadBarrierCount(), 0u);
106       }
107     }
108     if (kUseBakerReadBarrier) {
109       // TODO: separate the read barrier code from the collector code more.
110       Thread* self = Thread::Current();
111       if (self != nullptr && self->GetIsGcMarking()) {
112         ref = reinterpret_cast<MirrorType*>(Mark(ref));
113       }
114       AssertToSpaceInvariant(gc_root_source, ref);
115       return ref;
116     } else if (kUseBrooksReadBarrier) {
117       // To be implemented.
118       return ref;
119     } else if (kUseTableLookupReadBarrier) {
120       Thread* self = Thread::Current();
121       if (self != nullptr &&
122           self->GetIsGcMarking() &&
123           Runtime::Current()->GetHeap()->GetReadBarrierTable()->IsSet(ref)) {
124         MirrorType* old_ref = ref;
125         ref = reinterpret_cast<MirrorType*>(Mark(old_ref));
126         // Update the field atomically. This may fail if mutator updates before us, but it's ok.
127         if (ref != old_ref) {
128           Atomic<mirror::Object*>* atomic_root = reinterpret_cast<Atomic<mirror::Object*>*>(root);
129           atomic_root->CompareExchangeStrongRelaxed(old_ref, ref);
130         }
131       }
132       AssertToSpaceInvariant(gc_root_source, ref);
133       return ref;
134     } else {
135       LOG(FATAL) << "Unexpected read barrier type";
136       UNREACHABLE();
137     }
138   } else {
139     return ref;
140   }
141 }
142 
143 // TODO: Reduce copy paste
144 template <typename MirrorType, ReadBarrierOption kReadBarrierOption>
BarrierForRoot(mirror::CompressedReference<MirrorType> * root,GcRootSource * gc_root_source)145 inline MirrorType* ReadBarrier::BarrierForRoot(mirror::CompressedReference<MirrorType>* root,
146                                                GcRootSource* gc_root_source) {
147   MirrorType* ref = root->AsMirrorPtr();
148   const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
149   if (with_read_barrier && kUseBakerReadBarrier) {
150     // TODO: separate the read barrier code from the collector code more.
151     Thread* self = Thread::Current();
152     if (self != nullptr && self->GetIsGcMarking()) {
153       ref = reinterpret_cast<MirrorType*>(Mark(ref));
154     }
155     AssertToSpaceInvariant(gc_root_source, ref);
156     return ref;
157   } else if (with_read_barrier && kUseBrooksReadBarrier) {
158     // To be implemented.
159     return ref;
160   } else if (with_read_barrier && kUseTableLookupReadBarrier) {
161     Thread* self = Thread::Current();
162     if (self != nullptr &&
163         self->GetIsGcMarking() &&
164         Runtime::Current()->GetHeap()->GetReadBarrierTable()->IsSet(ref)) {
165       auto old_ref = mirror::CompressedReference<MirrorType>::FromMirrorPtr(ref);
166       ref = reinterpret_cast<MirrorType*>(Mark(ref));
167       auto new_ref = mirror::CompressedReference<MirrorType>::FromMirrorPtr(ref);
168       // Update the field atomically. This may fail if mutator updates before us, but it's ok.
169       if (new_ref.AsMirrorPtr() != old_ref.AsMirrorPtr()) {
170         auto* atomic_root =
171             reinterpret_cast<Atomic<mirror::CompressedReference<MirrorType>>*>(root);
172         atomic_root->CompareExchangeStrongRelaxed(old_ref, new_ref);
173       }
174     }
175     AssertToSpaceInvariant(gc_root_source, ref);
176     return ref;
177   } else {
178     return ref;
179   }
180 }
181 
IsDuringStartup()182 inline bool ReadBarrier::IsDuringStartup() {
183   gc::Heap* heap = Runtime::Current()->GetHeap();
184   if (heap == nullptr) {
185     // During startup, the heap can be null.
186     return true;
187   }
188   if (heap->CurrentCollectorType() != gc::kCollectorTypeCC) {
189     // CC isn't running.
190     return true;
191   }
192   gc::collector::ConcurrentCopying* collector = heap->ConcurrentCopyingCollector();
193   if (collector == nullptr) {
194     // During startup, the collector can be null.
195     return true;
196   }
197   return false;
198 }
199 
AssertToSpaceInvariant(mirror::Object * obj,MemberOffset offset,mirror::Object * ref)200 inline void ReadBarrier::AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset,
201                                                 mirror::Object* ref) {
202   if (kEnableToSpaceInvariantChecks || kIsDebugBuild) {
203     if (ref == nullptr || IsDuringStartup()) {
204       return;
205     }
206     Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->
207         AssertToSpaceInvariant(obj, offset, ref);
208   }
209 }
210 
AssertToSpaceInvariant(GcRootSource * gc_root_source,mirror::Object * ref)211 inline void ReadBarrier::AssertToSpaceInvariant(GcRootSource* gc_root_source,
212                                                 mirror::Object* ref) {
213   if (kEnableToSpaceInvariantChecks || kIsDebugBuild) {
214     if (ref == nullptr || IsDuringStartup()) {
215       return;
216     }
217     Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->
218         AssertToSpaceInvariant(gc_root_source, ref);
219   }
220 }
221 
Mark(mirror::Object * obj)222 inline mirror::Object* ReadBarrier::Mark(mirror::Object* obj) {
223   return Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->Mark(obj);
224 }
225 
HasGrayReadBarrierPointer(mirror::Object * obj,uintptr_t * out_rb_ptr_high_bits)226 inline bool ReadBarrier::HasGrayReadBarrierPointer(mirror::Object* obj,
227                                                    uintptr_t* out_rb_ptr_high_bits) {
228   mirror::Object* rb_ptr = obj->GetReadBarrierPointer();
229   uintptr_t rb_ptr_bits = reinterpret_cast<uintptr_t>(rb_ptr);
230   uintptr_t rb_ptr_low_bits = rb_ptr_bits & rb_ptr_mask_;
231   if (kEnableReadBarrierInvariantChecks) {
232     CHECK(rb_ptr_low_bits == white_ptr_ || rb_ptr_low_bits == gray_ptr_ ||
233           rb_ptr_low_bits == black_ptr_)
234         << "obj=" << obj << " rb_ptr=" << rb_ptr << " " << PrettyTypeOf(obj);
235   }
236   bool is_gray = rb_ptr_low_bits == gray_ptr_;
237   // The high bits are supposed to be zero. We check this on the caller side.
238   *out_rb_ptr_high_bits = rb_ptr_bits & ~rb_ptr_mask_;
239   return is_gray;
240 }
241 
242 }  // namespace art
243 
244 #endif  // ART_RUNTIME_READ_BARRIER_INL_H_
245