• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef PANDA_RUNTIME_INCLUDE_OBJECT_ACCESSOR_INL_H_
17 #define PANDA_RUNTIME_INCLUDE_OBJECT_ACCESSOR_INL_H_
18 
19 #include <securec.h>
20 
21 #include "libpandabase/mem/mem.h"
22 #include "runtime/include/field.h"
23 #include "runtime/include/object_accessor.h"
24 #include "runtime/mem/gc/gc_barrier_set.h"
25 
26 namespace panda {
27 
28 /* static */
29 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_HORIZON_SPACE, C_RULE_ID_COMMENT_LOCATION)
30 template <bool is_volatile /* = false */, bool need_read_barrier /* = true */, bool is_dyn /* = false */>
GetObject(const void * obj,size_t offset)31 inline ObjectHeader *ObjectAccessor::GetObject(const void *obj, size_t offset)
32 {
33     // We don't have GC with read barriers now
34     if (!is_dyn) {
35         return reinterpret_cast<ObjectHeader *>(Get<object_pointer_type, is_volatile>(obj, offset));
36     }
37     return Get<ObjectHeader *, is_volatile>(obj, offset);
38 }
39 
40 /* static */
41 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_HORIZON_SPACE, C_RULE_ID_COMMENT_LOCATION)
42 template <bool is_volatile /* = false */, bool need_write_barrier /* = true */, bool is_dyn /* = false */>
SetObject(void * obj,size_t offset,ObjectHeader * value)43 inline void ObjectAccessor::SetObject(void *obj, size_t offset, ObjectHeader *value)
44 {
45     if (need_write_barrier) {
46         auto *barrier_set = GetBarrierSet();
47         auto gc_pre_barrier_type = barrier_set->GetPreType();
48 
49         if (!mem::IsEmptyBarrier(gc_pre_barrier_type)) {
50             ObjectHeader *pre_val = GetObject<is_volatile, is_dyn>(obj, offset);
51             barrier_set->PreBarrier(ToVoidPtr(ToUintPtr(obj) + offset), pre_val);
52         }
53 
54         if (!is_dyn) {
55             Set<object_pointer_type, is_volatile>(obj, offset, ToObjPtrType(value));
56         } else {
57             Set<ObjectHeader *, is_volatile>(obj, offset, value);
58         }
59         auto gc_post_barrier_type = barrier_set->GetPostType();
60         if (!mem::IsEmptyBarrier(gc_post_barrier_type)) {
61             barrier_set->PostBarrier(ToVoidPtr(ToUintPtr(obj)), value);
62         }
63     } else {
64         if (!is_dyn) {
65             Set<object_pointer_type, is_volatile>(obj, offset, ToObjPtrType(value));
66         } else {
67             Set<ObjectHeader *, is_volatile>(obj, offset, value);
68         }
69     }
70 }
71 
72 /* static */
73 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_HORIZON_SPACE, C_RULE_ID_COMMENT_LOCATION)
74 template <bool is_volatile /* = false */, bool need_read_barrier /* = true */, bool is_dyn /* = false */>
GetObject(const ManagedThread * thread,const void * obj,size_t offset)75 inline ObjectHeader *ObjectAccessor::GetObject([[maybe_unused]] const ManagedThread *thread, const void *obj,
76                                                size_t offset)
77 {
78     // We don't have GC with read barriers now
79     if (!is_dyn) {
80         return reinterpret_cast<ObjectHeader *>(Get<object_pointer_type, is_volatile>(obj, offset));
81     }
82     return Get<ObjectHeader *, is_volatile>(obj, offset);
83 }
84 
85 /* static */
86 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_HORIZON_SPACE, C_RULE_ID_COMMENT_LOCATION)
87 template <bool is_volatile /* = false */, bool need_write_barrier /* = true */, bool is_dyn /* = false */>
SetObject(const ManagedThread * thread,void * obj,size_t offset,ObjectHeader * value)88 inline void ObjectAccessor::SetObject(const ManagedThread *thread, void *obj, size_t offset, ObjectHeader *value)
89 {
90     if (need_write_barrier) {
91         auto *barrier_set = GetBarrierSet(thread);
92         if (!mem::IsEmptyBarrier(barrier_set->GetPreType())) {
93             ObjectHeader *pre_val = GetObject<is_volatile, is_dyn>(obj, offset);
94             barrier_set->PreBarrier(ToVoidPtr(ToUintPtr(obj) + offset), pre_val);
95         }
96 
97         if (!is_dyn) {
98             Set<object_pointer_type, is_volatile>(obj, offset, ToObjPtrType(value));
99         } else {
100             Set<ObjectHeader *, is_volatile>(obj, offset, value);
101         }
102         if (!mem::IsEmptyBarrier(barrier_set->GetPostType())) {
103             barrier_set->PostBarrier(ToVoidPtr(ToUintPtr(obj)), value);
104         }
105     } else {
106         if (!is_dyn) {
107             Set<object_pointer_type, is_volatile>(obj, offset, ToObjPtrType(value));
108         } else {
109             Set<ObjectHeader *, is_volatile>(obj, offset, value);
110         }
111     }
112 }
113 
114 /* static */
115 template <class T>
GetFieldPrimitive(const void * obj,const Field & field)116 inline T ObjectAccessor::GetFieldPrimitive(const void *obj, const Field &field)
117 {
118     if (UNLIKELY(field.IsVolatile())) {
119         return GetPrimitive<T, true>(obj, field.GetOffset());
120     }
121     return GetPrimitive<T, false>(obj, field.GetOffset());
122 }
123 
124 /* static */
125 template <class T>
SetFieldPrimitive(void * obj,const Field & field,T value)126 inline void ObjectAccessor::SetFieldPrimitive(void *obj, const Field &field, T value)
127 {
128     if (UNLIKELY(field.IsVolatile())) {
129         SetPrimitive<T, true>(obj, field.GetOffset(), value);
130     } else {
131         SetPrimitive<T, false>(obj, field.GetOffset(), value);
132     }
133 }
134 
135 /* static */
136 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_HORIZON_SPACE, C_RULE_ID_COMMENT_LOCATION)
137 template <bool need_read_barrier /* = true */, bool is_dyn /* = false */>
GetFieldObject(const void * obj,const Field & field)138 inline ObjectHeader *ObjectAccessor::GetFieldObject(const void *obj, const Field &field)
139 {
140     if (UNLIKELY(field.IsVolatile())) {
141         return GetObject<true, need_read_barrier, is_dyn>(obj, field.GetOffset());
142     }
143     return GetObject<false, need_read_barrier, is_dyn>(obj, field.GetOffset());
144 }
145 
146 /* static */
147 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_HORIZON_SPACE, C_RULE_ID_COMMENT_LOCATION)
148 template <bool need_write_barrier /* = true */, bool is_dyn /* = false */>
SetFieldObject(void * obj,const Field & field,ObjectHeader * value)149 inline void ObjectAccessor::SetFieldObject(void *obj, const Field &field, ObjectHeader *value)
150 {
151 #ifdef PANDA_USE_32_BIT_POINTER
152     ASSERT(IsInObjectsAddressSpace(ToUintPtr(value)));
153 #endif
154     if (UNLIKELY(field.IsVolatile())) {
155         SetObject<true, need_write_barrier, is_dyn>(obj, field.GetOffset(), value);
156     } else {
157         SetObject<false, need_write_barrier, is_dyn>(obj, field.GetOffset(), value);
158     }
159 }
160 
161 /* static */
162 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_HORIZON_SPACE, C_RULE_ID_COMMENT_LOCATION)
163 template <bool need_read_barrier /* = true */, bool is_dyn /* = false */>
GetFieldObject(ManagedThread * thread,const void * obj,const Field & field)164 inline ObjectHeader *ObjectAccessor::GetFieldObject(ManagedThread *thread, const void *obj, const Field &field)
165 {
166     if (UNLIKELY(field.IsVolatile())) {
167         return GetObject<true, need_read_barrier, is_dyn>(thread, obj, field.GetOffset());
168     }
169     return GetObject<false, need_read_barrier, is_dyn>(thread, obj, field.GetOffset());
170 }
171 
172 /* static */
173 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_HORIZON_SPACE, C_RULE_ID_COMMENT_LOCATION)
174 template <bool need_write_barrier /* = true */, bool is_dyn /* = false */>
SetFieldObject(ManagedThread * thread,void * obj,const Field & field,ObjectHeader * value)175 inline void ObjectAccessor::SetFieldObject(ManagedThread *thread, void *obj, const Field &field, ObjectHeader *value)
176 {
177     if (UNLIKELY(field.IsVolatile())) {
178         SetObject<true, need_write_barrier, is_dyn>(thread, obj, field.GetOffset(), value);
179     } else {
180         SetObject<false, need_write_barrier, is_dyn>(thread, obj, field.GetOffset(), value);
181     }
182 }
183 
184 /* static */
185 template <class T>
GetFieldPrimitive(const void * obj,size_t offset,std::memory_order memory_order)186 inline T ObjectAccessor::GetFieldPrimitive(const void *obj, size_t offset, std::memory_order memory_order)
187 {
188     return Get<T>(obj, offset, memory_order);
189 }
190 
191 /* static */
192 template <class T>
SetFieldPrimitive(void * obj,size_t offset,T value,std::memory_order memory_order)193 inline void ObjectAccessor::SetFieldPrimitive(void *obj, size_t offset, T value, std::memory_order memory_order)
194 {
195     Set<T>(obj, offset, value, memory_order);
196 }
197 
198 /* static */
199 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_HORIZON_SPACE, C_RULE_ID_COMMENT_LOCATION)
200 template <bool need_read_barrier /* = true */, bool is_dyn /* = false */>
GetFieldObject(const void * obj,int offset,std::memory_order memory_order)201 inline ObjectHeader *ObjectAccessor::GetFieldObject(const void *obj, int offset, std::memory_order memory_order)
202 {
203     if (!is_dyn) {
204         return reinterpret_cast<ObjectHeader *>(Get<object_pointer_type>(obj, offset, memory_order));
205     }
206     return Get<ObjectHeader *>(obj, offset, memory_order);
207 }
208 
209 /* static */
210 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_HORIZON_SPACE, C_RULE_ID_COMMENT_LOCATION)
211 template <bool need_write_barrier /* = true */, bool is_dyn /* = false */>
SetFieldObject(void * obj,size_t offset,ObjectHeader * value,std::memory_order memory_order)212 inline void ObjectAccessor::SetFieldObject(void *obj, size_t offset, ObjectHeader *value,
213                                            std::memory_order memory_order)
214 {
215     if (need_write_barrier) {
216         auto *barrier_set = GetBarrierSet();
217         auto gc_pre_barrier_type = barrier_set->GetPreType();
218 
219         if (!mem::IsEmptyBarrier(gc_pre_barrier_type)) {
220             ObjectHeader *pre_val = GetFieldObject<is_dyn>(obj, offset, memory_order);
221             barrier_set->PreBarrier(ToVoidPtr(ToUintPtr(obj) + offset), pre_val);
222         }
223 
224         if (!is_dyn) {
225             Set<object_pointer_type>(obj, offset, ToObjPtrType(value), memory_order);
226         } else {
227             Set<ObjectHeader *>(obj, offset, value, memory_order);
228         }
229         auto gc_post_barrier_type = barrier_set->GetPostType();
230         if (!mem::IsEmptyBarrier(gc_post_barrier_type)) {
231             barrier_set->PostBarrier(ToVoidPtr(ToUintPtr(obj)), value);
232         }
233     } else {
234         if (!is_dyn) {
235             Set<object_pointer_type>(obj, offset, ToObjPtrType(value), memory_order);
236         } else {
237             Set<ObjectHeader *>(obj, offset, value, memory_order);
238         }
239     }
240 }
241 
242 /* static */
243 template <typename T>
CompareAndSetFieldPrimitive(void * obj,size_t offset,T old_value,T new_value,std::memory_order memory_order,bool strong)244 inline std::pair<bool, T> ObjectAccessor::CompareAndSetFieldPrimitive(void *obj, size_t offset, T old_value,
245                                                                       T new_value, std::memory_order memory_order,
246                                                                       bool strong)
247 {
248     uintptr_t raw_addr = reinterpret_cast<uintptr_t>(obj) + offset;
249     ASSERT(IsInObjectsAddressSpace(raw_addr));
250     auto *atomic_addr = reinterpret_cast<std::atomic<T> *>(raw_addr);
251     if (strong) {
252         return {atomic_addr->compare_exchange_strong(old_value, new_value, memory_order), old_value};
253     }
254     return {atomic_addr->compare_exchange_weak(old_value, new_value, memory_order), old_value};
255 }
256 
257 /* static */
258 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_HORIZON_SPACE, C_RULE_ID_COMMENT_LOCATION)
259 template <bool need_write_barrier /* = true */, bool is_dyn /* = false */>
CompareAndSetFieldObject(void * obj,size_t offset,ObjectHeader * old_value,ObjectHeader * new_value,std::memory_order memory_order,bool strong)260 inline std::pair<bool, ObjectHeader *> ObjectAccessor::CompareAndSetFieldObject(void *obj, size_t offset,
261                                                                                 ObjectHeader *old_value,
262                                                                                 ObjectHeader *new_value,
263                                                                                 std::memory_order memory_order,
264                                                                                 bool strong)
265 {
266     // update field with read barrier
267     ObjectHeader *pre_val = GetObject<false, is_dyn>(obj, offset);
268     bool success = false;
269     ObjectHeader *result = nullptr;
270     auto get_result = [&]() {
271         if (is_dyn) {
272             auto value =
273                 CompareAndSetFieldPrimitive<ObjectHeader *>(obj, offset, old_value, new_value, memory_order, strong);
274             success = value.first;
275             result = value.second;
276         } else {
277             auto value = CompareAndSetFieldPrimitive<object_pointer_type>(
278                 obj, offset, ToObjPtrType(old_value), ToObjPtrType(new_value), memory_order, strong);
279             success = value.first;
280             result = reinterpret_cast<ObjectHeader *>(value.second);
281         }
282     };
283 
284     if (need_write_barrier) {
285         auto *barrier_set = GetBarrierSet();
286         if (!mem::IsEmptyBarrier(barrier_set->GetPreType())) {
287             barrier_set->PreBarrier(ToVoidPtr(ToUintPtr(obj) + offset), pre_val);
288         }
289 
290         get_result();
291         if (success && !mem::IsEmptyBarrier(barrier_set->GetPostType())) {
292             barrier_set->PostBarrier(ToVoidPtr(ToUintPtr(obj)), new_value);
293         }
294         return {success, result};
295     }
296 
297     get_result();
298     return {success, result};
299 }
300 
301 /* static */
302 template <typename T>
GetAndSetFieldPrimitive(void * obj,size_t offset,T value,std::memory_order memory_order)303 inline T ObjectAccessor::GetAndSetFieldPrimitive(void *obj, size_t offset, T value, std::memory_order memory_order)
304 {
305     uintptr_t raw_addr = reinterpret_cast<uintptr_t>(obj) + offset;
306     ASSERT(IsInObjectsAddressSpace(raw_addr));
307     auto *atomic_addr = reinterpret_cast<std::atomic<T> *>(raw_addr);
308     return atomic_addr->exchange(value, memory_order);
309 }
310 
311 /* static */
312 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_HORIZON_SPACE, C_RULE_ID_COMMENT_LOCATION)
313 template <bool need_write_barrier /* = true */, bool is_dyn /* = false */>
GetAndSetFieldObject(void * obj,size_t offset,ObjectHeader * value,std::memory_order memory_order)314 inline ObjectHeader *ObjectAccessor::GetAndSetFieldObject(void *obj, size_t offset, ObjectHeader *value,
315                                                           std::memory_order memory_order)
316 {
317     // update field with read barrier
318     ObjectHeader *pre_val = GetObject<false, is_dyn>(obj, offset);
319 
320     if (need_write_barrier) {
321         auto *barrier_set = GetBarrierSet();
322         if (!mem::IsEmptyBarrier(barrier_set->GetPreType())) {
323             barrier_set->PreBarrier(ToVoidPtr(ToUintPtr(obj) + offset), pre_val);
324         }
325         ObjectHeader *result = is_dyn ? GetAndSetFieldPrimitive<ObjectHeader *>(obj, offset, value, memory_order)
326                                       : reinterpret_cast<ObjectHeader *>(GetAndSetFieldPrimitive<object_pointer_type>(
327                                             obj, offset, ToObjPtrType(value), memory_order));
328         if (result != nullptr && !mem::IsEmptyBarrier(barrier_set->GetPostType())) {
329             barrier_set->PostBarrier(ToVoidPtr(ToUintPtr(obj)), value);
330         }
331 
332         return result;
333     }
334 
335     return is_dyn ? GetAndSetFieldPrimitive<ObjectHeader *>(obj, offset, value, memory_order)
336                   : reinterpret_cast<ObjectHeader *>(
337                         GetAndSetFieldPrimitive<object_pointer_type>(obj, offset, ToObjPtrType(value), memory_order));
338 }
339 
340 /* static */
341 template <typename T>
GetAndAddFieldPrimitive(void * obj,size_t offset,T value,std::memory_order memory_order)342 inline T ObjectAccessor::GetAndAddFieldPrimitive([[maybe_unused]] void *obj, [[maybe_unused]] size_t offset,
343                                                  [[maybe_unused]] T value,
344                                                  [[maybe_unused]] std::memory_order memory_order)
345 {
346     if constexpr (std::is_same_v<T, uint8_t>) {  // NOLINT(readability-braces-around-statements)
347         LOG(FATAL, RUNTIME) << "Could not do add for boolean";
348         UNREACHABLE();
349     } else {                                          // NOLINT(readability-misleading-indentation)
350         if constexpr (std::is_floating_point_v<T>) {  // NOLINT(readability-braces-around-statements)
351             // Atmoic fetch_add only defined in the atomic specializations for integral and pointer
352             uintptr_t raw_addr = reinterpret_cast<uintptr_t>(obj) + offset;
353             ASSERT(IsInObjectsAddressSpace(raw_addr));
354             auto *atomic_addr = reinterpret_cast<std::atomic<T> *>(raw_addr);
355             T old_value = atomic_addr->load(memory_order);
356             T new_value;
357             do {
358                 new_value = old_value + value;
359             } while (!atomic_addr->compare_exchange_weak(old_value, new_value, memory_order));
360             return old_value;
361         } else {  // NOLINT(readability-misleading-indentation, readability-else-after-return)
362             uintptr_t raw_addr = reinterpret_cast<uintptr_t>(obj) + offset;
363             ASSERT(IsInObjectsAddressSpace(raw_addr));
364             auto *atomic_addr = reinterpret_cast<std::atomic<T> *>(raw_addr);
365             return atomic_addr->fetch_add(value, memory_order);
366         }
367     }
368 }
369 
370 /* static */
371 template <typename T>
GetAndBitwiseOrFieldPrimitive(void * obj,size_t offset,T value,std::memory_order memory_order)372 inline T ObjectAccessor::GetAndBitwiseOrFieldPrimitive([[maybe_unused]] void *obj, [[maybe_unused]] size_t offset,
373                                                        [[maybe_unused]] T value,
374                                                        [[maybe_unused]] std::memory_order memory_order)
375 {
376     if constexpr (std::is_floating_point_v<T>) {  // NOLINT(readability-braces-around-statements)
377         LOG(FATAL, RUNTIME) << "Could not do bitwise or for float/double";
378         UNREACHABLE();
379     } else {  // NOLINT(readability-misleading-indentation)
380         uintptr_t raw_addr = reinterpret_cast<uintptr_t>(obj) + offset;
381         ASSERT(IsInObjectsAddressSpace(raw_addr));
382         auto *atomic_addr = reinterpret_cast<std::atomic<T> *>(raw_addr);
383         return atomic_addr->fetch_or(value, memory_order);
384     }
385 }
386 
387 /* static */
388 template <typename T>
GetAndBitwiseAndFieldPrimitive(void * obj,size_t offset,T value,std::memory_order memory_order)389 inline T ObjectAccessor::GetAndBitwiseAndFieldPrimitive([[maybe_unused]] void *obj, [[maybe_unused]] size_t offset,
390                                                         [[maybe_unused]] T value,
391                                                         [[maybe_unused]] std::memory_order memory_order)
392 {
393     if constexpr (std::is_floating_point_v<T>) {  // NOLINT(readability-braces-around-statements)
394         LOG(FATAL, RUNTIME) << "Could not do bitwise and for float/double";
395         UNREACHABLE();
396     } else {  // NOLINT(readability-misleading-indentation)
397         uintptr_t raw_addr = reinterpret_cast<uintptr_t>(obj) + offset;
398         ASSERT(IsInObjectsAddressSpace(raw_addr));
399         auto *atomic_addr = reinterpret_cast<std::atomic<T> *>(raw_addr);
400         return atomic_addr->fetch_and(value, memory_order);
401     }
402 }
403 
404 /* static */
405 template <typename T>
GetAndBitwiseXorFieldPrimitive(void * obj,size_t offset,T value,std::memory_order memory_order)406 inline T ObjectAccessor::GetAndBitwiseXorFieldPrimitive([[maybe_unused]] void *obj, [[maybe_unused]] size_t offset,
407                                                         [[maybe_unused]] T value,
408                                                         [[maybe_unused]] std::memory_order memory_order)
409 {
410     if constexpr (std::is_floating_point_v<T>) {  // NOLINT(readability-braces-around-statements)
411         LOG(FATAL, RUNTIME) << "Could not do bitwise xor for float/double";
412         UNREACHABLE();
413     } else {  // NOLINT(readability-misleading-indentation)
414         uintptr_t raw_addr = reinterpret_cast<uintptr_t>(obj) + offset;
415         ASSERT(IsInObjectsAddressSpace(raw_addr));
416         auto *atomic_addr = reinterpret_cast<std::atomic<T> *>(raw_addr);
417         return atomic_addr->fetch_xor(value, memory_order);
418     }
419 }
420 
421 /* static */
422 // CODECHECK-NOLINTNEXTLINE(C_RULE_ID_COMMENT_LOCATION)
423 template <bool need_write_barrier /* = true */>
SetDynObject(const ManagedThread * thread,void * obj,size_t offset,ObjectHeader * value)424 inline void ObjectAccessor::SetDynObject(const ManagedThread *thread, void *obj, size_t offset, ObjectHeader *value)
425 {
426     auto *addr = reinterpret_cast<ObjectHeader *>(ToUintPtr(obj) + offset);
427     ASSERT(IsInObjectsAddressSpace(ToUintPtr(addr)));
428     (void)memcpy_s(reinterpret_cast<void *>(addr), coretypes::TaggedValue::TaggedTypeSize(), &value,
429                    coretypes::TaggedValue::TaggedTypeSize());
430     auto gc_post_barrier_type = GetPostBarrierType(thread);
431     if (need_write_barrier && !mem::IsEmptyBarrier(gc_post_barrier_type)) {
432         GetBarrierSet(thread)->PostBarrier(ToVoidPtr(ToUintPtr(obj)), value);
433     }
434 }
435 
436 }  // namespace panda
437 
438 #endif  // PANDA_RUNTIME_INCLUDE_OBJECT_ACCESSOR_INL_H_
439