• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_MIRROR_OBJECT_H_
18 #define ART_RUNTIME_MIRROR_OBJECT_H_
19 
20 #include "atomic.h"
21 #include "base/casts.h"
22 #include "base/enums.h"
23 #include "globals.h"
24 #include "obj_ptr.h"
25 #include "object_reference.h"
26 #include "offsets.h"
27 #include "verify_object.h"
28 
29 namespace art {
30 
31 class ArtField;
32 class ArtMethod;
33 class ImageWriter;
34 class LockWord;
35 class Monitor;
36 struct ObjectOffsets;
37 class Thread;
38 class VoidFunctor;
39 
40 namespace mirror {
41 
42 class Array;
43 class Class;
44 class ClassLoader;
45 class DexCache;
46 class FinalizerReference;
47 template<class T> class ObjectArray;
48 template<class T> class PrimitiveArray;
49 typedef PrimitiveArray<uint8_t> BooleanArray;
50 typedef PrimitiveArray<int8_t> ByteArray;
51 typedef PrimitiveArray<uint16_t> CharArray;
52 typedef PrimitiveArray<double> DoubleArray;
53 typedef PrimitiveArray<float> FloatArray;
54 typedef PrimitiveArray<int32_t> IntArray;
55 typedef PrimitiveArray<int64_t> LongArray;
56 typedef PrimitiveArray<int16_t> ShortArray;
57 class Reference;
58 class String;
59 class Throwable;
60 
61 // Fields within mirror objects aren't accessed directly so that the appropriate amount of
62 // handshaking is done with GC (for example, read and write barriers). This macro is used to
63 // compute an offset for the Set/Get methods defined in Object that can safely access fields.
64 #define OFFSET_OF_OBJECT_MEMBER(type, field) \
65     MemberOffset(OFFSETOF_MEMBER(type, field))
66 
67 // Checks that we don't do field assignments which violate the typing system.
68 static constexpr bool kCheckFieldAssignments = false;
69 
70 // Size of Object.
71 static constexpr uint32_t kObjectHeaderSize = kUseBrooksReadBarrier ? 16 : 8;
72 
73 // C++ mirror of java.lang.Object
74 class MANAGED LOCKABLE Object {
75  public:
76   // The number of vtable entries in java.lang.Object.
77   static constexpr size_t kVTableLength = 11;
78 
79   // The size of the java.lang.Class representing a java.lang.Object.
80   static uint32_t ClassSize(PointerSize pointer_size);
81 
82   // Size of an instance of java.lang.Object.
InstanceSize()83   static constexpr uint32_t InstanceSize() {
84     return sizeof(Object);
85   }
86 
ClassOffset()87   static MemberOffset ClassOffset() {
88     return OFFSET_OF_OBJECT_MEMBER(Object, klass_);
89   }
90 
91   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
92            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
93   ALWAYS_INLINE Class* GetClass() REQUIRES_SHARED(Locks::mutator_lock_);
94 
95   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
96   void SetClass(ObjPtr<Class> new_klass) REQUIRES_SHARED(Locks::mutator_lock_);
97 
98   // Get the read barrier state with a fake address dependency.
99   // '*fake_address_dependency' will be set to 0.
100   ALWAYS_INLINE uint32_t GetReadBarrierState(uintptr_t* fake_address_dependency)
101       REQUIRES_SHARED(Locks::mutator_lock_);
102   // This version does not offer any special mechanism to prevent load-load reordering.
103   ALWAYS_INLINE uint32_t GetReadBarrierState() REQUIRES_SHARED(Locks::mutator_lock_);
104   // Get the read barrier state with a load-acquire.
105   ALWAYS_INLINE uint32_t GetReadBarrierStateAcquire() REQUIRES_SHARED(Locks::mutator_lock_);
106 
107 #ifndef USE_BAKER_OR_BROOKS_READ_BARRIER
108   NO_RETURN
109 #endif
110   ALWAYS_INLINE void SetReadBarrierState(uint32_t rb_state) REQUIRES_SHARED(Locks::mutator_lock_);
111 
112   template<bool kCasRelease = false>
113   ALWAYS_INLINE bool AtomicSetReadBarrierState(uint32_t expected_rb_state, uint32_t rb_state)
114       REQUIRES_SHARED(Locks::mutator_lock_);
115 
116   ALWAYS_INLINE uint32_t GetMarkBit() REQUIRES_SHARED(Locks::mutator_lock_);
117 
118   ALWAYS_INLINE bool AtomicSetMarkBit(uint32_t expected_mark_bit, uint32_t mark_bit)
119       REQUIRES_SHARED(Locks::mutator_lock_);
120 
121   // Assert that the read barrier state is in the default (white) state.
122   ALWAYS_INLINE void AssertReadBarrierState() const REQUIRES_SHARED(Locks::mutator_lock_);
123 
124   // The verifier treats all interfaces as java.lang.Object and relies on runtime checks in
125   // invoke-interface to detect incompatible interface types.
126   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
127   bool VerifierInstanceOf(ObjPtr<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
128   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
129   ALWAYS_INLINE bool InstanceOf(ObjPtr<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
130 
131   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
132   size_t SizeOf() REQUIRES_SHARED(Locks::mutator_lock_);
133 
134   Object* Clone(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
135       REQUIRES(!Roles::uninterruptible_);
136 
137   int32_t IdentityHashCode()
138       REQUIRES_SHARED(Locks::mutator_lock_)
139       REQUIRES(!Locks::thread_list_lock_,
140                !Locks::thread_suspend_count_lock_);
141 
MonitorOffset()142   static MemberOffset MonitorOffset() {
143     return OFFSET_OF_OBJECT_MEMBER(Object, monitor_);
144   }
145 
146   // As_volatile can be false if the mutators are suspended. This is an optimization since it
147   // avoids the barriers.
148   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
149   LockWord GetLockWord(bool as_volatile) REQUIRES_SHARED(Locks::mutator_lock_);
150   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
151   void SetLockWord(LockWord new_val, bool as_volatile) REQUIRES_SHARED(Locks::mutator_lock_);
152   bool CasLockWordWeakSequentiallyConsistent(LockWord old_val, LockWord new_val)
153       REQUIRES_SHARED(Locks::mutator_lock_);
154   bool CasLockWordWeakRelaxed(LockWord old_val, LockWord new_val)
155       REQUIRES_SHARED(Locks::mutator_lock_);
156   bool CasLockWordWeakAcquire(LockWord old_val, LockWord new_val)
157       REQUIRES_SHARED(Locks::mutator_lock_);
158   bool CasLockWordWeakRelease(LockWord old_val, LockWord new_val)
159       REQUIRES_SHARED(Locks::mutator_lock_);
160   uint32_t GetLockOwnerThreadId();
161 
162   // Try to enter the monitor, returns non null if we succeeded.
163   mirror::Object* MonitorTryEnter(Thread* self)
164       EXCLUSIVE_LOCK_FUNCTION()
165       REQUIRES(!Roles::uninterruptible_)
166       REQUIRES_SHARED(Locks::mutator_lock_);
167   mirror::Object* MonitorEnter(Thread* self)
168       EXCLUSIVE_LOCK_FUNCTION()
169       REQUIRES(!Roles::uninterruptible_)
170       REQUIRES_SHARED(Locks::mutator_lock_);
171   bool MonitorExit(Thread* self)
172       REQUIRES(!Roles::uninterruptible_)
173       REQUIRES_SHARED(Locks::mutator_lock_)
174       UNLOCK_FUNCTION();
175   void Notify(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
176   void NotifyAll(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
177   void Wait(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
178   void Wait(Thread* self, int64_t timeout, int32_t nanos) REQUIRES_SHARED(Locks::mutator_lock_);
179 
180   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
181            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
182   bool IsClass() REQUIRES_SHARED(Locks::mutator_lock_);
183   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
184            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
185   Class* AsClass() REQUIRES_SHARED(Locks::mutator_lock_);
186 
187   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
188            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
189   bool IsObjectArray() REQUIRES_SHARED(Locks::mutator_lock_);
190   template<class T,
191            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
192            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
193   ObjectArray<T>* AsObjectArray() REQUIRES_SHARED(Locks::mutator_lock_);
194 
195   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
196            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
197   bool IsClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
198   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
199            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
200   ClassLoader* AsClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
201 
202   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
203            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
204   bool IsDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
205   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
206            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
207   DexCache* AsDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
208 
209   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
210            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
211   bool IsArrayInstance() REQUIRES_SHARED(Locks::mutator_lock_);
212   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
213            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
214   Array* AsArray() REQUIRES_SHARED(Locks::mutator_lock_);
215 
216   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
217   BooleanArray* AsBooleanArray() REQUIRES_SHARED(Locks::mutator_lock_);
218   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
219   ByteArray* AsByteArray() REQUIRES_SHARED(Locks::mutator_lock_);
220   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
221   ByteArray* AsByteSizedArray() REQUIRES_SHARED(Locks::mutator_lock_);
222 
223   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
224   CharArray* AsCharArray() REQUIRES_SHARED(Locks::mutator_lock_);
225   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
226   ShortArray* AsShortArray() REQUIRES_SHARED(Locks::mutator_lock_);
227   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
228   ShortArray* AsShortSizedArray() REQUIRES_SHARED(Locks::mutator_lock_);
229 
230   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
231            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
232   bool IsIntArray() REQUIRES_SHARED(Locks::mutator_lock_);
233   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
234            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
235   IntArray* AsIntArray() REQUIRES_SHARED(Locks::mutator_lock_);
236 
237   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
238            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
239   bool IsLongArray() REQUIRES_SHARED(Locks::mutator_lock_);
240   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
241            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
242   LongArray* AsLongArray() REQUIRES_SHARED(Locks::mutator_lock_);
243 
244   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
245   bool IsFloatArray() REQUIRES_SHARED(Locks::mutator_lock_);
246   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
247   FloatArray* AsFloatArray() REQUIRES_SHARED(Locks::mutator_lock_);
248 
249   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
250   bool IsDoubleArray() REQUIRES_SHARED(Locks::mutator_lock_);
251   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
252   DoubleArray* AsDoubleArray() REQUIRES_SHARED(Locks::mutator_lock_);
253 
254   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
255            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
256   bool IsString() REQUIRES_SHARED(Locks::mutator_lock_);
257 
258   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
259            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
260   String* AsString() REQUIRES_SHARED(Locks::mutator_lock_);
261 
262   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
263   Throwable* AsThrowable() REQUIRES_SHARED(Locks::mutator_lock_);
264 
265   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
266            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
267   bool IsReferenceInstance() REQUIRES_SHARED(Locks::mutator_lock_);
268   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
269            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
270   Reference* AsReference() REQUIRES_SHARED(Locks::mutator_lock_);
271   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
272   bool IsWeakReferenceInstance() REQUIRES_SHARED(Locks::mutator_lock_);
273   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
274   bool IsSoftReferenceInstance() REQUIRES_SHARED(Locks::mutator_lock_);
275   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
276   bool IsFinalizerReferenceInstance() REQUIRES_SHARED(Locks::mutator_lock_);
277   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
278   FinalizerReference* AsFinalizerReference() REQUIRES_SHARED(Locks::mutator_lock_);
279   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
280   bool IsPhantomReferenceInstance() REQUIRES_SHARED(Locks::mutator_lock_);
281 
282   // Accessor for Java type fields.
283   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
284       ReadBarrierOption kReadBarrierOption = kWithReadBarrier, bool kIsVolatile = false>
285   ALWAYS_INLINE T* GetFieldObject(MemberOffset field_offset)
286       REQUIRES_SHARED(Locks::mutator_lock_);
287 
288   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
289       ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
290   ALWAYS_INLINE T* GetFieldObjectVolatile(MemberOffset field_offset)
291       REQUIRES_SHARED(Locks::mutator_lock_);
292 
293   template<bool kTransactionActive,
294            bool kCheckTransaction = true,
295            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
296            bool kIsVolatile = false>
297   ALWAYS_INLINE void SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset,
298                                                        ObjPtr<Object> new_value)
299       REQUIRES_SHARED(Locks::mutator_lock_);
300 
301   template<bool kTransactionActive,
302            bool kCheckTransaction = true,
303            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
304            bool kIsVolatile = false>
305   ALWAYS_INLINE void SetFieldObject(MemberOffset field_offset, ObjPtr<Object> new_value)
306       REQUIRES_SHARED(Locks::mutator_lock_);
307 
308   template<bool kTransactionActive,
309            bool kCheckTransaction = true,
310            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
311   ALWAYS_INLINE void SetFieldObjectVolatile(MemberOffset field_offset,
312                                             ObjPtr<Object> new_value)
313       REQUIRES_SHARED(Locks::mutator_lock_);
314 
315   template<bool kCheckTransaction = true, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
316            bool kIsVolatile = false>
317   ALWAYS_INLINE void SetFieldObjectTransaction(MemberOffset field_offset, ObjPtr<Object> new_value)
318       REQUIRES_SHARED(Locks::mutator_lock_);
319 
320   template<bool kTransactionActive,
321            bool kCheckTransaction = true,
322            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
323   bool CasFieldWeakSequentiallyConsistentObject(MemberOffset field_offset,
324                                                 ObjPtr<Object> old_value,
325                                                 ObjPtr<Object> new_value)
326       REQUIRES_SHARED(Locks::mutator_lock_);
327   template<bool kTransactionActive,
328            bool kCheckTransaction = true,
329            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
330   bool CasFieldWeakSequentiallyConsistentObjectWithoutWriteBarrier(MemberOffset field_offset,
331                                                                    ObjPtr<Object> old_value,
332                                                                    ObjPtr<Object> new_value)
333       REQUIRES_SHARED(Locks::mutator_lock_);
334   template<bool kTransactionActive,
335            bool kCheckTransaction = true,
336            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
337   bool CasFieldStrongSequentiallyConsistentObject(MemberOffset field_offset,
338                                                   ObjPtr<Object> old_value,
339                                                   ObjPtr<Object> new_value)
340       REQUIRES_SHARED(Locks::mutator_lock_);
341   template<bool kTransactionActive,
342            bool kCheckTransaction = true,
343            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
344   bool CasFieldStrongSequentiallyConsistentObjectWithoutWriteBarrier(MemberOffset field_offset,
345                                                                      ObjPtr<Object> old_value,
346                                                                      ObjPtr<Object> new_value)
347       REQUIRES_SHARED(Locks::mutator_lock_);
348   template<bool kTransactionActive,
349            bool kCheckTransaction = true,
350            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
351   bool CasFieldWeakRelaxedObjectWithoutWriteBarrier(MemberOffset field_offset,
352                                                     ObjPtr<Object> old_value,
353                                                     ObjPtr<Object> new_value)
354       REQUIRES_SHARED(Locks::mutator_lock_);
355   template<bool kTransactionActive,
356            bool kCheckTransaction = true,
357            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
358   bool CasFieldWeakReleaseObjectWithoutWriteBarrier(MemberOffset field_offset,
359                                                     ObjPtr<Object> old_value,
360                                                     ObjPtr<Object> new_value)
361       REQUIRES_SHARED(Locks::mutator_lock_);
362 
363   template<bool kTransactionActive,
364            bool kCheckTransaction = true,
365            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
366   bool CasFieldStrongRelaxedObjectWithoutWriteBarrier(MemberOffset field_offset,
367                                                       ObjPtr<Object> old_value,
368                                                       ObjPtr<Object> new_value)
369       REQUIRES_SHARED(Locks::mutator_lock_);
370   template<bool kTransactionActive,
371            bool kCheckTransaction = true,
372            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
373   bool CasFieldStrongReleaseObjectWithoutWriteBarrier(MemberOffset field_offset,
374                                                       ObjPtr<Object> old_value,
375                                                       ObjPtr<Object> new_value)
376       REQUIRES_SHARED(Locks::mutator_lock_);
377 
378   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
379   HeapReference<Object>* GetFieldObjectReferenceAddr(MemberOffset field_offset);
380 
381   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
382   ALWAYS_INLINE uint8_t GetFieldBoolean(MemberOffset field_offset)
383       REQUIRES_SHARED(Locks::mutator_lock_);
384 
385   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
386   ALWAYS_INLINE int8_t GetFieldByte(MemberOffset field_offset)
387       REQUIRES_SHARED(Locks::mutator_lock_);
388 
389   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
390   ALWAYS_INLINE uint8_t GetFieldBooleanVolatile(MemberOffset field_offset)
391       REQUIRES_SHARED(Locks::mutator_lock_);
392 
393   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
394   ALWAYS_INLINE int8_t GetFieldByteVolatile(MemberOffset field_offset)
395       REQUIRES_SHARED(Locks::mutator_lock_);
396 
397   template<bool kTransactionActive, bool kCheckTransaction = true,
398       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
399   ALWAYS_INLINE void SetFieldBoolean(MemberOffset field_offset, uint8_t new_value)
400       REQUIRES_SHARED(Locks::mutator_lock_);
401 
402   template<bool kTransactionActive, bool kCheckTransaction = true,
403       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
404   ALWAYS_INLINE void SetFieldByte(MemberOffset field_offset, int8_t new_value)
405       REQUIRES_SHARED(Locks::mutator_lock_);
406 
407   template<bool kTransactionActive, bool kCheckTransaction = true,
408       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
409   ALWAYS_INLINE void SetFieldBooleanVolatile(MemberOffset field_offset, uint8_t new_value)
410       REQUIRES_SHARED(Locks::mutator_lock_);
411 
412   template<bool kTransactionActive, bool kCheckTransaction = true,
413       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
414   ALWAYS_INLINE void SetFieldByteVolatile(MemberOffset field_offset, int8_t new_value)
415       REQUIRES_SHARED(Locks::mutator_lock_);
416 
417   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
418   ALWAYS_INLINE uint16_t GetFieldChar(MemberOffset field_offset)
419       REQUIRES_SHARED(Locks::mutator_lock_);
420 
421   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
422   ALWAYS_INLINE int16_t GetFieldShort(MemberOffset field_offset)
423       REQUIRES_SHARED(Locks::mutator_lock_);
424 
425   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
426   ALWAYS_INLINE uint16_t GetFieldCharVolatile(MemberOffset field_offset)
427       REQUIRES_SHARED(Locks::mutator_lock_);
428 
429   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
430   ALWAYS_INLINE int16_t GetFieldShortVolatile(MemberOffset field_offset)
431       REQUIRES_SHARED(Locks::mutator_lock_);
432 
433   template<bool kTransactionActive, bool kCheckTransaction = true,
434       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
435   ALWAYS_INLINE void SetFieldChar(MemberOffset field_offset, uint16_t new_value)
436       REQUIRES_SHARED(Locks::mutator_lock_);
437 
438   template<bool kTransactionActive, bool kCheckTransaction = true,
439       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
440   ALWAYS_INLINE void SetFieldShort(MemberOffset field_offset, int16_t new_value)
441       REQUIRES_SHARED(Locks::mutator_lock_);
442 
443   template<bool kTransactionActive, bool kCheckTransaction = true,
444       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
445   ALWAYS_INLINE void SetFieldCharVolatile(MemberOffset field_offset, uint16_t new_value)
446       REQUIRES_SHARED(Locks::mutator_lock_);
447 
448   template<bool kTransactionActive, bool kCheckTransaction = true,
449       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
450   ALWAYS_INLINE void SetFieldShortVolatile(MemberOffset field_offset, int16_t new_value)
451       REQUIRES_SHARED(Locks::mutator_lock_);
452 
453   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
GetField32(MemberOffset field_offset)454   ALWAYS_INLINE int32_t GetField32(MemberOffset field_offset)
455       REQUIRES_SHARED(Locks::mutator_lock_) {
456     if (kVerifyFlags & kVerifyThis) {
457       VerifyObject(this);
458     }
459     return GetField<int32_t, kIsVolatile>(field_offset);
460   }
461 
462   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
GetField32Volatile(MemberOffset field_offset)463   ALWAYS_INLINE int32_t GetField32Volatile(MemberOffset field_offset)
464       REQUIRES_SHARED(Locks::mutator_lock_) {
465     return GetField32<kVerifyFlags, true>(field_offset);
466   }
467 
468   template<bool kTransactionActive, bool kCheckTransaction = true,
469       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
470   ALWAYS_INLINE void SetField32(MemberOffset field_offset, int32_t new_value)
471       REQUIRES_SHARED(Locks::mutator_lock_);
472 
473   template<bool kTransactionActive, bool kCheckTransaction = true,
474       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
475   ALWAYS_INLINE void SetField32Volatile(MemberOffset field_offset, int32_t new_value)
476       REQUIRES_SHARED(Locks::mutator_lock_);
477 
478   template<bool kCheckTransaction = true,
479            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
480            bool kIsVolatile = false>
481   ALWAYS_INLINE void SetField32Transaction(MemberOffset field_offset, int32_t new_value)
482       REQUIRES_SHARED(Locks::mutator_lock_);
483 
484   template<bool kTransactionActive, bool kCheckTransaction = true,
485       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
486   ALWAYS_INLINE bool CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset,
487                                                           int32_t old_value, int32_t new_value)
488       REQUIRES_SHARED(Locks::mutator_lock_);
489 
490   template<bool kTransactionActive, bool kCheckTransaction = true,
491       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
492   bool CasFieldWeakRelaxed32(MemberOffset field_offset, int32_t old_value,
493                              int32_t new_value) ALWAYS_INLINE
494       REQUIRES_SHARED(Locks::mutator_lock_);
495 
496   template<bool kTransactionActive, bool kCheckTransaction = true,
497       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
498   bool CasFieldWeakAcquire32(MemberOffset field_offset, int32_t old_value,
499                              int32_t new_value) ALWAYS_INLINE
500       REQUIRES_SHARED(Locks::mutator_lock_);
501 
502   template<bool kTransactionActive, bool kCheckTransaction = true,
503       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
504   bool CasFieldWeakRelease32(MemberOffset field_offset, int32_t old_value,
505                              int32_t new_value) ALWAYS_INLINE
506       REQUIRES_SHARED(Locks::mutator_lock_);
507 
508   template<bool kTransactionActive, bool kCheckTransaction = true,
509       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
510   bool CasFieldStrongSequentiallyConsistent32(MemberOffset field_offset, int32_t old_value,
511                                               int32_t new_value) ALWAYS_INLINE
512       REQUIRES_SHARED(Locks::mutator_lock_);
513 
514   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
GetField64(MemberOffset field_offset)515   ALWAYS_INLINE int64_t GetField64(MemberOffset field_offset)
516       REQUIRES_SHARED(Locks::mutator_lock_) {
517     if (kVerifyFlags & kVerifyThis) {
518       VerifyObject(this);
519     }
520     return GetField<int64_t, kIsVolatile>(field_offset);
521   }
522 
523   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
GetField64Volatile(MemberOffset field_offset)524   ALWAYS_INLINE int64_t GetField64Volatile(MemberOffset field_offset)
525       REQUIRES_SHARED(Locks::mutator_lock_) {
526     return GetField64<kVerifyFlags, true>(field_offset);
527   }
528 
529   template<bool kTransactionActive, bool kCheckTransaction = true,
530       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
531   ALWAYS_INLINE void SetField64(MemberOffset field_offset, int64_t new_value)
532       REQUIRES_SHARED(Locks::mutator_lock_);
533 
534   template<bool kTransactionActive, bool kCheckTransaction = true,
535       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
536   ALWAYS_INLINE void SetField64Volatile(MemberOffset field_offset, int64_t new_value)
537       REQUIRES_SHARED(Locks::mutator_lock_);
538 
539   template<bool kCheckTransaction = true,
540            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
541            bool kIsVolatile = false>
542   ALWAYS_INLINE void SetField64Transaction(MemberOffset field_offset, int32_t new_value)
543       REQUIRES_SHARED(Locks::mutator_lock_);
544 
545   template<bool kTransactionActive, bool kCheckTransaction = true,
546       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
547   bool CasFieldWeakSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value,
548                                             int64_t new_value)
549       REQUIRES_SHARED(Locks::mutator_lock_);
550 
551   template<bool kTransactionActive, bool kCheckTransaction = true,
552       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
553   bool CasFieldStrongSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value,
554                                               int64_t new_value)
555       REQUIRES_SHARED(Locks::mutator_lock_);
556 
557   template<bool kTransactionActive, bool kCheckTransaction = true,
558       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
SetFieldPtr(MemberOffset field_offset,T new_value)559   void SetFieldPtr(MemberOffset field_offset, T new_value)
560       REQUIRES_SHARED(Locks::mutator_lock_) {
561     SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>(
562         field_offset, new_value, kRuntimePointerSize);
563   }
564   template<bool kTransactionActive, bool kCheckTransaction = true,
565       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
SetFieldPtr64(MemberOffset field_offset,T new_value)566   void SetFieldPtr64(MemberOffset field_offset, T new_value)
567       REQUIRES_SHARED(Locks::mutator_lock_) {
568     SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>(
569         field_offset, new_value, 8u);
570   }
571 
572   template<bool kTransactionActive, bool kCheckTransaction = true,
573       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
SetFieldPtrWithSize(MemberOffset field_offset,T new_value,PointerSize pointer_size)574   ALWAYS_INLINE void SetFieldPtrWithSize(MemberOffset field_offset,
575                                          T new_value,
576                                          PointerSize pointer_size)
577       REQUIRES_SHARED(Locks::mutator_lock_) {
578     if (pointer_size == PointerSize::k32) {
579       uintptr_t ptr  = reinterpret_cast<uintptr_t>(new_value);
580       DCHECK_EQ(static_cast<uint32_t>(ptr), ptr);  // Check that we dont lose any non 0 bits.
581       SetField32<kTransactionActive, kCheckTransaction, kVerifyFlags>(
582           field_offset, static_cast<int32_t>(static_cast<uint32_t>(ptr)));
583     } else {
584       SetField64<kTransactionActive, kCheckTransaction, kVerifyFlags>(
585           field_offset, reinterpret_cast64<int64_t>(new_value));
586     }
587   }
588   // TODO fix thread safety analysis broken by the use of template. This should be
589   // REQUIRES_SHARED(Locks::mutator_lock_).
590   template <bool kVisitNativeRoots = true,
591             VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
592             ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
593             typename Visitor,
594             typename JavaLangRefVisitor = VoidFunctor>
595   void VisitReferences(const Visitor& visitor, const JavaLangRefVisitor& ref_visitor)
596       NO_THREAD_SAFETY_ANALYSIS;
597 
598   ArtField* FindFieldByOffset(MemberOffset offset) REQUIRES_SHARED(Locks::mutator_lock_);
599 
600   // Used by object_test.
601   static void SetHashCodeSeed(uint32_t new_seed);
602   // Generate an identity hash code. Public for object test.
603   static uint32_t GenerateIdentityHashCode();
604 
605   // Returns a human-readable form of the name of the *class* of the given object.
606   // So given an instance of java.lang.String, the output would
607   // be "java.lang.String". Given an array of int, the output would be "int[]".
608   // Given String.class, the output would be "java.lang.Class<java.lang.String>".
609   static std::string PrettyTypeOf(ObjPtr<mirror::Object> obj)
610       REQUIRES_SHARED(Locks::mutator_lock_);
611   std::string PrettyTypeOf()
612       REQUIRES_SHARED(Locks::mutator_lock_);
613 
614  protected:
615   // Accessors for non-Java type fields
616   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
GetFieldPtr(MemberOffset field_offset)617   T GetFieldPtr(MemberOffset field_offset)
618       REQUIRES_SHARED(Locks::mutator_lock_) {
619     return GetFieldPtrWithSize<T, kVerifyFlags, kIsVolatile>(field_offset, kRuntimePointerSize);
620   }
621   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
GetFieldPtr64(MemberOffset field_offset)622   T GetFieldPtr64(MemberOffset field_offset)
623       REQUIRES_SHARED(Locks::mutator_lock_) {
624     return GetFieldPtrWithSize<T, kVerifyFlags, kIsVolatile>(field_offset,
625                                                              PointerSize::k64);
626   }
627 
628   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
GetFieldPtrWithSize(MemberOffset field_offset,PointerSize pointer_size)629   ALWAYS_INLINE T GetFieldPtrWithSize(MemberOffset field_offset, PointerSize pointer_size)
630       REQUIRES_SHARED(Locks::mutator_lock_) {
631     if (pointer_size == PointerSize::k32) {
632       uint64_t address = static_cast<uint32_t>(GetField32<kVerifyFlags, kIsVolatile>(field_offset));
633       return reinterpret_cast<T>(static_cast<uintptr_t>(address));
634     } else {
635       int64_t v = GetField64<kVerifyFlags, kIsVolatile>(field_offset);
636       return reinterpret_cast64<T>(v);
637     }
638   }
639 
640   // TODO: Fixme when anotatalysis works with visitors.
641   template<bool kIsStatic,
642           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
643           ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
644           typename Visitor>
645   void VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) HOT_ATTR
646       NO_THREAD_SAFETY_ANALYSIS;
647   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
648            ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
649            typename Visitor>
650   void VisitInstanceFieldsReferences(ObjPtr<mirror::Class> klass, const Visitor& visitor) HOT_ATTR
651       REQUIRES_SHARED(Locks::mutator_lock_);
652   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
653            ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
654            typename Visitor>
655   void VisitStaticFieldsReferences(ObjPtr<mirror::Class> klass, const Visitor& visitor) HOT_ATTR
656       REQUIRES_SHARED(Locks::mutator_lock_);
657 
658  private:
659   template<typename kSize, bool kIsVolatile>
SetField(MemberOffset field_offset,kSize new_value)660   ALWAYS_INLINE void SetField(MemberOffset field_offset, kSize new_value)
661       REQUIRES_SHARED(Locks::mutator_lock_) {
662     uint8_t* raw_addr = reinterpret_cast<uint8_t*>(this) + field_offset.Int32Value();
663     kSize* addr = reinterpret_cast<kSize*>(raw_addr);
664     if (kIsVolatile) {
665       reinterpret_cast<Atomic<kSize>*>(addr)->StoreSequentiallyConsistent(new_value);
666     } else {
667       reinterpret_cast<Atomic<kSize>*>(addr)->StoreJavaData(new_value);
668     }
669   }
670 
671   template<typename kSize, bool kIsVolatile>
GetField(MemberOffset field_offset)672   ALWAYS_INLINE kSize GetField(MemberOffset field_offset)
673       REQUIRES_SHARED(Locks::mutator_lock_) {
674     const uint8_t* raw_addr = reinterpret_cast<const uint8_t*>(this) + field_offset.Int32Value();
675     const kSize* addr = reinterpret_cast<const kSize*>(raw_addr);
676     if (kIsVolatile) {
677       return reinterpret_cast<const Atomic<kSize>*>(addr)->LoadSequentiallyConsistent();
678     } else {
679       return reinterpret_cast<const Atomic<kSize>*>(addr)->LoadJavaData();
680     }
681   }
682 
683   // Get a field with acquire semantics.
684   template<typename kSize>
685   ALWAYS_INLINE kSize GetFieldAcquire(MemberOffset field_offset)
686       REQUIRES_SHARED(Locks::mutator_lock_);
687 
688   // Verify the type correctness of stores to fields.
689   // TODO: This can cause thread suspension and isn't moving GC safe.
690   void CheckFieldAssignmentImpl(MemberOffset field_offset, ObjPtr<Object> new_value)
691       REQUIRES_SHARED(Locks::mutator_lock_);
CheckFieldAssignment(MemberOffset field_offset,ObjPtr<Object> new_value)692   void CheckFieldAssignment(MemberOffset field_offset, ObjPtr<Object>new_value)
693       REQUIRES_SHARED(Locks::mutator_lock_) {
694     if (kCheckFieldAssignments) {
695       CheckFieldAssignmentImpl(field_offset, new_value);
696     }
697   }
698 
699   // A utility function that copies an object in a read barrier and write barrier-aware way.
700   // This is internally used by Clone() and Class::CopyOf(). If the object is finalizable,
701   // it is the callers job to call Heap::AddFinalizerReference.
702   static Object* CopyObject(ObjPtr<mirror::Object> dest,
703                             ObjPtr<mirror::Object> src,
704                             size_t num_bytes)
705       REQUIRES_SHARED(Locks::mutator_lock_);
706 
707   static Atomic<uint32_t> hash_code_seed;
708 
709   // The Class representing the type of the object.
710   HeapReference<Class> klass_;
711   // Monitor and hash code information.
712   uint32_t monitor_;
713 
714 #ifdef USE_BROOKS_READ_BARRIER
715   // Note names use a 'x' prefix and the x_rb_ptr_ is of type int
716   // instead of Object to go with the alphabetical/by-type field order
717   // on the Java side.
718   uint32_t x_rb_ptr_;      // For the Brooks pointer.
719   uint32_t x_xpadding_;    // For 8-byte alignment. TODO: get rid of this.
720 #endif
721 
722   friend class art::ImageWriter;
723   friend class art::Monitor;
724   friend struct art::ObjectOffsets;  // for verifying offset information
725   friend class CopyObjectVisitor;  // for CopyObject().
726   friend class CopyClassVisitor;   // for CopyObject().
727   DISALLOW_ALLOCATION();
728   DISALLOW_IMPLICIT_CONSTRUCTORS(Object);
729 };
730 
731 }  // namespace mirror
732 }  // namespace art
733 
734 #endif  // ART_RUNTIME_MIRROR_OBJECT_H_
735