/art/runtime/gc/accounting/ |
D | bitmap.h | 49 static ALWAYS_INLINE constexpr size_t BitIndexToWordIndex(uintptr_t offset) { in BitIndexToWordIndex() 54 static ALWAYS_INLINE constexpr T WordIndexToBitIndex(T word_index) { in WordIndexToBitIndex() 58 static ALWAYS_INLINE constexpr uintptr_t BitIndexToMask(uintptr_t bit_index) { in BitIndexToMask() 62 ALWAYS_INLINE bool SetBit(size_t bit_index) { in SetBit() 66 ALWAYS_INLINE bool ClearBit(size_t bit_index) { in ClearBit() 70 ALWAYS_INLINE bool TestBit(size_t bit_index) const; 73 ALWAYS_INLINE bool AtomicTestAndSetBit(size_t bit_index); 96 ALWAYS_INLINE void CheckValidBitIndex(size_t bit_index) const { in CheckValidBitIndex() 112 ALWAYS_INLINE bool ModifyBit(uintptr_t bit_index); 137 ALWAYS_INLINE uintptr_t CoverBegin() const { in CoverBegin() [all …]
|
D | card_table.h | 59 ALWAYS_INLINE void MarkCard(const void *addr) { in MarkCard() 126 void* AddrFromCard(const uint8_t *card_addr) const ALWAYS_INLINE; 129 uint8_t* CardFromAddr(const void *addr) const ALWAYS_INLINE; 137 bool IsValidCard(const uint8_t* card_addr) const ALWAYS_INLINE; 139 void CheckCardValid(uint8_t* card) const ALWAYS_INLINE;
|
/art/runtime/ |
D | handle.h | 42 ALWAYS_INLINE Handle(const Handle<T>& handle) : reference_(handle.reference_) { in Handle() 45 ALWAYS_INLINE Handle<T>& operator=(const Handle<T>& handle) { 50 ALWAYS_INLINE explicit Handle(StackReference<T>* reference) : reference_(reference) { in Handle() 53 ALWAYS_INLINE T& operator*() const SHARED_REQUIRES(Locks::mutator_lock_) { in SHARED_REQUIRES() 57 ALWAYS_INLINE T* operator->() const SHARED_REQUIRES(Locks::mutator_lock_) { 61 ALWAYS_INLINE T* Get() const SHARED_REQUIRES(Locks::mutator_lock_) { in Get() 65 ALWAYS_INLINE jobject ToJObject() const SHARED_REQUIRES(Locks::mutator_lock_) { in ToJObject() 73 ALWAYS_INLINE StackReference<mirror::Object>* GetReference() { in GetReference() 77 ALWAYS_INLINE const StackReference<mirror::Object>* GetReference() const { in GetReference() 108 ALWAYS_INLINE MutableHandle(const MutableHandle<T>& handle) in MutableHandle() [all …]
|
D | handle_scope.h | 62 ALWAYS_INLINE mirror::Object* GetReference(size_t i) const 65 ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i); 67 ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i) 70 ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object) 73 ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const; 98 ALWAYS_INLINE StackReference<mirror::Object>* GetReferences() const { in GetReferences() 148 explicit ALWAYS_INLINE StackHandleScope(Thread* self, mirror::Object* fill_value = nullptr); 149 ALWAYS_INLINE ~StackHandleScope(); 152 ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) SHARED_REQUIRES(Locks::mutator_lock_); 155 ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object) [all …]
|
D | scoped_thread_state_change.h | 37 REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE in ScopedThreadStateChange() 62 ~ScopedThreadStateChange() REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE { 179 REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE in ScopedObjectAccessAlreadyRunnable() 184 REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE in ScopedObjectAccessAlreadyRunnable() 195 ~ScopedObjectAccessAlreadyRunnable() ALWAYS_INLINE { in ~ScopedObjectAccessAlreadyRunnable() 223 REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE in ScopedObjectAccessUnchecked() 230 REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE in ScopedObjectAccessUnchecked() 238 explicit ScopedObjectAccessUnchecked(JavaVM* vm) ALWAYS_INLINE in ScopedObjectAccessUnchecked() 254 SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE in ScopedObjectAccess() 260 SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE in ScopedObjectAccess() [all …]
|
D | memory_region.h | 55 ALWAYS_INLINE T Load(uintptr_t offset) const { in Load() 65 ALWAYS_INLINE void Store(uintptr_t offset, T value) const { in Store() 74 ALWAYS_INLINE T LoadUnaligned(uintptr_t offset) const { in LoadUnaligned() 89 ALWAYS_INLINE void StoreUnaligned(uintptr_t offset, T value) const { in StoreUnaligned() 101 ALWAYS_INLINE T* PointerTo(uintptr_t offset) const { in PointerTo() 107 ALWAYS_INLINE bool LoadBit(uintptr_t bit_offset) const { in LoadBit() 113 ALWAYS_INLINE void StoreBit(uintptr_t bit_offset, bool value) const { in StoreBit() 165 ALWAYS_INLINE T* ComputeInternalPointer(size_t offset) const { in ComputeInternalPointer() 173 ALWAYS_INLINE uint8_t* ComputeBitPointer(uintptr_t bit_offset, uint8_t* bit_mask) const { in ComputeBitPointer()
|
D | stack_map.h | 663 ALWAYS_INLINE size_t BitSize() const { return end_offset_ - start_offset_; } in BitSize() 665 ALWAYS_INLINE int32_t Load(const MemoryRegion& region) const { in Load() 695 ALWAYS_INLINE void Store(MemoryRegion region, int32_t value) const { in Store() 747 ALWAYS_INLINE FieldEncoding GetNativePcEncoding() const { in GetNativePcEncoding() 750 ALWAYS_INLINE FieldEncoding GetDexPcEncoding() const { in GetDexPcEncoding() 753 ALWAYS_INLINE FieldEncoding GetDexRegisterMapEncoding() const { in GetDexRegisterMapEncoding() 756 ALWAYS_INLINE FieldEncoding GetInlineInfoEncoding() const { in GetInlineInfoEncoding() 759 ALWAYS_INLINE FieldEncoding GetRegisterMaskEncoding() const { in GetRegisterMaskEncoding() 762 ALWAYS_INLINE size_t GetStackMaskBitOffset() const { in GetStackMaskBitOffset() 796 ALWAYS_INLINE bool IsValid() const { return region_.pointer() != nullptr; } in IsValid() [all …]
|
D | gc_root.h | 93 ALWAYS_INLINE void VisitRoot(mirror::Object** root, const RootInfo& info) in VisitRoot() 99 ALWAYS_INLINE void VisitRootIfNonNull(mirror::Object** root, const RootInfo& info) in VisitRootIfNonNull() 171 ALWAYS_INLINE MirrorType* Read(GcRootSource* gc_root_source = nullptr) const 189 ALWAYS_INLINE mirror::CompressedReference<mirror::Object>* AddressWithoutBarrier() { in AddressWithoutBarrier() 193 ALWAYS_INLINE bool IsNull() const { in IsNull() 198 ALWAYS_INLINE GcRoot() {} in GcRoot() 199 explicit ALWAYS_INLINE GcRoot(MirrorType* ref) SHARED_REQUIRES(Locks::mutator_lock_); 225 ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root) in VisitRootIfNonNull() 233 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root) in VisitRootIfNonNull()
|
D | art_method.h | 233 ALWAYS_INLINE mirror::Class* GetDeclaringClass() SHARED_REQUIRES(Locks::mutator_lock_); 236 ALWAYS_INLINE mirror::Class* GetDeclaringClassUnchecked() 252 ALWAYS_INLINE uint32_t GetAccessFlags(); 419 ALWAYS_INLINE uint32_t GetDexMethodIndex() SHARED_REQUIRES(Locks::mutator_lock_); 426 ALWAYS_INLINE ArtMethod** GetDexCacheResolvedMethods(size_t pointer_size) 428 ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index, size_t ptr_size) 430 ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_index, 434 ALWAYS_INLINE void SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods, size_t ptr_size) 478 ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(size_t pointer_size) { in GetEntryPointFromQuickCompiledCodePtrSize() 487 ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize( in SetEntryPointFromQuickCompiledCodePtrSize() [all …]
|
D | read_barrier.h | 50 ALWAYS_INLINE static MirrorType* Barrier( 57 ALWAYS_INLINE static MirrorType* BarrierForRoot(MirrorType** root, 64 ALWAYS_INLINE static MirrorType* BarrierForRoot(mirror::CompressedReference<MirrorType>* root, 96 ALWAYS_INLINE static bool HasGrayReadBarrierPointer(mirror::Object* obj,
|
D | utf.h | 61 ALWAYS_INLINE int CompareModifiedUtf8ToModifiedUtf8AsUtf16CodePointValues(const char* utf8_1, 116 ALWAYS_INLINE uint16_t GetLeadingUtf16Char(uint32_t maybe_pair); 122 ALWAYS_INLINE uint16_t GetTrailingUtf16Char(uint32_t maybe_pair);
|
D | verify_object.h | 55 ALWAYS_INLINE void VerifyObject(mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS; 58 ALWAYS_INLINE bool VerifyClassClass(mirror::Class* c) NO_THREAD_SAFETY_ANALYSIS;
|
/art/compiler/debug/dwarf/ |
D | debug_frame_opcode_writer.h | 47 void ALWAYS_INLINE AdvancePC(int absolute_pc) { in AdvancePC() 73 void ALWAYS_INLINE RelOffset(Reg reg, int offset) { in RelOffset() 78 void ALWAYS_INLINE AdjustCFAOffset(int delta) { in AdjustCFAOffset() 83 void ALWAYS_INLINE RelOffsetForMany(Reg reg_base, int offset, in RelOffsetForMany() 99 void ALWAYS_INLINE RestoreMany(Reg reg_base, uint32_t reg_mask) { in RestoreMany() 111 void ALWAYS_INLINE Nop() { in Nop() 117 void ALWAYS_INLINE Offset(Reg reg, int offset) { in Offset() 139 void ALWAYS_INLINE Restore(Reg reg) { in Restore() 151 void ALWAYS_INLINE Undefined(Reg reg) { in Undefined() 159 void ALWAYS_INLINE SameValue(Reg reg) { in SameValue() [all …]
|
/art/runtime/mirror/ |
D | dex_cache.h | 112 String* GetResolvedString(uint32_t string_idx) ALWAYS_INLINE 115 void SetResolvedString(uint32_t string_idx, String* resolved) ALWAYS_INLINE 122 ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, size_t ptr_size) 125 ALWAYS_INLINE void SetResolvedMethod(uint32_t method_idx, ArtMethod* resolved, size_t ptr_size) 129 ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx, size_t ptr_size) 133 ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field, size_t ptr_size) 136 GcRoot<String>* GetStrings() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { in GetStrings() 140 void SetStrings(GcRoot<String>* strings) ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { in SetStrings() 144 GcRoot<Class>* GetResolvedTypes() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { in GetResolvedTypes() 149 ALWAYS_INLINE in SetResolvedTypes() [all …]
|
D | object.h | 90 ALWAYS_INLINE Class* GetClass() SHARED_REQUIRES(Locks::mutator_lock_); 103 ALWAYS_INLINE bool AtomicSetReadBarrierPointer(Object* expected_rb_ptr, Object* rb_ptr) 112 ALWAYS_INLINE bool InstanceOf(Class* klass) SHARED_REQUIRES(Locks::mutator_lock_); 266 ALWAYS_INLINE T* GetFieldObject(MemberOffset field_offset) 271 ALWAYS_INLINE T* GetFieldObjectVolatile(MemberOffset field_offset) 276 ALWAYS_INLINE void SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset, Object* new_value) 281 ALWAYS_INLINE void SetFieldObject(MemberOffset field_offset, Object* new_value) 286 ALWAYS_INLINE void SetFieldObjectVolatile(MemberOffset field_offset, Object* new_value) 328 ALWAYS_INLINE uint8_t GetFieldBoolean(MemberOffset field_offset) 332 ALWAYS_INLINE int8_t GetFieldByte(MemberOffset field_offset) [all …]
|
D | class.h | 195 ALWAYS_INLINE uint32_t GetAccessFlags() SHARED_REQUIRES(Locks::mutator_lock_); 201 ALWAYS_INLINE uint32_t GetClassFlags() SHARED_REQUIRES(Locks::mutator_lock_) { in GetClassFlags() 209 ALWAYS_INLINE bool IsInterface() SHARED_REQUIRES(Locks::mutator_lock_) { in IsInterface() 214 ALWAYS_INLINE bool IsPublic() SHARED_REQUIRES(Locks::mutator_lock_) { in IsPublic() 219 ALWAYS_INLINE bool IsFinal() SHARED_REQUIRES(Locks::mutator_lock_) { in IsFinal() 223 ALWAYS_INLINE bool IsFinalizable() SHARED_REQUIRES(Locks::mutator_lock_) { in IsFinalizable() 227 ALWAYS_INLINE void SetRecursivelyInitialized() SHARED_REQUIRES(Locks::mutator_lock_) { in SetRecursivelyInitialized() 233 ALWAYS_INLINE void SetHasDefaultMethods() SHARED_REQUIRES(Locks::mutator_lock_) { in SetHasDefaultMethods() 239 ALWAYS_INLINE void SetFinalizable() SHARED_REQUIRES(Locks::mutator_lock_) { in SetFinalizable() 244 ALWAYS_INLINE bool IsStringClass() SHARED_REQUIRES(Locks::mutator_lock_) { in IsStringClass() [all …]
|
D | array.h | 40 ALWAYS_INLINE static Array* Alloc(Thread* self, Class* array_class, int32_t component_count, 52 ALWAYS_INLINE int32_t GetLength() SHARED_REQUIRES(Locks::mutator_lock_) { in GetLength() 85 ALWAYS_INLINE bool CheckIsValidIndex(int32_t index) SHARED_REQUIRES(Locks::mutator_lock_); 114 const T* GetData() const ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { in GetData() 118 T* GetData() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { in GetData() 122 T Get(int32_t i) ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_); 124 T GetWithoutChecks(int32_t i) ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { in GetWithoutChecks() 129 void Set(int32_t i, T value) ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_); 134 void Set(int32_t i, T value) ALWAYS_INLINE NO_THREAD_SAFETY_ANALYSIS; 141 void SetWithoutChecks(int32_t i, T value) ALWAYS_INLINE NO_THREAD_SAFETY_ANALYSIS;
|
D | object_array.h | 42 ALWAYS_INLINE T* Get(int32_t i) SHARED_REQUIRES(Locks::mutator_lock_); 50 ALWAYS_INLINE void Set(int32_t i, T* object) SHARED_REQUIRES(Locks::mutator_lock_); 54 ALWAYS_INLINE void Set(int32_t i, T* object) NO_THREAD_SAFETY_ANALYSIS; 62 ALWAYS_INLINE void SetWithoutChecks(int32_t i, T* object) NO_THREAD_SAFETY_ANALYSIS; 67 ALWAYS_INLINE void SetWithoutChecksAndWriteBarrier(int32_t i, T* object) 70 ALWAYS_INLINE T* GetWithoutChecks(int32_t i) SHARED_REQUIRES(Locks::mutator_lock_);
|
D | iftable.h | 28 ALWAYS_INLINE Class* GetInterface(int32_t i) SHARED_REQUIRES(Locks::mutator_lock_) { in GetInterface() 34 ALWAYS_INLINE void SetInterface(int32_t i, Class* interface)
|
D | string.h | 86 ALWAYS_INLINE static String* Alloc(Thread* self, int32_t utf16_length, 92 ALWAYS_INLINE static String* AllocFromByteArray(Thread* self, int32_t byte_length, 99 ALWAYS_INLINE static String* AllocFromCharArray(Thread* self, int32_t count, 105 ALWAYS_INLINE static String* AllocFromString(Thread* self, int32_t string_length,
|
/art/runtime/entrypoints/ |
D | entrypoint_utils.h | 47 ALWAYS_INLINE inline mirror::Class* CheckObjectAlloc(uint32_t type_idx, 52 ALWAYS_INLINE inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass, 62 ALWAYS_INLINE inline mirror::Object* AllocObjectFromCode(uint32_t type_idx, 70 ALWAYS_INLINE inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass, 77 ALWAYS_INLINE inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass, 84 ALWAYS_INLINE inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, 95 ALWAYS_INLINE inline mirror::Array* AllocArrayFromCode(uint32_t type_idx, 103 ALWAYS_INLINE inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
|
/art/runtime/base/ |
D | macros.h | 74 NO_RETURN ALWAYS_INLINE void operator delete(void*, size_t) { UNREACHABLE(); } \ 75 ALWAYS_INLINE void* operator new(size_t, void* ptr) noexcept { return ptr; } \ 76 ALWAYS_INLINE void operator delete(void*, void*) noexcept { } \ 156 #define ALWAYS_INLINE macro 158 #define ALWAYS_INLINE __attribute__ ((always_inline)) macro 165 #define ALWAYS_INLINE_LAMBDA ALWAYS_INLINE
|
D | arena_object.h | 45 ALWAYS_INLINE void* operator new(size_t, void* ptr) noexcept { return ptr; } 46 ALWAYS_INLINE void operator delete(void*, void*) noexcept { }
|
/art/runtime/native/ |
D | scoped_fast_native_object_access.h | 31 SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE in ScopedFastNativeObjectAccess() 39 ~ScopedFastNativeObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE { in UNLOCK_FUNCTION()
|
/art/runtime/gc/collector/ |
D | immune_spaces.h | 52 ALWAYS_INLINE bool IsInImmuneRegion(const mirror::Object* obj) const { in IsInImmuneRegion() 70 ALWAYS_INLINE bool ContainsObject(const mirror::Object* obj) const { in ContainsObject()
|