1 /* Copyright (C) 2017 The Android Open Source Project 2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * 4 * This file implements interfaces from the file jvmti.h. This implementation 5 * is licensed under the same terms as the file jvmti.h. The 6 * copyright and license information for the file jvmti.h follows. 7 * 8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved. 9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 10 * 11 * This code is free software; you can redistribute it and/or modify it 12 * under the terms of the GNU General Public License version 2 only, as 13 * published by the Free Software Foundation. Oracle designates this 14 * particular file as subject to the "Classpath" exception as provided 15 * by Oracle in the LICENSE file that accompanied this code. 16 * 17 * This code is distributed in the hope that it will be useful, but WITHOUT 18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 19 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 20 * version 2 for more details (a copy is included in the LICENSE file that 21 * accompanied this code). 22 * 23 * You should have received a copy of the GNU General Public License version 24 * 2 along with this work; if not, write to the Free Software Foundation, 25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 26 * 27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 28 * or visit www.oracle.com if you need additional information or have any 29 * questions. 30 */ 31 32 #ifndef ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_H_ 33 #define ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_H_ 34 35 #include <unordered_map> 36 37 #include "base/globals.h" 38 #include "base/macros.h" 39 #include "base/mutex.h" 40 #include "gc/system_weak.h" 41 #include "gc_root-inl.h" 42 #include "jvmti.h" 43 #include "jvmti_allocator.h" 44 #include "mirror/object.h" 45 #include "thread-current-inl.h" 46 47 namespace openjdkjvmti { 48 49 class EventHandler; 50 51 // A system-weak container mapping objects to elements of the template type. This corresponds 52 // to a weak hash map. For historical reasons the stored value is called "tag." 53 template <typename T> 54 class JvmtiWeakTable : public art::gc::SystemWeakHolder { 55 public: JvmtiWeakTable()56 JvmtiWeakTable() 57 : art::gc::SystemWeakHolder(art::kTaggingLockLevel), 58 update_since_last_sweep_(false) { 59 } 60 61 // Remove the mapping for the given object, returning whether such a mapping existed (and the old 62 // value). 63 ALWAYS_INLINE bool Remove(art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag) 64 REQUIRES_SHARED(art::Locks::mutator_lock_) 65 REQUIRES(!allow_disallow_lock_); 66 ALWAYS_INLINE bool RemoveLocked(art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag) 67 REQUIRES_SHARED(art::Locks::mutator_lock_) 68 REQUIRES(allow_disallow_lock_); 69 70 // Set the mapping for the given object. Returns true if this overwrites an already existing 71 // mapping. 72 ALWAYS_INLINE virtual bool Set(art::ObjPtr<art::mirror::Object> obj, T tag) 73 REQUIRES_SHARED(art::Locks::mutator_lock_) 74 REQUIRES(!allow_disallow_lock_); 75 ALWAYS_INLINE virtual bool SetLocked(art::ObjPtr<art::mirror::Object> obj, T tag) 76 REQUIRES_SHARED(art::Locks::mutator_lock_) 77 REQUIRES(allow_disallow_lock_); 78 79 // Return the value associated with the given object. Returns true if the mapping exists, false 80 // otherwise. GetTag(art::ObjPtr<art::mirror::Object> obj,T * result)81 bool GetTag(art::ObjPtr<art::mirror::Object> obj, /* out */ T* result) 82 REQUIRES_SHARED(art::Locks::mutator_lock_) 83 REQUIRES(!allow_disallow_lock_) { 84 art::Thread* self = art::Thread::Current(); 85 art::MutexLock mu(self, allow_disallow_lock_); 86 Wait(self); 87 88 return GetTagLocked(self, obj, result); 89 } GetTagLocked(art::ObjPtr<art::mirror::Object> obj,T * result)90 bool GetTagLocked(art::ObjPtr<art::mirror::Object> obj, /* out */ T* result) 91 REQUIRES_SHARED(art::Locks::mutator_lock_) 92 REQUIRES(allow_disallow_lock_) { 93 art::Thread* self = art::Thread::Current(); 94 allow_disallow_lock_.AssertHeld(self); 95 Wait(self); 96 97 return GetTagLocked(self, obj, result); 98 } 99 100 // Sweep the container. DO NOT CALL MANUALLY. 101 ALWAYS_INLINE void Sweep(art::IsMarkedVisitor* visitor) 102 REQUIRES_SHARED(art::Locks::mutator_lock_) 103 REQUIRES(!allow_disallow_lock_); 104 105 // Return all objects that have a value mapping in tags. 106 ALWAYS_INLINE 107 jvmtiError GetTaggedObjects(jvmtiEnv* jvmti_env, 108 jint tag_count, 109 const T* tags, 110 /* out */ jint* count_ptr, 111 /* out */ jobject** object_result_ptr, 112 /* out */ T** tag_result_ptr) 113 REQUIRES_SHARED(art::Locks::mutator_lock_) 114 REQUIRES(!allow_disallow_lock_); 115 116 // Locking functions, to allow coarse-grained locking and amortization. 117 ALWAYS_INLINE void Lock() ACQUIRE(allow_disallow_lock_); 118 ALWAYS_INLINE void Unlock() RELEASE(allow_disallow_lock_); 119 ALWAYS_INLINE void AssertLocked() ASSERT_CAPABILITY(allow_disallow_lock_); 120 121 ALWAYS_INLINE art::ObjPtr<art::mirror::Object> Find(T tag) 122 REQUIRES_SHARED(art::Locks::mutator_lock_) 123 REQUIRES(!allow_disallow_lock_); 124 125 protected: 126 // Should HandleNullSweep be called when Sweep detects the release of an object? DoesHandleNullOnSweep()127 virtual bool DoesHandleNullOnSweep() { 128 return false; 129 } 130 // If DoesHandleNullOnSweep returns true, this function will be called. HandleNullSweep(T tag ATTRIBUTE_UNUSED)131 virtual void HandleNullSweep(T tag ATTRIBUTE_UNUSED) {} 132 133 private: 134 ALWAYS_INLINE 135 bool SetLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, T tag) 136 REQUIRES_SHARED(art::Locks::mutator_lock_) 137 REQUIRES(allow_disallow_lock_); 138 139 ALWAYS_INLINE 140 bool RemoveLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag) 141 REQUIRES_SHARED(art::Locks::mutator_lock_) 142 REQUIRES(allow_disallow_lock_); 143 GetTagLocked(art::Thread * self,art::ObjPtr<art::mirror::Object> obj,T * result)144 bool GetTagLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, /* out */ T* result) 145 REQUIRES_SHARED(art::Locks::mutator_lock_) 146 REQUIRES(allow_disallow_lock_) { 147 auto it = tagged_objects_.find(art::GcRoot<art::mirror::Object>(obj)); 148 if (it != tagged_objects_.end()) { 149 *result = it->second; 150 return true; 151 } 152 153 // Performance optimization: To avoid multiple table updates, ensure that during GC we 154 // only update once. See the comment on the implementation of GetTagSlowPath. 155 if (art::kUseReadBarrier && 156 self != nullptr && 157 self->GetIsGcMarking() && 158 !update_since_last_sweep_) { 159 return GetTagSlowPath(self, obj, result); 160 } 161 162 return false; 163 } 164 165 // Slow-path for GetTag. We didn't find the object, but we might be storing from-pointers and 166 // are asked to retrieve with a to-pointer. 167 ALWAYS_INLINE 168 bool GetTagSlowPath(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, /* out */ T* result) 169 REQUIRES_SHARED(art::Locks::mutator_lock_) 170 REQUIRES(allow_disallow_lock_); 171 172 // Update the table by doing read barriers on each element, ensuring that to-space pointers 173 // are stored. 174 ALWAYS_INLINE 175 void UpdateTableWithReadBarrier() 176 REQUIRES_SHARED(art::Locks::mutator_lock_) 177 REQUIRES(allow_disallow_lock_); 178 179 template <bool kHandleNull> 180 void SweepImpl(art::IsMarkedVisitor* visitor) 181 REQUIRES_SHARED(art::Locks::mutator_lock_) 182 REQUIRES(!allow_disallow_lock_); 183 184 enum TableUpdateNullTarget { 185 kIgnoreNull, 186 kRemoveNull, 187 kCallHandleNull 188 }; 189 190 template <typename Updater, TableUpdateNullTarget kTargetNull> 191 void UpdateTableWith(Updater& updater) 192 REQUIRES_SHARED(art::Locks::mutator_lock_) 193 REQUIRES(allow_disallow_lock_); 194 195 template <typename Storage, class Allocator = JvmtiAllocator<T>> 196 struct ReleasableContainer; 197 198 struct HashGcRoot { operatorHashGcRoot199 size_t operator()(const art::GcRoot<art::mirror::Object>& r) const 200 REQUIRES_SHARED(art::Locks::mutator_lock_) { 201 return reinterpret_cast<uintptr_t>(r.Read<art::kWithoutReadBarrier>()); 202 } 203 }; 204 205 struct EqGcRoot { operatorEqGcRoot206 bool operator()(const art::GcRoot<art::mirror::Object>& r1, 207 const art::GcRoot<art::mirror::Object>& r2) const 208 REQUIRES_SHARED(art::Locks::mutator_lock_) { 209 return r1.Read<art::kWithoutReadBarrier>() == r2.Read<art::kWithoutReadBarrier>(); 210 } 211 }; 212 213 using TagAllocator = JvmtiAllocator<std::pair<const art::GcRoot<art::mirror::Object>, T>>; 214 std::unordered_map<art::GcRoot<art::mirror::Object>, 215 T, 216 HashGcRoot, 217 EqGcRoot, 218 TagAllocator> tagged_objects_ 219 GUARDED_BY(allow_disallow_lock_) 220 GUARDED_BY(art::Locks::mutator_lock_); 221 // To avoid repeatedly scanning the whole table, remember if we did that since the last sweep. 222 bool update_since_last_sweep_; 223 }; 224 225 } // namespace openjdkjvmti 226 227 #endif // ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_H_ 228