• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (C) 2017 The Android Open Source Project
2  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3  *
4  * This file implements interfaces from the file jvmti.h. This implementation
5  * is licensed under the same terms as the file jvmti.h.  The
6  * copyright and license information for the file jvmti.h follows.
7  *
8  * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10  *
11  * This code is free software; you can redistribute it and/or modify it
12  * under the terms of the GNU General Public License version 2 only, as
13  * published by the Free Software Foundation.  Oracle designates this
14  * particular file as subject to the "Classpath" exception as provided
15  * by Oracle in the LICENSE file that accompanied this code.
16  *
17  * This code is distributed in the hope that it will be useful, but WITHOUT
18  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
20  * version 2 for more details (a copy is included in the LICENSE file that
21  * accompanied this code).
22  *
23  * You should have received a copy of the GNU General Public License version
24  * 2 along with this work; if not, write to the Free Software Foundation,
25  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26  *
27  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28  * or visit www.oracle.com if you need additional information or have any
29  * questions.
30  */
31 
32 #ifndef ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_INL_H_
33 #define ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_INL_H_
34 
35 #include "jvmti_weak_table.h"
36 
37 #include <limits>
38 
39 #include <android-base/logging.h>
40 
41 #include "art_jvmti.h"
42 #include "gc/allocation_listener.h"
43 #include "instrumentation.h"
44 #include "jni/jni_env_ext-inl.h"
45 #include "jvmti_allocator.h"
46 #include "mirror/class.h"
47 #include "mirror/object.h"
48 #include "nativehelper/scoped_local_ref.h"
49 #include "runtime.h"
50 
51 namespace openjdkjvmti {
52 
53 template <typename T>
Lock()54 void JvmtiWeakTable<T>::Lock() {
55   allow_disallow_lock_.ExclusiveLock(art::Thread::Current());
56 }
57 template <typename T>
Unlock()58 void JvmtiWeakTable<T>::Unlock() {
59   allow_disallow_lock_.ExclusiveUnlock(art::Thread::Current());
60 }
61 template <typename T>
AssertLocked()62 void JvmtiWeakTable<T>::AssertLocked() {
63   allow_disallow_lock_.AssertHeld(art::Thread::Current());
64 }
65 
66 template <typename T>
UpdateTableWithReadBarrier()67 void JvmtiWeakTable<T>::UpdateTableWithReadBarrier() {
68   update_since_last_sweep_ = true;
69 
70   auto WithReadBarrierUpdater = [&](const art::GcRoot<art::mirror::Object>& original_root,
71                                     art::mirror::Object* original_obj ATTRIBUTE_UNUSED)
72      REQUIRES_SHARED(art::Locks::mutator_lock_) {
73     return original_root.Read<art::kWithReadBarrier>();
74   };
75 
76   UpdateTableWith<decltype(WithReadBarrierUpdater), kIgnoreNull>(WithReadBarrierUpdater);
77 }
78 
79 template <typename T>
GetTagSlowPath(art::Thread * self,art::ObjPtr<art::mirror::Object> obj,T * result)80 bool JvmtiWeakTable<T>::GetTagSlowPath(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, T* result) {
81   // Under concurrent GC, there is a window between moving objects and sweeping of system
82   // weaks in which mutators are active. We may receive a to-space object pointer in obj,
83   // but still have from-space pointers in the table. Explicitly update the table once.
84   // Note: this will keep *all* objects in the table live, but should be a rare occurrence.
85   UpdateTableWithReadBarrier();
86   return GetTagLocked(self, obj, result);
87 }
88 
89 template <typename T>
Remove(art::ObjPtr<art::mirror::Object> obj,T * tag)90 bool JvmtiWeakTable<T>::Remove(art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag) {
91   art::Thread* self = art::Thread::Current();
92   art::MutexLock mu(self, allow_disallow_lock_);
93   Wait(self);
94 
95   return RemoveLocked(self, obj, tag);
96 }
97 template <typename T>
RemoveLocked(art::ObjPtr<art::mirror::Object> obj,T * tag)98 bool JvmtiWeakTable<T>::RemoveLocked(art::ObjPtr<art::mirror::Object> obj, T* tag) {
99   art::Thread* self = art::Thread::Current();
100   allow_disallow_lock_.AssertHeld(self);
101   Wait(self);
102 
103   return RemoveLocked(self, obj, tag);
104 }
105 
106 template <typename T>
RemoveLocked(art::Thread * self,art::ObjPtr<art::mirror::Object> obj,T * tag)107 bool JvmtiWeakTable<T>::RemoveLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, T* tag) {
108   auto it = tagged_objects_.find(art::GcRoot<art::mirror::Object>(obj));
109   if (it != tagged_objects_.end()) {
110     if (tag != nullptr) {
111       *tag = it->second;
112     }
113     tagged_objects_.erase(it);
114     return true;
115   }
116 
117   if (art::gUseReadBarrier && self->GetIsGcMarking() && !update_since_last_sweep_) {
118     // Under concurrent GC, there is a window between moving objects and sweeping of system
119     // weaks in which mutators are active. We may receive a to-space object pointer in obj,
120     // but still have from-space pointers in the table. Explicitly update the table once.
121     // Note: this will keep *all* objects in the table live, but should be a rare occurrence.
122 
123     // Update the table.
124     UpdateTableWithReadBarrier();
125 
126     // And try again.
127     return RemoveLocked(self, obj, tag);
128   }
129 
130   // Not in here.
131   return false;
132 }
133 
134 template <typename T>
Set(art::ObjPtr<art::mirror::Object> obj,T new_tag)135 bool JvmtiWeakTable<T>::Set(art::ObjPtr<art::mirror::Object> obj, T new_tag) {
136   art::Thread* self = art::Thread::Current();
137   art::MutexLock mu(self, allow_disallow_lock_);
138   Wait(self);
139 
140   return SetLocked(self, obj, new_tag);
141 }
142 template <typename T>
SetLocked(art::ObjPtr<art::mirror::Object> obj,T new_tag)143 bool JvmtiWeakTable<T>::SetLocked(art::ObjPtr<art::mirror::Object> obj, T new_tag) {
144   art::Thread* self = art::Thread::Current();
145   allow_disallow_lock_.AssertHeld(self);
146   Wait(self);
147 
148   return SetLocked(self, obj, new_tag);
149 }
150 
151 template <typename T>
SetLocked(art::Thread * self,art::ObjPtr<art::mirror::Object> obj,T new_tag)152 bool JvmtiWeakTable<T>::SetLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, T new_tag) {
153   auto it = tagged_objects_.find(art::GcRoot<art::mirror::Object>(obj));
154   if (it != tagged_objects_.end()) {
155     it->second = new_tag;
156     return true;
157   }
158 
159   if (art::gUseReadBarrier && self->GetIsGcMarking() && !update_since_last_sweep_) {
160     // Under concurrent GC, there is a window between moving objects and sweeping of system
161     // weaks in which mutators are active. We may receive a to-space object pointer in obj,
162     // but still have from-space pointers in the table. Explicitly update the table once.
163     // Note: this will keep *all* objects in the table live, but should be a rare occurrence.
164 
165     // Update the table.
166     UpdateTableWithReadBarrier();
167 
168     // And try again.
169     return SetLocked(self, obj, new_tag);
170   }
171 
172   // New element.
173   auto insert_it = tagged_objects_.emplace(art::GcRoot<art::mirror::Object>(obj), new_tag);
174   DCHECK(insert_it.second);
175   return false;
176 }
177 
178 template <typename T>
Sweep(art::IsMarkedVisitor * visitor)179 void JvmtiWeakTable<T>::Sweep(art::IsMarkedVisitor* visitor) {
180   if (DoesHandleNullOnSweep()) {
181     SweepImpl<true>(visitor);
182   } else {
183     SweepImpl<false>(visitor);
184   }
185 
186   // Under concurrent GC, there is a window between moving objects and sweeping of system
187   // weaks in which mutators are active. We may receive a to-space object pointer in obj,
188   // but still have from-space pointers in the table. We explicitly update the table then
189   // to ensure we compare against to-space pointers. But we want to do this only once. Once
190   // sweeping is done, we know all objects are to-space pointers until the next GC cycle,
191   // so we re-enable the explicit update for the next marking.
192   update_since_last_sweep_ = false;
193 }
194 
195 template <typename T>
196 template <bool kHandleNull>
SweepImpl(art::IsMarkedVisitor * visitor)197 void JvmtiWeakTable<T>::SweepImpl(art::IsMarkedVisitor* visitor) {
198   art::Thread* self = art::Thread::Current();
199   art::MutexLock mu(self, allow_disallow_lock_);
200 
201   auto IsMarkedUpdater = [&](const art::GcRoot<art::mirror::Object>& original_root ATTRIBUTE_UNUSED,
202                              art::mirror::Object* original_obj) {
203     return visitor->IsMarked(original_obj);
204   };
205 
206   UpdateTableWith<decltype(IsMarkedUpdater),
207                   kHandleNull ? kCallHandleNull : kRemoveNull>(IsMarkedUpdater);
208 }
209 
210 template <typename T>
211 template <typename Updater, typename JvmtiWeakTable<T>::TableUpdateNullTarget kTargetNull>
UpdateTableWith(Updater & updater)212 ALWAYS_INLINE inline void JvmtiWeakTable<T>::UpdateTableWith(Updater& updater) {
213   // We can't emplace within the map as a to-space reference could be the same as some
214   // from-space object reference in the map, causing correctness issues. The problem
215   // doesn't arise if all updated <K,V> pairs are inserted after the loop as by then such
216   // from-space object references would also have been taken care of.
217 
218   // Side vector to hold node handles of entries which are updated.
219   std::vector<typename TagMap::node_type> updated_node_handles;
220 
221   for (auto it = tagged_objects_.begin(); it != tagged_objects_.end();) {
222     DCHECK(!it->first.IsNull());
223     art::mirror::Object* original_obj = it->first.template Read<art::kWithoutReadBarrier>();
224     art::mirror::Object* target_obj = updater(it->first, original_obj);
225     if (original_obj != target_obj) {
226       if (kTargetNull == kIgnoreNull && target_obj == nullptr) {
227         // Ignore null target, don't do anything.
228       } else {
229         auto nh = tagged_objects_.extract(it++);
230         DCHECK(!nh.empty());
231         if (target_obj != nullptr) {
232           nh.key() = art::GcRoot<art::mirror::Object>(target_obj);
233           updated_node_handles.push_back(std::move(nh));
234         } else if (kTargetNull == kCallHandleNull) {
235           HandleNullSweep(nh.mapped());
236         }
237         continue;  // Iterator already updated above.
238       }
239     }
240     it++;
241   }
242   while (!updated_node_handles.empty()) {
243     auto ret = tagged_objects_.insert(std::move(updated_node_handles.back()));
244     DCHECK(ret.inserted);
245     updated_node_handles.pop_back();
246   }
247 }
248 
249 template <typename T>
250 template <typename Storage, class Allocator>
251 struct JvmtiWeakTable<T>::ReleasableContainer {
252   using allocator_type = Allocator;
253 
254   explicit ReleasableContainer(const allocator_type& alloc, size_t reserve = 10)
255       : allocator(alloc),
256         data(reserve > 0 ? allocator.allocate(reserve) : nullptr),
257         size(0),
258         capacity(reserve) {
259   }
260 
261   ~ReleasableContainer() {
262     if (data != nullptr) {
263       allocator.deallocate(data, capacity);
264       capacity = 0;
265       size = 0;
266     }
267   }
268 
269   Storage* Release() {
270     Storage* tmp = data;
271 
272     data = nullptr;
273     size = 0;
274     capacity = 0;
275 
276     return tmp;
277   }
278 
279   void Resize(size_t new_capacity) {
280     CHECK_GT(new_capacity, capacity);
281 
282     Storage* tmp = allocator.allocate(new_capacity);
283     DCHECK(tmp != nullptr);
284     if (data != nullptr) {
285       memcpy(tmp, data, sizeof(Storage) * size);
286     }
287     Storage* old = data;
288     data = tmp;
289     allocator.deallocate(old, capacity);
290     capacity = new_capacity;
291   }
292 
293   void Pushback(const Storage& elem) {
294     if (size == capacity) {
295       size_t new_capacity = 2 * capacity + 1;
296       Resize(new_capacity);
297     }
298     data[size++] = elem;
299   }
300 
301   Allocator allocator;
302   Storage* data;
303   size_t size;
304   size_t capacity;
305 };
306 
307 template <typename T>
308 jvmtiError JvmtiWeakTable<T>::GetTaggedObjects(jvmtiEnv* jvmti_env,
309                                                jint tag_count,
310                                                const T* tags,
311                                                jint* count_ptr,
312                                                jobject** object_result_ptr,
313                                                T** tag_result_ptr) {
314   if (tag_count < 0) {
315     return ERR(ILLEGAL_ARGUMENT);
316   }
317   if (tag_count > 0) {
318     for (size_t i = 0; i != static_cast<size_t>(tag_count); ++i) {
319       if (tags[i] == 0) {
320         return ERR(ILLEGAL_ARGUMENT);
321       }
322     }
323   }
324   if (tags == nullptr) {
325     return ERR(NULL_POINTER);
326   }
327   if (count_ptr == nullptr) {
328     return ERR(NULL_POINTER);
329   }
330 
331   art::Thread* self = art::Thread::Current();
332   art::MutexLock mu(self, allow_disallow_lock_);
333   Wait(self);
334 
335   art::JNIEnvExt* jni_env = self->GetJniEnv();
336 
337   constexpr size_t kDefaultSize = 10;
338   size_t initial_object_size;
339   size_t initial_tag_size;
340   if (tag_count == 0) {
341     initial_object_size = (object_result_ptr != nullptr) ? tagged_objects_.size() : 0;
342     initial_tag_size = (tag_result_ptr != nullptr) ? tagged_objects_.size() : 0;
343   } else {
344     initial_object_size = initial_tag_size = kDefaultSize;
345   }
346   JvmtiAllocator<void> allocator(jvmti_env);
347   ReleasableContainer<jobject, JvmtiAllocator<jobject>> selected_objects(allocator,
348                                                                          initial_object_size);
349   ReleasableContainer<T, JvmtiAllocator<T>> selected_tags(allocator, initial_tag_size);
350 
351   size_t count = 0;
352   for (auto& pair : tagged_objects_) {
353     bool select;
354     if (tag_count > 0) {
355       select = false;
356       for (size_t i = 0; i != static_cast<size_t>(tag_count); ++i) {
357         if (tags[i] == pair.second) {
358           select = true;
359           break;
360         }
361       }
362     } else {
363       select = true;
364     }
365 
366     if (select) {
367       art::ObjPtr<art::mirror::Object> obj = pair.first.template Read<art::kWithReadBarrier>();
368       if (obj != nullptr) {
369         count++;
370         if (object_result_ptr != nullptr) {
371           selected_objects.Pushback(jni_env->AddLocalReference<jobject>(obj));
372         }
373         if (tag_result_ptr != nullptr) {
374           selected_tags.Pushback(pair.second);
375         }
376       }
377     }
378   }
379 
380   if (object_result_ptr != nullptr) {
381     *object_result_ptr = selected_objects.Release();
382   }
383   if (tag_result_ptr != nullptr) {
384     *tag_result_ptr = selected_tags.Release();
385   }
386   *count_ptr = static_cast<jint>(count);
387   return ERR(NONE);
388 }
389 
390 template <typename T>
391 art::ObjPtr<art::mirror::Object> JvmtiWeakTable<T>::Find(T tag) {
392   art::Thread* self = art::Thread::Current();
393   art::MutexLock mu(self, allow_disallow_lock_);
394   Wait(self);
395 
396   for (auto& pair : tagged_objects_) {
397     if (tag == pair.second) {
398       art::ObjPtr<art::mirror::Object> obj = pair.first.template Read<art::kWithReadBarrier>();
399       if (obj != nullptr) {
400         return obj;
401       }
402     }
403   }
404   return nullptr;
405 }
406 
407 }  // namespace openjdkjvmti
408 
409 #endif  // ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_INL_H_
410