• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2009 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "indirect_reference_table-inl.h"
18 
19 #include "base/systrace.h"
20 #include "jni_internal.h"
21 #include "nth_caller_visitor.h"
22 #include "reference_table.h"
23 #include "runtime.h"
24 #include "scoped_thread_state_change.h"
25 #include "thread.h"
26 #include "utils.h"
27 #include "verify_object-inl.h"
28 
29 #include <cstdlib>
30 
31 namespace art {
32 
33 static constexpr bool kDumpStackOnNonLocalReference = false;
34 
GetIndirectRefKindString(const IndirectRefKind & kind)35 const char* GetIndirectRefKindString(const IndirectRefKind& kind) {
36   switch (kind) {
37     case kHandleScopeOrInvalid:
38       return "HandleScopeOrInvalid";
39     case kLocal:
40       return "Local";
41     case kGlobal:
42       return "Global";
43     case kWeakGlobal:
44       return "WeakGlobal";
45   }
46   return "IndirectRefKind Error";
47 }
48 
49 template<typename T>
50 class MutatorLockedDumpable {
51  public:
52   explicit MutatorLockedDumpable(T& value)
SHARED_REQUIRES(Locks::mutator_lock_)53       SHARED_REQUIRES(Locks::mutator_lock_) : value_(value) {
54   }
55 
Dump(std::ostream & os) const56   void Dump(std::ostream& os) const SHARED_REQUIRES(Locks::mutator_lock_) {
57     value_.Dump(os);
58   }
59 
60  private:
61   T& value_;
62 
63   DISALLOW_COPY_AND_ASSIGN(MutatorLockedDumpable);
64 };
65 
66 template<typename T>
operator <<(std::ostream & os,const MutatorLockedDumpable<T> & rhs)67 std::ostream& operator<<(std::ostream& os, const MutatorLockedDumpable<T>& rhs)
68 // TODO: should be SHARED_REQUIRES(Locks::mutator_lock_) however annotalysis
69 //       currently fails for this.
70     NO_THREAD_SAFETY_ANALYSIS {
71   rhs.Dump(os);
72   return os;
73 }
74 
AbortIfNoCheckJNI(const std::string & msg)75 void IndirectReferenceTable::AbortIfNoCheckJNI(const std::string& msg) {
76   // If -Xcheck:jni is on, it'll give a more detailed error before aborting.
77   JavaVMExt* vm = Runtime::Current()->GetJavaVM();
78   if (!vm->IsCheckJniEnabled()) {
79     // Otherwise, we want to abort rather than hand back a bad reference.
80     LOG(FATAL) << msg;
81   } else {
82     LOG(ERROR) << msg;
83   }
84 }
85 
IndirectReferenceTable(size_t initialCount,size_t maxCount,IndirectRefKind desiredKind,bool abort_on_error)86 IndirectReferenceTable::IndirectReferenceTable(size_t initialCount,
87                                                size_t maxCount, IndirectRefKind desiredKind,
88                                                bool abort_on_error)
89     : kind_(desiredKind),
90       max_entries_(maxCount) {
91   CHECK_GT(initialCount, 0U);
92   CHECK_LE(initialCount, maxCount);
93   CHECK_NE(desiredKind, kHandleScopeOrInvalid);
94 
95   std::string error_str;
96   const size_t table_bytes = maxCount * sizeof(IrtEntry);
97   table_mem_map_.reset(MemMap::MapAnonymous("indirect ref table", nullptr, table_bytes,
98                                             PROT_READ | PROT_WRITE, false, false, &error_str));
99   if (abort_on_error) {
100     CHECK(table_mem_map_.get() != nullptr) << error_str;
101     CHECK_EQ(table_mem_map_->Size(), table_bytes);
102     CHECK(table_mem_map_->Begin() != nullptr);
103   } else if (table_mem_map_.get() == nullptr ||
104              table_mem_map_->Size() != table_bytes ||
105              table_mem_map_->Begin() == nullptr) {
106     table_mem_map_.reset();
107     LOG(ERROR) << error_str;
108     return;
109   }
110   table_ = reinterpret_cast<IrtEntry*>(table_mem_map_->Begin());
111   segment_state_.all = IRT_FIRST_SEGMENT;
112 }
113 
~IndirectReferenceTable()114 IndirectReferenceTable::~IndirectReferenceTable() {
115 }
116 
IsValid() const117 bool IndirectReferenceTable::IsValid() const {
118   return table_mem_map_.get() != nullptr;
119 }
120 
Add(uint32_t cookie,mirror::Object * obj)121 IndirectRef IndirectReferenceTable::Add(uint32_t cookie, mirror::Object* obj) {
122   IRTSegmentState prevState;
123   prevState.all = cookie;
124   size_t topIndex = segment_state_.parts.topIndex;
125 
126   CHECK(obj != nullptr);
127   VerifyObject(obj);
128   DCHECK(table_ != nullptr);
129   DCHECK_GE(segment_state_.parts.numHoles, prevState.parts.numHoles);
130 
131   if (topIndex == max_entries_) {
132     LOG(FATAL) << "JNI ERROR (app bug): " << kind_ << " table overflow "
133                << "(max=" << max_entries_ << ")\n"
134                << MutatorLockedDumpable<IndirectReferenceTable>(*this);
135   }
136 
137   // We know there's enough room in the table.  Now we just need to find
138   // the right spot.  If there's a hole, find it and fill it; otherwise,
139   // add to the end of the list.
140   IndirectRef result;
141   int numHoles = segment_state_.parts.numHoles - prevState.parts.numHoles;
142   size_t index;
143   if (numHoles > 0) {
144     DCHECK_GT(topIndex, 1U);
145     // Find the first hole; likely to be near the end of the list.
146     IrtEntry* pScan = &table_[topIndex - 1];
147     DCHECK(!pScan->GetReference()->IsNull());
148     --pScan;
149     while (!pScan->GetReference()->IsNull()) {
150       DCHECK_GE(pScan, table_ + prevState.parts.topIndex);
151       --pScan;
152     }
153     index = pScan - table_;
154     segment_state_.parts.numHoles--;
155   } else {
156     // Add to the end.
157     index = topIndex++;
158     segment_state_.parts.topIndex = topIndex;
159   }
160   table_[index].Add(obj);
161   result = ToIndirectRef(index);
162   if ((false)) {
163     LOG(INFO) << "+++ added at " << ExtractIndex(result) << " top=" << segment_state_.parts.topIndex
164               << " holes=" << segment_state_.parts.numHoles;
165   }
166 
167   DCHECK(result != nullptr);
168   return result;
169 }
170 
AssertEmpty()171 void IndirectReferenceTable::AssertEmpty() {
172   for (size_t i = 0; i < Capacity(); ++i) {
173     if (!table_[i].GetReference()->IsNull()) {
174       ScopedObjectAccess soa(Thread::Current());
175       LOG(FATAL) << "Internal Error: non-empty local reference table\n"
176                  << MutatorLockedDumpable<IndirectReferenceTable>(*this);
177     }
178   }
179 }
180 
181 // Removes an object. We extract the table offset bits from "iref"
182 // and zap the corresponding entry, leaving a hole if it's not at the top.
183 // If the entry is not between the current top index and the bottom index
184 // specified by the cookie, we don't remove anything. This is the behavior
185 // required by JNI's DeleteLocalRef function.
186 // This method is not called when a local frame is popped; this is only used
187 // for explicit single removals.
188 // Returns "false" if nothing was removed.
Remove(uint32_t cookie,IndirectRef iref)189 bool IndirectReferenceTable::Remove(uint32_t cookie, IndirectRef iref) {
190   IRTSegmentState prevState;
191   prevState.all = cookie;
192   int topIndex = segment_state_.parts.topIndex;
193   int bottomIndex = prevState.parts.topIndex;
194 
195   DCHECK(table_ != nullptr);
196   DCHECK_GE(segment_state_.parts.numHoles, prevState.parts.numHoles);
197 
198   if (GetIndirectRefKind(iref) == kHandleScopeOrInvalid) {
199     auto* self = Thread::Current();
200     if (self->HandleScopeContains(reinterpret_cast<jobject>(iref))) {
201       auto* env = self->GetJniEnv();
202       DCHECK(env != nullptr);
203       if (env->check_jni) {
204         ScopedObjectAccess soa(self);
205         LOG(WARNING) << "Attempt to remove non-JNI local reference, dumping thread";
206         if (kDumpStackOnNonLocalReference) {
207           self->Dump(LOG(WARNING));
208         }
209       }
210       return true;
211     }
212   }
213   const int idx = ExtractIndex(iref);
214   if (idx < bottomIndex) {
215     // Wrong segment.
216     LOG(WARNING) << "Attempt to remove index outside index area (" << idx
217                  << " vs " << bottomIndex << "-" << topIndex << ")";
218     return false;
219   }
220   if (idx >= topIndex) {
221     // Bad --- stale reference?
222     LOG(WARNING) << "Attempt to remove invalid index " << idx
223                  << " (bottom=" << bottomIndex << " top=" << topIndex << ")";
224     return false;
225   }
226 
227   if (idx == topIndex - 1) {
228     // Top-most entry.  Scan up and consume holes.
229 
230     if (!CheckEntry("remove", iref, idx)) {
231       return false;
232     }
233 
234     *table_[idx].GetReference() = GcRoot<mirror::Object>(nullptr);
235     int numHoles = segment_state_.parts.numHoles - prevState.parts.numHoles;
236     if (numHoles != 0) {
237       while (--topIndex > bottomIndex && numHoles != 0) {
238         if ((false)) {
239           LOG(INFO) << "+++ checking for hole at " << topIndex - 1
240                     << " (cookie=" << cookie << ") val="
241                     << table_[topIndex - 1].GetReference()->Read<kWithoutReadBarrier>();
242         }
243         if (!table_[topIndex - 1].GetReference()->IsNull()) {
244           break;
245         }
246         if ((false)) {
247           LOG(INFO) << "+++ ate hole at " << (topIndex - 1);
248         }
249         numHoles--;
250       }
251       segment_state_.parts.numHoles = numHoles + prevState.parts.numHoles;
252       segment_state_.parts.topIndex = topIndex;
253     } else {
254       segment_state_.parts.topIndex = topIndex-1;
255       if ((false)) {
256         LOG(INFO) << "+++ ate last entry " << topIndex - 1;
257       }
258     }
259   } else {
260     // Not the top-most entry.  This creates a hole.  We null out the entry to prevent somebody
261     // from deleting it twice and screwing up the hole count.
262     if (table_[idx].GetReference()->IsNull()) {
263       LOG(INFO) << "--- WEIRD: removing null entry " << idx;
264       return false;
265     }
266     if (!CheckEntry("remove", iref, idx)) {
267       return false;
268     }
269 
270     *table_[idx].GetReference() = GcRoot<mirror::Object>(nullptr);
271     segment_state_.parts.numHoles++;
272     if ((false)) {
273       LOG(INFO) << "+++ left hole at " << idx << ", holes=" << segment_state_.parts.numHoles;
274     }
275   }
276 
277   return true;
278 }
279 
Trim()280 void IndirectReferenceTable::Trim() {
281   ScopedTrace trace(__PRETTY_FUNCTION__);
282   const size_t top_index = Capacity();
283   auto* release_start = AlignUp(reinterpret_cast<uint8_t*>(&table_[top_index]), kPageSize);
284   uint8_t* release_end = table_mem_map_->End();
285   madvise(release_start, release_end - release_start, MADV_DONTNEED);
286 }
287 
VisitRoots(RootVisitor * visitor,const RootInfo & root_info)288 void IndirectReferenceTable::VisitRoots(RootVisitor* visitor, const RootInfo& root_info) {
289   BufferedRootVisitor<kDefaultBufferedRootCount> root_visitor(visitor, root_info);
290   for (auto ref : *this) {
291     if (!ref->IsNull()) {
292       root_visitor.VisitRoot(*ref);
293       DCHECK(!ref->IsNull());
294     }
295   }
296 }
297 
Dump(std::ostream & os) const298 void IndirectReferenceTable::Dump(std::ostream& os) const {
299   os << kind_ << " table dump:\n";
300   ReferenceTable::Table entries;
301   for (size_t i = 0; i < Capacity(); ++i) {
302     mirror::Object* obj = table_[i].GetReference()->Read<kWithoutReadBarrier>();
303     if (obj != nullptr) {
304       obj = table_[i].GetReference()->Read();
305       entries.push_back(GcRoot<mirror::Object>(obj));
306     }
307   }
308   ReferenceTable::Dump(os, entries);
309 }
310 
311 }  // namespace art
312