• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "reg_type_cache-inl.h"
18 
19 #include "base/arena_bit_vector.h"
20 #include "base/bit_vector-inl.h"
21 #include "base/casts.h"
22 #include "base/scoped_arena_allocator.h"
23 #include "base/stl_util.h"
24 #include "class_linker-inl.h"
25 #include "dex_file-inl.h"
26 #include "mirror/class-inl.h"
27 #include "mirror/object-inl.h"
28 #include "reg_type-inl.h"
29 
30 namespace art {
31 namespace verifier {
32 
33 bool RegTypeCache::primitive_initialized_ = false;
34 uint16_t RegTypeCache::primitive_count_ = 0;
35 const PreciseConstType* RegTypeCache::small_precise_constants_[kMaxSmallConstant -
36                                                                kMinSmallConstant + 1];
37 
MatchingPrecisionForClass(const RegType * entry,bool precise)38 ALWAYS_INLINE static inline bool MatchingPrecisionForClass(const RegType* entry, bool precise)
39     REQUIRES_SHARED(Locks::mutator_lock_) {
40   if (entry->IsPreciseReference() == precise) {
41     // We were or weren't looking for a precise reference and we found what we need.
42     return true;
43   } else {
44     if (!precise && entry->GetClass()->CannotBeAssignedFromOtherTypes()) {
45       // We weren't looking for a precise reference, as we're looking up based on a descriptor, but
46       // we found a matching entry based on the descriptor. Return the precise entry in that case.
47       return true;
48     }
49     return false;
50   }
51 }
52 
FillPrimitiveAndSmallConstantTypes()53 void RegTypeCache::FillPrimitiveAndSmallConstantTypes() {
54   entries_.push_back(UndefinedType::GetInstance());
55   entries_.push_back(ConflictType::GetInstance());
56   entries_.push_back(BooleanType::GetInstance());
57   entries_.push_back(ByteType::GetInstance());
58   entries_.push_back(ShortType::GetInstance());
59   entries_.push_back(CharType::GetInstance());
60   entries_.push_back(IntegerType::GetInstance());
61   entries_.push_back(LongLoType::GetInstance());
62   entries_.push_back(LongHiType::GetInstance());
63   entries_.push_back(FloatType::GetInstance());
64   entries_.push_back(DoubleLoType::GetInstance());
65   entries_.push_back(DoubleHiType::GetInstance());
66   for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
67     int32_t i = value - kMinSmallConstant;
68     DCHECK_EQ(entries_.size(), small_precise_constants_[i]->GetId());
69     entries_.push_back(small_precise_constants_[i]);
70   }
71   DCHECK_EQ(entries_.size(), primitive_count_);
72 }
73 
FromDescriptor(mirror::ClassLoader * loader,const char * descriptor,bool precise)74 const RegType& RegTypeCache::FromDescriptor(mirror::ClassLoader* loader,
75                                             const char* descriptor,
76                                             bool precise) {
77   DCHECK(RegTypeCache::primitive_initialized_);
78   if (descriptor[1] == '\0') {
79     switch (descriptor[0]) {
80       case 'Z':
81         return Boolean();
82       case 'B':
83         return Byte();
84       case 'S':
85         return Short();
86       case 'C':
87         return Char();
88       case 'I':
89         return Integer();
90       case 'J':
91         return LongLo();
92       case 'F':
93         return Float();
94       case 'D':
95         return DoubleLo();
96       case 'V':  // For void types, conflict types.
97       default:
98         return Conflict();
99     }
100   } else if (descriptor[0] == 'L' || descriptor[0] == '[') {
101     return From(loader, descriptor, precise);
102   } else {
103     return Conflict();
104   }
105 }
106 
RegTypeFromPrimitiveType(Primitive::Type prim_type) const107 const RegType& RegTypeCache::RegTypeFromPrimitiveType(Primitive::Type prim_type) const {
108   DCHECK(RegTypeCache::primitive_initialized_);
109   switch (prim_type) {
110     case Primitive::kPrimBoolean:
111       return *BooleanType::GetInstance();
112     case Primitive::kPrimByte:
113       return *ByteType::GetInstance();
114     case Primitive::kPrimShort:
115       return *ShortType::GetInstance();
116     case Primitive::kPrimChar:
117       return *CharType::GetInstance();
118     case Primitive::kPrimInt:
119       return *IntegerType::GetInstance();
120     case Primitive::kPrimLong:
121       return *LongLoType::GetInstance();
122     case Primitive::kPrimFloat:
123       return *FloatType::GetInstance();
124     case Primitive::kPrimDouble:
125       return *DoubleLoType::GetInstance();
126     case Primitive::kPrimVoid:
127     default:
128       return *ConflictType::GetInstance();
129   }
130 }
131 
MatchDescriptor(size_t idx,const StringPiece & descriptor,bool precise)132 bool RegTypeCache::MatchDescriptor(size_t idx, const StringPiece& descriptor, bool precise) {
133   const RegType* entry = entries_[idx];
134   if (descriptor != entry->descriptor_) {
135     return false;
136   }
137   if (entry->HasClass()) {
138     return MatchingPrecisionForClass(entry, precise);
139   }
140   // There is no notion of precise unresolved references, the precise information is just dropped
141   // on the floor.
142   DCHECK(entry->IsUnresolvedReference());
143   return true;
144 }
145 
ResolveClass(const char * descriptor,mirror::ClassLoader * loader)146 mirror::Class* RegTypeCache::ResolveClass(const char* descriptor, mirror::ClassLoader* loader) {
147   // Class was not found, must create new type.
148   // Try resolving class
149   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
150   Thread* self = Thread::Current();
151   StackHandleScope<1> hs(self);
152   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(loader));
153   mirror::Class* klass = nullptr;
154   if (can_load_classes_) {
155     klass = class_linker->FindClass(self, descriptor, class_loader);
156   } else {
157     klass = class_linker->LookupClass(self, descriptor, loader);
158     if (klass != nullptr && !klass->IsResolved()) {
159       // We found the class but without it being loaded its not safe for use.
160       klass = nullptr;
161     }
162   }
163   return klass;
164 }
165 
AddString(const StringPiece & string_piece)166 StringPiece RegTypeCache::AddString(const StringPiece& string_piece) {
167   char* ptr = arena_.AllocArray<char>(string_piece.length());
168   memcpy(ptr, string_piece.data(), string_piece.length());
169   return StringPiece(ptr, string_piece.length());
170 }
171 
From(mirror::ClassLoader * loader,const char * descriptor,bool precise)172 const RegType& RegTypeCache::From(mirror::ClassLoader* loader,
173                                   const char* descriptor,
174                                   bool precise) {
175   StringPiece sp_descriptor(descriptor);
176   // Try looking up the class in the cache first. We use a StringPiece to avoid continual strlen
177   // operations on the descriptor.
178   for (size_t i = primitive_count_; i < entries_.size(); i++) {
179     if (MatchDescriptor(i, sp_descriptor, precise)) {
180       return *(entries_[i]);
181     }
182   }
183   // Class not found in the cache, will create a new type for that.
184   // Try resolving class.
185   mirror::Class* klass = ResolveClass(descriptor, loader);
186   if (klass != nullptr) {
187     // Class resolved, first look for the class in the list of entries
188     // Class was not found, must create new type.
189     // To pass the verification, the type should be imprecise,
190     // instantiable or an interface with the precise type set to false.
191     DCHECK(!precise || klass->IsInstantiable());
192     // Create a precise type if:
193     // 1- Class is final and NOT an interface. a precise interface is meaningless !!
194     // 2- Precise Flag passed as true.
195     RegType* entry;
196     // Create an imprecise type if we can't tell for a fact that it is precise.
197     if (klass->CannotBeAssignedFromOtherTypes() || precise) {
198       DCHECK(!(klass->IsAbstract()) || klass->IsArrayClass());
199       DCHECK(!klass->IsInterface());
200       entry = new (&arena_) PreciseReferenceType(klass, AddString(sp_descriptor), entries_.size());
201     } else {
202       entry = new (&arena_) ReferenceType(klass, AddString(sp_descriptor), entries_.size());
203     }
204     return AddEntry(entry);
205   } else {  // Class not resolved.
206     // We tried loading the class and failed, this might get an exception raised
207     // so we want to clear it before we go on.
208     if (can_load_classes_) {
209       DCHECK(Thread::Current()->IsExceptionPending());
210       Thread::Current()->ClearException();
211     } else {
212       DCHECK(!Thread::Current()->IsExceptionPending());
213     }
214     if (IsValidDescriptor(descriptor)) {
215       return AddEntry(
216           new (&arena_) UnresolvedReferenceType(AddString(sp_descriptor), entries_.size()));
217     } else {
218       // The descriptor is broken return the unknown type as there's nothing sensible that
219       // could be done at runtime
220       return Conflict();
221     }
222   }
223 }
224 
MakeUnresolvedReference()225 const RegType& RegTypeCache::MakeUnresolvedReference() {
226   // The descriptor is intentionally invalid so nothing else will match this type.
227   return AddEntry(new (&arena_) UnresolvedReferenceType(AddString("a"), entries_.size()));
228 }
229 
FindClass(mirror::Class * klass,bool precise) const230 const RegType* RegTypeCache::FindClass(mirror::Class* klass, bool precise) const {
231   DCHECK(klass != nullptr);
232   if (klass->IsPrimitive()) {
233     // Note: precise isn't used for primitive classes. A char is assignable to an int. All
234     // primitive classes are final.
235     return &RegTypeFromPrimitiveType(klass->GetPrimitiveType());
236   }
237   for (auto& pair : klass_entries_) {
238     mirror::Class* const reg_klass = pair.first.Read();
239     if (reg_klass == klass) {
240       const RegType* reg_type = pair.second;
241       if (MatchingPrecisionForClass(reg_type, precise)) {
242         return reg_type;
243       }
244     }
245   }
246   return nullptr;
247 }
248 
InsertClass(const StringPiece & descriptor,mirror::Class * klass,bool precise)249 const RegType* RegTypeCache::InsertClass(const StringPiece& descriptor,
250                                          mirror::Class* klass,
251                                          bool precise) {
252   // No reference to the class was found, create new reference.
253   DCHECK(FindClass(klass, precise) == nullptr);
254   RegType* const reg_type = precise
255       ? static_cast<RegType*>(
256           new (&arena_) PreciseReferenceType(klass, descriptor, entries_.size()))
257       : new (&arena_) ReferenceType(klass, descriptor, entries_.size());
258   return &AddEntry(reg_type);
259 }
260 
FromClass(const char * descriptor,mirror::Class * klass,bool precise)261 const RegType& RegTypeCache::FromClass(const char* descriptor, mirror::Class* klass, bool precise) {
262   DCHECK(klass != nullptr);
263   const RegType* reg_type = FindClass(klass, precise);
264   if (reg_type == nullptr) {
265     reg_type = InsertClass(AddString(StringPiece(descriptor)), klass, precise);
266   }
267   return *reg_type;
268 }
269 
RegTypeCache(bool can_load_classes,ScopedArenaAllocator & arena)270 RegTypeCache::RegTypeCache(bool can_load_classes, ScopedArenaAllocator& arena)
271     : entries_(arena.Adapter(kArenaAllocVerifier)),
272       klass_entries_(arena.Adapter(kArenaAllocVerifier)),
273       can_load_classes_(can_load_classes),
274       arena_(arena) {
275   if (kIsDebugBuild) {
276     Thread::Current()->AssertThreadSuspensionIsAllowable(gAborting == 0);
277   }
278   // The klass_entries_ array does not have primitives or small constants.
279   static constexpr size_t kNumReserveEntries = 32;
280   klass_entries_.reserve(kNumReserveEntries);
281   // We want to have room for additional entries after inserting primitives and small
282   // constants.
283   entries_.reserve(kNumReserveEntries + kNumPrimitivesAndSmallConstants);
284   FillPrimitiveAndSmallConstantTypes();
285 }
286 
~RegTypeCache()287 RegTypeCache::~RegTypeCache() {
288   DCHECK_LE(primitive_count_, entries_.size());
289 }
290 
ShutDown()291 void RegTypeCache::ShutDown() {
292   if (RegTypeCache::primitive_initialized_) {
293     UndefinedType::Destroy();
294     ConflictType::Destroy();
295     BooleanType::Destroy();
296     ByteType::Destroy();
297     ShortType::Destroy();
298     CharType::Destroy();
299     IntegerType::Destroy();
300     LongLoType::Destroy();
301     LongHiType::Destroy();
302     FloatType::Destroy();
303     DoubleLoType::Destroy();
304     DoubleHiType::Destroy();
305     for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
306       const PreciseConstType* type = small_precise_constants_[value - kMinSmallConstant];
307       delete type;
308       small_precise_constants_[value - kMinSmallConstant] = nullptr;
309     }
310     RegTypeCache::primitive_initialized_ = false;
311     RegTypeCache::primitive_count_ = 0;
312   }
313 }
314 
315 template <class Type>
CreatePrimitiveTypeInstance(const std::string & descriptor)316 const Type* RegTypeCache::CreatePrimitiveTypeInstance(const std::string& descriptor) {
317   mirror::Class* klass = nullptr;
318   // Try loading the class from linker.
319   if (!descriptor.empty()) {
320     klass = art::Runtime::Current()->GetClassLinker()->FindSystemClass(Thread::Current(),
321                                                                        descriptor.c_str());
322     DCHECK(klass != nullptr);
323   }
324   const Type* entry = Type::CreateInstance(klass, descriptor, RegTypeCache::primitive_count_);
325   RegTypeCache::primitive_count_++;
326   return entry;
327 }
328 
CreatePrimitiveAndSmallConstantTypes()329 void RegTypeCache::CreatePrimitiveAndSmallConstantTypes() {
330   CreatePrimitiveTypeInstance<UndefinedType>("");
331   CreatePrimitiveTypeInstance<ConflictType>("");
332   CreatePrimitiveTypeInstance<BooleanType>("Z");
333   CreatePrimitiveTypeInstance<ByteType>("B");
334   CreatePrimitiveTypeInstance<ShortType>("S");
335   CreatePrimitiveTypeInstance<CharType>("C");
336   CreatePrimitiveTypeInstance<IntegerType>("I");
337   CreatePrimitiveTypeInstance<LongLoType>("J");
338   CreatePrimitiveTypeInstance<LongHiType>("J");
339   CreatePrimitiveTypeInstance<FloatType>("F");
340   CreatePrimitiveTypeInstance<DoubleLoType>("D");
341   CreatePrimitiveTypeInstance<DoubleHiType>("D");
342   for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
343     PreciseConstType* type = new PreciseConstType(value, primitive_count_);
344     small_precise_constants_[value - kMinSmallConstant] = type;
345     primitive_count_++;
346   }
347 }
348 
FromUnresolvedMerge(const RegType & left,const RegType & right,MethodVerifier * verifier)349 const RegType& RegTypeCache::FromUnresolvedMerge(const RegType& left,
350                                                  const RegType& right,
351                                                  MethodVerifier* verifier) {
352   ArenaBitVector types(&arena_,
353                        kDefaultArenaBitVectorBytes * kBitsPerByte,  // Allocate at least 8 bytes.
354                        true);                                       // Is expandable.
355   const RegType* left_resolved;
356   bool left_unresolved_is_array;
357   if (left.IsUnresolvedMergedReference()) {
358     const UnresolvedMergedType& left_merge = *down_cast<const UnresolvedMergedType*>(&left);
359 
360     types.Copy(&left_merge.GetUnresolvedTypes());
361     left_resolved = &left_merge.GetResolvedPart();
362     left_unresolved_is_array = left.IsArrayTypes();
363   } else if (left.IsUnresolvedTypes()) {
364     types.ClearAllBits();
365     types.SetBit(left.GetId());
366     left_resolved = &Zero();
367     left_unresolved_is_array = left.IsArrayTypes();
368   } else {
369     types.ClearAllBits();
370     left_resolved = &left;
371     left_unresolved_is_array = false;
372   }
373 
374   const RegType* right_resolved;
375   bool right_unresolved_is_array;
376   if (right.IsUnresolvedMergedReference()) {
377     const UnresolvedMergedType& right_merge = *down_cast<const UnresolvedMergedType*>(&right);
378 
379     types.Union(&right_merge.GetUnresolvedTypes());
380     right_resolved = &right_merge.GetResolvedPart();
381     right_unresolved_is_array = right.IsArrayTypes();
382   } else if (right.IsUnresolvedTypes()) {
383     types.SetBit(right.GetId());
384     right_resolved = &Zero();
385     right_unresolved_is_array = right.IsArrayTypes();
386   } else {
387     right_resolved = &right;
388     right_unresolved_is_array = false;
389   }
390 
391   // Merge the resolved parts. Left and right might be equal, so use SafeMerge.
392   const RegType& resolved_parts_merged = left_resolved->SafeMerge(*right_resolved, this, verifier);
393   // If we get a conflict here, the merge result is a conflict, not an unresolved merge type.
394   if (resolved_parts_merged.IsConflict()) {
395     return Conflict();
396   }
397 
398   bool resolved_merged_is_array = resolved_parts_merged.IsArrayTypes();
399   if (left_unresolved_is_array || right_unresolved_is_array || resolved_merged_is_array) {
400     // Arrays involved, see if we need to merge to Object.
401 
402     // Is the resolved part a primitive array?
403     if (resolved_merged_is_array && !resolved_parts_merged.IsObjectArrayTypes()) {
404       return JavaLangObject(false /* precise */);
405     }
406 
407     // Is any part not an array (but exists)?
408     if ((!left_unresolved_is_array && left_resolved != &left) ||
409         (!right_unresolved_is_array && right_resolved != &right) ||
410         !resolved_merged_is_array) {
411       return JavaLangObject(false /* precise */);
412     }
413   }
414 
415   // Check if entry already exists.
416   for (size_t i = primitive_count_; i < entries_.size(); i++) {
417     const RegType* cur_entry = entries_[i];
418     if (cur_entry->IsUnresolvedMergedReference()) {
419       const UnresolvedMergedType* cmp_type = down_cast<const UnresolvedMergedType*>(cur_entry);
420       const RegType& resolved_part = cmp_type->GetResolvedPart();
421       const BitVector& unresolved_part = cmp_type->GetUnresolvedTypes();
422       // Use SameBitsSet. "types" is expandable to allow merging in the components, but the
423       // BitVector in the final RegType will be made non-expandable.
424       if (&resolved_part == &resolved_parts_merged && types.SameBitsSet(&unresolved_part)) {
425         return *cur_entry;
426       }
427     }
428   }
429   return AddEntry(new (&arena_) UnresolvedMergedType(resolved_parts_merged,
430                                                      types,
431                                                      this,
432                                                      entries_.size()));
433 }
434 
FromUnresolvedSuperClass(const RegType & child)435 const RegType& RegTypeCache::FromUnresolvedSuperClass(const RegType& child) {
436   // Check if entry already exists.
437   for (size_t i = primitive_count_; i < entries_.size(); i++) {
438     const RegType* cur_entry = entries_[i];
439     if (cur_entry->IsUnresolvedSuperClass()) {
440       const UnresolvedSuperClass* tmp_entry =
441           down_cast<const UnresolvedSuperClass*>(cur_entry);
442       uint16_t unresolved_super_child_id =
443           tmp_entry->GetUnresolvedSuperClassChildId();
444       if (unresolved_super_child_id == child.GetId()) {
445         return *cur_entry;
446       }
447     }
448   }
449   return AddEntry(new (&arena_) UnresolvedSuperClass(child.GetId(), this, entries_.size()));
450 }
451 
Uninitialized(const RegType & type,uint32_t allocation_pc)452 const UninitializedType& RegTypeCache::Uninitialized(const RegType& type, uint32_t allocation_pc) {
453   UninitializedType* entry = nullptr;
454   const StringPiece& descriptor(type.GetDescriptor());
455   if (type.IsUnresolvedTypes()) {
456     for (size_t i = primitive_count_; i < entries_.size(); i++) {
457       const RegType* cur_entry = entries_[i];
458       if (cur_entry->IsUnresolvedAndUninitializedReference() &&
459           down_cast<const UnresolvedUninitializedRefType*>(cur_entry)->GetAllocationPc()
460               == allocation_pc &&
461           (cur_entry->GetDescriptor() == descriptor)) {
462         return *down_cast<const UnresolvedUninitializedRefType*>(cur_entry);
463       }
464     }
465     entry = new (&arena_) UnresolvedUninitializedRefType(descriptor,
466                                                          allocation_pc,
467                                                          entries_.size());
468   } else {
469     mirror::Class* klass = type.GetClass();
470     for (size_t i = primitive_count_; i < entries_.size(); i++) {
471       const RegType* cur_entry = entries_[i];
472       if (cur_entry->IsUninitializedReference() &&
473           down_cast<const UninitializedReferenceType*>(cur_entry)
474               ->GetAllocationPc() == allocation_pc &&
475           cur_entry->GetClass() == klass) {
476         return *down_cast<const UninitializedReferenceType*>(cur_entry);
477       }
478     }
479     entry = new (&arena_) UninitializedReferenceType(klass,
480                                                      descriptor,
481                                                      allocation_pc,
482                                                      entries_.size());
483   }
484   return AddEntry(entry);
485 }
486 
FromUninitialized(const RegType & uninit_type)487 const RegType& RegTypeCache::FromUninitialized(const RegType& uninit_type) {
488   RegType* entry;
489 
490   if (uninit_type.IsUnresolvedTypes()) {
491     const StringPiece& descriptor(uninit_type.GetDescriptor());
492     for (size_t i = primitive_count_; i < entries_.size(); i++) {
493       const RegType* cur_entry = entries_[i];
494       if (cur_entry->IsUnresolvedReference() &&
495           cur_entry->GetDescriptor() == descriptor) {
496         return *cur_entry;
497       }
498     }
499     entry = new (&arena_) UnresolvedReferenceType(descriptor, entries_.size());
500   } else {
501     mirror::Class* klass = uninit_type.GetClass();
502     if (uninit_type.IsUninitializedThisReference() && !klass->IsFinal()) {
503       // For uninitialized "this reference" look for reference types that are not precise.
504       for (size_t i = primitive_count_; i < entries_.size(); i++) {
505         const RegType* cur_entry = entries_[i];
506         if (cur_entry->IsReference() && cur_entry->GetClass() == klass) {
507           return *cur_entry;
508         }
509       }
510       entry = new (&arena_) ReferenceType(klass, "", entries_.size());
511     } else if (!klass->IsPrimitive()) {
512       // We're uninitialized because of allocation, look or create a precise type as allocations
513       // may only create objects of that type.
514       // Note: we do not check whether the given klass is actually instantiable (besides being
515       //       primitive), that is, we allow interfaces and abstract classes here. The reasoning is
516       //       twofold:
517       //       1) The "new-instance" instruction to generate the uninitialized type will already
518       //          queue an instantiation error. This is a soft error that must be thrown at runtime,
519       //          and could potentially change if the class is resolved differently at runtime.
520       //       2) Checking whether the klass is instantiable and using conflict may produce a hard
521       //          error when the value is used, which leads to a VerifyError, which is not the
522       //          correct semantics.
523       for (size_t i = primitive_count_; i < entries_.size(); i++) {
524         const RegType* cur_entry = entries_[i];
525         if (cur_entry->IsPreciseReference() && cur_entry->GetClass() == klass) {
526           return *cur_entry;
527         }
528       }
529       entry = new (&arena_) PreciseReferenceType(klass,
530                                                  uninit_type.GetDescriptor(),
531                                                  entries_.size());
532     } else {
533       return Conflict();
534     }
535   }
536   return AddEntry(entry);
537 }
538 
UninitializedThisArgument(const RegType & type)539 const UninitializedType& RegTypeCache::UninitializedThisArgument(const RegType& type) {
540   UninitializedType* entry;
541   const StringPiece& descriptor(type.GetDescriptor());
542   if (type.IsUnresolvedTypes()) {
543     for (size_t i = primitive_count_; i < entries_.size(); i++) {
544       const RegType* cur_entry = entries_[i];
545       if (cur_entry->IsUnresolvedAndUninitializedThisReference() &&
546           cur_entry->GetDescriptor() == descriptor) {
547         return *down_cast<const UninitializedType*>(cur_entry);
548       }
549     }
550     entry = new (&arena_) UnresolvedUninitializedThisRefType(descriptor, entries_.size());
551   } else {
552     mirror::Class* klass = type.GetClass();
553     for (size_t i = primitive_count_; i < entries_.size(); i++) {
554       const RegType* cur_entry = entries_[i];
555       if (cur_entry->IsUninitializedThisReference() && cur_entry->GetClass() == klass) {
556         return *down_cast<const UninitializedType*>(cur_entry);
557       }
558     }
559     entry = new (&arena_) UninitializedThisReferenceType(klass, descriptor, entries_.size());
560   }
561   return AddEntry(entry);
562 }
563 
FromCat1NonSmallConstant(int32_t value,bool precise)564 const ConstantType& RegTypeCache::FromCat1NonSmallConstant(int32_t value, bool precise) {
565   for (size_t i = primitive_count_; i < entries_.size(); i++) {
566     const RegType* cur_entry = entries_[i];
567     if (cur_entry->klass_.IsNull() && cur_entry->IsConstant() &&
568         cur_entry->IsPreciseConstant() == precise &&
569         (down_cast<const ConstantType*>(cur_entry))->ConstantValue() == value) {
570       return *down_cast<const ConstantType*>(cur_entry);
571     }
572   }
573   ConstantType* entry;
574   if (precise) {
575     entry = new (&arena_) PreciseConstType(value, entries_.size());
576   } else {
577     entry = new (&arena_) ImpreciseConstType(value, entries_.size());
578   }
579   return AddEntry(entry);
580 }
581 
FromCat2ConstLo(int32_t value,bool precise)582 const ConstantType& RegTypeCache::FromCat2ConstLo(int32_t value, bool precise) {
583   for (size_t i = primitive_count_; i < entries_.size(); i++) {
584     const RegType* cur_entry = entries_[i];
585     if (cur_entry->IsConstantLo() && (cur_entry->IsPrecise() == precise) &&
586         (down_cast<const ConstantType*>(cur_entry))->ConstantValueLo() == value) {
587       return *down_cast<const ConstantType*>(cur_entry);
588     }
589   }
590   ConstantType* entry;
591   if (precise) {
592     entry = new (&arena_) PreciseConstLoType(value, entries_.size());
593   } else {
594     entry = new (&arena_) ImpreciseConstLoType(value, entries_.size());
595   }
596   return AddEntry(entry);
597 }
598 
FromCat2ConstHi(int32_t value,bool precise)599 const ConstantType& RegTypeCache::FromCat2ConstHi(int32_t value, bool precise) {
600   for (size_t i = primitive_count_; i < entries_.size(); i++) {
601     const RegType* cur_entry = entries_[i];
602     if (cur_entry->IsConstantHi() && (cur_entry->IsPrecise() == precise) &&
603         (down_cast<const ConstantType*>(cur_entry))->ConstantValueHi() == value) {
604       return *down_cast<const ConstantType*>(cur_entry);
605     }
606   }
607   ConstantType* entry;
608   if (precise) {
609     entry = new (&arena_) PreciseConstHiType(value, entries_.size());
610   } else {
611     entry = new (&arena_) ImpreciseConstHiType(value, entries_.size());
612   }
613   return AddEntry(entry);
614 }
615 
GetComponentType(const RegType & array,mirror::ClassLoader * loader)616 const RegType& RegTypeCache::GetComponentType(const RegType& array, mirror::ClassLoader* loader) {
617   if (!array.IsArrayTypes()) {
618     return Conflict();
619   } else if (array.IsUnresolvedTypes()) {
620     DCHECK(!array.IsUnresolvedMergedReference());  // Caller must make sure not to ask for this.
621     const std::string descriptor(array.GetDescriptor().as_string());
622     return FromDescriptor(loader, descriptor.c_str() + 1, false);
623   } else {
624     mirror::Class* klass = array.GetClass()->GetComponentType();
625     std::string temp;
626     const char* descriptor = klass->GetDescriptor(&temp);
627     if (klass->IsErroneous()) {
628       // Arrays may have erroneous component types, use unresolved in that case.
629       // We assume that the primitive classes are not erroneous, so we know it is a
630       // reference type.
631       return FromDescriptor(loader, descriptor, false);
632     } else {
633       return FromClass(descriptor, klass, klass->CannotBeAssignedFromOtherTypes());
634     }
635   }
636 }
637 
Dump(std::ostream & os)638 void RegTypeCache::Dump(std::ostream& os) {
639   for (size_t i = 0; i < entries_.size(); i++) {
640     const RegType* cur_entry = entries_[i];
641     if (cur_entry != nullptr) {
642       os << i << ": " << cur_entry->Dump() << "\n";
643     }
644   }
645 }
646 
VisitStaticRoots(RootVisitor * visitor)647 void RegTypeCache::VisitStaticRoots(RootVisitor* visitor) {
648   // Visit the primitive types, this is required since if there are no active verifiers they wont
649   // be in the entries array, and therefore not visited as roots.
650   if (primitive_initialized_) {
651     RootInfo ri(kRootUnknown);
652     UndefinedType::GetInstance()->VisitRoots(visitor, ri);
653     ConflictType::GetInstance()->VisitRoots(visitor, ri);
654     BooleanType::GetInstance()->VisitRoots(visitor, ri);
655     ByteType::GetInstance()->VisitRoots(visitor, ri);
656     ShortType::GetInstance()->VisitRoots(visitor, ri);
657     CharType::GetInstance()->VisitRoots(visitor, ri);
658     IntegerType::GetInstance()->VisitRoots(visitor, ri);
659     LongLoType::GetInstance()->VisitRoots(visitor, ri);
660     LongHiType::GetInstance()->VisitRoots(visitor, ri);
661     FloatType::GetInstance()->VisitRoots(visitor, ri);
662     DoubleLoType::GetInstance()->VisitRoots(visitor, ri);
663     DoubleHiType::GetInstance()->VisitRoots(visitor, ri);
664     for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
665       small_precise_constants_[value - kMinSmallConstant]->VisitRoots(visitor, ri);
666     }
667   }
668 }
669 
VisitRoots(RootVisitor * visitor,const RootInfo & root_info)670 void RegTypeCache::VisitRoots(RootVisitor* visitor, const RootInfo& root_info) {
671   // Exclude the static roots that are visited by VisitStaticRoots().
672   for (size_t i = primitive_count_; i < entries_.size(); ++i) {
673     entries_[i]->VisitRoots(visitor, root_info);
674   }
675   for (auto& pair : klass_entries_) {
676     GcRoot<mirror::Class>& root = pair.first;
677     root.VisitRoot(visitor, root_info);
678   }
679 }
680 
681 }  // namespace verifier
682 }  // namespace art
683