• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "reg_type.h"
18 
19 #include <set>
20 
21 #include "base/bit_vector.h"
22 #include "base/casts.h"
23 #include "base/scoped_arena_allocator.h"
24 #include "common_runtime_test.h"
25 #include "compiler_callbacks.h"
26 #include "reg_type-inl.h"
27 #include "reg_type_cache-inl.h"
28 #include "scoped_thread_state_change-inl.h"
29 #include "thread-current-inl.h"
30 
31 namespace art {
32 namespace verifier {
33 
34 class RegTypeTest : public CommonRuntimeTest {
35  public:
RegTypeTest()36   RegTypeTest() {
37     use_boot_image_ = true;  // Make the Runtime creation cheaper.
38   }
39 };
40 
TEST_F(RegTypeTest,ConstLoHi)41 TEST_F(RegTypeTest, ConstLoHi) {
42   // Tests creating primitive types types.
43   ArenaStack stack(Runtime::Current()->GetArenaPool());
44   ScopedArenaAllocator allocator(&stack);
45   ScopedObjectAccess soa(Thread::Current());
46   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
47   const RegType& ref_type_const_0 = cache.FromCat1Const(10, true);
48   const RegType& ref_type_const_1 = cache.FromCat1Const(10, true);
49   const RegType& ref_type_const_2 = cache.FromCat1Const(30, true);
50   const RegType& ref_type_const_3 = cache.FromCat1Const(30, false);
51   EXPECT_TRUE(ref_type_const_0.Equals(ref_type_const_1));
52   EXPECT_FALSE(ref_type_const_0.Equals(ref_type_const_2));
53   EXPECT_FALSE(ref_type_const_0.Equals(ref_type_const_3));
54 
55   const RegType& ref_type_const_wide_0 = cache.FromCat2ConstHi(50, true);
56   const RegType& ref_type_const_wide_1 = cache.FromCat2ConstHi(50, true);
57   EXPECT_TRUE(ref_type_const_wide_0.Equals(ref_type_const_wide_1));
58 
59   const RegType& ref_type_const_wide_2 = cache.FromCat2ConstLo(50, true);
60   const RegType& ref_type_const_wide_3 = cache.FromCat2ConstLo(50, true);
61   const RegType& ref_type_const_wide_4 = cache.FromCat2ConstLo(55, true);
62   EXPECT_TRUE(ref_type_const_wide_2.Equals(ref_type_const_wide_3));
63   EXPECT_FALSE(ref_type_const_wide_2.Equals(ref_type_const_wide_4));
64 }
65 
TEST_F(RegTypeTest,Pairs)66 TEST_F(RegTypeTest, Pairs) {
67   ArenaStack stack(Runtime::Current()->GetArenaPool());
68   ScopedArenaAllocator allocator(&stack);
69   ScopedObjectAccess soa(Thread::Current());
70   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
71   int64_t val = static_cast<int32_t>(1234);
72   const RegType& precise_lo = cache.FromCat2ConstLo(static_cast<int32_t>(val), true);
73   const RegType& precise_hi = cache.FromCat2ConstHi(static_cast<int32_t>(val >> 32), true);
74   const RegType& precise_const = cache.FromCat1Const(static_cast<int32_t>(val >> 32), true);
75   const RegType& long_lo = cache.LongLo();
76   const RegType& long_hi = cache.LongHi();
77   // Check the expectations for types.
78   EXPECT_TRUE(precise_lo.IsLowHalf());
79   EXPECT_FALSE(precise_hi.IsLowHalf());
80   EXPECT_FALSE(precise_lo.IsHighHalf());
81   EXPECT_TRUE(precise_hi.IsHighHalf());
82   EXPECT_TRUE(long_hi.IsLongHighTypes());
83   EXPECT_TRUE(precise_hi.IsLongHighTypes());
84   // Check Pairing.
85   EXPECT_FALSE(precise_lo.CheckWidePair(precise_const));
86   EXPECT_TRUE(precise_lo.CheckWidePair(precise_hi));
87   // Test Merging.
88   EXPECT_TRUE((long_lo.Merge(precise_lo, &cache, /* verifier= */ nullptr)).IsLongTypes());
89   EXPECT_TRUE((long_hi.Merge(precise_hi, &cache, /* verifier= */ nullptr)).IsLongHighTypes());
90 }
91 
TEST_F(RegTypeTest,Primitives)92 TEST_F(RegTypeTest, Primitives) {
93   ArenaStack stack(Runtime::Current()->GetArenaPool());
94   ScopedArenaAllocator allocator(&stack);
95   ScopedObjectAccess soa(Thread::Current());
96   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
97 
98   const RegType& bool_reg_type = cache.Boolean();
99   EXPECT_FALSE(bool_reg_type.IsUndefined());
100   EXPECT_FALSE(bool_reg_type.IsConflict());
101   EXPECT_FALSE(bool_reg_type.IsZero());
102   EXPECT_FALSE(bool_reg_type.IsOne());
103   EXPECT_FALSE(bool_reg_type.IsLongConstant());
104   EXPECT_TRUE(bool_reg_type.IsBoolean());
105   EXPECT_FALSE(bool_reg_type.IsByte());
106   EXPECT_FALSE(bool_reg_type.IsChar());
107   EXPECT_FALSE(bool_reg_type.IsShort());
108   EXPECT_FALSE(bool_reg_type.IsInteger());
109   EXPECT_FALSE(bool_reg_type.IsLong());
110   EXPECT_FALSE(bool_reg_type.IsFloat());
111   EXPECT_FALSE(bool_reg_type.IsDouble());
112   EXPECT_FALSE(bool_reg_type.IsReference());
113   EXPECT_FALSE(bool_reg_type.IsLowHalf());
114   EXPECT_FALSE(bool_reg_type.IsHighHalf());
115   EXPECT_FALSE(bool_reg_type.IsLongOrDoubleTypes());
116   EXPECT_FALSE(bool_reg_type.IsReferenceTypes());
117   EXPECT_TRUE(bool_reg_type.IsCategory1Types());
118   EXPECT_FALSE(bool_reg_type.IsCategory2Types());
119   EXPECT_TRUE(bool_reg_type.IsBooleanTypes());
120   EXPECT_TRUE(bool_reg_type.IsByteTypes());
121   EXPECT_TRUE(bool_reg_type.IsShortTypes());
122   EXPECT_TRUE(bool_reg_type.IsCharTypes());
123   EXPECT_TRUE(bool_reg_type.IsIntegralTypes());
124   EXPECT_FALSE(bool_reg_type.IsFloatTypes());
125   EXPECT_FALSE(bool_reg_type.IsLongTypes());
126   EXPECT_FALSE(bool_reg_type.IsDoubleTypes());
127   EXPECT_TRUE(bool_reg_type.IsArrayIndexTypes());
128   EXPECT_FALSE(bool_reg_type.IsNonZeroReferenceTypes());
129   EXPECT_TRUE(bool_reg_type.HasClass());
130 
131   const RegType& byte_reg_type = cache.Byte();
132   EXPECT_FALSE(byte_reg_type.IsUndefined());
133   EXPECT_FALSE(byte_reg_type.IsConflict());
134   EXPECT_FALSE(byte_reg_type.IsZero());
135   EXPECT_FALSE(byte_reg_type.IsOne());
136   EXPECT_FALSE(byte_reg_type.IsLongConstant());
137   EXPECT_FALSE(byte_reg_type.IsBoolean());
138   EXPECT_TRUE(byte_reg_type.IsByte());
139   EXPECT_FALSE(byte_reg_type.IsChar());
140   EXPECT_FALSE(byte_reg_type.IsShort());
141   EXPECT_FALSE(byte_reg_type.IsInteger());
142   EXPECT_FALSE(byte_reg_type.IsLong());
143   EXPECT_FALSE(byte_reg_type.IsFloat());
144   EXPECT_FALSE(byte_reg_type.IsDouble());
145   EXPECT_FALSE(byte_reg_type.IsReference());
146   EXPECT_FALSE(byte_reg_type.IsLowHalf());
147   EXPECT_FALSE(byte_reg_type.IsHighHalf());
148   EXPECT_FALSE(byte_reg_type.IsLongOrDoubleTypes());
149   EXPECT_FALSE(byte_reg_type.IsReferenceTypes());
150   EXPECT_TRUE(byte_reg_type.IsCategory1Types());
151   EXPECT_FALSE(byte_reg_type.IsCategory2Types());
152   EXPECT_FALSE(byte_reg_type.IsBooleanTypes());
153   EXPECT_TRUE(byte_reg_type.IsByteTypes());
154   EXPECT_TRUE(byte_reg_type.IsShortTypes());
155   EXPECT_FALSE(byte_reg_type.IsCharTypes());
156   EXPECT_TRUE(byte_reg_type.IsIntegralTypes());
157   EXPECT_FALSE(byte_reg_type.IsFloatTypes());
158   EXPECT_FALSE(byte_reg_type.IsLongTypes());
159   EXPECT_FALSE(byte_reg_type.IsDoubleTypes());
160   EXPECT_TRUE(byte_reg_type.IsArrayIndexTypes());
161   EXPECT_FALSE(byte_reg_type.IsNonZeroReferenceTypes());
162   EXPECT_TRUE(byte_reg_type.HasClass());
163 
164   const RegType& char_reg_type = cache.Char();
165   EXPECT_FALSE(char_reg_type.IsUndefined());
166   EXPECT_FALSE(char_reg_type.IsConflict());
167   EXPECT_FALSE(char_reg_type.IsZero());
168   EXPECT_FALSE(char_reg_type.IsOne());
169   EXPECT_FALSE(char_reg_type.IsLongConstant());
170   EXPECT_FALSE(char_reg_type.IsBoolean());
171   EXPECT_FALSE(char_reg_type.IsByte());
172   EXPECT_TRUE(char_reg_type.IsChar());
173   EXPECT_FALSE(char_reg_type.IsShort());
174   EXPECT_FALSE(char_reg_type.IsInteger());
175   EXPECT_FALSE(char_reg_type.IsLong());
176   EXPECT_FALSE(char_reg_type.IsFloat());
177   EXPECT_FALSE(char_reg_type.IsDouble());
178   EXPECT_FALSE(char_reg_type.IsReference());
179   EXPECT_FALSE(char_reg_type.IsLowHalf());
180   EXPECT_FALSE(char_reg_type.IsHighHalf());
181   EXPECT_FALSE(char_reg_type.IsLongOrDoubleTypes());
182   EXPECT_FALSE(char_reg_type.IsReferenceTypes());
183   EXPECT_TRUE(char_reg_type.IsCategory1Types());
184   EXPECT_FALSE(char_reg_type.IsCategory2Types());
185   EXPECT_FALSE(char_reg_type.IsBooleanTypes());
186   EXPECT_FALSE(char_reg_type.IsByteTypes());
187   EXPECT_FALSE(char_reg_type.IsShortTypes());
188   EXPECT_TRUE(char_reg_type.IsCharTypes());
189   EXPECT_TRUE(char_reg_type.IsIntegralTypes());
190   EXPECT_FALSE(char_reg_type.IsFloatTypes());
191   EXPECT_FALSE(char_reg_type.IsLongTypes());
192   EXPECT_FALSE(char_reg_type.IsDoubleTypes());
193   EXPECT_TRUE(char_reg_type.IsArrayIndexTypes());
194   EXPECT_FALSE(char_reg_type.IsNonZeroReferenceTypes());
195   EXPECT_TRUE(char_reg_type.HasClass());
196 
197   const RegType& short_reg_type = cache.Short();
198   EXPECT_FALSE(short_reg_type.IsUndefined());
199   EXPECT_FALSE(short_reg_type.IsConflict());
200   EXPECT_FALSE(short_reg_type.IsZero());
201   EXPECT_FALSE(short_reg_type.IsOne());
202   EXPECT_FALSE(short_reg_type.IsLongConstant());
203   EXPECT_FALSE(short_reg_type.IsBoolean());
204   EXPECT_FALSE(short_reg_type.IsByte());
205   EXPECT_FALSE(short_reg_type.IsChar());
206   EXPECT_TRUE(short_reg_type.IsShort());
207   EXPECT_FALSE(short_reg_type.IsInteger());
208   EXPECT_FALSE(short_reg_type.IsLong());
209   EXPECT_FALSE(short_reg_type.IsFloat());
210   EXPECT_FALSE(short_reg_type.IsDouble());
211   EXPECT_FALSE(short_reg_type.IsReference());
212   EXPECT_FALSE(short_reg_type.IsLowHalf());
213   EXPECT_FALSE(short_reg_type.IsHighHalf());
214   EXPECT_FALSE(short_reg_type.IsLongOrDoubleTypes());
215   EXPECT_FALSE(short_reg_type.IsReferenceTypes());
216   EXPECT_TRUE(short_reg_type.IsCategory1Types());
217   EXPECT_FALSE(short_reg_type.IsCategory2Types());
218   EXPECT_FALSE(short_reg_type.IsBooleanTypes());
219   EXPECT_FALSE(short_reg_type.IsByteTypes());
220   EXPECT_TRUE(short_reg_type.IsShortTypes());
221   EXPECT_FALSE(short_reg_type.IsCharTypes());
222   EXPECT_TRUE(short_reg_type.IsIntegralTypes());
223   EXPECT_FALSE(short_reg_type.IsFloatTypes());
224   EXPECT_FALSE(short_reg_type.IsLongTypes());
225   EXPECT_FALSE(short_reg_type.IsDoubleTypes());
226   EXPECT_TRUE(short_reg_type.IsArrayIndexTypes());
227   EXPECT_FALSE(short_reg_type.IsNonZeroReferenceTypes());
228   EXPECT_TRUE(short_reg_type.HasClass());
229 
230   const RegType& int_reg_type = cache.Integer();
231   EXPECT_FALSE(int_reg_type.IsUndefined());
232   EXPECT_FALSE(int_reg_type.IsConflict());
233   EXPECT_FALSE(int_reg_type.IsZero());
234   EXPECT_FALSE(int_reg_type.IsOne());
235   EXPECT_FALSE(int_reg_type.IsLongConstant());
236   EXPECT_FALSE(int_reg_type.IsBoolean());
237   EXPECT_FALSE(int_reg_type.IsByte());
238   EXPECT_FALSE(int_reg_type.IsChar());
239   EXPECT_FALSE(int_reg_type.IsShort());
240   EXPECT_TRUE(int_reg_type.IsInteger());
241   EXPECT_FALSE(int_reg_type.IsLong());
242   EXPECT_FALSE(int_reg_type.IsFloat());
243   EXPECT_FALSE(int_reg_type.IsDouble());
244   EXPECT_FALSE(int_reg_type.IsReference());
245   EXPECT_FALSE(int_reg_type.IsLowHalf());
246   EXPECT_FALSE(int_reg_type.IsHighHalf());
247   EXPECT_FALSE(int_reg_type.IsLongOrDoubleTypes());
248   EXPECT_FALSE(int_reg_type.IsReferenceTypes());
249   EXPECT_TRUE(int_reg_type.IsCategory1Types());
250   EXPECT_FALSE(int_reg_type.IsCategory2Types());
251   EXPECT_FALSE(int_reg_type.IsBooleanTypes());
252   EXPECT_FALSE(int_reg_type.IsByteTypes());
253   EXPECT_FALSE(int_reg_type.IsShortTypes());
254   EXPECT_FALSE(int_reg_type.IsCharTypes());
255   EXPECT_TRUE(int_reg_type.IsIntegralTypes());
256   EXPECT_FALSE(int_reg_type.IsFloatTypes());
257   EXPECT_FALSE(int_reg_type.IsLongTypes());
258   EXPECT_FALSE(int_reg_type.IsDoubleTypes());
259   EXPECT_TRUE(int_reg_type.IsArrayIndexTypes());
260   EXPECT_FALSE(int_reg_type.IsNonZeroReferenceTypes());
261   EXPECT_TRUE(int_reg_type.HasClass());
262 
263   const RegType& long_reg_type = cache.LongLo();
264   EXPECT_FALSE(long_reg_type.IsUndefined());
265   EXPECT_FALSE(long_reg_type.IsConflict());
266   EXPECT_FALSE(long_reg_type.IsZero());
267   EXPECT_FALSE(long_reg_type.IsOne());
268   EXPECT_FALSE(long_reg_type.IsLongConstant());
269   EXPECT_FALSE(long_reg_type.IsBoolean());
270   EXPECT_FALSE(long_reg_type.IsByte());
271   EXPECT_FALSE(long_reg_type.IsChar());
272   EXPECT_FALSE(long_reg_type.IsShort());
273   EXPECT_FALSE(long_reg_type.IsInteger());
274   EXPECT_TRUE(long_reg_type.IsLong());
275   EXPECT_FALSE(long_reg_type.IsFloat());
276   EXPECT_FALSE(long_reg_type.IsDouble());
277   EXPECT_FALSE(long_reg_type.IsReference());
278   EXPECT_TRUE(long_reg_type.IsLowHalf());
279   EXPECT_FALSE(long_reg_type.IsHighHalf());
280   EXPECT_TRUE(long_reg_type.IsLongOrDoubleTypes());
281   EXPECT_FALSE(long_reg_type.IsReferenceTypes());
282   EXPECT_FALSE(long_reg_type.IsCategory1Types());
283   EXPECT_TRUE(long_reg_type.IsCategory2Types());
284   EXPECT_FALSE(long_reg_type.IsBooleanTypes());
285   EXPECT_FALSE(long_reg_type.IsByteTypes());
286   EXPECT_FALSE(long_reg_type.IsShortTypes());
287   EXPECT_FALSE(long_reg_type.IsCharTypes());
288   EXPECT_FALSE(long_reg_type.IsIntegralTypes());
289   EXPECT_FALSE(long_reg_type.IsFloatTypes());
290   EXPECT_TRUE(long_reg_type.IsLongTypes());
291   EXPECT_FALSE(long_reg_type.IsDoubleTypes());
292   EXPECT_FALSE(long_reg_type.IsArrayIndexTypes());
293   EXPECT_FALSE(long_reg_type.IsNonZeroReferenceTypes());
294   EXPECT_TRUE(long_reg_type.HasClass());
295 
296   const RegType& float_reg_type = cache.Float();
297   EXPECT_FALSE(float_reg_type.IsUndefined());
298   EXPECT_FALSE(float_reg_type.IsConflict());
299   EXPECT_FALSE(float_reg_type.IsZero());
300   EXPECT_FALSE(float_reg_type.IsOne());
301   EXPECT_FALSE(float_reg_type.IsLongConstant());
302   EXPECT_FALSE(float_reg_type.IsBoolean());
303   EXPECT_FALSE(float_reg_type.IsByte());
304   EXPECT_FALSE(float_reg_type.IsChar());
305   EXPECT_FALSE(float_reg_type.IsShort());
306   EXPECT_FALSE(float_reg_type.IsInteger());
307   EXPECT_FALSE(float_reg_type.IsLong());
308   EXPECT_TRUE(float_reg_type.IsFloat());
309   EXPECT_FALSE(float_reg_type.IsDouble());
310   EXPECT_FALSE(float_reg_type.IsReference());
311   EXPECT_FALSE(float_reg_type.IsLowHalf());
312   EXPECT_FALSE(float_reg_type.IsHighHalf());
313   EXPECT_FALSE(float_reg_type.IsLongOrDoubleTypes());
314   EXPECT_FALSE(float_reg_type.IsReferenceTypes());
315   EXPECT_TRUE(float_reg_type.IsCategory1Types());
316   EXPECT_FALSE(float_reg_type.IsCategory2Types());
317   EXPECT_FALSE(float_reg_type.IsBooleanTypes());
318   EXPECT_FALSE(float_reg_type.IsByteTypes());
319   EXPECT_FALSE(float_reg_type.IsShortTypes());
320   EXPECT_FALSE(float_reg_type.IsCharTypes());
321   EXPECT_FALSE(float_reg_type.IsIntegralTypes());
322   EXPECT_TRUE(float_reg_type.IsFloatTypes());
323   EXPECT_FALSE(float_reg_type.IsLongTypes());
324   EXPECT_FALSE(float_reg_type.IsDoubleTypes());
325   EXPECT_FALSE(float_reg_type.IsArrayIndexTypes());
326   EXPECT_FALSE(float_reg_type.IsNonZeroReferenceTypes());
327   EXPECT_TRUE(float_reg_type.HasClass());
328 
329   const RegType& double_reg_type = cache.DoubleLo();
330   EXPECT_FALSE(double_reg_type.IsUndefined());
331   EXPECT_FALSE(double_reg_type.IsConflict());
332   EXPECT_FALSE(double_reg_type.IsZero());
333   EXPECT_FALSE(double_reg_type.IsOne());
334   EXPECT_FALSE(double_reg_type.IsLongConstant());
335   EXPECT_FALSE(double_reg_type.IsBoolean());
336   EXPECT_FALSE(double_reg_type.IsByte());
337   EXPECT_FALSE(double_reg_type.IsChar());
338   EXPECT_FALSE(double_reg_type.IsShort());
339   EXPECT_FALSE(double_reg_type.IsInteger());
340   EXPECT_FALSE(double_reg_type.IsLong());
341   EXPECT_FALSE(double_reg_type.IsFloat());
342   EXPECT_TRUE(double_reg_type.IsDouble());
343   EXPECT_FALSE(double_reg_type.IsReference());
344   EXPECT_TRUE(double_reg_type.IsLowHalf());
345   EXPECT_FALSE(double_reg_type.IsHighHalf());
346   EXPECT_TRUE(double_reg_type.IsLongOrDoubleTypes());
347   EXPECT_FALSE(double_reg_type.IsReferenceTypes());
348   EXPECT_FALSE(double_reg_type.IsCategory1Types());
349   EXPECT_TRUE(double_reg_type.IsCategory2Types());
350   EXPECT_FALSE(double_reg_type.IsBooleanTypes());
351   EXPECT_FALSE(double_reg_type.IsByteTypes());
352   EXPECT_FALSE(double_reg_type.IsShortTypes());
353   EXPECT_FALSE(double_reg_type.IsCharTypes());
354   EXPECT_FALSE(double_reg_type.IsIntegralTypes());
355   EXPECT_FALSE(double_reg_type.IsFloatTypes());
356   EXPECT_FALSE(double_reg_type.IsLongTypes());
357   EXPECT_TRUE(double_reg_type.IsDoubleTypes());
358   EXPECT_FALSE(double_reg_type.IsArrayIndexTypes());
359   EXPECT_FALSE(double_reg_type.IsNonZeroReferenceTypes());
360   EXPECT_TRUE(double_reg_type.HasClass());
361 }
362 
363 class RegTypeReferenceTest : public CommonRuntimeTest {};
364 
TEST_F(RegTypeReferenceTest,JavalangObjectImprecise)365 TEST_F(RegTypeReferenceTest, JavalangObjectImprecise) {
366   // Tests matching precisions. A reference type that was created precise doesn't
367   // match the one that is imprecise.
368   ArenaStack stack(Runtime::Current()->GetArenaPool());
369   ScopedArenaAllocator allocator(&stack);
370   ScopedObjectAccess soa(Thread::Current());
371   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
372   const RegType& imprecise_obj = cache.JavaLangObject(false);
373   const RegType& precise_obj = cache.JavaLangObject(true);
374   const RegType& precise_obj_2 = cache.FromDescriptor(nullptr, "Ljava/lang/Object;", true);
375 
376   EXPECT_TRUE(precise_obj.Equals(precise_obj_2));
377   EXPECT_FALSE(imprecise_obj.Equals(precise_obj));
378   EXPECT_FALSE(imprecise_obj.Equals(precise_obj));
379   EXPECT_FALSE(imprecise_obj.Equals(precise_obj_2));
380 }
381 
TEST_F(RegTypeReferenceTest,UnresolvedType)382 TEST_F(RegTypeReferenceTest, UnresolvedType) {
383   // Tests creating unresolved types. Miss for the first time asking the cache and
384   // a hit second time.
385   ArenaStack stack(Runtime::Current()->GetArenaPool());
386   ScopedArenaAllocator allocator(&stack);
387   ScopedObjectAccess soa(Thread::Current());
388   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
389   const RegType& ref_type_0 = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true);
390   EXPECT_TRUE(ref_type_0.IsUnresolvedReference());
391   EXPECT_TRUE(ref_type_0.IsNonZeroReferenceTypes());
392 
393   const RegType& ref_type_1 = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true);
394   EXPECT_TRUE(ref_type_0.Equals(ref_type_1));
395 
396   const RegType& unresolved_super_class =  cache.FromUnresolvedSuperClass(ref_type_0);
397   EXPECT_TRUE(unresolved_super_class.IsUnresolvedSuperClass());
398   EXPECT_TRUE(unresolved_super_class.IsNonZeroReferenceTypes());
399 }
400 
TEST_F(RegTypeReferenceTest,UnresolvedUnintializedType)401 TEST_F(RegTypeReferenceTest, UnresolvedUnintializedType) {
402   // Tests creating types uninitialized types from unresolved types.
403   ArenaStack stack(Runtime::Current()->GetArenaPool());
404   ScopedArenaAllocator allocator(&stack);
405   ScopedObjectAccess soa(Thread::Current());
406   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
407   const RegType& ref_type_0 = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true);
408   EXPECT_TRUE(ref_type_0.IsUnresolvedReference());
409   const RegType& ref_type = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true);
410   EXPECT_TRUE(ref_type_0.Equals(ref_type));
411   // Create an uninitialized type of this unresolved type
412   const RegType& unresolved_unintialised = cache.Uninitialized(ref_type, 1101ull);
413   EXPECT_TRUE(unresolved_unintialised.IsUnresolvedAndUninitializedReference());
414   EXPECT_TRUE(unresolved_unintialised.IsUninitializedTypes());
415   EXPECT_TRUE(unresolved_unintialised.IsNonZeroReferenceTypes());
416   // Create an uninitialized type of this unresolved type with different  PC
417   const RegType& ref_type_unresolved_unintialised_1 =  cache.Uninitialized(ref_type, 1102ull);
418   EXPECT_TRUE(unresolved_unintialised.IsUnresolvedAndUninitializedReference());
419   EXPECT_FALSE(unresolved_unintialised.Equals(ref_type_unresolved_unintialised_1));
420   // Create an uninitialized type of this unresolved type with the same PC
421   const RegType& unresolved_unintialised_2 = cache.Uninitialized(ref_type, 1101ull);
422   EXPECT_TRUE(unresolved_unintialised.Equals(unresolved_unintialised_2));
423 }
424 
TEST_F(RegTypeReferenceTest,Dump)425 TEST_F(RegTypeReferenceTest, Dump) {
426   // Tests types for proper Dump messages.
427   ArenaStack stack(Runtime::Current()->GetArenaPool());
428   ScopedArenaAllocator allocator(&stack);
429   ScopedObjectAccess soa(Thread::Current());
430   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
431   const RegType& unresolved_ref = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true);
432   const RegType& unresolved_ref_another = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExistEither;", true);
433   const RegType& resolved_ref = cache.JavaLangString();
434   const RegType& resolved_unintialiesd = cache.Uninitialized(resolved_ref, 10);
435   const RegType& unresolved_unintialized = cache.Uninitialized(unresolved_ref, 12);
436   const RegType& unresolved_merged = cache.FromUnresolvedMerge(
437       unresolved_ref, unresolved_ref_another, /* verifier= */ nullptr);
438 
439   std::string expected = "Unresolved Reference: java.lang.DoesNotExist";
440   EXPECT_EQ(expected, unresolved_ref.Dump());
441   expected = "Precise Reference: java.lang.String";
442   EXPECT_EQ(expected, resolved_ref.Dump());
443   expected ="Uninitialized Reference: java.lang.String Allocation PC: 10";
444   EXPECT_EQ(expected, resolved_unintialiesd.Dump());
445   expected = "Unresolved And Uninitialized Reference: java.lang.DoesNotExist Allocation PC: 12";
446   EXPECT_EQ(expected, unresolved_unintialized.Dump());
447   expected = "UnresolvedMergedReferences(Zero/null | Unresolved Reference: java.lang.DoesNotExist, Unresolved Reference: java.lang.DoesNotExistEither)";
448   EXPECT_EQ(expected, unresolved_merged.Dump());
449 }
450 
TEST_F(RegTypeReferenceTest,JavalangString)451 TEST_F(RegTypeReferenceTest, JavalangString) {
452   // Add a class to the cache then look for the same class and make sure it is  a
453   // Hit the second time. Then check for the same effect when using
454   // The JavaLangObject method instead of FromDescriptor. String class is final.
455   ArenaStack stack(Runtime::Current()->GetArenaPool());
456   ScopedArenaAllocator allocator(&stack);
457   ScopedObjectAccess soa(Thread::Current());
458   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
459   const RegType& ref_type = cache.JavaLangString();
460   const RegType& ref_type_2 = cache.JavaLangString();
461   const RegType& ref_type_3 = cache.FromDescriptor(nullptr, "Ljava/lang/String;", true);
462 
463   EXPECT_TRUE(ref_type.Equals(ref_type_2));
464   EXPECT_TRUE(ref_type_2.Equals(ref_type_3));
465   EXPECT_TRUE(ref_type.IsPreciseReference());
466 
467   // Create an uninitialized type out of this:
468   const RegType& ref_type_unintialized = cache.Uninitialized(ref_type, 0110ull);
469   EXPECT_TRUE(ref_type_unintialized.IsUninitializedReference());
470   EXPECT_FALSE(ref_type_unintialized.IsUnresolvedAndUninitializedReference());
471 }
472 
TEST_F(RegTypeReferenceTest,JavalangObject)473 TEST_F(RegTypeReferenceTest, JavalangObject) {
474   // Add a class to the cache then look for the same class and make sure it is  a
475   // Hit the second time. Then I am checking for the same effect when using
476   // The JavaLangObject method instead of FromDescriptor. Object Class in not final.
477   ArenaStack stack(Runtime::Current()->GetArenaPool());
478   ScopedArenaAllocator allocator(&stack);
479   ScopedObjectAccess soa(Thread::Current());
480   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
481   const RegType& ref_type = cache.JavaLangObject(true);
482   const RegType& ref_type_2 = cache.JavaLangObject(true);
483   const RegType& ref_type_3 = cache.FromDescriptor(nullptr, "Ljava/lang/Object;", true);
484 
485   EXPECT_TRUE(ref_type.Equals(ref_type_2));
486   EXPECT_TRUE(ref_type_3.Equals(ref_type_2));
487   EXPECT_EQ(ref_type.GetId(), ref_type_3.GetId());
488 }
TEST_F(RegTypeReferenceTest,Merging)489 TEST_F(RegTypeReferenceTest, Merging) {
490   // Tests merging logic
491   // String and object , LUB is object.
492   ScopedObjectAccess soa(Thread::Current());
493   ArenaStack stack(Runtime::Current()->GetArenaPool());
494   ScopedArenaAllocator allocator(&stack);
495   RegTypeCache cache_new(Runtime::Current()->GetClassLinker(), true, allocator);
496   const RegType& string = cache_new.JavaLangString();
497   const RegType& Object = cache_new.JavaLangObject(true);
498   EXPECT_TRUE(string.Merge(Object, &cache_new, /* verifier= */ nullptr).IsJavaLangObject());
499   // Merge two unresolved types.
500   const RegType& ref_type_0 = cache_new.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true);
501   EXPECT_TRUE(ref_type_0.IsUnresolvedReference());
502   const RegType& ref_type_1 = cache_new.FromDescriptor(nullptr, "Ljava/lang/DoesNotExistToo;", true);
503   EXPECT_FALSE(ref_type_0.Equals(ref_type_1));
504 
505   const RegType& merged = ref_type_1.Merge(ref_type_0, &cache_new, /* verifier= */ nullptr);
506   EXPECT_TRUE(merged.IsUnresolvedMergedReference());
507   RegType& merged_nonconst = const_cast<RegType&>(merged);
508 
509   const BitVector& unresolved_parts =
510       down_cast<UnresolvedMergedType*>(&merged_nonconst)->GetUnresolvedTypes();
511   EXPECT_TRUE(unresolved_parts.IsBitSet(ref_type_0.GetId()));
512   EXPECT_TRUE(unresolved_parts.IsBitSet(ref_type_1.GetId()));
513 }
514 
TEST_F(RegTypeTest,MergingFloat)515 TEST_F(RegTypeTest, MergingFloat) {
516   // Testing merging logic with float and float constants.
517   ArenaStack stack(Runtime::Current()->GetArenaPool());
518   ScopedArenaAllocator allocator(&stack);
519   ScopedObjectAccess soa(Thread::Current());
520   RegTypeCache cache_new(Runtime::Current()->GetClassLinker(), true, allocator);
521 
522   constexpr int32_t kTestConstantValue = 10;
523   const RegType& float_type = cache_new.Float();
524   const RegType& precise_cst = cache_new.FromCat1Const(kTestConstantValue, true);
525   const RegType& imprecise_cst = cache_new.FromCat1Const(kTestConstantValue, false);
526   {
527     // float MERGE precise cst => float.
528     const RegType& merged = float_type.Merge(precise_cst, &cache_new, /* verifier= */ nullptr);
529     EXPECT_TRUE(merged.IsFloat());
530   }
531   {
532     // precise cst MERGE float => float.
533     const RegType& merged = precise_cst.Merge(float_type, &cache_new, /* verifier= */ nullptr);
534     EXPECT_TRUE(merged.IsFloat());
535   }
536   {
537     // float MERGE imprecise cst => float.
538     const RegType& merged = float_type.Merge(imprecise_cst, &cache_new, /* verifier= */ nullptr);
539     EXPECT_TRUE(merged.IsFloat());
540   }
541   {
542     // imprecise cst MERGE float => float.
543     const RegType& merged = imprecise_cst.Merge(float_type, &cache_new, /* verifier= */ nullptr);
544     EXPECT_TRUE(merged.IsFloat());
545   }
546 }
547 
TEST_F(RegTypeTest,MergingLong)548 TEST_F(RegTypeTest, MergingLong) {
549   // Testing merging logic with long and long constants.
550   ArenaStack stack(Runtime::Current()->GetArenaPool());
551   ScopedArenaAllocator allocator(&stack);
552   ScopedObjectAccess soa(Thread::Current());
553   RegTypeCache cache_new(Runtime::Current()->GetClassLinker(), true, allocator);
554 
555   constexpr int32_t kTestConstantValue = 10;
556   const RegType& long_lo_type = cache_new.LongLo();
557   const RegType& long_hi_type = cache_new.LongHi();
558   const RegType& precise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, true);
559   const RegType& imprecise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, false);
560   const RegType& precise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, true);
561   const RegType& imprecise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, false);
562   {
563     // lo MERGE precise cst lo => lo.
564     const RegType& merged = long_lo_type.Merge(precise_cst_lo, &cache_new, /* verifier= */ nullptr);
565     EXPECT_TRUE(merged.IsLongLo());
566   }
567   {
568     // precise cst lo MERGE lo => lo.
569     const RegType& merged = precise_cst_lo.Merge(long_lo_type, &cache_new, /* verifier= */ nullptr);
570     EXPECT_TRUE(merged.IsLongLo());
571   }
572   {
573     // lo MERGE imprecise cst lo => lo.
574     const RegType& merged = long_lo_type.Merge(
575         imprecise_cst_lo, &cache_new, /* verifier= */ nullptr);
576     EXPECT_TRUE(merged.IsLongLo());
577   }
578   {
579     // imprecise cst lo MERGE lo => lo.
580     const RegType& merged = imprecise_cst_lo.Merge(
581         long_lo_type, &cache_new, /* verifier= */ nullptr);
582     EXPECT_TRUE(merged.IsLongLo());
583   }
584   {
585     // hi MERGE precise cst hi => hi.
586     const RegType& merged = long_hi_type.Merge(precise_cst_hi, &cache_new, /* verifier= */ nullptr);
587     EXPECT_TRUE(merged.IsLongHi());
588   }
589   {
590     // precise cst hi MERGE hi => hi.
591     const RegType& merged = precise_cst_hi.Merge(long_hi_type, &cache_new, /* verifier= */ nullptr);
592     EXPECT_TRUE(merged.IsLongHi());
593   }
594   {
595     // hi MERGE imprecise cst hi => hi.
596     const RegType& merged = long_hi_type.Merge(
597         imprecise_cst_hi, &cache_new, /* verifier= */ nullptr);
598     EXPECT_TRUE(merged.IsLongHi());
599   }
600   {
601     // imprecise cst hi MERGE hi => hi.
602     const RegType& merged = imprecise_cst_hi.Merge(
603         long_hi_type, &cache_new, /* verifier= */ nullptr);
604     EXPECT_TRUE(merged.IsLongHi());
605   }
606 }
607 
TEST_F(RegTypeTest,MergingDouble)608 TEST_F(RegTypeTest, MergingDouble) {
609   // Testing merging logic with double and double constants.
610   ArenaStack stack(Runtime::Current()->GetArenaPool());
611   ScopedArenaAllocator allocator(&stack);
612   ScopedObjectAccess soa(Thread::Current());
613   RegTypeCache cache_new(Runtime::Current()->GetClassLinker(), true, allocator);
614 
615   constexpr int32_t kTestConstantValue = 10;
616   const RegType& double_lo_type = cache_new.DoubleLo();
617   const RegType& double_hi_type = cache_new.DoubleHi();
618   const RegType& precise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, true);
619   const RegType& imprecise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, false);
620   const RegType& precise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, true);
621   const RegType& imprecise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, false);
622   {
623     // lo MERGE precise cst lo => lo.
624     const RegType& merged = double_lo_type.Merge(
625         precise_cst_lo, &cache_new, /* verifier= */ nullptr);
626     EXPECT_TRUE(merged.IsDoubleLo());
627   }
628   {
629     // precise cst lo MERGE lo => lo.
630     const RegType& merged = precise_cst_lo.Merge(
631         double_lo_type, &cache_new, /* verifier= */ nullptr);
632     EXPECT_TRUE(merged.IsDoubleLo());
633   }
634   {
635     // lo MERGE imprecise cst lo => lo.
636     const RegType& merged = double_lo_type.Merge(
637         imprecise_cst_lo, &cache_new, /* verifier= */ nullptr);
638     EXPECT_TRUE(merged.IsDoubleLo());
639   }
640   {
641     // imprecise cst lo MERGE lo => lo.
642     const RegType& merged = imprecise_cst_lo.Merge(
643         double_lo_type, &cache_new, /* verifier= */ nullptr);
644     EXPECT_TRUE(merged.IsDoubleLo());
645   }
646   {
647     // hi MERGE precise cst hi => hi.
648     const RegType& merged = double_hi_type.Merge(
649         precise_cst_hi, &cache_new, /* verifier= */ nullptr);
650     EXPECT_TRUE(merged.IsDoubleHi());
651   }
652   {
653     // precise cst hi MERGE hi => hi.
654     const RegType& merged = precise_cst_hi.Merge(
655         double_hi_type, &cache_new, /* verifier= */ nullptr);
656     EXPECT_TRUE(merged.IsDoubleHi());
657   }
658   {
659     // hi MERGE imprecise cst hi => hi.
660     const RegType& merged = double_hi_type.Merge(
661         imprecise_cst_hi, &cache_new, /* verifier= */ nullptr);
662     EXPECT_TRUE(merged.IsDoubleHi());
663   }
664   {
665     // imprecise cst hi MERGE hi => hi.
666     const RegType& merged = imprecise_cst_hi.Merge(
667         double_hi_type, &cache_new, /* verifier= */ nullptr);
668     EXPECT_TRUE(merged.IsDoubleHi());
669   }
670 }
671 
672 // Without a running MethodVerifier, the class-bearing register types may become stale as the GC
673 // will not visit them. It is easiest to disable moving GC.
674 //
675 // For some of the tests we need (or want) a working RegTypeCache that can load classes. So it is
676 // not generally possible to disable GC using ScopedGCCriticalSection (as it blocks GC and
677 // suspension completely).
678 struct ScopedDisableMovingGC {
ScopedDisableMovingGCart::verifier::ScopedDisableMovingGC679   explicit ScopedDisableMovingGC(Thread* t) : self(t) {
680     Runtime::Current()->GetHeap()->IncrementDisableMovingGC(self);
681   }
~ScopedDisableMovingGCart::verifier::ScopedDisableMovingGC682   ~ScopedDisableMovingGC() {
683     Runtime::Current()->GetHeap()->DecrementDisableMovingGC(self);
684   }
685 
686   Thread* self;
687 };
688 
TEST_F(RegTypeTest,MergeSemiLatticeRef)689 TEST_F(RegTypeTest, MergeSemiLatticeRef) {
690   //  (Incomplete) semilattice:
691   //
692   //  Excluded for now: * category-2 types
693   //                    * interfaces
694   //                    * all of category-1 primitive types, including constants.
695   //  This is to demonstrate/codify the reference side, mostly.
696   //
697   //  Note: It is not a real semilattice because int = float makes this wonky. :-(
698   //
699   //                                       Conflict
700   //                                           |
701   //      #---------#--------------------------#-----------------------------#
702   //      |         |                                                        |
703   //      |         |                                                      Object
704   //      |         |                                                        |
705   //     int   uninit types              #---------------#--------#------------------#---------#
706   //      |                              |               |        |                  |         |
707   //      |                  unresolved-merge-types      |      Object[]           char[]   byte[]
708   //      |                              |    |  |       |        |                  |         |
709   //      |                  unresolved-types |  #------Number    #---------#        |         |
710   //      |                              |    |          |        |         |        |         |
711   //      |                              |    #--------Integer  Number[] Number[][]  |         |
712   //      |                              |               |        |         |        |         |
713   //      |                              #---------------#--------#---------#--------#---------#
714   //      |                                                       |
715   //      |                                                     null
716   //      |                                                       |
717   //      #--------------------------#----------------------------#
718   //                                 |
719   //                                 0
720 
721   ArenaStack stack(Runtime::Current()->GetArenaPool());
722   ScopedArenaAllocator allocator(&stack);
723   ScopedObjectAccess soa(Thread::Current());
724 
725   ScopedDisableMovingGC no_gc(soa.Self());
726 
727   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
728 
729   const RegType& conflict = cache.Conflict();
730   const RegType& zero = cache.Zero();
731   const RegType& null = cache.Null();
732   const RegType& int_type = cache.Integer();
733 
734   const RegType& obj = cache.JavaLangObject(false);
735   const RegType& obj_arr = cache.From(nullptr, "[Ljava/lang/Object;", false);
736   ASSERT_FALSE(obj_arr.IsUnresolvedReference());
737 
738   const RegType& unresolved_a = cache.From(nullptr, "Ldoes/not/resolve/A;", false);
739   ASSERT_TRUE(unresolved_a.IsUnresolvedReference());
740   const RegType& unresolved_b = cache.From(nullptr, "Ldoes/not/resolve/B;", false);
741   ASSERT_TRUE(unresolved_b.IsUnresolvedReference());
742   const RegType& unresolved_ab = cache.FromUnresolvedMerge(unresolved_a, unresolved_b, nullptr);
743   ASSERT_TRUE(unresolved_ab.IsUnresolvedMergedReference());
744 
745   const RegType& uninit_this = cache.UninitializedThisArgument(obj);
746   const RegType& uninit_obj_0 = cache.Uninitialized(obj, 0u);
747   const RegType& uninit_obj_1 = cache.Uninitialized(obj, 1u);
748 
749   const RegType& uninit_unres_this = cache.UninitializedThisArgument(unresolved_a);
750   const RegType& uninit_unres_a_0 = cache.Uninitialized(unresolved_a, 0);
751   const RegType& uninit_unres_b_0 = cache.Uninitialized(unresolved_b, 0);
752 
753   const RegType& number = cache.From(nullptr, "Ljava/lang/Number;", false);
754   ASSERT_FALSE(number.IsUnresolvedReference());
755   const RegType& integer = cache.From(nullptr, "Ljava/lang/Integer;", false);
756   ASSERT_FALSE(integer.IsUnresolvedReference());
757 
758   const RegType& uninit_number_0 = cache.Uninitialized(number, 0u);
759   const RegType& uninit_integer_0 = cache.Uninitialized(integer, 0u);
760 
761   const RegType& number_arr = cache.From(nullptr, "[Ljava/lang/Number;", false);
762   ASSERT_FALSE(number_arr.IsUnresolvedReference());
763   const RegType& integer_arr = cache.From(nullptr, "[Ljava/lang/Integer;", false);
764   ASSERT_FALSE(integer_arr.IsUnresolvedReference());
765 
766   const RegType& number_arr_arr = cache.From(nullptr, "[[Ljava/lang/Number;", false);
767   ASSERT_FALSE(number_arr_arr.IsUnresolvedReference());
768 
769   const RegType& char_arr = cache.From(nullptr, "[C", false);
770   ASSERT_FALSE(char_arr.IsUnresolvedReference());
771   const RegType& byte_arr = cache.From(nullptr, "[B", false);
772   ASSERT_FALSE(byte_arr.IsUnresolvedReference());
773 
774   const RegType& unresolved_a_num = cache.FromUnresolvedMerge(unresolved_a, number, nullptr);
775   ASSERT_TRUE(unresolved_a_num.IsUnresolvedMergedReference());
776   const RegType& unresolved_b_num = cache.FromUnresolvedMerge(unresolved_b, number, nullptr);
777   ASSERT_TRUE(unresolved_b_num.IsUnresolvedMergedReference());
778   const RegType& unresolved_ab_num = cache.FromUnresolvedMerge(unresolved_ab, number, nullptr);
779   ASSERT_TRUE(unresolved_ab_num.IsUnresolvedMergedReference());
780 
781   const RegType& unresolved_a_int = cache.FromUnresolvedMerge(unresolved_a, integer, nullptr);
782   ASSERT_TRUE(unresolved_a_int.IsUnresolvedMergedReference());
783   const RegType& unresolved_b_int = cache.FromUnresolvedMerge(unresolved_b, integer, nullptr);
784   ASSERT_TRUE(unresolved_b_int.IsUnresolvedMergedReference());
785   const RegType& unresolved_ab_int = cache.FromUnresolvedMerge(unresolved_ab, integer, nullptr);
786   ASSERT_TRUE(unresolved_ab_int.IsUnresolvedMergedReference());
787   std::vector<const RegType*> uninitialized_types = {
788       &uninit_this, &uninit_obj_0, &uninit_obj_1, &uninit_number_0, &uninit_integer_0
789   };
790   std::vector<const RegType*> unresolved_types = {
791       &unresolved_a,
792       &unresolved_b,
793       &unresolved_ab,
794       &unresolved_a_num,
795       &unresolved_b_num,
796       &unresolved_ab_num,
797       &unresolved_a_int,
798       &unresolved_b_int,
799       &unresolved_ab_int
800   };
801   std::vector<const RegType*> uninit_unresolved_types = {
802       &uninit_unres_this, &uninit_unres_a_0, &uninit_unres_b_0
803   };
804   std::vector<const RegType*> plain_nonobj_classes = { &number, &integer };
805   std::vector<const RegType*> plain_nonobj_arr_classes = {
806       &number_arr,
807       &number_arr_arr,
808       &integer_arr,
809       &char_arr,
810   };
811   // std::vector<const RegType*> others = { &conflict, &zero, &null, &obj, &int_type };
812 
813   std::vector<const RegType*> all_minus_uninit_conflict;
814   all_minus_uninit_conflict.insert(all_minus_uninit_conflict.end(),
815                                    unresolved_types.begin(),
816                                    unresolved_types.end());
817   all_minus_uninit_conflict.insert(all_minus_uninit_conflict.end(),
818                                    plain_nonobj_classes.begin(),
819                                    plain_nonobj_classes.end());
820   all_minus_uninit_conflict.insert(all_minus_uninit_conflict.end(),
821                                    plain_nonobj_arr_classes.begin(),
822                                    plain_nonobj_arr_classes.end());
823   all_minus_uninit_conflict.push_back(&zero);
824   all_minus_uninit_conflict.push_back(&null);
825   all_minus_uninit_conflict.push_back(&obj);
826 
827   std::vector<const RegType*> all_minus_uninit;
828   all_minus_uninit.insert(all_minus_uninit.end(),
829                           all_minus_uninit_conflict.begin(),
830                           all_minus_uninit_conflict.end());
831   all_minus_uninit.push_back(&conflict);
832 
833 
834   std::vector<const RegType*> all;
835   all.insert(all.end(), uninitialized_types.begin(), uninitialized_types.end());
836   all.insert(all.end(), uninit_unresolved_types.begin(), uninit_unresolved_types.end());
837   all.insert(all.end(), all_minus_uninit.begin(), all_minus_uninit.end());
838   all.push_back(&int_type);
839 
840   auto check = [&](const RegType& in1, const RegType& in2, const RegType& expected_out)
841       REQUIRES_SHARED(Locks::mutator_lock_) {
842     const RegType& merge_result = in1.SafeMerge(in2, &cache, nullptr);
843     EXPECT_EQ(&expected_out, &merge_result)
844         << in1.Dump() << " x " << in2.Dump() << " = " << merge_result.Dump()
845         << " != " << expected_out.Dump();
846   };
847 
848   // Identity.
849   {
850     for (auto r : all) {
851       check(*r, *r, *r);
852     }
853   }
854 
855   // Define a covering relation through a list of Edges. We'll then derive LUBs from this and
856   // create checks for every pair of types.
857 
858   struct Edge {
859     const RegType& from;
860     const RegType& to;
861 
862     Edge(const RegType& from_, const RegType& to_) : from(from_), to(to_) {}
863   };
864   std::vector<Edge> edges;
865 #define ADD_EDGE(from, to) edges.emplace_back((from), (to))
866 
867   // To Conflict.
868   {
869     for (auto r : uninitialized_types) {
870       ADD_EDGE(*r, conflict);
871     }
872     for (auto r : uninit_unresolved_types) {
873       ADD_EDGE(*r, conflict);
874     }
875     ADD_EDGE(obj, conflict);
876     ADD_EDGE(int_type, conflict);
877   }
878 
879   ADD_EDGE(zero, null);
880 
881   // Unresolved.
882   {
883     ADD_EDGE(null, unresolved_a);
884     ADD_EDGE(null, unresolved_b);
885     ADD_EDGE(unresolved_a, unresolved_ab);
886     ADD_EDGE(unresolved_b, unresolved_ab);
887 
888     ADD_EDGE(number, unresolved_a_num);
889     ADD_EDGE(unresolved_a, unresolved_a_num);
890     ADD_EDGE(number, unresolved_b_num);
891     ADD_EDGE(unresolved_b, unresolved_b_num);
892     ADD_EDGE(number, unresolved_ab_num);
893     ADD_EDGE(unresolved_a_num, unresolved_ab_num);
894     ADD_EDGE(unresolved_b_num, unresolved_ab_num);
895     ADD_EDGE(unresolved_ab, unresolved_ab_num);
896 
897     ADD_EDGE(integer, unresolved_a_int);
898     ADD_EDGE(unresolved_a, unresolved_a_int);
899     ADD_EDGE(integer, unresolved_b_int);
900     ADD_EDGE(unresolved_b, unresolved_b_int);
901     ADD_EDGE(integer, unresolved_ab_int);
902     ADD_EDGE(unresolved_a_int, unresolved_ab_int);
903     ADD_EDGE(unresolved_b_int, unresolved_ab_int);
904     ADD_EDGE(unresolved_ab, unresolved_ab_int);
905 
906     ADD_EDGE(unresolved_a_int, unresolved_a_num);
907     ADD_EDGE(unresolved_b_int, unresolved_b_num);
908     ADD_EDGE(unresolved_ab_int, unresolved_ab_num);
909 
910     ADD_EDGE(unresolved_ab_num, obj);
911   }
912 
913   // Classes.
914   {
915     ADD_EDGE(null, integer);
916     ADD_EDGE(integer, number);
917     ADD_EDGE(number, obj);
918   }
919 
920   // Arrays.
921   {
922     ADD_EDGE(integer_arr, number_arr);
923     ADD_EDGE(number_arr, obj_arr);
924     ADD_EDGE(obj_arr, obj);
925     ADD_EDGE(number_arr_arr, obj_arr);
926 
927     ADD_EDGE(char_arr, obj);
928     ADD_EDGE(byte_arr, obj);
929 
930     ADD_EDGE(null, integer_arr);
931     ADD_EDGE(null, number_arr_arr);
932     ADD_EDGE(null, char_arr);
933     ADD_EDGE(null, byte_arr);
934   }
935 
936   // Primitive.
937   {
938     ADD_EDGE(zero, int_type);
939   }
940 #undef ADD_EDGE
941 
942   // Create merge triples by using the covering relation established by edges to derive the
943   // expected merge for any pair of types.
944 
945   // Expect merge(in1, in2) == out.
946   struct MergeExpectation {
947     const RegType& in1;
948     const RegType& in2;
949     const RegType& out;
950 
951     MergeExpectation(const RegType& in1_, const RegType& in2_, const RegType& out_)
952         : in1(in1_), in2(in2_), out(out_) {}
953   };
954   std::vector<MergeExpectation> expectations;
955 
956   for (auto r1 : all) {
957     for (auto r2 : all) {
958       if (r1 == r2) {
959         continue;
960       }
961 
962       // Very simple algorithm here that is usually used with adjacency lists. Our graph is
963       // small, it didn't make sense to have lists per node. Thus, the regular guarantees
964       // of O(n + |e|) don't apply, but that is acceptable.
965       //
966       // To compute r1 lub r2 = merge(r1, r2):
967       //   1) Generate the reachable set of r1, name it grey.
968       //   2) Mark all grey reachable nodes of r2 as black.
969       //   3) Find black nodes with no in-edges from other black nodes.
970       //   4) If |3)| == 1, that's the lub.
971 
972       // Generic BFS of the graph induced by edges, starting at start. new_node will be called
973       // with any discovered node, in order.
974       auto bfs = [&](auto new_node, const RegType* start) {
975         std::unordered_set<const RegType*> seen;
976         std::queue<const RegType*> work_list;
977         work_list.push(start);
978         while (!work_list.empty()) {
979           const RegType* cur = work_list.front();
980           work_list.pop();
981           auto it = seen.find(cur);
982           if (it != seen.end()) {
983             continue;
984           }
985           seen.insert(cur);
986           new_node(cur);
987 
988           for (const Edge& edge : edges) {
989             if (&edge.from == cur) {
990               work_list.push(&edge.to);
991             }
992           }
993         }
994       };
995 
996       std::unordered_set<const RegType*> grey;
997       auto compute_grey = [&](const RegType* cur) {
998         grey.insert(cur);  // Mark discovered node as grey.
999       };
1000       bfs(compute_grey, r1);
1001 
1002       std::set<const RegType*> black;
1003       auto compute_black = [&](const RegType* cur) {
1004         // Mark discovered grey node as black.
1005         if (grey.find(cur) != grey.end()) {
1006           black.insert(cur);
1007         }
1008       };
1009       bfs(compute_black, r2);
1010 
1011       std::set<const RegType*> no_in_edge(black);  // Copy of black, remove nodes with in-edges.
1012       for (auto r : black) {
1013         for (Edge& e : edges) {
1014           if (&e.from == r) {
1015             no_in_edge.erase(&e.to);  // It doesn't matter whether "to" is black or not, just
1016                                       // attempt to remove it.
1017           }
1018         }
1019       }
1020 
1021       // Helper to print sets when something went wrong.
1022       auto print_set = [](auto& container) REQUIRES_SHARED(Locks::mutator_lock_) {
1023         std::string result;
1024         for (auto r : container) {
1025           result.append(" + ");
1026           result.append(r->Dump());
1027         }
1028         return result;
1029       };
1030       ASSERT_EQ(no_in_edge.size(), 1u) << r1->Dump() << " u " << r2->Dump()
1031                                        << " grey=" << print_set(grey)
1032                                        << " black=" << print_set(black)
1033                                        << " no-in-edge=" << print_set(no_in_edge);
1034       expectations.emplace_back(*r1, *r2, **no_in_edge.begin());
1035     }
1036   }
1037 
1038   // Evaluate merge expectations. The merge is expected to be commutative.
1039 
1040   for (auto& triple : expectations) {
1041     check(triple.in1, triple.in2, triple.out);
1042     check(triple.in2, triple.in1, triple.out);
1043   }
1044 }
1045 
TEST_F(RegTypeTest,ConstPrecision)1046 TEST_F(RegTypeTest, ConstPrecision) {
1047   // Tests creating primitive types types.
1048   ArenaStack stack(Runtime::Current()->GetArenaPool());
1049   ScopedArenaAllocator allocator(&stack);
1050   ScopedObjectAccess soa(Thread::Current());
1051   RegTypeCache cache_new(Runtime::Current()->GetClassLinker(), true, allocator);
1052   const RegType& imprecise_const = cache_new.FromCat1Const(10, false);
1053   const RegType& precise_const = cache_new.FromCat1Const(10, true);
1054 
1055   EXPECT_TRUE(imprecise_const.IsImpreciseConstant());
1056   EXPECT_TRUE(precise_const.IsPreciseConstant());
1057   EXPECT_FALSE(imprecise_const.Equals(precise_const));
1058 }
1059 
1060 class RegTypeOOMTest : public RegTypeTest {
1061  protected:
SetUpRuntimeOptions(RuntimeOptions * options)1062   void SetUpRuntimeOptions(RuntimeOptions *options) override {
1063     SetUpRuntimeOptionsForFillHeap(options);
1064 
1065     // We must not appear to be a compiler, or we'll abort on the host.
1066     callbacks_.reset();
1067   }
1068 };
1069 
TEST_F(RegTypeOOMTest,ClassJoinOOM)1070 TEST_F(RegTypeOOMTest, ClassJoinOOM) {
1071   // TODO: Figure out why FillHeap isn't good enough under CMS.
1072   TEST_DISABLED_WITHOUT_BAKER_READ_BARRIERS();
1073 
1074   // Tests that we don't abort with OOMs.
1075 
1076   ArenaStack stack(Runtime::Current()->GetArenaPool());
1077   ScopedArenaAllocator allocator(&stack);
1078   ScopedObjectAccess soa(Thread::Current());
1079 
1080   ScopedDisableMovingGC no_gc(soa.Self());
1081 
1082   // We merge nested array of primitive wrappers. These have a join type of an array of Number of
1083   // the same depth. We start with depth five, as we want at least two newly created classes to
1084   // test recursion (it's just more likely that nobody uses such deep arrays in runtime bringup).
1085   constexpr const char* kIntArrayFive = "[[[[[Ljava/lang/Integer;";
1086   constexpr const char* kFloatArrayFive = "[[[[[Ljava/lang/Float;";
1087   constexpr const char* kNumberArrayFour = "[[[[Ljava/lang/Number;";
1088   constexpr const char* kNumberArrayFive = "[[[[[Ljava/lang/Number;";
1089 
1090   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
1091   const RegType& int_array_array = cache.From(nullptr, kIntArrayFive, false);
1092   ASSERT_TRUE(int_array_array.HasClass());
1093   const RegType& float_array_array = cache.From(nullptr, kFloatArrayFive, false);
1094   ASSERT_TRUE(float_array_array.HasClass());
1095 
1096   // Check assumptions: the joined classes don't exist, yet.
1097   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1098   ASSERT_TRUE(class_linker->LookupClass(soa.Self(), kNumberArrayFour, nullptr) == nullptr);
1099   ASSERT_TRUE(class_linker->LookupClass(soa.Self(), kNumberArrayFive, nullptr) == nullptr);
1100 
1101   // Fill the heap.
1102   VariableSizedHandleScope hs(soa.Self());
1103   FillHeap(soa.Self(), class_linker, &hs);
1104 
1105   const RegType& join_type = int_array_array.Merge(float_array_array, &cache, nullptr);
1106   ASSERT_TRUE(join_type.IsUnresolvedReference());
1107 }
1108 
1109 class RegTypeClassJoinTest : public RegTypeTest {
1110  protected:
TestClassJoin(const char * in1,const char * in2,const char * out)1111   void TestClassJoin(const char* in1, const char* in2, const char* out) {
1112     ArenaStack stack(Runtime::Current()->GetArenaPool());
1113     ScopedArenaAllocator allocator(&stack);
1114 
1115     ScopedObjectAccess soa(Thread::Current());
1116     jobject jclass_loader = LoadDex("Interfaces");
1117     StackHandleScope<4> hs(soa.Self());
1118     Handle<mirror::ClassLoader> class_loader(
1119         hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
1120 
1121     Handle<mirror::Class> c1(hs.NewHandle(
1122         class_linker_->FindClass(soa.Self(), in1, class_loader)));
1123     Handle<mirror::Class> c2(hs.NewHandle(
1124         class_linker_->FindClass(soa.Self(), in2, class_loader)));
1125     ASSERT_TRUE(c1 != nullptr);
1126     ASSERT_TRUE(c2 != nullptr);
1127 
1128     ScopedDisableMovingGC no_gc(soa.Self());
1129 
1130     RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
1131     const RegType& c1_reg_type = *cache.InsertClass(in1, c1.Get(), false);
1132     const RegType& c2_reg_type = *cache.InsertClass(in2, c2.Get(), false);
1133 
1134     const RegType& join_type = c1_reg_type.Merge(c2_reg_type, &cache, nullptr);
1135     EXPECT_TRUE(join_type.HasClass());
1136     EXPECT_EQ(join_type.GetDescriptor(), std::string_view(out));
1137   }
1138 };
1139 
TEST_F(RegTypeClassJoinTest,ClassJoinInterfaces)1140 TEST_F(RegTypeClassJoinTest, ClassJoinInterfaces) {
1141   TestClassJoin("LInterfaces$K;", "LInterfaces$L;", "LInterfaces$J;");
1142 }
1143 
TEST_F(RegTypeClassJoinTest,ClassJoinInterfaceClass)1144 TEST_F(RegTypeClassJoinTest, ClassJoinInterfaceClass) {
1145   TestClassJoin("LInterfaces$B;", "LInterfaces$L;", "LInterfaces$J;");
1146 }
1147 
TEST_F(RegTypeClassJoinTest,ClassJoinClassClass)1148 TEST_F(RegTypeClassJoinTest, ClassJoinClassClass) {
1149   // This test codifies that we prefer the class hierarchy over interfaces. It's a mostly
1150   // arbitrary choice, optimally we'd have set types and could handle multi-inheritance precisely.
1151   TestClassJoin("LInterfaces$A;", "LInterfaces$B;", "Ljava/lang/Object;");
1152 }
1153 
1154 }  // namespace verifier
1155 }  // namespace art
1156