• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "reg_type.h"
18 
19 #include <set>
20 
21 #include "base/bit_vector.h"
22 #include "base/casts.h"
23 #include "base/scoped_arena_allocator.h"
24 #include "common_runtime_test.h"
25 #include "compiler_callbacks.h"
26 #include "reg_type-inl.h"
27 #include "reg_type_cache-inl.h"
28 #include "scoped_thread_state_change-inl.h"
29 #include "thread-current-inl.h"
30 
31 namespace art {
32 namespace verifier {
33 
34 class RegTypeTest : public CommonRuntimeTest {};
35 
TEST_F(RegTypeTest,ConstLoHi)36 TEST_F(RegTypeTest, ConstLoHi) {
37   // Tests creating primitive types types.
38   ArenaStack stack(Runtime::Current()->GetArenaPool());
39   ScopedArenaAllocator allocator(&stack);
40   ScopedObjectAccess soa(Thread::Current());
41   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
42   const RegType& ref_type_const_0 = cache.FromCat1Const(10, true);
43   const RegType& ref_type_const_1 = cache.FromCat1Const(10, true);
44   const RegType& ref_type_const_2 = cache.FromCat1Const(30, true);
45   const RegType& ref_type_const_3 = cache.FromCat1Const(30, false);
46   EXPECT_TRUE(ref_type_const_0.Equals(ref_type_const_1));
47   EXPECT_FALSE(ref_type_const_0.Equals(ref_type_const_2));
48   EXPECT_FALSE(ref_type_const_0.Equals(ref_type_const_3));
49 
50   const RegType& ref_type_const_wide_0 = cache.FromCat2ConstHi(50, true);
51   const RegType& ref_type_const_wide_1 = cache.FromCat2ConstHi(50, true);
52   EXPECT_TRUE(ref_type_const_wide_0.Equals(ref_type_const_wide_1));
53 
54   const RegType& ref_type_const_wide_2 = cache.FromCat2ConstLo(50, true);
55   const RegType& ref_type_const_wide_3 = cache.FromCat2ConstLo(50, true);
56   const RegType& ref_type_const_wide_4 = cache.FromCat2ConstLo(55, true);
57   EXPECT_TRUE(ref_type_const_wide_2.Equals(ref_type_const_wide_3));
58   EXPECT_FALSE(ref_type_const_wide_2.Equals(ref_type_const_wide_4));
59 }
60 
TEST_F(RegTypeTest,Pairs)61 TEST_F(RegTypeTest, Pairs) {
62   ArenaStack stack(Runtime::Current()->GetArenaPool());
63   ScopedArenaAllocator allocator(&stack);
64   ScopedObjectAccess soa(Thread::Current());
65   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
66   int64_t val = static_cast<int32_t>(1234);
67   const RegType& precise_lo = cache.FromCat2ConstLo(static_cast<int32_t>(val), true);
68   const RegType& precise_hi = cache.FromCat2ConstHi(static_cast<int32_t>(val >> 32), true);
69   const RegType& precise_const = cache.FromCat1Const(static_cast<int32_t>(val >> 32), true);
70   const RegType& long_lo = cache.LongLo();
71   const RegType& long_hi = cache.LongHi();
72   // Check the expectations for types.
73   EXPECT_TRUE(precise_lo.IsLowHalf());
74   EXPECT_FALSE(precise_hi.IsLowHalf());
75   EXPECT_FALSE(precise_lo.IsHighHalf());
76   EXPECT_TRUE(precise_hi.IsHighHalf());
77   EXPECT_TRUE(long_hi.IsLongHighTypes());
78   EXPECT_TRUE(precise_hi.IsLongHighTypes());
79   // Check Pairing.
80   EXPECT_FALSE(precise_lo.CheckWidePair(precise_const));
81   EXPECT_TRUE(precise_lo.CheckWidePair(precise_hi));
82   // Test Merging.
83   EXPECT_TRUE((long_lo.Merge(precise_lo, &cache, /* verifier= */ nullptr)).IsLongTypes());
84   EXPECT_TRUE((long_hi.Merge(precise_hi, &cache, /* verifier= */ nullptr)).IsLongHighTypes());
85 }
86 
TEST_F(RegTypeTest,Primitives)87 TEST_F(RegTypeTest, Primitives) {
88   ArenaStack stack(Runtime::Current()->GetArenaPool());
89   ScopedArenaAllocator allocator(&stack);
90   ScopedObjectAccess soa(Thread::Current());
91   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
92 
93   const RegType& bool_reg_type = cache.Boolean();
94   EXPECT_FALSE(bool_reg_type.IsUndefined());
95   EXPECT_FALSE(bool_reg_type.IsConflict());
96   EXPECT_FALSE(bool_reg_type.IsZero());
97   EXPECT_FALSE(bool_reg_type.IsOne());
98   EXPECT_FALSE(bool_reg_type.IsLongConstant());
99   EXPECT_TRUE(bool_reg_type.IsBoolean());
100   EXPECT_FALSE(bool_reg_type.IsByte());
101   EXPECT_FALSE(bool_reg_type.IsChar());
102   EXPECT_FALSE(bool_reg_type.IsShort());
103   EXPECT_FALSE(bool_reg_type.IsInteger());
104   EXPECT_FALSE(bool_reg_type.IsLong());
105   EXPECT_FALSE(bool_reg_type.IsFloat());
106   EXPECT_FALSE(bool_reg_type.IsDouble());
107   EXPECT_FALSE(bool_reg_type.IsReference());
108   EXPECT_FALSE(bool_reg_type.IsLowHalf());
109   EXPECT_FALSE(bool_reg_type.IsHighHalf());
110   EXPECT_FALSE(bool_reg_type.IsLongOrDoubleTypes());
111   EXPECT_FALSE(bool_reg_type.IsReferenceTypes());
112   EXPECT_TRUE(bool_reg_type.IsCategory1Types());
113   EXPECT_FALSE(bool_reg_type.IsCategory2Types());
114   EXPECT_TRUE(bool_reg_type.IsBooleanTypes());
115   EXPECT_TRUE(bool_reg_type.IsByteTypes());
116   EXPECT_TRUE(bool_reg_type.IsShortTypes());
117   EXPECT_TRUE(bool_reg_type.IsCharTypes());
118   EXPECT_TRUE(bool_reg_type.IsIntegralTypes());
119   EXPECT_FALSE(bool_reg_type.IsFloatTypes());
120   EXPECT_FALSE(bool_reg_type.IsLongTypes());
121   EXPECT_FALSE(bool_reg_type.IsDoubleTypes());
122   EXPECT_TRUE(bool_reg_type.IsArrayIndexTypes());
123   EXPECT_FALSE(bool_reg_type.IsNonZeroReferenceTypes());
124   EXPECT_TRUE(bool_reg_type.HasClass());
125 
126   const RegType& byte_reg_type = cache.Byte();
127   EXPECT_FALSE(byte_reg_type.IsUndefined());
128   EXPECT_FALSE(byte_reg_type.IsConflict());
129   EXPECT_FALSE(byte_reg_type.IsZero());
130   EXPECT_FALSE(byte_reg_type.IsOne());
131   EXPECT_FALSE(byte_reg_type.IsLongConstant());
132   EXPECT_FALSE(byte_reg_type.IsBoolean());
133   EXPECT_TRUE(byte_reg_type.IsByte());
134   EXPECT_FALSE(byte_reg_type.IsChar());
135   EXPECT_FALSE(byte_reg_type.IsShort());
136   EXPECT_FALSE(byte_reg_type.IsInteger());
137   EXPECT_FALSE(byte_reg_type.IsLong());
138   EXPECT_FALSE(byte_reg_type.IsFloat());
139   EXPECT_FALSE(byte_reg_type.IsDouble());
140   EXPECT_FALSE(byte_reg_type.IsReference());
141   EXPECT_FALSE(byte_reg_type.IsLowHalf());
142   EXPECT_FALSE(byte_reg_type.IsHighHalf());
143   EXPECT_FALSE(byte_reg_type.IsLongOrDoubleTypes());
144   EXPECT_FALSE(byte_reg_type.IsReferenceTypes());
145   EXPECT_TRUE(byte_reg_type.IsCategory1Types());
146   EXPECT_FALSE(byte_reg_type.IsCategory2Types());
147   EXPECT_FALSE(byte_reg_type.IsBooleanTypes());
148   EXPECT_TRUE(byte_reg_type.IsByteTypes());
149   EXPECT_TRUE(byte_reg_type.IsShortTypes());
150   EXPECT_FALSE(byte_reg_type.IsCharTypes());
151   EXPECT_TRUE(byte_reg_type.IsIntegralTypes());
152   EXPECT_FALSE(byte_reg_type.IsFloatTypes());
153   EXPECT_FALSE(byte_reg_type.IsLongTypes());
154   EXPECT_FALSE(byte_reg_type.IsDoubleTypes());
155   EXPECT_TRUE(byte_reg_type.IsArrayIndexTypes());
156   EXPECT_FALSE(byte_reg_type.IsNonZeroReferenceTypes());
157   EXPECT_TRUE(byte_reg_type.HasClass());
158 
159   const RegType& char_reg_type = cache.Char();
160   EXPECT_FALSE(char_reg_type.IsUndefined());
161   EXPECT_FALSE(char_reg_type.IsConflict());
162   EXPECT_FALSE(char_reg_type.IsZero());
163   EXPECT_FALSE(char_reg_type.IsOne());
164   EXPECT_FALSE(char_reg_type.IsLongConstant());
165   EXPECT_FALSE(char_reg_type.IsBoolean());
166   EXPECT_FALSE(char_reg_type.IsByte());
167   EXPECT_TRUE(char_reg_type.IsChar());
168   EXPECT_FALSE(char_reg_type.IsShort());
169   EXPECT_FALSE(char_reg_type.IsInteger());
170   EXPECT_FALSE(char_reg_type.IsLong());
171   EXPECT_FALSE(char_reg_type.IsFloat());
172   EXPECT_FALSE(char_reg_type.IsDouble());
173   EXPECT_FALSE(char_reg_type.IsReference());
174   EXPECT_FALSE(char_reg_type.IsLowHalf());
175   EXPECT_FALSE(char_reg_type.IsHighHalf());
176   EXPECT_FALSE(char_reg_type.IsLongOrDoubleTypes());
177   EXPECT_FALSE(char_reg_type.IsReferenceTypes());
178   EXPECT_TRUE(char_reg_type.IsCategory1Types());
179   EXPECT_FALSE(char_reg_type.IsCategory2Types());
180   EXPECT_FALSE(char_reg_type.IsBooleanTypes());
181   EXPECT_FALSE(char_reg_type.IsByteTypes());
182   EXPECT_FALSE(char_reg_type.IsShortTypes());
183   EXPECT_TRUE(char_reg_type.IsCharTypes());
184   EXPECT_TRUE(char_reg_type.IsIntegralTypes());
185   EXPECT_FALSE(char_reg_type.IsFloatTypes());
186   EXPECT_FALSE(char_reg_type.IsLongTypes());
187   EXPECT_FALSE(char_reg_type.IsDoubleTypes());
188   EXPECT_TRUE(char_reg_type.IsArrayIndexTypes());
189   EXPECT_FALSE(char_reg_type.IsNonZeroReferenceTypes());
190   EXPECT_TRUE(char_reg_type.HasClass());
191 
192   const RegType& short_reg_type = cache.Short();
193   EXPECT_FALSE(short_reg_type.IsUndefined());
194   EXPECT_FALSE(short_reg_type.IsConflict());
195   EXPECT_FALSE(short_reg_type.IsZero());
196   EXPECT_FALSE(short_reg_type.IsOne());
197   EXPECT_FALSE(short_reg_type.IsLongConstant());
198   EXPECT_FALSE(short_reg_type.IsBoolean());
199   EXPECT_FALSE(short_reg_type.IsByte());
200   EXPECT_FALSE(short_reg_type.IsChar());
201   EXPECT_TRUE(short_reg_type.IsShort());
202   EXPECT_FALSE(short_reg_type.IsInteger());
203   EXPECT_FALSE(short_reg_type.IsLong());
204   EXPECT_FALSE(short_reg_type.IsFloat());
205   EXPECT_FALSE(short_reg_type.IsDouble());
206   EXPECT_FALSE(short_reg_type.IsReference());
207   EXPECT_FALSE(short_reg_type.IsLowHalf());
208   EXPECT_FALSE(short_reg_type.IsHighHalf());
209   EXPECT_FALSE(short_reg_type.IsLongOrDoubleTypes());
210   EXPECT_FALSE(short_reg_type.IsReferenceTypes());
211   EXPECT_TRUE(short_reg_type.IsCategory1Types());
212   EXPECT_FALSE(short_reg_type.IsCategory2Types());
213   EXPECT_FALSE(short_reg_type.IsBooleanTypes());
214   EXPECT_FALSE(short_reg_type.IsByteTypes());
215   EXPECT_TRUE(short_reg_type.IsShortTypes());
216   EXPECT_FALSE(short_reg_type.IsCharTypes());
217   EXPECT_TRUE(short_reg_type.IsIntegralTypes());
218   EXPECT_FALSE(short_reg_type.IsFloatTypes());
219   EXPECT_FALSE(short_reg_type.IsLongTypes());
220   EXPECT_FALSE(short_reg_type.IsDoubleTypes());
221   EXPECT_TRUE(short_reg_type.IsArrayIndexTypes());
222   EXPECT_FALSE(short_reg_type.IsNonZeroReferenceTypes());
223   EXPECT_TRUE(short_reg_type.HasClass());
224 
225   const RegType& int_reg_type = cache.Integer();
226   EXPECT_FALSE(int_reg_type.IsUndefined());
227   EXPECT_FALSE(int_reg_type.IsConflict());
228   EXPECT_FALSE(int_reg_type.IsZero());
229   EXPECT_FALSE(int_reg_type.IsOne());
230   EXPECT_FALSE(int_reg_type.IsLongConstant());
231   EXPECT_FALSE(int_reg_type.IsBoolean());
232   EXPECT_FALSE(int_reg_type.IsByte());
233   EXPECT_FALSE(int_reg_type.IsChar());
234   EXPECT_FALSE(int_reg_type.IsShort());
235   EXPECT_TRUE(int_reg_type.IsInteger());
236   EXPECT_FALSE(int_reg_type.IsLong());
237   EXPECT_FALSE(int_reg_type.IsFloat());
238   EXPECT_FALSE(int_reg_type.IsDouble());
239   EXPECT_FALSE(int_reg_type.IsReference());
240   EXPECT_FALSE(int_reg_type.IsLowHalf());
241   EXPECT_FALSE(int_reg_type.IsHighHalf());
242   EXPECT_FALSE(int_reg_type.IsLongOrDoubleTypes());
243   EXPECT_FALSE(int_reg_type.IsReferenceTypes());
244   EXPECT_TRUE(int_reg_type.IsCategory1Types());
245   EXPECT_FALSE(int_reg_type.IsCategory2Types());
246   EXPECT_FALSE(int_reg_type.IsBooleanTypes());
247   EXPECT_FALSE(int_reg_type.IsByteTypes());
248   EXPECT_FALSE(int_reg_type.IsShortTypes());
249   EXPECT_FALSE(int_reg_type.IsCharTypes());
250   EXPECT_TRUE(int_reg_type.IsIntegralTypes());
251   EXPECT_FALSE(int_reg_type.IsFloatTypes());
252   EXPECT_FALSE(int_reg_type.IsLongTypes());
253   EXPECT_FALSE(int_reg_type.IsDoubleTypes());
254   EXPECT_TRUE(int_reg_type.IsArrayIndexTypes());
255   EXPECT_FALSE(int_reg_type.IsNonZeroReferenceTypes());
256   EXPECT_TRUE(int_reg_type.HasClass());
257 
258   const RegType& long_reg_type = cache.LongLo();
259   EXPECT_FALSE(long_reg_type.IsUndefined());
260   EXPECT_FALSE(long_reg_type.IsConflict());
261   EXPECT_FALSE(long_reg_type.IsZero());
262   EXPECT_FALSE(long_reg_type.IsOne());
263   EXPECT_FALSE(long_reg_type.IsLongConstant());
264   EXPECT_FALSE(long_reg_type.IsBoolean());
265   EXPECT_FALSE(long_reg_type.IsByte());
266   EXPECT_FALSE(long_reg_type.IsChar());
267   EXPECT_FALSE(long_reg_type.IsShort());
268   EXPECT_FALSE(long_reg_type.IsInteger());
269   EXPECT_TRUE(long_reg_type.IsLong());
270   EXPECT_FALSE(long_reg_type.IsFloat());
271   EXPECT_FALSE(long_reg_type.IsDouble());
272   EXPECT_FALSE(long_reg_type.IsReference());
273   EXPECT_TRUE(long_reg_type.IsLowHalf());
274   EXPECT_FALSE(long_reg_type.IsHighHalf());
275   EXPECT_TRUE(long_reg_type.IsLongOrDoubleTypes());
276   EXPECT_FALSE(long_reg_type.IsReferenceTypes());
277   EXPECT_FALSE(long_reg_type.IsCategory1Types());
278   EXPECT_TRUE(long_reg_type.IsCategory2Types());
279   EXPECT_FALSE(long_reg_type.IsBooleanTypes());
280   EXPECT_FALSE(long_reg_type.IsByteTypes());
281   EXPECT_FALSE(long_reg_type.IsShortTypes());
282   EXPECT_FALSE(long_reg_type.IsCharTypes());
283   EXPECT_FALSE(long_reg_type.IsIntegralTypes());
284   EXPECT_FALSE(long_reg_type.IsFloatTypes());
285   EXPECT_TRUE(long_reg_type.IsLongTypes());
286   EXPECT_FALSE(long_reg_type.IsDoubleTypes());
287   EXPECT_FALSE(long_reg_type.IsArrayIndexTypes());
288   EXPECT_FALSE(long_reg_type.IsNonZeroReferenceTypes());
289   EXPECT_TRUE(long_reg_type.HasClass());
290 
291   const RegType& float_reg_type = cache.Float();
292   EXPECT_FALSE(float_reg_type.IsUndefined());
293   EXPECT_FALSE(float_reg_type.IsConflict());
294   EXPECT_FALSE(float_reg_type.IsZero());
295   EXPECT_FALSE(float_reg_type.IsOne());
296   EXPECT_FALSE(float_reg_type.IsLongConstant());
297   EXPECT_FALSE(float_reg_type.IsBoolean());
298   EXPECT_FALSE(float_reg_type.IsByte());
299   EXPECT_FALSE(float_reg_type.IsChar());
300   EXPECT_FALSE(float_reg_type.IsShort());
301   EXPECT_FALSE(float_reg_type.IsInteger());
302   EXPECT_FALSE(float_reg_type.IsLong());
303   EXPECT_TRUE(float_reg_type.IsFloat());
304   EXPECT_FALSE(float_reg_type.IsDouble());
305   EXPECT_FALSE(float_reg_type.IsReference());
306   EXPECT_FALSE(float_reg_type.IsLowHalf());
307   EXPECT_FALSE(float_reg_type.IsHighHalf());
308   EXPECT_FALSE(float_reg_type.IsLongOrDoubleTypes());
309   EXPECT_FALSE(float_reg_type.IsReferenceTypes());
310   EXPECT_TRUE(float_reg_type.IsCategory1Types());
311   EXPECT_FALSE(float_reg_type.IsCategory2Types());
312   EXPECT_FALSE(float_reg_type.IsBooleanTypes());
313   EXPECT_FALSE(float_reg_type.IsByteTypes());
314   EXPECT_FALSE(float_reg_type.IsShortTypes());
315   EXPECT_FALSE(float_reg_type.IsCharTypes());
316   EXPECT_FALSE(float_reg_type.IsIntegralTypes());
317   EXPECT_TRUE(float_reg_type.IsFloatTypes());
318   EXPECT_FALSE(float_reg_type.IsLongTypes());
319   EXPECT_FALSE(float_reg_type.IsDoubleTypes());
320   EXPECT_FALSE(float_reg_type.IsArrayIndexTypes());
321   EXPECT_FALSE(float_reg_type.IsNonZeroReferenceTypes());
322   EXPECT_TRUE(float_reg_type.HasClass());
323 
324   const RegType& double_reg_type = cache.DoubleLo();
325   EXPECT_FALSE(double_reg_type.IsUndefined());
326   EXPECT_FALSE(double_reg_type.IsConflict());
327   EXPECT_FALSE(double_reg_type.IsZero());
328   EXPECT_FALSE(double_reg_type.IsOne());
329   EXPECT_FALSE(double_reg_type.IsLongConstant());
330   EXPECT_FALSE(double_reg_type.IsBoolean());
331   EXPECT_FALSE(double_reg_type.IsByte());
332   EXPECT_FALSE(double_reg_type.IsChar());
333   EXPECT_FALSE(double_reg_type.IsShort());
334   EXPECT_FALSE(double_reg_type.IsInteger());
335   EXPECT_FALSE(double_reg_type.IsLong());
336   EXPECT_FALSE(double_reg_type.IsFloat());
337   EXPECT_TRUE(double_reg_type.IsDouble());
338   EXPECT_FALSE(double_reg_type.IsReference());
339   EXPECT_TRUE(double_reg_type.IsLowHalf());
340   EXPECT_FALSE(double_reg_type.IsHighHalf());
341   EXPECT_TRUE(double_reg_type.IsLongOrDoubleTypes());
342   EXPECT_FALSE(double_reg_type.IsReferenceTypes());
343   EXPECT_FALSE(double_reg_type.IsCategory1Types());
344   EXPECT_TRUE(double_reg_type.IsCategory2Types());
345   EXPECT_FALSE(double_reg_type.IsBooleanTypes());
346   EXPECT_FALSE(double_reg_type.IsByteTypes());
347   EXPECT_FALSE(double_reg_type.IsShortTypes());
348   EXPECT_FALSE(double_reg_type.IsCharTypes());
349   EXPECT_FALSE(double_reg_type.IsIntegralTypes());
350   EXPECT_FALSE(double_reg_type.IsFloatTypes());
351   EXPECT_FALSE(double_reg_type.IsLongTypes());
352   EXPECT_TRUE(double_reg_type.IsDoubleTypes());
353   EXPECT_FALSE(double_reg_type.IsArrayIndexTypes());
354   EXPECT_FALSE(double_reg_type.IsNonZeroReferenceTypes());
355   EXPECT_TRUE(double_reg_type.HasClass());
356 }
357 
358 class RegTypeReferenceTest : public CommonRuntimeTest {};
359 
TEST_F(RegTypeReferenceTest,JavalangObjectImprecise)360 TEST_F(RegTypeReferenceTest, JavalangObjectImprecise) {
361   // Tests matching precisions. A reference type that was created precise doesn't
362   // match the one that is imprecise.
363   ArenaStack stack(Runtime::Current()->GetArenaPool());
364   ScopedArenaAllocator allocator(&stack);
365   ScopedObjectAccess soa(Thread::Current());
366   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
367   const RegType& imprecise_obj = cache.JavaLangObject(false);
368   const RegType& precise_obj = cache.JavaLangObject(true);
369   const RegType& precise_obj_2 = cache.FromDescriptor(nullptr, "Ljava/lang/Object;", true);
370 
371   EXPECT_TRUE(precise_obj.Equals(precise_obj_2));
372   EXPECT_FALSE(imprecise_obj.Equals(precise_obj));
373   EXPECT_FALSE(imprecise_obj.Equals(precise_obj));
374   EXPECT_FALSE(imprecise_obj.Equals(precise_obj_2));
375 }
376 
TEST_F(RegTypeReferenceTest,UnresolvedType)377 TEST_F(RegTypeReferenceTest, UnresolvedType) {
378   // Tests creating unresolved types. Miss for the first time asking the cache and
379   // a hit second time.
380   ArenaStack stack(Runtime::Current()->GetArenaPool());
381   ScopedArenaAllocator allocator(&stack);
382   ScopedObjectAccess soa(Thread::Current());
383   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
384   const RegType& ref_type_0 = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true);
385   EXPECT_TRUE(ref_type_0.IsUnresolvedReference());
386   EXPECT_TRUE(ref_type_0.IsNonZeroReferenceTypes());
387 
388   const RegType& ref_type_1 = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true);
389   EXPECT_TRUE(ref_type_0.Equals(ref_type_1));
390 
391   const RegType& unresolved_super_class =  cache.FromUnresolvedSuperClass(ref_type_0);
392   EXPECT_TRUE(unresolved_super_class.IsUnresolvedSuperClass());
393   EXPECT_TRUE(unresolved_super_class.IsNonZeroReferenceTypes());
394 }
395 
TEST_F(RegTypeReferenceTest,UnresolvedUnintializedType)396 TEST_F(RegTypeReferenceTest, UnresolvedUnintializedType) {
397   // Tests creating types uninitialized types from unresolved types.
398   ArenaStack stack(Runtime::Current()->GetArenaPool());
399   ScopedArenaAllocator allocator(&stack);
400   ScopedObjectAccess soa(Thread::Current());
401   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
402   const RegType& ref_type_0 = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true);
403   EXPECT_TRUE(ref_type_0.IsUnresolvedReference());
404   const RegType& ref_type = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true);
405   EXPECT_TRUE(ref_type_0.Equals(ref_type));
406   // Create an uninitialized type of this unresolved type
407   const RegType& unresolved_unintialised = cache.Uninitialized(ref_type, 1101ull);
408   EXPECT_TRUE(unresolved_unintialised.IsUnresolvedAndUninitializedReference());
409   EXPECT_TRUE(unresolved_unintialised.IsUninitializedTypes());
410   EXPECT_TRUE(unresolved_unintialised.IsNonZeroReferenceTypes());
411   // Create an uninitialized type of this unresolved type with different  PC
412   const RegType& ref_type_unresolved_unintialised_1 =  cache.Uninitialized(ref_type, 1102ull);
413   EXPECT_TRUE(unresolved_unintialised.IsUnresolvedAndUninitializedReference());
414   EXPECT_FALSE(unresolved_unintialised.Equals(ref_type_unresolved_unintialised_1));
415   // Create an uninitialized type of this unresolved type with the same PC
416   const RegType& unresolved_unintialised_2 = cache.Uninitialized(ref_type, 1101ull);
417   EXPECT_TRUE(unresolved_unintialised.Equals(unresolved_unintialised_2));
418 }
419 
TEST_F(RegTypeReferenceTest,Dump)420 TEST_F(RegTypeReferenceTest, Dump) {
421   // Tests types for proper Dump messages.
422   ArenaStack stack(Runtime::Current()->GetArenaPool());
423   ScopedArenaAllocator allocator(&stack);
424   ScopedObjectAccess soa(Thread::Current());
425   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
426   const RegType& unresolved_ref = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true);
427   const RegType& unresolved_ref_another = cache.FromDescriptor(nullptr, "Ljava/lang/DoesNotExistEither;", true);
428   const RegType& resolved_ref = cache.JavaLangString();
429   const RegType& resolved_unintialiesd = cache.Uninitialized(resolved_ref, 10);
430   const RegType& unresolved_unintialized = cache.Uninitialized(unresolved_ref, 12);
431   const RegType& unresolved_merged = cache.FromUnresolvedMerge(
432       unresolved_ref, unresolved_ref_another, /* verifier= */ nullptr);
433 
434   std::string expected = "Unresolved Reference: java.lang.DoesNotExist";
435   EXPECT_EQ(expected, unresolved_ref.Dump());
436   expected = "Precise Reference: java.lang.String";
437   EXPECT_EQ(expected, resolved_ref.Dump());
438   expected ="Uninitialized Reference: java.lang.String Allocation PC: 10";
439   EXPECT_EQ(expected, resolved_unintialiesd.Dump());
440   expected = "Unresolved And Uninitialized Reference: java.lang.DoesNotExist Allocation PC: 12";
441   EXPECT_EQ(expected, unresolved_unintialized.Dump());
442   expected = "UnresolvedMergedReferences(Zero/null | Unresolved Reference: java.lang.DoesNotExist, Unresolved Reference: java.lang.DoesNotExistEither)";
443   EXPECT_EQ(expected, unresolved_merged.Dump());
444 }
445 
TEST_F(RegTypeReferenceTest,JavalangString)446 TEST_F(RegTypeReferenceTest, JavalangString) {
447   // Add a class to the cache then look for the same class and make sure it is  a
448   // Hit the second time. Then check for the same effect when using
449   // The JavaLangObject method instead of FromDescriptor. String class is final.
450   ArenaStack stack(Runtime::Current()->GetArenaPool());
451   ScopedArenaAllocator allocator(&stack);
452   ScopedObjectAccess soa(Thread::Current());
453   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
454   const RegType& ref_type = cache.JavaLangString();
455   const RegType& ref_type_2 = cache.JavaLangString();
456   const RegType& ref_type_3 = cache.FromDescriptor(nullptr, "Ljava/lang/String;", true);
457 
458   EXPECT_TRUE(ref_type.Equals(ref_type_2));
459   EXPECT_TRUE(ref_type_2.Equals(ref_type_3));
460   EXPECT_TRUE(ref_type.IsPreciseReference());
461 
462   // Create an uninitialized type out of this:
463   const RegType& ref_type_unintialized = cache.Uninitialized(ref_type, 0110ull);
464   EXPECT_TRUE(ref_type_unintialized.IsUninitializedReference());
465   EXPECT_FALSE(ref_type_unintialized.IsUnresolvedAndUninitializedReference());
466 }
467 
TEST_F(RegTypeReferenceTest,JavalangObject)468 TEST_F(RegTypeReferenceTest, JavalangObject) {
469   // Add a class to the cache then look for the same class and make sure it is  a
470   // Hit the second time. Then I am checking for the same effect when using
471   // The JavaLangObject method instead of FromDescriptor. Object Class in not final.
472   ArenaStack stack(Runtime::Current()->GetArenaPool());
473   ScopedArenaAllocator allocator(&stack);
474   ScopedObjectAccess soa(Thread::Current());
475   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
476   const RegType& ref_type = cache.JavaLangObject(true);
477   const RegType& ref_type_2 = cache.JavaLangObject(true);
478   const RegType& ref_type_3 = cache.FromDescriptor(nullptr, "Ljava/lang/Object;", true);
479 
480   EXPECT_TRUE(ref_type.Equals(ref_type_2));
481   EXPECT_TRUE(ref_type_3.Equals(ref_type_2));
482   EXPECT_EQ(ref_type.GetId(), ref_type_3.GetId());
483 }
TEST_F(RegTypeReferenceTest,Merging)484 TEST_F(RegTypeReferenceTest, Merging) {
485   // Tests merging logic
486   // String and object , LUB is object.
487   ScopedObjectAccess soa(Thread::Current());
488   ArenaStack stack(Runtime::Current()->GetArenaPool());
489   ScopedArenaAllocator allocator(&stack);
490   RegTypeCache cache_new(Runtime::Current()->GetClassLinker(), true, allocator);
491   const RegType& string = cache_new.JavaLangString();
492   const RegType& Object = cache_new.JavaLangObject(true);
493   EXPECT_TRUE(string.Merge(Object, &cache_new, /* verifier= */ nullptr).IsJavaLangObject());
494   // Merge two unresolved types.
495   const RegType& ref_type_0 = cache_new.FromDescriptor(nullptr, "Ljava/lang/DoesNotExist;", true);
496   EXPECT_TRUE(ref_type_0.IsUnresolvedReference());
497   const RegType& ref_type_1 = cache_new.FromDescriptor(nullptr, "Ljava/lang/DoesNotExistToo;", true);
498   EXPECT_FALSE(ref_type_0.Equals(ref_type_1));
499 
500   const RegType& merged = ref_type_1.Merge(ref_type_0, &cache_new, /* verifier= */ nullptr);
501   EXPECT_TRUE(merged.IsUnresolvedMergedReference());
502   RegType& merged_nonconst = const_cast<RegType&>(merged);
503 
504   const BitVector& unresolved_parts =
505       down_cast<UnresolvedMergedType*>(&merged_nonconst)->GetUnresolvedTypes();
506   EXPECT_TRUE(unresolved_parts.IsBitSet(ref_type_0.GetId()));
507   EXPECT_TRUE(unresolved_parts.IsBitSet(ref_type_1.GetId()));
508 }
509 
TEST_F(RegTypeTest,MergingFloat)510 TEST_F(RegTypeTest, MergingFloat) {
511   // Testing merging logic with float and float constants.
512   ArenaStack stack(Runtime::Current()->GetArenaPool());
513   ScopedArenaAllocator allocator(&stack);
514   ScopedObjectAccess soa(Thread::Current());
515   RegTypeCache cache_new(Runtime::Current()->GetClassLinker(), true, allocator);
516 
517   constexpr int32_t kTestConstantValue = 10;
518   const RegType& float_type = cache_new.Float();
519   const RegType& precise_cst = cache_new.FromCat1Const(kTestConstantValue, true);
520   const RegType& imprecise_cst = cache_new.FromCat1Const(kTestConstantValue, false);
521   {
522     // float MERGE precise cst => float.
523     const RegType& merged = float_type.Merge(precise_cst, &cache_new, /* verifier= */ nullptr);
524     EXPECT_TRUE(merged.IsFloat());
525   }
526   {
527     // precise cst MERGE float => float.
528     const RegType& merged = precise_cst.Merge(float_type, &cache_new, /* verifier= */ nullptr);
529     EXPECT_TRUE(merged.IsFloat());
530   }
531   {
532     // float MERGE imprecise cst => float.
533     const RegType& merged = float_type.Merge(imprecise_cst, &cache_new, /* verifier= */ nullptr);
534     EXPECT_TRUE(merged.IsFloat());
535   }
536   {
537     // imprecise cst MERGE float => float.
538     const RegType& merged = imprecise_cst.Merge(float_type, &cache_new, /* verifier= */ nullptr);
539     EXPECT_TRUE(merged.IsFloat());
540   }
541 }
542 
TEST_F(RegTypeTest,MergingLong)543 TEST_F(RegTypeTest, MergingLong) {
544   // Testing merging logic with long and long constants.
545   ArenaStack stack(Runtime::Current()->GetArenaPool());
546   ScopedArenaAllocator allocator(&stack);
547   ScopedObjectAccess soa(Thread::Current());
548   RegTypeCache cache_new(Runtime::Current()->GetClassLinker(), true, allocator);
549 
550   constexpr int32_t kTestConstantValue = 10;
551   const RegType& long_lo_type = cache_new.LongLo();
552   const RegType& long_hi_type = cache_new.LongHi();
553   const RegType& precise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, true);
554   const RegType& imprecise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, false);
555   const RegType& precise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, true);
556   const RegType& imprecise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, false);
557   {
558     // lo MERGE precise cst lo => lo.
559     const RegType& merged = long_lo_type.Merge(precise_cst_lo, &cache_new, /* verifier= */ nullptr);
560     EXPECT_TRUE(merged.IsLongLo());
561   }
562   {
563     // precise cst lo MERGE lo => lo.
564     const RegType& merged = precise_cst_lo.Merge(long_lo_type, &cache_new, /* verifier= */ nullptr);
565     EXPECT_TRUE(merged.IsLongLo());
566   }
567   {
568     // lo MERGE imprecise cst lo => lo.
569     const RegType& merged = long_lo_type.Merge(
570         imprecise_cst_lo, &cache_new, /* verifier= */ nullptr);
571     EXPECT_TRUE(merged.IsLongLo());
572   }
573   {
574     // imprecise cst lo MERGE lo => lo.
575     const RegType& merged = imprecise_cst_lo.Merge(
576         long_lo_type, &cache_new, /* verifier= */ nullptr);
577     EXPECT_TRUE(merged.IsLongLo());
578   }
579   {
580     // hi MERGE precise cst hi => hi.
581     const RegType& merged = long_hi_type.Merge(precise_cst_hi, &cache_new, /* verifier= */ nullptr);
582     EXPECT_TRUE(merged.IsLongHi());
583   }
584   {
585     // precise cst hi MERGE hi => hi.
586     const RegType& merged = precise_cst_hi.Merge(long_hi_type, &cache_new, /* verifier= */ nullptr);
587     EXPECT_TRUE(merged.IsLongHi());
588   }
589   {
590     // hi MERGE imprecise cst hi => hi.
591     const RegType& merged = long_hi_type.Merge(
592         imprecise_cst_hi, &cache_new, /* verifier= */ nullptr);
593     EXPECT_TRUE(merged.IsLongHi());
594   }
595   {
596     // imprecise cst hi MERGE hi => hi.
597     const RegType& merged = imprecise_cst_hi.Merge(
598         long_hi_type, &cache_new, /* verifier= */ nullptr);
599     EXPECT_TRUE(merged.IsLongHi());
600   }
601 }
602 
TEST_F(RegTypeTest,MergingDouble)603 TEST_F(RegTypeTest, MergingDouble) {
604   // Testing merging logic with double and double constants.
605   ArenaStack stack(Runtime::Current()->GetArenaPool());
606   ScopedArenaAllocator allocator(&stack);
607   ScopedObjectAccess soa(Thread::Current());
608   RegTypeCache cache_new(Runtime::Current()->GetClassLinker(), true, allocator);
609 
610   constexpr int32_t kTestConstantValue = 10;
611   const RegType& double_lo_type = cache_new.DoubleLo();
612   const RegType& double_hi_type = cache_new.DoubleHi();
613   const RegType& precise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, true);
614   const RegType& imprecise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, false);
615   const RegType& precise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, true);
616   const RegType& imprecise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, false);
617   {
618     // lo MERGE precise cst lo => lo.
619     const RegType& merged = double_lo_type.Merge(
620         precise_cst_lo, &cache_new, /* verifier= */ nullptr);
621     EXPECT_TRUE(merged.IsDoubleLo());
622   }
623   {
624     // precise cst lo MERGE lo => lo.
625     const RegType& merged = precise_cst_lo.Merge(
626         double_lo_type, &cache_new, /* verifier= */ nullptr);
627     EXPECT_TRUE(merged.IsDoubleLo());
628   }
629   {
630     // lo MERGE imprecise cst lo => lo.
631     const RegType& merged = double_lo_type.Merge(
632         imprecise_cst_lo, &cache_new, /* verifier= */ nullptr);
633     EXPECT_TRUE(merged.IsDoubleLo());
634   }
635   {
636     // imprecise cst lo MERGE lo => lo.
637     const RegType& merged = imprecise_cst_lo.Merge(
638         double_lo_type, &cache_new, /* verifier= */ nullptr);
639     EXPECT_TRUE(merged.IsDoubleLo());
640   }
641   {
642     // hi MERGE precise cst hi => hi.
643     const RegType& merged = double_hi_type.Merge(
644         precise_cst_hi, &cache_new, /* verifier= */ nullptr);
645     EXPECT_TRUE(merged.IsDoubleHi());
646   }
647   {
648     // precise cst hi MERGE hi => hi.
649     const RegType& merged = precise_cst_hi.Merge(
650         double_hi_type, &cache_new, /* verifier= */ nullptr);
651     EXPECT_TRUE(merged.IsDoubleHi());
652   }
653   {
654     // hi MERGE imprecise cst hi => hi.
655     const RegType& merged = double_hi_type.Merge(
656         imprecise_cst_hi, &cache_new, /* verifier= */ nullptr);
657     EXPECT_TRUE(merged.IsDoubleHi());
658   }
659   {
660     // imprecise cst hi MERGE hi => hi.
661     const RegType& merged = imprecise_cst_hi.Merge(
662         double_hi_type, &cache_new, /* verifier= */ nullptr);
663     EXPECT_TRUE(merged.IsDoubleHi());
664   }
665 }
666 
667 // Without a running MethodVerifier, the class-bearing register types may become stale as the GC
668 // will not visit them. It is easiest to disable moving GC.
669 //
670 // For some of the tests we need (or want) a working RegTypeCache that can load classes. So it is
671 // not generally possible to disable GC using ScopedGCCriticalSection (as it blocks GC and
672 // suspension completely).
673 struct ScopedDisableMovingGC {
ScopedDisableMovingGCart::verifier::ScopedDisableMovingGC674   explicit ScopedDisableMovingGC(Thread* t) : self(t) {
675     Runtime::Current()->GetHeap()->IncrementDisableMovingGC(self);
676   }
~ScopedDisableMovingGCart::verifier::ScopedDisableMovingGC677   ~ScopedDisableMovingGC() {
678     Runtime::Current()->GetHeap()->DecrementDisableMovingGC(self);
679   }
680 
681   Thread* self;
682 };
683 
TEST_F(RegTypeTest,MergeSemiLatticeRef)684 TEST_F(RegTypeTest, MergeSemiLatticeRef) {
685   //  (Incomplete) semilattice:
686   //
687   //  Excluded for now: * category-2 types
688   //                    * interfaces
689   //                    * all of category-1 primitive types, including constants.
690   //  This is to demonstrate/codify the reference side, mostly.
691   //
692   //  Note: It is not a real semilattice because int = float makes this wonky. :-(
693   //
694   //                                       Conflict
695   //                                           |
696   //      #---------#--------------------------#-----------------------------#
697   //      |         |                                                        |
698   //      |         |                                                      Object
699   //      |         |                                                        |
700   //     int   uninit types              #---------------#--------#------------------#---------#
701   //      |                              |               |        |                  |         |
702   //      |                  unresolved-merge-types      |      Object[]           char[]   byte[]
703   //      |                              |    |  |       |        |                  |         |
704   //      |                  unresolved-types |  #------Number    #---------#        |         |
705   //      |                              |    |          |        |         |        |         |
706   //      |                              |    #--------Integer  Number[] Number[][]  |         |
707   //      |                              |               |        |         |        |         |
708   //      |                              #---------------#--------#---------#--------#---------#
709   //      |                                                       |
710   //      |                                                     null
711   //      |                                                       |
712   //      #--------------------------#----------------------------#
713   //                                 |
714   //                                 0
715 
716   ArenaStack stack(Runtime::Current()->GetArenaPool());
717   ScopedArenaAllocator allocator(&stack);
718   ScopedObjectAccess soa(Thread::Current());
719 
720   ScopedDisableMovingGC no_gc(soa.Self());
721 
722   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
723 
724   const RegType& conflict = cache.Conflict();
725   const RegType& zero = cache.Zero();
726   const RegType& null = cache.Null();
727   const RegType& int_type = cache.Integer();
728 
729   const RegType& obj = cache.JavaLangObject(false);
730   const RegType& obj_arr = cache.From(nullptr, "[Ljava/lang/Object;", false);
731   ASSERT_FALSE(obj_arr.IsUnresolvedReference());
732 
733   const RegType& unresolved_a = cache.From(nullptr, "Ldoes/not/resolve/A;", false);
734   ASSERT_TRUE(unresolved_a.IsUnresolvedReference());
735   const RegType& unresolved_b = cache.From(nullptr, "Ldoes/not/resolve/B;", false);
736   ASSERT_TRUE(unresolved_b.IsUnresolvedReference());
737   const RegType& unresolved_ab = cache.FromUnresolvedMerge(unresolved_a, unresolved_b, nullptr);
738   ASSERT_TRUE(unresolved_ab.IsUnresolvedMergedReference());
739 
740   const RegType& uninit_this = cache.UninitializedThisArgument(obj);
741   const RegType& uninit_obj_0 = cache.Uninitialized(obj, 0u);
742   const RegType& uninit_obj_1 = cache.Uninitialized(obj, 1u);
743 
744   const RegType& uninit_unres_this = cache.UninitializedThisArgument(unresolved_a);
745   const RegType& uninit_unres_a_0 = cache.Uninitialized(unresolved_a, 0);
746   const RegType& uninit_unres_b_0 = cache.Uninitialized(unresolved_b, 0);
747 
748   const RegType& number = cache.From(nullptr, "Ljava/lang/Number;", false);
749   ASSERT_FALSE(number.IsUnresolvedReference());
750   const RegType& integer = cache.From(nullptr, "Ljava/lang/Integer;", false);
751   ASSERT_FALSE(integer.IsUnresolvedReference());
752 
753   const RegType& uninit_number_0 = cache.Uninitialized(number, 0u);
754   const RegType& uninit_integer_0 = cache.Uninitialized(integer, 0u);
755 
756   const RegType& number_arr = cache.From(nullptr, "[Ljava/lang/Number;", false);
757   ASSERT_FALSE(number_arr.IsUnresolvedReference());
758   const RegType& integer_arr = cache.From(nullptr, "[Ljava/lang/Integer;", false);
759   ASSERT_FALSE(integer_arr.IsUnresolvedReference());
760 
761   const RegType& number_arr_arr = cache.From(nullptr, "[[Ljava/lang/Number;", false);
762   ASSERT_FALSE(number_arr_arr.IsUnresolvedReference());
763 
764   const RegType& char_arr = cache.From(nullptr, "[C", false);
765   ASSERT_FALSE(char_arr.IsUnresolvedReference());
766   const RegType& byte_arr = cache.From(nullptr, "[B", false);
767   ASSERT_FALSE(byte_arr.IsUnresolvedReference());
768 
769   const RegType& unresolved_a_num = cache.FromUnresolvedMerge(unresolved_a, number, nullptr);
770   ASSERT_TRUE(unresolved_a_num.IsUnresolvedMergedReference());
771   const RegType& unresolved_b_num = cache.FromUnresolvedMerge(unresolved_b, number, nullptr);
772   ASSERT_TRUE(unresolved_b_num.IsUnresolvedMergedReference());
773   const RegType& unresolved_ab_num = cache.FromUnresolvedMerge(unresolved_ab, number, nullptr);
774   ASSERT_TRUE(unresolved_ab_num.IsUnresolvedMergedReference());
775 
776   const RegType& unresolved_a_int = cache.FromUnresolvedMerge(unresolved_a, integer, nullptr);
777   ASSERT_TRUE(unresolved_a_int.IsUnresolvedMergedReference());
778   const RegType& unresolved_b_int = cache.FromUnresolvedMerge(unresolved_b, integer, nullptr);
779   ASSERT_TRUE(unresolved_b_int.IsUnresolvedMergedReference());
780   const RegType& unresolved_ab_int = cache.FromUnresolvedMerge(unresolved_ab, integer, nullptr);
781   ASSERT_TRUE(unresolved_ab_int.IsUnresolvedMergedReference());
782   std::vector<const RegType*> uninitialized_types = {
783       &uninit_this, &uninit_obj_0, &uninit_obj_1, &uninit_number_0, &uninit_integer_0
784   };
785   std::vector<const RegType*> unresolved_types = {
786       &unresolved_a,
787       &unresolved_b,
788       &unresolved_ab,
789       &unresolved_a_num,
790       &unresolved_b_num,
791       &unresolved_ab_num,
792       &unresolved_a_int,
793       &unresolved_b_int,
794       &unresolved_ab_int
795   };
796   std::vector<const RegType*> uninit_unresolved_types = {
797       &uninit_unres_this, &uninit_unres_a_0, &uninit_unres_b_0
798   };
799   std::vector<const RegType*> plain_nonobj_classes = { &number, &integer };
800   std::vector<const RegType*> plain_nonobj_arr_classes = {
801       &number_arr,
802       &number_arr_arr,
803       &integer_arr,
804       &char_arr,
805   };
806   // std::vector<const RegType*> others = { &conflict, &zero, &null, &obj, &int_type };
807 
808   std::vector<const RegType*> all_minus_uninit_conflict;
809   all_minus_uninit_conflict.insert(all_minus_uninit_conflict.end(),
810                                    unresolved_types.begin(),
811                                    unresolved_types.end());
812   all_minus_uninit_conflict.insert(all_minus_uninit_conflict.end(),
813                                    plain_nonobj_classes.begin(),
814                                    plain_nonobj_classes.end());
815   all_minus_uninit_conflict.insert(all_minus_uninit_conflict.end(),
816                                    plain_nonobj_arr_classes.begin(),
817                                    plain_nonobj_arr_classes.end());
818   all_minus_uninit_conflict.push_back(&zero);
819   all_minus_uninit_conflict.push_back(&null);
820   all_minus_uninit_conflict.push_back(&obj);
821 
822   std::vector<const RegType*> all_minus_uninit;
823   all_minus_uninit.insert(all_minus_uninit.end(),
824                           all_minus_uninit_conflict.begin(),
825                           all_minus_uninit_conflict.end());
826   all_minus_uninit.push_back(&conflict);
827 
828 
829   std::vector<const RegType*> all;
830   all.insert(all.end(), uninitialized_types.begin(), uninitialized_types.end());
831   all.insert(all.end(), uninit_unresolved_types.begin(), uninit_unresolved_types.end());
832   all.insert(all.end(), all_minus_uninit.begin(), all_minus_uninit.end());
833   all.push_back(&int_type);
834 
835   auto check = [&](const RegType& in1, const RegType& in2, const RegType& expected_out)
836       REQUIRES_SHARED(Locks::mutator_lock_) {
837     const RegType& merge_result = in1.SafeMerge(in2, &cache, nullptr);
838     EXPECT_EQ(&expected_out, &merge_result)
839         << in1.Dump() << " x " << in2.Dump() << " = " << merge_result.Dump()
840         << " != " << expected_out.Dump();
841   };
842 
843   // Identity.
844   {
845     for (auto r : all) {
846       check(*r, *r, *r);
847     }
848   }
849 
850   // Define a covering relation through a list of Edges. We'll then derive LUBs from this and
851   // create checks for every pair of types.
852 
853   struct Edge {
854     const RegType& from;
855     const RegType& to;
856 
857     Edge(const RegType& from_, const RegType& to_) : from(from_), to(to_) {}
858   };
859   std::vector<Edge> edges;
860 #define ADD_EDGE(from, to) edges.emplace_back((from), (to))
861 
862   // To Conflict.
863   {
864     for (auto r : uninitialized_types) {
865       ADD_EDGE(*r, conflict);
866     }
867     for (auto r : uninit_unresolved_types) {
868       ADD_EDGE(*r, conflict);
869     }
870     ADD_EDGE(obj, conflict);
871     ADD_EDGE(int_type, conflict);
872   }
873 
874   ADD_EDGE(zero, null);
875 
876   // Unresolved.
877   {
878     ADD_EDGE(null, unresolved_a);
879     ADD_EDGE(null, unresolved_b);
880     ADD_EDGE(unresolved_a, unresolved_ab);
881     ADD_EDGE(unresolved_b, unresolved_ab);
882 
883     ADD_EDGE(number, unresolved_a_num);
884     ADD_EDGE(unresolved_a, unresolved_a_num);
885     ADD_EDGE(number, unresolved_b_num);
886     ADD_EDGE(unresolved_b, unresolved_b_num);
887     ADD_EDGE(number, unresolved_ab_num);
888     ADD_EDGE(unresolved_a_num, unresolved_ab_num);
889     ADD_EDGE(unresolved_b_num, unresolved_ab_num);
890     ADD_EDGE(unresolved_ab, unresolved_ab_num);
891 
892     ADD_EDGE(integer, unresolved_a_int);
893     ADD_EDGE(unresolved_a, unresolved_a_int);
894     ADD_EDGE(integer, unresolved_b_int);
895     ADD_EDGE(unresolved_b, unresolved_b_int);
896     ADD_EDGE(integer, unresolved_ab_int);
897     ADD_EDGE(unresolved_a_int, unresolved_ab_int);
898     ADD_EDGE(unresolved_b_int, unresolved_ab_int);
899     ADD_EDGE(unresolved_ab, unresolved_ab_int);
900 
901     ADD_EDGE(unresolved_a_int, unresolved_a_num);
902     ADD_EDGE(unresolved_b_int, unresolved_b_num);
903     ADD_EDGE(unresolved_ab_int, unresolved_ab_num);
904 
905     ADD_EDGE(unresolved_ab_num, obj);
906   }
907 
908   // Classes.
909   {
910     ADD_EDGE(null, integer);
911     ADD_EDGE(integer, number);
912     ADD_EDGE(number, obj);
913   }
914 
915   // Arrays.
916   {
917     ADD_EDGE(integer_arr, number_arr);
918     ADD_EDGE(number_arr, obj_arr);
919     ADD_EDGE(obj_arr, obj);
920     ADD_EDGE(number_arr_arr, obj_arr);
921 
922     ADD_EDGE(char_arr, obj);
923     ADD_EDGE(byte_arr, obj);
924 
925     ADD_EDGE(null, integer_arr);
926     ADD_EDGE(null, number_arr_arr);
927     ADD_EDGE(null, char_arr);
928     ADD_EDGE(null, byte_arr);
929   }
930 
931   // Primitive.
932   {
933     ADD_EDGE(zero, int_type);
934   }
935 #undef ADD_EDGE
936 
937   // Create merge triples by using the covering relation established by edges to derive the
938   // expected merge for any pair of types.
939 
940   // Expect merge(in1, in2) == out.
941   struct MergeExpectation {
942     const RegType& in1;
943     const RegType& in2;
944     const RegType& out;
945 
946     MergeExpectation(const RegType& in1_, const RegType& in2_, const RegType& out_)
947         : in1(in1_), in2(in2_), out(out_) {}
948   };
949   std::vector<MergeExpectation> expectations;
950 
951   for (auto r1 : all) {
952     for (auto r2 : all) {
953       if (r1 == r2) {
954         continue;
955       }
956 
957       // Very simple algorithm here that is usually used with adjacency lists. Our graph is
958       // small, it didn't make sense to have lists per node. Thus, the regular guarantees
959       // of O(n + |e|) don't apply, but that is acceptable.
960       //
961       // To compute r1 lub r2 = merge(r1, r2):
962       //   1) Generate the reachable set of r1, name it grey.
963       //   2) Mark all grey reachable nodes of r2 as black.
964       //   3) Find black nodes with no in-edges from other black nodes.
965       //   4) If |3)| == 1, that's the lub.
966 
967       // Generic BFS of the graph induced by edges, starting at start. new_node will be called
968       // with any discovered node, in order.
969       auto bfs = [&](auto new_node, const RegType* start) {
970         std::unordered_set<const RegType*> seen;
971         std::queue<const RegType*> work_list;
972         work_list.push(start);
973         while (!work_list.empty()) {
974           const RegType* cur = work_list.front();
975           work_list.pop();
976           auto it = seen.find(cur);
977           if (it != seen.end()) {
978             continue;
979           }
980           seen.insert(cur);
981           new_node(cur);
982 
983           for (const Edge& edge : edges) {
984             if (&edge.from == cur) {
985               work_list.push(&edge.to);
986             }
987           }
988         }
989       };
990 
991       std::unordered_set<const RegType*> grey;
992       auto compute_grey = [&](const RegType* cur) {
993         grey.insert(cur);  // Mark discovered node as grey.
994       };
995       bfs(compute_grey, r1);
996 
997       std::set<const RegType*> black;
998       auto compute_black = [&](const RegType* cur) {
999         // Mark discovered grey node as black.
1000         if (grey.find(cur) != grey.end()) {
1001           black.insert(cur);
1002         }
1003       };
1004       bfs(compute_black, r2);
1005 
1006       std::set<const RegType*> no_in_edge(black);  // Copy of black, remove nodes with in-edges.
1007       for (auto r : black) {
1008         for (Edge& e : edges) {
1009           if (&e.from == r) {
1010             no_in_edge.erase(&e.to);  // It doesn't matter whether "to" is black or not, just
1011                                       // attempt to remove it.
1012           }
1013         }
1014       }
1015 
1016       // Helper to print sets when something went wrong.
1017       auto print_set = [](auto& container) REQUIRES_SHARED(Locks::mutator_lock_) {
1018         std::string result;
1019         for (auto r : container) {
1020           result.append(" + ");
1021           result.append(r->Dump());
1022         }
1023         return result;
1024       };
1025       ASSERT_EQ(no_in_edge.size(), 1u) << r1->Dump() << " u " << r2->Dump()
1026                                        << " grey=" << print_set(grey)
1027                                        << " black=" << print_set(black)
1028                                        << " no-in-edge=" << print_set(no_in_edge);
1029       expectations.emplace_back(*r1, *r2, **no_in_edge.begin());
1030     }
1031   }
1032 
1033   // Evaluate merge expectations. The merge is expected to be commutative.
1034 
1035   for (auto& triple : expectations) {
1036     check(triple.in1, triple.in2, triple.out);
1037     check(triple.in2, triple.in1, triple.out);
1038   }
1039 }
1040 
TEST_F(RegTypeTest,ConstPrecision)1041 TEST_F(RegTypeTest, ConstPrecision) {
1042   // Tests creating primitive types types.
1043   ArenaStack stack(Runtime::Current()->GetArenaPool());
1044   ScopedArenaAllocator allocator(&stack);
1045   ScopedObjectAccess soa(Thread::Current());
1046   RegTypeCache cache_new(Runtime::Current()->GetClassLinker(), true, allocator);
1047   const RegType& imprecise_const = cache_new.FromCat1Const(10, false);
1048   const RegType& precise_const = cache_new.FromCat1Const(10, true);
1049 
1050   EXPECT_TRUE(imprecise_const.IsImpreciseConstant());
1051   EXPECT_TRUE(precise_const.IsPreciseConstant());
1052   EXPECT_FALSE(imprecise_const.Equals(precise_const));
1053 }
1054 
1055 class RegTypeOOMTest : public RegTypeTest {
1056  protected:
SetUpRuntimeOptions(RuntimeOptions * options)1057   void SetUpRuntimeOptions(RuntimeOptions *options) override {
1058     SetUpRuntimeOptionsForFillHeap(options);
1059 
1060     // We must not appear to be a compiler, or we'll abort on the host.
1061     callbacks_.reset();
1062   }
1063 };
1064 
TEST_F(RegTypeOOMTest,ClassJoinOOM)1065 TEST_F(RegTypeOOMTest, ClassJoinOOM) {
1066   // TODO: Figure out why FillHeap isn't good enough under CMS.
1067   TEST_DISABLED_WITHOUT_BAKER_READ_BARRIERS();
1068 
1069   // Tests that we don't abort with OOMs.
1070 
1071   ArenaStack stack(Runtime::Current()->GetArenaPool());
1072   ScopedArenaAllocator allocator(&stack);
1073   ScopedObjectAccess soa(Thread::Current());
1074 
1075   ScopedDisableMovingGC no_gc(soa.Self());
1076 
1077   // We merge nested array of primitive wrappers. These have a join type of an array of Number of
1078   // the same depth. We start with depth five, as we want at least two newly created classes to
1079   // test recursion (it's just more likely that nobody uses such deep arrays in runtime bringup).
1080   constexpr const char* kIntArrayFive = "[[[[[Ljava/lang/Integer;";
1081   constexpr const char* kFloatArrayFive = "[[[[[Ljava/lang/Float;";
1082   constexpr const char* kNumberArrayFour = "[[[[Ljava/lang/Number;";
1083   constexpr const char* kNumberArrayFive = "[[[[[Ljava/lang/Number;";
1084 
1085   RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
1086   const RegType& int_array_array = cache.From(nullptr, kIntArrayFive, false);
1087   ASSERT_TRUE(int_array_array.HasClass());
1088   const RegType& float_array_array = cache.From(nullptr, kFloatArrayFive, false);
1089   ASSERT_TRUE(float_array_array.HasClass());
1090 
1091   // Check assumptions: the joined classes don't exist, yet.
1092   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1093   ASSERT_TRUE(class_linker->LookupClass(soa.Self(), kNumberArrayFour, nullptr) == nullptr);
1094   ASSERT_TRUE(class_linker->LookupClass(soa.Self(), kNumberArrayFive, nullptr) == nullptr);
1095 
1096   // Fill the heap.
1097   VariableSizedHandleScope hs(soa.Self());
1098   FillHeap(soa.Self(), class_linker, &hs);
1099 
1100   const RegType& join_type = int_array_array.Merge(float_array_array, &cache, nullptr);
1101   ASSERT_TRUE(join_type.IsUnresolvedReference());
1102 }
1103 
1104 class RegTypeClassJoinTest : public RegTypeTest {
1105  protected:
TestClassJoin(const char * in1,const char * in2,const char * out)1106   void TestClassJoin(const char* in1, const char* in2, const char* out) {
1107     ArenaStack stack(Runtime::Current()->GetArenaPool());
1108     ScopedArenaAllocator allocator(&stack);
1109 
1110     ScopedObjectAccess soa(Thread::Current());
1111     jobject jclass_loader = LoadDex("Interfaces");
1112     StackHandleScope<4> hs(soa.Self());
1113     Handle<mirror::ClassLoader> class_loader(
1114         hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
1115 
1116     Handle<mirror::Class> c1(hs.NewHandle(
1117         class_linker_->FindClass(soa.Self(), in1, class_loader)));
1118     Handle<mirror::Class> c2(hs.NewHandle(
1119         class_linker_->FindClass(soa.Self(), in2, class_loader)));
1120     ASSERT_TRUE(c1 != nullptr);
1121     ASSERT_TRUE(c2 != nullptr);
1122 
1123     ScopedDisableMovingGC no_gc(soa.Self());
1124 
1125     RegTypeCache cache(Runtime::Current()->GetClassLinker(), true, allocator);
1126     const RegType& c1_reg_type = *cache.InsertClass(in1, c1.Get(), false);
1127     const RegType& c2_reg_type = *cache.InsertClass(in2, c2.Get(), false);
1128 
1129     const RegType& join_type = c1_reg_type.Merge(c2_reg_type, &cache, nullptr);
1130     EXPECT_TRUE(join_type.HasClass());
1131     EXPECT_EQ(join_type.GetDescriptor(), std::string_view(out));
1132   }
1133 };
1134 
TEST_F(RegTypeClassJoinTest,ClassJoinInterfaces)1135 TEST_F(RegTypeClassJoinTest, ClassJoinInterfaces) {
1136   TestClassJoin("LInterfaces$K;", "LInterfaces$L;", "LInterfaces$J;");
1137 }
1138 
TEST_F(RegTypeClassJoinTest,ClassJoinInterfaceClass)1139 TEST_F(RegTypeClassJoinTest, ClassJoinInterfaceClass) {
1140   TestClassJoin("LInterfaces$B;", "LInterfaces$L;", "LInterfaces$J;");
1141 }
1142 
TEST_F(RegTypeClassJoinTest,ClassJoinClassClass)1143 TEST_F(RegTypeClassJoinTest, ClassJoinClassClass) {
1144   // This test codifies that we prefer the class hierarchy over interfaces. It's a mostly
1145   // arbitrary choice, optimally we'd have set types and could handle multi-inheritance precisely.
1146   TestClassJoin("LInterfaces$A;", "LInterfaces$B;", "Ljava/lang/Object;");
1147 }
1148 
1149 }  // namespace verifier
1150 }  // namespace art
1151