• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "oat/stack_map.h"
18 
19 #include "art_method.h"
20 #include "base/arena_bit_vector.h"
21 #include "base/macros.h"
22 #include "base/malloc_arena_pool.h"
23 #include "stack_map_stream.h"
24 
25 #include "gtest/gtest.h"
26 
27 namespace art HIDDEN {
28 
29 // Check that the stack mask of given stack map is identical
30 // to the given bit vector. Returns true if they are same.
CheckStackMask(const CodeInfo & code_info,const StackMap & stack_map,const BitVector & bit_vector)31 static bool CheckStackMask(
32     const CodeInfo& code_info,
33     const StackMap& stack_map,
34     const BitVector& bit_vector) {
35   BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map);
36   if (bit_vector.GetNumberOfBits() > stack_mask.size_in_bits()) {
37     return false;
38   }
39   for (size_t i = 0; i < stack_mask.size_in_bits(); ++i) {
40     if (stack_mask.LoadBit(i) != bit_vector.IsBitSet(i)) {
41       return false;
42     }
43   }
44   return true;
45 }
46 
47 using Kind = DexRegisterLocation::Kind;
48 
49 constexpr static uint32_t kPcAlign = GetInstructionSetInstructionAlignment(kRuntimeISA);
50 
TEST(StackMapTest,Test1)51 TEST(StackMapTest, Test1) {
52   MallocArenaPool pool;
53   ArenaStack arena_stack(&pool);
54   ScopedArenaAllocator allocator(&arena_stack);
55   StackMapStream stream(&allocator, kRuntimeISA);
56   stream.BeginMethod(/* frame_size_in_bytes= */ 32,
57                      /* core_spill_mask= */ 0,
58                      /* fp_spill_mask= */ 0,
59                      /* num_dex_registers= */ 2,
60                      /* baseline= */ false,
61                      /* debuggable= */ false);
62 
63   ArenaBitVector sp_mask(&allocator, 0, false);
64   size_t number_of_dex_registers = 2;
65   stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask);
66   stream.AddDexRegisterEntry(Kind::kInStack, 0);         // Short location.
67   stream.AddDexRegisterEntry(Kind::kConstant, -2);       // Short location.
68   stream.EndStackMapEntry();
69 
70   stream.EndMethod(64 * kPcAlign);
71   ScopedArenaVector<uint8_t> memory = stream.Encode();
72 
73   CodeInfo code_info(memory.data());
74   ASSERT_EQ(1u, code_info.GetNumberOfStackMaps());
75 
76   uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
77   ASSERT_EQ(2u, number_of_catalog_entries);
78 
79   StackMap stack_map = code_info.GetStackMapAt(0);
80   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
81   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
82   ASSERT_EQ(0u, stack_map.GetDexPc());
83   ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
84   ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
85 
86   ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask));
87 
88   ASSERT_TRUE(stack_map.HasDexRegisterMap());
89   DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map);
90   ASSERT_EQ(number_of_dex_registers, dex_register_map.size());
91   ASSERT_TRUE(dex_register_map[0].IsLive());
92   ASSERT_TRUE(dex_register_map[1].IsLive());
93   ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
94 
95   ASSERT_EQ(Kind::kInStack, dex_register_map[0].GetKind());
96   ASSERT_EQ(Kind::kConstant, dex_register_map[1].GetKind());
97   ASSERT_EQ(0, dex_register_map[0].GetStackOffsetInBytes());
98   ASSERT_EQ(-2, dex_register_map[1].GetConstant());
99 
100   DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(0);
101   DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(1);
102   ASSERT_EQ(Kind::kInStack, location0.GetKind());
103   ASSERT_EQ(Kind::kConstant, location1.GetKind());
104   ASSERT_EQ(0, location0.GetValue());
105   ASSERT_EQ(-2, location1.GetValue());
106 
107   ASSERT_FALSE(stack_map.HasInlineInfo());
108 }
109 
TEST(StackMapTest,Test2)110 TEST(StackMapTest, Test2) {
111   MallocArenaPool pool;
112   ArenaStack arena_stack(&pool);
113   ScopedArenaAllocator allocator(&arena_stack);
114   StackMapStream stream(&allocator, kRuntimeISA);
115   stream.BeginMethod(/* frame_size_in_bytes= */ 32,
116                      /* core_spill_mask= */ 0,
117                      /* fp_spill_mask= */ 0,
118                      /* num_dex_registers= */ 2,
119                      /* baseline= */ false,
120                      /* debuggable= */ false);
121   ArtMethod art_method;
122 
123   ArenaBitVector sp_mask1(&allocator, 0, true);
124   sp_mask1.SetBit(2);
125   sp_mask1.SetBit(4);
126   size_t number_of_dex_registers = 2;
127   size_t number_of_dex_registers_in_inline_info = 0;
128   stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1);
129   stream.AddDexRegisterEntry(Kind::kInStack, 0);         // Short location.
130   stream.AddDexRegisterEntry(Kind::kConstant, -2);       // Large location.
131   stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info);
132   stream.EndInlineInfoEntry();
133   stream.BeginInlineInfoEntry(&art_method, 2, number_of_dex_registers_in_inline_info);
134   stream.EndInlineInfoEntry();
135   stream.EndStackMapEntry();
136 
137   ArenaBitVector sp_mask2(&allocator, 0, true);
138   sp_mask2.SetBit(3);
139   sp_mask2.SetBit(8);
140   stream.BeginStackMapEntry(1, 128 * kPcAlign, 0xFF, &sp_mask2);
141   stream.AddDexRegisterEntry(Kind::kInRegister, 18);     // Short location.
142   stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3);   // Short location.
143   stream.EndStackMapEntry();
144 
145   ArenaBitVector sp_mask3(&allocator, 0, true);
146   sp_mask3.SetBit(1);
147   sp_mask3.SetBit(5);
148   stream.BeginStackMapEntry(2, 192 * kPcAlign, 0xAB, &sp_mask3);
149   stream.AddDexRegisterEntry(Kind::kInRegister, 6);       // Short location.
150   stream.AddDexRegisterEntry(Kind::kInRegisterHigh, 8);   // Short location.
151   stream.EndStackMapEntry();
152 
153   ArenaBitVector sp_mask4(&allocator, 0, true);
154   sp_mask4.SetBit(6);
155   sp_mask4.SetBit(7);
156   stream.BeginStackMapEntry(3, 256 * kPcAlign, 0xCD, &sp_mask4);
157   stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3);      // Short location, same in stack map 2.
158   stream.AddDexRegisterEntry(Kind::kInFpuRegisterHigh, 1);  // Short location.
159   stream.EndStackMapEntry();
160 
161   stream.EndMethod(256 * kPcAlign);
162   ScopedArenaVector<uint8_t> memory = stream.Encode();
163 
164   CodeInfo code_info(memory.data());
165   ASSERT_EQ(4u, code_info.GetNumberOfStackMaps());
166 
167   uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
168   ASSERT_EQ(7u, number_of_catalog_entries);
169 
170   // First stack map.
171   {
172     StackMap stack_map = code_info.GetStackMapAt(0);
173     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
174     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
175     ASSERT_EQ(0u, stack_map.GetDexPc());
176     ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
177     ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
178 
179     ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask1));
180 
181     ASSERT_TRUE(stack_map.HasDexRegisterMap());
182     DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map);
183     ASSERT_EQ(number_of_dex_registers, dex_register_map.size());
184     ASSERT_TRUE(dex_register_map[0].IsLive());
185     ASSERT_TRUE(dex_register_map[1].IsLive());
186     ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
187 
188     ASSERT_EQ(Kind::kInStack, dex_register_map[0].GetKind());
189     ASSERT_EQ(Kind::kConstant, dex_register_map[1].GetKind());
190     ASSERT_EQ(0, dex_register_map[0].GetStackOffsetInBytes());
191     ASSERT_EQ(-2, dex_register_map[1].GetConstant());
192 
193     DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(0);
194     DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(1);
195     ASSERT_EQ(Kind::kInStack, location0.GetKind());
196     ASSERT_EQ(Kind::kConstant, location1.GetKind());
197     ASSERT_EQ(0, location0.GetValue());
198     ASSERT_EQ(-2, location1.GetValue());
199 
200     ASSERT_TRUE(stack_map.HasInlineInfo());
201     auto inline_infos = code_info.GetInlineInfosOf(stack_map);
202     ASSERT_EQ(2u, inline_infos.size());
203     ASSERT_EQ(3u, inline_infos[0].GetDexPc());
204     ASSERT_EQ(2u, inline_infos[1].GetDexPc());
205     ASSERT_TRUE(inline_infos[0].EncodesArtMethod());
206     ASSERT_TRUE(inline_infos[1].EncodesArtMethod());
207   }
208 
209   // Second stack map.
210   {
211     StackMap stack_map = code_info.GetStackMapAt(1);
212     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u)));
213     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u * kPcAlign)));
214     ASSERT_EQ(1u, stack_map.GetDexPc());
215     ASSERT_EQ(128u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
216     ASSERT_EQ(0xFFu, code_info.GetRegisterMaskOf(stack_map));
217 
218     ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask2));
219 
220     ASSERT_TRUE(stack_map.HasDexRegisterMap());
221     DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map);
222     ASSERT_EQ(number_of_dex_registers, dex_register_map.size());
223     ASSERT_TRUE(dex_register_map[0].IsLive());
224     ASSERT_TRUE(dex_register_map[1].IsLive());
225     ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
226 
227     ASSERT_EQ(Kind::kInRegister, dex_register_map[0].GetKind());
228     ASSERT_EQ(Kind::kInFpuRegister, dex_register_map[1].GetKind());
229     ASSERT_EQ(18, dex_register_map[0].GetMachineRegister());
230     ASSERT_EQ(3, dex_register_map[1].GetMachineRegister());
231 
232     DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(2);
233     DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(3);
234     ASSERT_EQ(Kind::kInRegister, location0.GetKind());
235     ASSERT_EQ(Kind::kInFpuRegister, location1.GetKind());
236     ASSERT_EQ(18, location0.GetValue());
237     ASSERT_EQ(3, location1.GetValue());
238 
239     ASSERT_FALSE(stack_map.HasInlineInfo());
240   }
241 
242   // Third stack map.
243   {
244     StackMap stack_map = code_info.GetStackMapAt(2);
245     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(2u)));
246     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(192u * kPcAlign)));
247     ASSERT_EQ(2u, stack_map.GetDexPc());
248     ASSERT_EQ(192u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
249     ASSERT_EQ(0xABu, code_info.GetRegisterMaskOf(stack_map));
250 
251     ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask3));
252 
253     ASSERT_TRUE(stack_map.HasDexRegisterMap());
254     DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map);
255     ASSERT_EQ(number_of_dex_registers, dex_register_map.size());
256     ASSERT_TRUE(dex_register_map[0].IsLive());
257     ASSERT_TRUE(dex_register_map[1].IsLive());
258     ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
259 
260     ASSERT_EQ(Kind::kInRegister, dex_register_map[0].GetKind());
261     ASSERT_EQ(Kind::kInRegisterHigh, dex_register_map[1].GetKind());
262     ASSERT_EQ(6, dex_register_map[0].GetMachineRegister());
263     ASSERT_EQ(8, dex_register_map[1].GetMachineRegister());
264 
265     DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(4);
266     DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(5);
267     ASSERT_EQ(Kind::kInRegister, location0.GetKind());
268     ASSERT_EQ(Kind::kInRegisterHigh, location1.GetKind());
269     ASSERT_EQ(6, location0.GetValue());
270     ASSERT_EQ(8, location1.GetValue());
271 
272     ASSERT_FALSE(stack_map.HasInlineInfo());
273   }
274 
275   // Fourth stack map.
276   {
277     StackMap stack_map = code_info.GetStackMapAt(3);
278     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(3u)));
279     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(256u * kPcAlign)));
280     ASSERT_EQ(3u, stack_map.GetDexPc());
281     ASSERT_EQ(256u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
282     ASSERT_EQ(0xCDu, code_info.GetRegisterMaskOf(stack_map));
283 
284     ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask4));
285 
286     ASSERT_TRUE(stack_map.HasDexRegisterMap());
287     DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map);
288     ASSERT_EQ(number_of_dex_registers, dex_register_map.size());
289     ASSERT_TRUE(dex_register_map[0].IsLive());
290     ASSERT_TRUE(dex_register_map[1].IsLive());
291     ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
292 
293     ASSERT_EQ(Kind::kInFpuRegister, dex_register_map[0].GetKind());
294     ASSERT_EQ(Kind::kInFpuRegisterHigh, dex_register_map[1].GetKind());
295     ASSERT_EQ(3, dex_register_map[0].GetMachineRegister());
296     ASSERT_EQ(1, dex_register_map[1].GetMachineRegister());
297 
298     DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(3);
299     DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(6);
300     ASSERT_EQ(Kind::kInFpuRegister, location0.GetKind());
301     ASSERT_EQ(Kind::kInFpuRegisterHigh, location1.GetKind());
302     ASSERT_EQ(3, location0.GetValue());
303     ASSERT_EQ(1, location1.GetValue());
304 
305     ASSERT_FALSE(stack_map.HasInlineInfo());
306   }
307 }
308 
TEST(StackMapTest,TestDeduplicateInlineInfoDexRegisterMap)309 TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) {
310   MallocArenaPool pool;
311   ArenaStack arena_stack(&pool);
312   ScopedArenaAllocator allocator(&arena_stack);
313   StackMapStream stream(&allocator, kRuntimeISA);
314   stream.BeginMethod(/* frame_size_in_bytes= */ 32,
315                      /* core_spill_mask= */ 0,
316                      /* fp_spill_mask= */ 0,
317                      /* num_dex_registers= */ 2,
318                      /* baseline= */ false,
319                      /* debuggable= */ false);
320   ArtMethod art_method;
321 
322   ArenaBitVector sp_mask1(&allocator, 0, true);
323   sp_mask1.SetBit(2);
324   sp_mask1.SetBit(4);
325   const size_t number_of_dex_registers = 2;
326   const size_t number_of_dex_registers_in_inline_info = 2;
327   stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1);
328   stream.AddDexRegisterEntry(Kind::kInStack, 0);         // Short location.
329   stream.AddDexRegisterEntry(Kind::kConstant, -2);       // Large location.
330   stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info);
331   stream.AddDexRegisterEntry(Kind::kInStack, 0);         // Short location.
332   stream.AddDexRegisterEntry(Kind::kConstant, -2);       // Large location.
333   stream.EndInlineInfoEntry();
334   stream.EndStackMapEntry();
335 
336   stream.EndMethod(64 * kPcAlign);
337   ScopedArenaVector<uint8_t> memory = stream.Encode();
338 
339   CodeInfo code_info(memory.data());
340   ASSERT_EQ(1u, code_info.GetNumberOfStackMaps());
341 
342   uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
343   ASSERT_EQ(2u, number_of_catalog_entries);
344 
345   // First stack map.
346   {
347     StackMap stack_map = code_info.GetStackMapAt(0);
348     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
349     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
350     ASSERT_EQ(0u, stack_map.GetDexPc());
351     ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
352     ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
353 
354     ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask1));
355 
356     ASSERT_TRUE(stack_map.HasDexRegisterMap());
357     DexRegisterMap map(code_info.GetDexRegisterMapOf(stack_map));
358     ASSERT_EQ(number_of_dex_registers, map.size());
359     ASSERT_TRUE(map[0].IsLive());
360     ASSERT_TRUE(map[1].IsLive());
361     ASSERT_EQ(2u, map.GetNumberOfLiveDexRegisters());
362 
363     ASSERT_EQ(Kind::kInStack, map[0].GetKind());
364     ASSERT_EQ(Kind::kConstant, map[1].GetKind());
365     ASSERT_EQ(0, map[0].GetStackOffsetInBytes());
366     ASSERT_EQ(-2, map[1].GetConstant());
367 
368     DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(0);
369     DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(1);
370     ASSERT_EQ(Kind::kInStack, location0.GetKind());
371     ASSERT_EQ(Kind::kConstant, location1.GetKind());
372     ASSERT_EQ(0, location0.GetValue());
373     ASSERT_EQ(-2, location1.GetValue());
374   }
375 }
376 
TEST(StackMapTest,TestNonLiveDexRegisters)377 TEST(StackMapTest, TestNonLiveDexRegisters) {
378   MallocArenaPool pool;
379   ArenaStack arena_stack(&pool);
380   ScopedArenaAllocator allocator(&arena_stack);
381   StackMapStream stream(&allocator, kRuntimeISA);
382   stream.BeginMethod(/* frame_size_in_bytes= */ 32,
383                      /* core_spill_mask= */ 0,
384                      /* fp_spill_mask= */ 0,
385                      /* num_dex_registers= */ 2,
386                      /* baseline= */ false,
387                      /* debuggable= */ false);
388 
389   ArenaBitVector sp_mask(&allocator, 0, false);
390   uint32_t number_of_dex_registers = 2;
391   stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask);
392   stream.AddDexRegisterEntry(Kind::kNone, 0);            // No location.
393   stream.AddDexRegisterEntry(Kind::kConstant, -2);       // Large location.
394   stream.EndStackMapEntry();
395 
396   stream.EndMethod(64 * kPcAlign);
397   ScopedArenaVector<uint8_t> memory = stream.Encode();
398 
399   CodeInfo code_info(memory.data());
400   ASSERT_EQ(1u, code_info.GetNumberOfStackMaps());
401 
402   uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
403   ASSERT_EQ(1u, number_of_catalog_entries);
404 
405   StackMap stack_map = code_info.GetStackMapAt(0);
406   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
407   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
408   ASSERT_EQ(0u, stack_map.GetDexPc());
409   ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
410   ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
411 
412   ASSERT_TRUE(stack_map.HasDexRegisterMap());
413   DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map);
414   ASSERT_EQ(number_of_dex_registers, dex_register_map.size());
415   ASSERT_FALSE(dex_register_map[0].IsLive());
416   ASSERT_TRUE(dex_register_map[1].IsLive());
417   ASSERT_EQ(1u, dex_register_map.GetNumberOfLiveDexRegisters());
418 
419   ASSERT_EQ(Kind::kNone, dex_register_map[0].GetKind());
420   ASSERT_EQ(Kind::kConstant, dex_register_map[1].GetKind());
421   ASSERT_EQ(-2, dex_register_map[1].GetConstant());
422 
423   DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(0);
424   ASSERT_EQ(Kind::kConstant, location1.GetKind());
425   ASSERT_EQ(-2, location1.GetValue());
426 
427   ASSERT_FALSE(stack_map.HasInlineInfo());
428 }
429 
TEST(StackMapTest,TestShareDexRegisterMap)430 TEST(StackMapTest, TestShareDexRegisterMap) {
431   MallocArenaPool pool;
432   ArenaStack arena_stack(&pool);
433   ScopedArenaAllocator allocator(&arena_stack);
434   StackMapStream stream(&allocator, kRuntimeISA);
435   stream.BeginMethod(/* frame_size_in_bytes= */ 32,
436                      /* core_spill_mask= */ 0,
437                      /* fp_spill_mask= */ 0,
438                      /* num_dex_registers= */ 2,
439                      /* baseline= */ false,
440                      /* debuggable= */ false);
441 
442   ArenaBitVector sp_mask(&allocator, 0, false);
443   uint32_t number_of_dex_registers = 2;
444   // First stack map.
445   stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask);
446   stream.AddDexRegisterEntry(Kind::kInRegister, 0);  // Short location.
447   stream.AddDexRegisterEntry(Kind::kConstant, -2);   // Large location.
448   stream.EndStackMapEntry();
449   // Second stack map, which should share the same dex register map.
450   stream.BeginStackMapEntry(0, 65 * kPcAlign, 0x3, &sp_mask);
451   stream.AddDexRegisterEntry(Kind::kInRegister, 0);  // Short location.
452   stream.AddDexRegisterEntry(Kind::kConstant, -2);   // Large location.
453   stream.EndStackMapEntry();
454   // Third stack map (doesn't share the dex register map).
455   stream.BeginStackMapEntry(0, 66 * kPcAlign, 0x3, &sp_mask);
456   stream.AddDexRegisterEntry(Kind::kInRegister, 2);  // Short location.
457   stream.AddDexRegisterEntry(Kind::kConstant, -2);   // Large location.
458   stream.EndStackMapEntry();
459 
460   stream.EndMethod(66 * kPcAlign);
461   ScopedArenaVector<uint8_t> memory = stream.Encode();
462 
463   CodeInfo ci(memory.data());
464 
465   // Verify first stack map.
466   StackMap sm0 = ci.GetStackMapAt(0);
467   DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0);
468   ASSERT_EQ(number_of_dex_registers, dex_registers0.size());
469   ASSERT_EQ(0, dex_registers0[0].GetMachineRegister());
470   ASSERT_EQ(-2, dex_registers0[1].GetConstant());
471 
472   // Verify second stack map.
473   StackMap sm1 = ci.GetStackMapAt(1);
474   DexRegisterMap dex_registers1 = ci.GetDexRegisterMapOf(sm1);
475   ASSERT_EQ(number_of_dex_registers, dex_registers1.size());
476   ASSERT_EQ(0, dex_registers1[0].GetMachineRegister());
477   ASSERT_EQ(-2, dex_registers1[1].GetConstant());
478 
479   // Verify third stack map.
480   StackMap sm2 = ci.GetStackMapAt(2);
481   DexRegisterMap dex_registers2 = ci.GetDexRegisterMapOf(sm2);
482   ASSERT_EQ(number_of_dex_registers, dex_registers2.size());
483   ASSERT_EQ(2, dex_registers2[0].GetMachineRegister());
484   ASSERT_EQ(-2, dex_registers2[1].GetConstant());
485 
486   // Verify dex register mask offsets.
487   ASSERT_FALSE(sm1.HasDexRegisterMaskIndex());  // No delta.
488   ASSERT_TRUE(sm2.HasDexRegisterMaskIndex());  // Has delta.
489 }
490 
TEST(StackMapTest,TestNoDexRegisterMap)491 TEST(StackMapTest, TestNoDexRegisterMap) {
492   MallocArenaPool pool;
493   ArenaStack arena_stack(&pool);
494   ScopedArenaAllocator allocator(&arena_stack);
495   StackMapStream stream(&allocator, kRuntimeISA);
496   stream.BeginMethod(/* frame_size_in_bytes= */ 32,
497                      /* core_spill_mask= */ 0,
498                      /* fp_spill_mask= */ 0,
499                      /* num_dex_registers= */ 1,
500                      /* baseline= */ false,
501                      /* debuggable= */ false);
502 
503   ArenaBitVector sp_mask(&allocator, 0, false);
504   stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask);
505   stream.EndStackMapEntry();
506 
507   stream.BeginStackMapEntry(1, 68 * kPcAlign, 0x4, &sp_mask);
508   stream.AddDexRegisterEntry(Kind::kNone, 0);
509   stream.EndStackMapEntry();
510 
511   stream.EndMethod(68 * kPcAlign);
512   ScopedArenaVector<uint8_t> memory = stream.Encode();
513 
514   CodeInfo code_info(memory.data());
515   ASSERT_EQ(2u, code_info.GetNumberOfStackMaps());
516 
517   uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
518   ASSERT_EQ(0u, number_of_catalog_entries);
519 
520   StackMap stack_map = code_info.GetStackMapAt(0);
521   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
522   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
523   ASSERT_EQ(0u, stack_map.GetDexPc());
524   ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
525   ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
526 
527   ASSERT_FALSE(stack_map.HasDexRegisterMap());
528   ASSERT_FALSE(stack_map.HasInlineInfo());
529 
530   stack_map = code_info.GetStackMapAt(1);
531   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1)));
532   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(68 * kPcAlign)));
533   ASSERT_EQ(1u, stack_map.GetDexPc());
534   ASSERT_EQ(68u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
535   ASSERT_EQ(0x4u, code_info.GetRegisterMaskOf(stack_map));
536 
537   ASSERT_TRUE(stack_map.HasDexRegisterMap());
538   ASSERT_FALSE(stack_map.HasInlineInfo());
539 }
540 
TEST(StackMapTest,InlineTest)541 TEST(StackMapTest, InlineTest) {
542   MallocArenaPool pool;
543   ArenaStack arena_stack(&pool);
544   ScopedArenaAllocator allocator(&arena_stack);
545   StackMapStream stream(&allocator, kRuntimeISA);
546   stream.BeginMethod(/* frame_size_in_bytes= */ 32,
547                      /* core_spill_mask= */ 0,
548                      /* fp_spill_mask= */ 0,
549                      /* num_dex_registers= */ 2,
550                      /* baseline= */ false,
551                      /* debuggable= */ false);
552   ArtMethod art_method;
553 
554   ArenaBitVector sp_mask1(&allocator, 0, true);
555   sp_mask1.SetBit(2);
556   sp_mask1.SetBit(4);
557 
558   // First stack map.
559   stream.BeginStackMapEntry(0, 10 * kPcAlign, 0x3, &sp_mask1);
560   stream.AddDexRegisterEntry(Kind::kInStack, 0);
561   stream.AddDexRegisterEntry(Kind::kConstant, 4);
562 
563   stream.BeginInlineInfoEntry(&art_method, 2, 1);
564   stream.AddDexRegisterEntry(Kind::kInStack, 8);
565   stream.EndInlineInfoEntry();
566   stream.BeginInlineInfoEntry(&art_method, 3, 3);
567   stream.AddDexRegisterEntry(Kind::kInStack, 16);
568   stream.AddDexRegisterEntry(Kind::kConstant, 20);
569   stream.AddDexRegisterEntry(Kind::kInRegister, 15);
570   stream.EndInlineInfoEntry();
571 
572   stream.EndStackMapEntry();
573 
574   // Second stack map.
575   stream.BeginStackMapEntry(2, 22 * kPcAlign, 0x3, &sp_mask1);
576   stream.AddDexRegisterEntry(Kind::kInStack, 56);
577   stream.AddDexRegisterEntry(Kind::kConstant, 0);
578 
579   stream.BeginInlineInfoEntry(&art_method, 2, 1);
580   stream.AddDexRegisterEntry(Kind::kInStack, 12);
581   stream.EndInlineInfoEntry();
582   stream.BeginInlineInfoEntry(&art_method, 3, 3);
583   stream.AddDexRegisterEntry(Kind::kInStack, 80);
584   stream.AddDexRegisterEntry(Kind::kConstant, 10);
585   stream.AddDexRegisterEntry(Kind::kInRegister, 5);
586   stream.EndInlineInfoEntry();
587   stream.BeginInlineInfoEntry(&art_method, 5, 0);
588   stream.EndInlineInfoEntry();
589 
590   stream.EndStackMapEntry();
591 
592   // Third stack map.
593   stream.BeginStackMapEntry(4, 56 * kPcAlign, 0x3, &sp_mask1);
594   stream.AddDexRegisterEntry(Kind::kNone, 0);
595   stream.AddDexRegisterEntry(Kind::kConstant, 4);
596   stream.EndStackMapEntry();
597 
598   // Fourth stack map.
599   stream.BeginStackMapEntry(6, 78 * kPcAlign, 0x3, &sp_mask1);
600   stream.AddDexRegisterEntry(Kind::kInStack, 56);
601   stream.AddDexRegisterEntry(Kind::kConstant, 0);
602 
603   stream.BeginInlineInfoEntry(&art_method, 2, 0);
604   stream.EndInlineInfoEntry();
605   stream.BeginInlineInfoEntry(&art_method, 5, 1);
606   stream.AddDexRegisterEntry(Kind::kInRegister, 2);
607   stream.EndInlineInfoEntry();
608   stream.BeginInlineInfoEntry(&art_method, 10, 2);
609   stream.AddDexRegisterEntry(Kind::kNone, 0);
610   stream.AddDexRegisterEntry(Kind::kInRegister, 3);
611   stream.EndInlineInfoEntry();
612 
613   stream.EndStackMapEntry();
614 
615   stream.EndMethod(78 * kPcAlign);
616   ScopedArenaVector<uint8_t> memory = stream.Encode();
617 
618   CodeInfo ci(memory.data());
619 
620   {
621     // Verify first stack map.
622     StackMap sm0 = ci.GetStackMapAt(0);
623 
624     DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0);
625     ASSERT_EQ(2u, dex_registers0.size());
626     ASSERT_EQ(0, dex_registers0[0].GetStackOffsetInBytes());
627     ASSERT_EQ(4, dex_registers0[1].GetConstant());
628 
629     auto inline_infos = ci.GetInlineInfosOf(sm0);
630     ASSERT_EQ(2u, inline_infos.size());
631     ASSERT_EQ(2u, inline_infos[0].GetDexPc());
632     ASSERT_TRUE(inline_infos[0].EncodesArtMethod());
633     ASSERT_EQ(3u, inline_infos[1].GetDexPc());
634     ASSERT_TRUE(inline_infos[1].EncodesArtMethod());
635 
636     DexRegisterMap dex_registers1 = ci.GetInlineDexRegisterMapOf(sm0, inline_infos[0]);
637     ASSERT_EQ(1u, dex_registers1.size());
638     ASSERT_EQ(8, dex_registers1[0].GetStackOffsetInBytes());
639 
640     DexRegisterMap dex_registers2 = ci.GetInlineDexRegisterMapOf(sm0, inline_infos[1]);
641     ASSERT_EQ(3u, dex_registers2.size());
642     ASSERT_EQ(16, dex_registers2[0].GetStackOffsetInBytes());
643     ASSERT_EQ(20, dex_registers2[1].GetConstant());
644     ASSERT_EQ(15, dex_registers2[2].GetMachineRegister());
645   }
646 
647   {
648     // Verify second stack map.
649     StackMap sm1 = ci.GetStackMapAt(1);
650 
651     DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm1);
652     ASSERT_EQ(2u, dex_registers0.size());
653     ASSERT_EQ(56, dex_registers0[0].GetStackOffsetInBytes());
654     ASSERT_EQ(0, dex_registers0[1].GetConstant());
655 
656     auto inline_infos = ci.GetInlineInfosOf(sm1);
657     ASSERT_EQ(3u, inline_infos.size());
658     ASSERT_EQ(2u, inline_infos[0].GetDexPc());
659     ASSERT_TRUE(inline_infos[0].EncodesArtMethod());
660     ASSERT_EQ(3u, inline_infos[1].GetDexPc());
661     ASSERT_TRUE(inline_infos[1].EncodesArtMethod());
662     ASSERT_EQ(5u, inline_infos[2].GetDexPc());
663     ASSERT_TRUE(inline_infos[2].EncodesArtMethod());
664 
665     DexRegisterMap dex_registers1 = ci.GetInlineDexRegisterMapOf(sm1, inline_infos[0]);
666     ASSERT_EQ(1u, dex_registers1.size());
667     ASSERT_EQ(12, dex_registers1[0].GetStackOffsetInBytes());
668 
669     DexRegisterMap dex_registers2 = ci.GetInlineDexRegisterMapOf(sm1, inline_infos[1]);
670     ASSERT_EQ(3u, dex_registers2.size());
671     ASSERT_EQ(80, dex_registers2[0].GetStackOffsetInBytes());
672     ASSERT_EQ(10, dex_registers2[1].GetConstant());
673     ASSERT_EQ(5, dex_registers2[2].GetMachineRegister());
674   }
675 
676   {
677     // Verify third stack map.
678     StackMap sm2 = ci.GetStackMapAt(2);
679 
680     DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm2);
681     ASSERT_EQ(2u, dex_registers0.size());
682     ASSERT_FALSE(dex_registers0[0].IsLive());
683     ASSERT_EQ(4, dex_registers0[1].GetConstant());
684     ASSERT_FALSE(sm2.HasInlineInfo());
685   }
686 
687   {
688     // Verify fourth stack map.
689     StackMap sm3 = ci.GetStackMapAt(3);
690 
691     DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm3);
692     ASSERT_EQ(2u, dex_registers0.size());
693     ASSERT_EQ(56, dex_registers0[0].GetStackOffsetInBytes());
694     ASSERT_EQ(0, dex_registers0[1].GetConstant());
695 
696     auto inline_infos = ci.GetInlineInfosOf(sm3);
697     ASSERT_EQ(3u, inline_infos.size());
698     ASSERT_EQ(2u, inline_infos[0].GetDexPc());
699     ASSERT_TRUE(inline_infos[0].EncodesArtMethod());
700     ASSERT_EQ(5u, inline_infos[1].GetDexPc());
701     ASSERT_TRUE(inline_infos[1].EncodesArtMethod());
702     ASSERT_EQ(10u, inline_infos[2].GetDexPc());
703     ASSERT_TRUE(inline_infos[2].EncodesArtMethod());
704 
705     DexRegisterMap dex_registers1 = ci.GetInlineDexRegisterMapOf(sm3, inline_infos[1]);
706     ASSERT_EQ(1u, dex_registers1.size());
707     ASSERT_EQ(2, dex_registers1[0].GetMachineRegister());
708 
709     DexRegisterMap dex_registers2 = ci.GetInlineDexRegisterMapOf(sm3, inline_infos[2]);
710     ASSERT_EQ(2u, dex_registers2.size());
711     ASSERT_FALSE(dex_registers2[0].IsLive());
712     ASSERT_EQ(3, dex_registers2[1].GetMachineRegister());
713   }
714 }
715 
TEST(StackMapTest,PackedNativePcTest)716 TEST(StackMapTest, PackedNativePcTest) {
717   // Test minimum alignments, and decoding.
718   uint32_t packed_thumb2 =
719       StackMap::PackNativePc(kThumb2InstructionAlignment, InstructionSet::kThumb2);
720   uint32_t packed_arm64 =
721       StackMap::PackNativePc(kArm64InstructionAlignment, InstructionSet::kArm64);
722   uint32_t packed_x86 =
723       StackMap::PackNativePc(kX86InstructionAlignment, InstructionSet::kX86);
724   uint32_t packed_x86_64 =
725       StackMap::PackNativePc(kX86_64InstructionAlignment, InstructionSet::kX86_64);
726   EXPECT_EQ(StackMap::UnpackNativePc(packed_thumb2, InstructionSet::kThumb2),
727             kThumb2InstructionAlignment);
728   EXPECT_EQ(StackMap::UnpackNativePc(packed_arm64, InstructionSet::kArm64),
729             kArm64InstructionAlignment);
730   EXPECT_EQ(StackMap::UnpackNativePc(packed_x86, InstructionSet::kX86),
731             kX86InstructionAlignment);
732   EXPECT_EQ(StackMap::UnpackNativePc(packed_x86_64, InstructionSet::kX86_64),
733             kX86_64InstructionAlignment);
734 }
735 
TEST(StackMapTest,TestDeduplicateStackMask)736 TEST(StackMapTest, TestDeduplicateStackMask) {
737   MallocArenaPool pool;
738   ArenaStack arena_stack(&pool);
739   ScopedArenaAllocator allocator(&arena_stack);
740   StackMapStream stream(&allocator, kRuntimeISA);
741   stream.BeginMethod(/* frame_size_in_bytes= */ 32,
742                      /* core_spill_mask= */ 0,
743                      /* fp_spill_mask= */ 0,
744                      /* num_dex_registers= */ 0,
745                      /* baseline= */ false,
746                      /* debuggable= */ false);
747 
748   ArenaBitVector sp_mask(&allocator, 0, true);
749   sp_mask.SetBit(1);
750   sp_mask.SetBit(4);
751   stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask);
752   stream.EndStackMapEntry();
753   stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask);
754   stream.EndStackMapEntry();
755 
756   stream.EndMethod(8 * kPcAlign);
757   ScopedArenaVector<uint8_t> memory = stream.Encode();
758 
759   CodeInfo code_info(memory.data());
760   ASSERT_EQ(2u, code_info.GetNumberOfStackMaps());
761 
762   StackMap stack_map1 = code_info.GetStackMapForNativePcOffset(4 * kPcAlign);
763   StackMap stack_map2 = code_info.GetStackMapForNativePcOffset(8 * kPcAlign);
764   EXPECT_EQ(stack_map1.GetStackMaskIndex(),
765             stack_map2.GetStackMaskIndex());
766 }
767 
768 }  // namespace art
769