1 /*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "stack_map.h"
18
19 #include <iomanip>
20 #include <stdint.h>
21
22 #include "art_method.h"
23 #include "base/indenter.h"
24 #include "base/stats.h"
25 #include "oat_quick_method_header.h"
26 #include "scoped_thread_state_change-inl.h"
27
28 namespace art {
29
CodeInfo(const OatQuickMethodHeader * header,DecodeFlags flags)30 CodeInfo::CodeInfo(const OatQuickMethodHeader* header, DecodeFlags flags)
31 : CodeInfo(header->GetOptimizedCodeInfoPtr(), flags) {
32 }
33
34 // Returns true if the decoded table was deduped.
35 template<typename Accessor>
DecodeTable(BitTable<Accessor> & table,BitMemoryReader & reader)36 ALWAYS_INLINE static bool DecodeTable(BitTable<Accessor>& table, BitMemoryReader& reader) {
37 bool is_deduped = reader.ReadBit();
38 if (is_deduped) {
39 ssize_t bit_offset = reader.NumberOfReadBits() - reader.ReadVarint();
40 BitMemoryReader reader2(reader.data(), bit_offset); // The offset is negative.
41 table.Decode(reader2);
42 } else {
43 table.Decode(reader);
44 }
45 return is_deduped;
46 }
47
Decode(const uint8_t * data,DecodeFlags flags)48 void CodeInfo::Decode(const uint8_t* data, DecodeFlags flags) {
49 BitMemoryReader reader(data);
50 ForEachHeaderField([this, &reader](auto member_pointer) {
51 this->*member_pointer = reader.ReadVarint();
52 });
53 ForEachBitTableField([this, &reader](auto member_pointer) {
54 DecodeTable(this->*member_pointer, reader);
55 }, flags);
56 size_in_bits_ = reader.NumberOfReadBits();
57 }
58
Dedupe(const uint8_t * code_info_data)59 size_t CodeInfo::Deduper::Dedupe(const uint8_t* code_info_data) {
60 writer_.ByteAlign();
61 size_t deduped_offset = writer_.NumberOfWrittenBits() / kBitsPerByte;
62 BitMemoryReader reader(code_info_data);
63 CodeInfo code_info; // Temporary storage for decoded data.
64 ForEachHeaderField([this, &reader, &code_info](auto member_pointer) {
65 code_info.*member_pointer = reader.ReadVarint();
66 writer_.WriteVarint(code_info.*member_pointer);
67 });
68 ForEachBitTableField([this, &reader, &code_info](auto member_pointer) {
69 bool is_deduped = reader.ReadBit();
70 DCHECK(!is_deduped);
71 size_t bit_table_start = reader.NumberOfReadBits();
72 (code_info.*member_pointer).Decode(reader);
73 BitMemoryRegion region = reader.GetReadRegion().Subregion(bit_table_start);
74 auto it = dedupe_map_.insert(std::make_pair(region, /* placeholder */ 0));
75 if (it.second /* new bit table */ || region.size_in_bits() < 32) {
76 writer_.WriteBit(false); // Is not deduped.
77 it.first->second = writer_.NumberOfWrittenBits();
78 writer_.WriteRegion(region);
79 } else {
80 writer_.WriteBit(true); // Is deduped.
81 size_t bit_offset = writer_.NumberOfWrittenBits();
82 writer_.WriteVarint(bit_offset - it.first->second);
83 }
84 });
85
86 if (kIsDebugBuild) {
87 CodeInfo old_code_info(code_info_data);
88 CodeInfo new_code_info(writer_.data() + deduped_offset);
89 ForEachHeaderField([&old_code_info, &new_code_info](auto member_pointer) {
90 DCHECK_EQ(old_code_info.*member_pointer, new_code_info.*member_pointer);
91 });
92 ForEachBitTableField([&old_code_info, &new_code_info](auto member_pointer) {
93 DCHECK((old_code_info.*member_pointer).Equals(new_code_info.*member_pointer));
94 });
95 }
96
97 return deduped_offset;
98 }
99
BinarySearchNativePc(uint32_t packed_pc) const100 BitTable<StackMap>::const_iterator CodeInfo::BinarySearchNativePc(uint32_t packed_pc) const {
101 return std::partition_point(
102 stack_maps_.begin(),
103 stack_maps_.end(),
104 [packed_pc](const StackMap& sm) {
105 return sm.GetPackedNativePc() < packed_pc && sm.GetKind() != StackMap::Kind::Catch;
106 });
107 }
108
GetStackMapForNativePcOffset(uint32_t pc,InstructionSet isa) const109 StackMap CodeInfo::GetStackMapForNativePcOffset(uint32_t pc, InstructionSet isa) const {
110 auto it = BinarySearchNativePc(StackMap::PackNativePc(pc, isa));
111 // Start at the lower bound and iterate over all stack maps with the given native pc.
112 for (; it != stack_maps_.end() && (*it).GetNativePcOffset(isa) == pc; ++it) {
113 StackMap::Kind kind = static_cast<StackMap::Kind>((*it).GetKind());
114 if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) {
115 return *it;
116 }
117 }
118 return stack_maps_.GetInvalidRow();
119 }
120
121 // Scan backward to determine dex register locations at given stack map.
122 // All registers for a stack map are combined - inlined registers are just appended,
123 // therefore 'first_dex_register' allows us to select a sub-range to decode.
DecodeDexRegisterMap(uint32_t stack_map_index,uint32_t first_dex_register,DexRegisterMap * map) const124 void CodeInfo::DecodeDexRegisterMap(uint32_t stack_map_index,
125 uint32_t first_dex_register,
126 /*out*/ DexRegisterMap* map) const {
127 // Count remaining work so we know when we have finished.
128 uint32_t remaining_registers = map->size();
129
130 // Keep scanning backwards and collect the most recent location of each register.
131 for (int32_t s = stack_map_index; s >= 0 && remaining_registers != 0; s--) {
132 StackMap stack_map = GetStackMapAt(s);
133 DCHECK_LE(stack_map_index - s, kMaxDexRegisterMapSearchDistance) << "Unbounded search";
134
135 // The mask specifies which registers where modified in this stack map.
136 // NB: the mask can be shorter than expected if trailing zero bits were removed.
137 uint32_t mask_index = stack_map.GetDexRegisterMaskIndex();
138 if (mask_index == StackMap::kNoValue) {
139 continue; // Nothing changed at this stack map.
140 }
141 BitMemoryRegion mask = dex_register_masks_.GetBitMemoryRegion(mask_index);
142 if (mask.size_in_bits() <= first_dex_register) {
143 continue; // Nothing changed after the first register we are interested in.
144 }
145
146 // The map stores one catalogue index per each modified register location.
147 uint32_t map_index = stack_map.GetDexRegisterMapIndex();
148 DCHECK_NE(map_index, StackMap::kNoValue);
149
150 // Skip initial registers which we are not interested in (to get to inlined registers).
151 map_index += mask.PopCount(0, first_dex_register);
152 mask = mask.Subregion(first_dex_register, mask.size_in_bits() - first_dex_register);
153
154 // Update registers that we see for first time (i.e. most recent value).
155 DexRegisterLocation* regs = map->data();
156 const uint32_t end = std::min<uint32_t>(map->size(), mask.size_in_bits());
157 const size_t kNumBits = BitSizeOf<uint32_t>();
158 for (uint32_t reg = 0; reg < end; reg += kNumBits) {
159 // Process the mask in chunks of kNumBits for performance.
160 uint32_t bits = mask.LoadBits(reg, std::min<uint32_t>(end - reg, kNumBits));
161 while (bits != 0) {
162 uint32_t bit = CTZ(bits);
163 if (regs[reg + bit].GetKind() == DexRegisterLocation::Kind::kInvalid) {
164 regs[reg + bit] = GetDexRegisterCatalogEntry(dex_register_maps_.Get(map_index));
165 remaining_registers--;
166 }
167 map_index++;
168 bits ^= 1u << bit; // Clear the bit.
169 }
170 }
171 }
172
173 // Set any remaining registers to None (which is the default state at first stack map).
174 if (remaining_registers != 0) {
175 DexRegisterLocation* regs = map->data();
176 for (uint32_t r = 0; r < map->size(); r++) {
177 if (regs[r].GetKind() == DexRegisterLocation::Kind::kInvalid) {
178 regs[r] = DexRegisterLocation::None();
179 }
180 }
181 }
182 }
183
184 // Decode the CodeInfo while collecting size statistics.
CollectSizeStats(const uint8_t * code_info_data,Stats * parent)185 void CodeInfo::CollectSizeStats(const uint8_t* code_info_data, /*out*/ Stats* parent) {
186 Stats* codeinfo_stats = parent->Child("CodeInfo");
187 BitMemoryReader reader(code_info_data);
188 ForEachHeaderField([&reader](auto) { reader.ReadVarint(); });
189 codeinfo_stats->Child("Header")->AddBits(reader.NumberOfReadBits());
190 CodeInfo code_info; // Temporary storage for decoded tables.
191 ForEachBitTableField([codeinfo_stats, &reader, &code_info](auto member_pointer) {
192 auto& table = code_info.*member_pointer;
193 size_t bit_offset = reader.NumberOfReadBits();
194 bool deduped = DecodeTable(table, reader);
195 if (deduped) {
196 codeinfo_stats->Child("DedupeOffset")->AddBits(reader.NumberOfReadBits() - bit_offset);
197 } else {
198 Stats* table_stats = codeinfo_stats->Child(table.GetName());
199 table_stats->AddBits(reader.NumberOfReadBits() - bit_offset);
200 const char* const* column_names = table.GetColumnNames();
201 for (size_t c = 0; c < table.NumColumns(); c++) {
202 if (table.NumColumnBits(c) > 0) {
203 Stats* column_stats = table_stats->Child(column_names[c]);
204 column_stats->AddBits(table.NumRows() * table.NumColumnBits(c), table.NumRows());
205 }
206 }
207 }
208 });
209 codeinfo_stats->AddBytes(BitsToBytesRoundUp(reader.NumberOfReadBits()));
210 }
211
Dump(VariableIndentationOutputStream * vios) const212 void DexRegisterMap::Dump(VariableIndentationOutputStream* vios) const {
213 if (HasAnyLiveDexRegisters()) {
214 ScopedIndentation indent1(vios);
215 for (size_t i = 0; i < size(); ++i) {
216 DexRegisterLocation reg = (*this)[i];
217 if (reg.IsLive()) {
218 vios->Stream() << "v" << i << ":" << reg << " ";
219 }
220 }
221 vios->Stream() << "\n";
222 }
223 }
224
Dump(VariableIndentationOutputStream * vios,uint32_t code_offset,bool verbose,InstructionSet instruction_set) const225 void CodeInfo::Dump(VariableIndentationOutputStream* vios,
226 uint32_t code_offset,
227 bool verbose,
228 InstructionSet instruction_set) const {
229 vios->Stream() << "CodeInfo BitSize=" << size_in_bits_
230 << " FrameSize:" << packed_frame_size_ * kStackAlignment
231 << " CoreSpillMask:" << std::hex << core_spill_mask_
232 << " FpSpillMask:" << std::hex << fp_spill_mask_
233 << " NumberOfDexRegisters:" << std::dec << number_of_dex_registers_
234 << "\n";
235 ScopedIndentation indent1(vios);
236 ForEachBitTableField([this, &vios, verbose](auto member_pointer) {
237 const auto& table = this->*member_pointer;
238 if (table.NumRows() != 0) {
239 vios->Stream() << table.GetName() << " BitSize=" << table.DataBitSize();
240 vios->Stream() << " Rows=" << table.NumRows() << " Bits={";
241 const char* const* column_names = table.GetColumnNames();
242 for (size_t c = 0; c < table.NumColumns(); c++) {
243 vios->Stream() << (c != 0 ? " " : "");
244 vios->Stream() << column_names[c] << "=" << table.NumColumnBits(c);
245 }
246 vios->Stream() << "}\n";
247 if (verbose) {
248 ScopedIndentation indent1(vios);
249 for (size_t r = 0; r < table.NumRows(); r++) {
250 vios->Stream() << "[" << std::right << std::setw(3) << r << "]={";
251 for (size_t c = 0; c < table.NumColumns(); c++) {
252 vios->Stream() << (c != 0 ? " " : "");
253 if (&table == static_cast<const void*>(&stack_masks_) ||
254 &table == static_cast<const void*>(&dex_register_masks_)) {
255 BitMemoryRegion bits = table.GetBitMemoryRegion(r, c);
256 for (size_t b = 0, e = bits.size_in_bits(); b < e; b++) {
257 vios->Stream() << bits.LoadBit(e - b - 1);
258 }
259 } else {
260 vios->Stream() << std::right << std::setw(8) << static_cast<int32_t>(table.Get(r, c));
261 }
262 }
263 vios->Stream() << "}\n";
264 }
265 }
266 }
267 });
268
269 // Display stack maps along with (live) Dex register maps.
270 if (verbose) {
271 for (StackMap stack_map : stack_maps_) {
272 stack_map.Dump(vios, *this, code_offset, instruction_set);
273 }
274 }
275 }
276
Dump(VariableIndentationOutputStream * vios,const CodeInfo & code_info,uint32_t code_offset,InstructionSet instruction_set) const277 void StackMap::Dump(VariableIndentationOutputStream* vios,
278 const CodeInfo& code_info,
279 uint32_t code_offset,
280 InstructionSet instruction_set) const {
281 const uint32_t pc_offset = GetNativePcOffset(instruction_set);
282 vios->Stream()
283 << "StackMap[" << Row() << "]"
284 << std::hex
285 << " (native_pc=0x" << code_offset + pc_offset
286 << ", dex_pc=0x" << GetDexPc()
287 << ", register_mask=0x" << code_info.GetRegisterMaskOf(*this)
288 << std::dec
289 << ", stack_mask=0b";
290 BitMemoryRegion stack_mask = code_info.GetStackMaskOf(*this);
291 for (size_t i = 0, e = stack_mask.size_in_bits(); i < e; ++i) {
292 vios->Stream() << stack_mask.LoadBit(e - i - 1);
293 }
294 vios->Stream() << ")\n";
295 code_info.GetDexRegisterMapOf(*this).Dump(vios);
296 for (InlineInfo inline_info : code_info.GetInlineInfosOf(*this)) {
297 inline_info.Dump(vios, code_info, *this);
298 }
299 }
300
Dump(VariableIndentationOutputStream * vios,const CodeInfo & code_info,const StackMap & stack_map) const301 void InlineInfo::Dump(VariableIndentationOutputStream* vios,
302 const CodeInfo& code_info,
303 const StackMap& stack_map) const {
304 uint32_t depth = Row() - stack_map.GetInlineInfoIndex();
305 vios->Stream()
306 << "InlineInfo[" << Row() << "]"
307 << " (depth=" << depth
308 << std::hex
309 << ", dex_pc=0x" << GetDexPc();
310 if (EncodesArtMethod()) {
311 ScopedObjectAccess soa(Thread::Current());
312 vios->Stream() << ", method=" << GetArtMethod()->PrettyMethod();
313 } else {
314 vios->Stream()
315 << std::dec
316 << ", method_index=" << code_info.GetMethodIndexOf(*this);
317 }
318 vios->Stream() << ")\n";
319 code_info.GetInlineDexRegisterMapOf(stack_map, *this).Dump(vios);
320 }
321
322 } // namespace art
323