• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "block_builder.h"
18 
19 #include "base/logging.h"  // FOR VLOG.
20 #include "dex/bytecode_utils.h"
21 #include "dex/code_item_accessors-inl.h"
22 #include "dex/dex_file_exception_helpers.h"
23 
24 namespace art HIDDEN {
25 
HBasicBlockBuilder(HGraph * graph,const DexFile * const dex_file,const CodeItemDebugInfoAccessor & accessor,ScopedArenaAllocator * local_allocator)26 HBasicBlockBuilder::HBasicBlockBuilder(HGraph* graph,
27                                        const DexFile* const dex_file,
28                                        const CodeItemDebugInfoAccessor& accessor,
29                                        ScopedArenaAllocator* local_allocator)
30     : allocator_(graph->GetAllocator()),
31       graph_(graph),
32       dex_file_(dex_file),
33       code_item_accessor_(accessor),
34       local_allocator_(local_allocator),
35       branch_targets_(code_item_accessor_.HasCodeItem()
36                           ? code_item_accessor_.InsnsSizeInCodeUnits()
37                           : /* fake dex_pc=0 for intrinsic graph */ 1u,
38                       nullptr,
39                       local_allocator->Adapter(kArenaAllocGraphBuilder)),
40       throwing_blocks_(kDefaultNumberOfThrowingBlocks,
41                        local_allocator->Adapter(kArenaAllocGraphBuilder)) {}
42 
MaybeCreateBlockAt(uint32_t dex_pc)43 HBasicBlock* HBasicBlockBuilder::MaybeCreateBlockAt(uint32_t dex_pc) {
44   return MaybeCreateBlockAt(dex_pc, dex_pc);
45 }
46 
MaybeCreateBlockAt(uint32_t semantic_dex_pc,uint32_t store_dex_pc)47 HBasicBlock* HBasicBlockBuilder::MaybeCreateBlockAt(uint32_t semantic_dex_pc,
48                                                     uint32_t store_dex_pc) {
49   HBasicBlock* block = branch_targets_[store_dex_pc];
50   if (block == nullptr) {
51     block = new (allocator_) HBasicBlock(graph_, semantic_dex_pc);
52     branch_targets_[store_dex_pc] = block;
53   }
54   DCHECK_EQ(block->GetDexPc(), semantic_dex_pc);
55   return block;
56 }
57 
CreateBranchTargets()58 bool HBasicBlockBuilder::CreateBranchTargets() {
59   // Create the first block for the dex instructions, single successor of the entry block.
60   MaybeCreateBlockAt(0u);
61 
62   if (code_item_accessor_.TriesSize() != 0) {
63     // Create branch targets at the start/end of the TryItem range. These are
64     // places where the program might fall through into/out of the a block and
65     // where TryBoundary instructions will be inserted later. Other edges which
66     // enter/exit the try blocks are a result of branches/switches.
67     for (const dex::TryItem& try_item : code_item_accessor_.TryItems()) {
68       uint32_t dex_pc_start = try_item.start_addr_;
69       uint32_t dex_pc_end = dex_pc_start + try_item.insn_count_;
70       MaybeCreateBlockAt(dex_pc_start);
71       if (dex_pc_end < code_item_accessor_.InsnsSizeInCodeUnits()) {
72         // TODO: Do not create block if the last instruction cannot fall through.
73         MaybeCreateBlockAt(dex_pc_end);
74       } else if (dex_pc_end == code_item_accessor_.InsnsSizeInCodeUnits()) {
75         // The TryItem spans until the very end of the CodeItem and therefore
76         // cannot have any code afterwards.
77       } else {
78         // The TryItem spans beyond the end of the CodeItem. This is invalid code.
79         VLOG(compiler) << "Not compiled: TryItem spans beyond the end of the CodeItem";
80         return false;
81       }
82     }
83 
84     // Create branch targets for exception handlers.
85     const uint8_t* handlers_ptr = code_item_accessor_.GetCatchHandlerData();
86     uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
87     for (uint32_t idx = 0; idx < handlers_size; ++idx) {
88       CatchHandlerIterator iterator(handlers_ptr);
89       for (; iterator.HasNext(); iterator.Next()) {
90         MaybeCreateBlockAt(iterator.GetHandlerAddress());
91       }
92       handlers_ptr = iterator.EndDataPointer();
93     }
94   }
95 
96   // Iterate over all instructions and find branching instructions. Create blocks for
97   // the locations these instructions branch to.
98   for (const DexInstructionPcPair& pair : code_item_accessor_) {
99     const uint32_t dex_pc = pair.DexPc();
100     const Instruction& instruction = pair.Inst();
101 
102     if (instruction.IsBranch()) {
103       MaybeCreateBlockAt(dex_pc + instruction.GetTargetOffset());
104     } else if (instruction.IsSwitch()) {
105       DexSwitchTable table(instruction, dex_pc);
106       for (DexSwitchTableIterator s_it(table); !s_it.Done(); s_it.Advance()) {
107         MaybeCreateBlockAt(dex_pc + s_it.CurrentTargetOffset());
108 
109         // Create N-1 blocks where we will insert comparisons of the input value
110         // against the Switch's case keys.
111         if (table.ShouldBuildDecisionTree() && !s_it.IsLast()) {
112           // Store the block under dex_pc of the current key at the switch data
113           // instruction for uniqueness but give it the dex_pc of the SWITCH
114           // instruction which it semantically belongs to.
115           MaybeCreateBlockAt(dex_pc, s_it.GetDexPcForCurrentIndex());
116         }
117       }
118     } else if (instruction.Opcode() == Instruction::MOVE_EXCEPTION) {
119       // End the basic block after MOVE_EXCEPTION. This simplifies the later
120       // stage of TryBoundary-block insertion.
121     } else {
122       continue;
123     }
124 
125     if (instruction.CanFlowThrough()) {
126       DexInstructionIterator next(std::next(DexInstructionIterator(pair)));
127       if (next == code_item_accessor_.end()) {
128         // In the normal case we should never hit this but someone can artificially forge a dex
129         // file to fall-through out the method code. In this case we bail out compilation.
130         VLOG(compiler) << "Not compiled: Fall-through beyond the CodeItem";
131         return false;
132       }
133       MaybeCreateBlockAt(next.DexPc());
134     }
135   }
136 
137   return true;
138 }
139 
ConnectBasicBlocks()140 void HBasicBlockBuilder::ConnectBasicBlocks() {
141   HBasicBlock* block = graph_->GetEntryBlock();
142   graph_->AddBlock(block);
143 
144   bool is_throwing_block = false;
145   // Calculate the qucikening index here instead of CreateBranchTargets since it's easier to
146   // calculate in dex_pc order.
147   for (const DexInstructionPcPair& pair : code_item_accessor_) {
148     const uint32_t dex_pc = pair.DexPc();
149     const Instruction& instruction = pair.Inst();
150 
151     // Check if this dex_pc address starts a new basic block.
152     HBasicBlock* next_block = GetBlockAt(dex_pc);
153     if (next_block != nullptr) {
154       if (block != nullptr) {
155         // Last instruction did not end its basic block but a new one starts here.
156         // It must have been a block falling through into the next one.
157         block->AddSuccessor(next_block);
158       }
159       block = next_block;
160       is_throwing_block = false;
161       graph_->AddBlock(block);
162     }
163 
164     if (block == nullptr) {
165       // Ignore dead code.
166       continue;
167     }
168 
169     if (!is_throwing_block && IsThrowingDexInstruction(instruction)) {
170       DCHECK(!ContainsElement(throwing_blocks_, block));
171       is_throwing_block = true;
172       throwing_blocks_.push_back(block);
173     }
174 
175     if (instruction.IsBranch()) {
176       uint32_t target_dex_pc = dex_pc + instruction.GetTargetOffset();
177       block->AddSuccessor(GetBlockAt(target_dex_pc));
178     } else if (instruction.IsReturn() || (instruction.Opcode() == Instruction::THROW)) {
179       block->AddSuccessor(graph_->GetExitBlock());
180     } else if (instruction.IsSwitch()) {
181       DexSwitchTable table(instruction, dex_pc);
182       for (DexSwitchTableIterator s_it(table); !s_it.Done(); s_it.Advance()) {
183         uint32_t target_dex_pc = dex_pc + s_it.CurrentTargetOffset();
184         block->AddSuccessor(GetBlockAt(target_dex_pc));
185 
186         if (table.ShouldBuildDecisionTree() && !s_it.IsLast()) {
187           uint32_t next_case_dex_pc = s_it.GetDexPcForCurrentIndex();
188           HBasicBlock* next_case_block = GetBlockAt(next_case_dex_pc);
189           block->AddSuccessor(next_case_block);
190           block = next_case_block;
191           graph_->AddBlock(block);
192         }
193       }
194     } else {
195       // Remaining code only applies to instructions which end their basic block.
196       continue;
197     }
198 
199     // Go to the next instruction in case we read dex PC below.
200     if (instruction.CanFlowThrough()) {
201       block->AddSuccessor(GetBlockAt(std::next(DexInstructionIterator(pair)).DexPc()));
202     }
203 
204     // The basic block ends here. Do not add any more instructions.
205     block = nullptr;
206   }
207 
208   graph_->AddBlock(graph_->GetExitBlock());
209 }
210 
211 // Returns the TryItem stored for `block` or nullptr if there is no info for it.
GetTryItem(HBasicBlock * block,const ScopedArenaSafeMap<uint32_t,const dex::TryItem * > & try_block_info)212 static const dex::TryItem* GetTryItem(
213     HBasicBlock* block,
214     const ScopedArenaSafeMap<uint32_t, const dex::TryItem*>& try_block_info) {
215   auto iterator = try_block_info.find(block->GetBlockId());
216   return (iterator == try_block_info.end()) ? nullptr : iterator->second;
217 }
218 
219 // Iterates over the exception handlers of `try_item`, finds the corresponding
220 // catch blocks and makes them successors of `try_boundary`. The order of
221 // successors matches the order in which runtime exception delivery searches
222 // for a handler.
LinkToCatchBlocks(HTryBoundary * try_boundary,const CodeItemDataAccessor & accessor,const dex::TryItem * try_item,const ScopedArenaSafeMap<uint32_t,HBasicBlock * > & catch_blocks)223 static void LinkToCatchBlocks(HTryBoundary* try_boundary,
224                               const CodeItemDataAccessor& accessor,
225                               const dex::TryItem* try_item,
226                               const ScopedArenaSafeMap<uint32_t, HBasicBlock*>& catch_blocks) {
227   for (CatchHandlerIterator it(accessor.GetCatchHandlerData(try_item->handler_off_));
228       it.HasNext();
229       it.Next()) {
230     try_boundary->AddExceptionHandler(catch_blocks.Get(it.GetHandlerAddress()));
231   }
232 }
233 
MightHaveLiveNormalPredecessors(HBasicBlock * catch_block)234 bool HBasicBlockBuilder::MightHaveLiveNormalPredecessors(HBasicBlock* catch_block) {
235   if (kIsDebugBuild) {
236     DCHECK_NE(catch_block->GetDexPc(), kNoDexPc) << "Should not be called on synthetic blocks";
237     DCHECK(!graph_->GetEntryBlock()->GetSuccessors().empty())
238         << "Basic blocks must have been created and connected";
239     for (HBasicBlock* predecessor : catch_block->GetPredecessors()) {
240       DCHECK(!predecessor->IsSingleTryBoundary())
241           << "TryBoundary blocks must not have not been created yet";
242     }
243   }
244 
245   const Instruction& first = code_item_accessor_.InstructionAt(catch_block->GetDexPc());
246   if (first.Opcode() == Instruction::MOVE_EXCEPTION) {
247     // Verifier guarantees that if a catch block begins with MOVE_EXCEPTION then
248     // it has no live normal predecessors.
249     return false;
250   } else if (catch_block->GetPredecessors().empty()) {
251     // Normal control-flow edges have already been created. Since block's list of
252     // predecessors is empty, it cannot have any live or dead normal predecessors.
253     return false;
254   }
255 
256   // The catch block has normal predecessors but we do not know which are live
257   // and which will be removed during the initial DCE. Return `true` to signal
258   // that it may have live normal predecessors.
259   return true;
260 }
261 
InsertTryBoundaryBlocks()262 void HBasicBlockBuilder::InsertTryBoundaryBlocks() {
263   if (code_item_accessor_.TriesSize() == 0) {
264     return;
265   }
266 
267   // Keep a map of all try blocks and their respective TryItems. We do not use
268   // the block's pointer but rather its id to ensure deterministic iteration.
269   ScopedArenaSafeMap<uint32_t, const dex::TryItem*> try_block_info(
270       std::less<uint32_t>(), local_allocator_->Adapter(kArenaAllocGraphBuilder));
271 
272   // Obtain TryItem information for blocks with throwing instructions, and split
273   // blocks which are both try & catch to simplify the graph.
274   for (HBasicBlock* block : graph_->GetBlocks()) {
275     if (block->GetDexPc() == kNoDexPc) {
276       continue;
277     }
278 
279     // Do not bother creating exceptional edges for try blocks which have no
280     // throwing instructions. In that case we simply assume that the block is
281     // not covered by a TryItem. This prevents us from creating a throw-catch
282     // loop for synchronized blocks.
283     if (ContainsElement(throwing_blocks_, block)) {
284       // Try to find a TryItem covering the block.
285       const dex::TryItem* try_item = code_item_accessor_.FindTryItem(block->GetDexPc());
286       if (try_item != nullptr) {
287         // Block throwing and in a TryItem. Store the try block information.
288         try_block_info.Put(block->GetBlockId(), try_item);
289       }
290     }
291   }
292 
293   // Map from a handler dex_pc to the corresponding catch block.
294   ScopedArenaSafeMap<uint32_t, HBasicBlock*> catch_blocks(
295       std::less<uint32_t>(), local_allocator_->Adapter(kArenaAllocGraphBuilder));
296 
297   // Iterate over catch blocks, create artifical landing pads if necessary to
298   // simplify the CFG, and set metadata.
299   const uint8_t* handlers_ptr = code_item_accessor_.GetCatchHandlerData();
300   uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
301   for (uint32_t idx = 0; idx < handlers_size; ++idx) {
302     CatchHandlerIterator iterator(handlers_ptr);
303     for (; iterator.HasNext(); iterator.Next()) {
304       uint32_t address = iterator.GetHandlerAddress();
305       auto existing = catch_blocks.find(address);
306       if (existing != catch_blocks.end()) {
307         // Catch block already processed.
308         TryCatchInformation* info = existing->second->GetTryCatchInformation();
309         if (iterator.GetHandlerTypeIndex() != info->GetCatchTypeIndex()) {
310           // The handler is for multiple types. We could record all the types, but
311           // doing class resolution here isn't ideal, and it's unclear whether wasting
312           // the space in TryCatchInformation is worth it.
313           info->SetInvalidTypeIndex();
314         }
315         continue;
316       }
317 
318       // Check if we should create an artifical landing pad for the catch block.
319       // We create one if the catch block is also a try block because we do not
320       // have a strategy for inserting TryBoundaries on exceptional edges.
321       // We also create one if the block might have normal predecessors so as to
322       // simplify register allocation.
323       HBasicBlock* catch_block = GetBlockAt(address);
324       bool is_try_block = (try_block_info.find(catch_block->GetBlockId()) != try_block_info.end());
325       if (is_try_block || MightHaveLiveNormalPredecessors(catch_block)) {
326         HBasicBlock* new_catch_block = new (allocator_) HBasicBlock(graph_, address);
327         new_catch_block->AddInstruction(new (allocator_) HGoto(address));
328         new_catch_block->AddSuccessor(catch_block);
329         graph_->AddBlock(new_catch_block);
330         catch_block = new_catch_block;
331       }
332 
333       catch_blocks.Put(address, catch_block);
334       catch_block->SetTryCatchInformation(
335           new (allocator_) TryCatchInformation(iterator.GetHandlerTypeIndex(), *dex_file_));
336     }
337     handlers_ptr = iterator.EndDataPointer();
338   }
339 
340   // Do a pass over the try blocks and insert entering TryBoundaries where at
341   // least one predecessor is not covered by the same TryItem as the try block.
342   // We do not split each edge separately, but rather create one boundary block
343   // that all predecessors are relinked to. This preserves loop headers (b/23895756).
344   for (const auto& entry : try_block_info) {
345     uint32_t block_id = entry.first;
346     const dex::TryItem* try_item = entry.second;
347     HBasicBlock* try_block = graph_->GetBlocks()[block_id];
348     for (HBasicBlock* predecessor : try_block->GetPredecessors()) {
349       if (GetTryItem(predecessor, try_block_info) != try_item) {
350         // Found a predecessor not covered by the same TryItem. Insert entering
351         // boundary block.
352         HTryBoundary* try_entry = new (allocator_) HTryBoundary(
353             HTryBoundary::BoundaryKind::kEntry, try_block->GetDexPc());
354         try_block->CreateImmediateDominator()->AddInstruction(try_entry);
355         LinkToCatchBlocks(try_entry, code_item_accessor_, try_item, catch_blocks);
356         break;
357       }
358     }
359   }
360 
361   // Do a second pass over the try blocks and insert exit TryBoundaries where
362   // the successor is not in the same TryItem.
363   for (const auto& entry : try_block_info) {
364     uint32_t block_id = entry.first;
365     const dex::TryItem* try_item = entry.second;
366     HBasicBlock* try_block = graph_->GetBlocks()[block_id];
367     // NOTE: Do not use iterators because SplitEdge would invalidate them.
368     for (size_t i = 0, e = try_block->GetSuccessors().size(); i < e; ++i) {
369       HBasicBlock* successor = try_block->GetSuccessors()[i];
370 
371       // If the successor is a try block, all of its predecessors must be
372       // covered by the same TryItem. Otherwise the previous pass would have
373       // created a non-throwing boundary block.
374       if (GetTryItem(successor, try_block_info) != nullptr) {
375         DCHECK_EQ(try_item, GetTryItem(successor, try_block_info));
376         continue;
377       }
378 
379       // Insert TryBoundary and link to catch blocks.
380       HTryBoundary* try_exit =
381           new (allocator_) HTryBoundary(HTryBoundary::BoundaryKind::kExit, successor->GetDexPc());
382       graph_->SplitEdge(try_block, successor)->AddInstruction(try_exit);
383       LinkToCatchBlocks(try_exit, code_item_accessor_, try_item, catch_blocks);
384     }
385   }
386 }
387 
InsertSynthesizedLoopsForOsr()388 void HBasicBlockBuilder::InsertSynthesizedLoopsForOsr() {
389   ArenaSet<uint32_t> targets(allocator_->Adapter(kArenaAllocGraphBuilder));
390   // Collect basic blocks that are targets of a negative branch.
391   for (const DexInstructionPcPair& pair : code_item_accessor_) {
392     const uint32_t dex_pc = pair.DexPc();
393     const Instruction& instruction = pair.Inst();
394     if (instruction.IsBranch()) {
395       uint32_t target_dex_pc = dex_pc + instruction.GetTargetOffset();
396       if (target_dex_pc < dex_pc) {
397         HBasicBlock* block = GetBlockAt(target_dex_pc);
398         CHECK_NE(kNoDexPc, block->GetDexPc());
399         targets.insert(block->GetBlockId());
400       }
401     } else if (instruction.IsSwitch()) {
402       DexSwitchTable table(instruction, dex_pc);
403       for (DexSwitchTableIterator s_it(table); !s_it.Done(); s_it.Advance()) {
404         uint32_t target_dex_pc = dex_pc + s_it.CurrentTargetOffset();
405         if (target_dex_pc < dex_pc) {
406           HBasicBlock* block = GetBlockAt(target_dex_pc);
407           CHECK_NE(kNoDexPc, block->GetDexPc());
408           targets.insert(block->GetBlockId());
409         }
410       }
411     }
412   }
413 
414   // Insert synthesized loops before the collected blocks.
415   for (uint32_t block_id : targets) {
416     HBasicBlock* block = graph_->GetBlocks()[block_id];
417     HBasicBlock* loop_block = new (allocator_) HBasicBlock(graph_, block->GetDexPc());
418     graph_->AddBlock(loop_block);
419     while (!block->GetPredecessors().empty()) {
420       block->GetPredecessors()[0]->ReplaceSuccessor(block, loop_block);
421     }
422     loop_block->AddSuccessor(loop_block);
423     loop_block->AddSuccessor(block);
424     // We loop on false - we know this won't be optimized later on as the loop
425     // is marked irreducible, which disables loop optimizations.
426     loop_block->AddInstruction(new (allocator_) HIf(graph_->GetIntConstant(0), kNoDexPc));
427   }
428 }
429 
Build()430 bool HBasicBlockBuilder::Build() {
431   DCHECK(code_item_accessor_.HasCodeItem());
432   DCHECK(graph_->GetBlocks().empty());
433 
434   graph_->SetEntryBlock(new (allocator_) HBasicBlock(graph_, kNoDexPc));
435   graph_->SetExitBlock(new (allocator_) HBasicBlock(graph_, kNoDexPc));
436 
437   // TODO(dbrazdil): Do CreateBranchTargets and ConnectBasicBlocks in one pass.
438   if (!CreateBranchTargets()) {
439     return false;
440   }
441 
442   ConnectBasicBlocks();
443   InsertTryBoundaryBlocks();
444 
445   if (graph_->IsCompilingOsr()) {
446     InsertSynthesizedLoopsForOsr();
447   }
448 
449   return true;
450 }
451 
BuildIntrinsic()452 void HBasicBlockBuilder::BuildIntrinsic() {
453   DCHECK(!code_item_accessor_.HasCodeItem());
454   DCHECK(graph_->GetBlocks().empty());
455 
456   // Create blocks.
457   HBasicBlock* entry_block = new (allocator_) HBasicBlock(graph_, kNoDexPc);
458   HBasicBlock* exit_block = new (allocator_) HBasicBlock(graph_, kNoDexPc);
459   HBasicBlock* body = MaybeCreateBlockAt(/* semantic_dex_pc= */ kNoDexPc, /* store_dex_pc= */ 0u);
460 
461   // Add blocks to the graph.
462   graph_->AddBlock(entry_block);
463   graph_->AddBlock(body);
464   graph_->AddBlock(exit_block);
465   graph_->SetEntryBlock(entry_block);
466   graph_->SetExitBlock(exit_block);
467 
468   // Connect blocks.
469   entry_block->AddSuccessor(body);
470   body->AddSuccessor(exit_block);
471 }
472 
473 }  // namespace art
474