1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <fstream>
17 #include <method_data_accessor.h>
18 #include "compiler_logger.h"
19 #include "pbc_iterator.h"
20 #include "bytecode_instruction.h"
21 #include "code_data_accessor.h"
22 #include "code_data_accessor-inl.h"
23 #include "method_data_accessor.h"
24 #include "optimizer/analysis/dominators_tree.h"
25 #include "optimizer/analysis/loop_analyzer.h"
26 #include "method_data_accessor-inl.h"
27 #include "ir_builder.h"
28
29 namespace panda::compiler {
RunImpl()30 bool IrBuilder::RunImpl()
31 {
32 COMPILER_LOG(INFO, IR_BUILDER) << "Start building ir for method: "
33 << GetGraph()->GetRuntime()->GetClassNameFromMethod(GetMethod()) << "."
34 << GetGraph()->GetRuntime()->GetMethodName(GetMethod())
35 << "(args=" << GetGraph()->GetRuntime()->GetMethodTotalArgumentsCount(GetMethod())
36 << ", regs=" << GetGraph()->GetRuntime()->GetMethodRegistersCount(GetMethod())
37 << ")";
38
39 auto instructions_buf = GetGraph()->GetRuntime()->GetMethodCode(GetMethod());
40 BytecodeInstructions pbc_instructions(instructions_buf, GetGraph()->GetRuntime()->GetMethodCodeSize(GetMethod()));
41 size_t vregs_count = GetGraph()->GetRuntime()->GetMethodRegistersCount(GetMethod()) +
42 GetGraph()->GetRuntime()->GetMethodTotalArgumentsCount(GetMethod()) + 1;
43 if (!CheckMethodLimitations(pbc_instructions, vregs_count)) {
44 return false;
45 }
46 GetGraph()->SetVRegsCount(vregs_count);
47 BuildBasicBlocks(pbc_instructions);
48 GetGraph()->RunPass<DominatorsTree>();
49 GetGraph()->RunPass<LoopAnalyzer>();
50
51 InstBuilder inst_builder(GetGraph(), GetMethod(), caller_inst_);
52 inst_builder.Prepare(is_inlined_graph_);
53 inst_defs_.resize(vregs_count);
54 COMPILER_LOG(INFO, IR_BUILDER) << "Start instructions building...";
55 for (auto bb : GetGraph()->GetBlocksRPO()) {
56 if (!BuildBasicBlock(bb, &inst_builder, instructions_buf)) {
57 return false;
58 }
59 }
60 GetGraph()->RunPass<DominatorsTree>();
61 GetGraph()->InvalidateAnalysis<LoopAnalyzer>();
62 GetGraph()->RunPass<LoopAnalyzer>();
63 inst_builder.FixInstructions();
64
65 if (options.IsCompilerPrintStats() || options.WasSetCompilerDumpStatsCsv()) {
66 uint64_t pbc_inst_num = 0;
67 for ([[maybe_unused]] auto i : pbc_instructions) {
68 pbc_inst_num++;
69 }
70 GetGraph()->GetPassManager()->GetStatistics()->AddPbcInstNum(pbc_inst_num);
71 }
72 COMPILER_LOG(INFO, IR_BUILDER) << "IR successfully built: " << GetGraph()->GetVectorBlocks().size()
73 << " basic blocks, " << GetGraph()->GetCurrentInstructionId() << " instructions";
74 return true;
75 }
76
CheckMethodLimitations(const BytecodeInstructions & instructions,size_t vregs_count)77 bool IrBuilder::CheckMethodLimitations(const BytecodeInstructions &instructions, size_t vregs_count)
78 {
79 // TODO(a.popov) Optimize catch-phi's memory consumption and get rid of this limitation
80 static constexpr auto TRY_BLOCKS_LIMIT = 128U;
81
82 size_t bytecode_size_limit = options.GetCompilerMaxBytecodeSize();
83
84 // The option CompilerInlineFullIntrinsics increases the size of the code several times.
85 // So the limit for this option is reduced
86 if (options.IsCompilerInlineFullIntrinsics()) {
87 ASSERT(GetGraph()->IsDynamicMethod());
88 bytecode_size_limit >>= 2U;
89 }
90
91 if (instructions.GetSize() > bytecode_size_limit) {
92 COMPILER_LOG(INFO, IR_BUILDER) << "Method is too big: size=" << instructions.GetSize()
93 << ", limit=" << bytecode_size_limit;
94 return false;
95 }
96 if (vregs_count >= options.GetCompilerMaxVregsNum()) {
97 COMPILER_LOG(INFO, IR_BUILDER) << "Method has too many virtual registers: " << vregs_count
98 << ", limit=" << options.GetCompilerMaxVregsNum();
99 return false;
100 }
101
102 auto panda_file = static_cast<panda_file::File *>(GetGraph()->GetRuntime()->GetBinaryFileForMethod(GetMethod()));
103 panda_file::MethodDataAccessor mda(*panda_file,
104 panda_file::File::EntityId(GetGraph()->GetRuntime()->GetMethodId(GetMethod())));
105 panda_file::CodeDataAccessor cda(*panda_file, mda.GetCodeId().value());
106 if (cda.GetTriesSize() > TRY_BLOCKS_LIMIT) {
107 COMPILER_LOG(INFO, IR_BUILDER) << "Method has too many try blocks: " << cda.GetTriesSize()
108 << ", limit=" << TRY_BLOCKS_LIMIT;
109 return false;
110 }
111 return true;
112 }
113
BuildBasicBlock(BasicBlock * bb,InstBuilder * inst_builder,const uint8_t * instructions_buf)114 bool IrBuilder::BuildBasicBlock(BasicBlock *bb, InstBuilder *inst_builder, const uint8_t *instructions_buf)
115 {
116 inst_builder->SetCurrentBlock(bb);
117 inst_builder->UpdateDefs();
118
119 if (bb->IsLoopPreHeader() && !GetGraph()->IsOsrMode()) {
120 ASSERT(bb->GetGuestPc() != INVALID_PC);
121 auto ss = inst_builder->CreateSaveStateDeoptimize(bb->GetGuestPc());
122 bb->AppendInst(ss);
123 COMPILER_LOG(DEBUG, IR_BUILDER) << "Create save state deoptimize: " << *ss;
124 }
125
126 ASSERT(bb->GetGuestPc() != INVALID_PC);
127 // If block is not in the `blocks_` vector, it's auxiliary block without instructions
128 if (bb == blocks_[bb->GetGuestPc()]) {
129 return BuildInstructionsForBB(bb, inst_builder, instructions_buf);
130 }
131 COMPILER_LOG(DEBUG, IR_BUILDER) << "Auxiliary block, skipping";
132 return true;
133 }
134
BuildInstructionsForBB(BasicBlock * bb,InstBuilder * inst_builder,const uint8_t * instructions_buf)135 bool IrBuilder::BuildInstructionsForBB(BasicBlock *bb, InstBuilder *inst_builder, const uint8_t *instructions_buf)
136 {
137 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
138 BytecodeInstructions instructions(instructions_buf + bb->GetGuestPc(), std::numeric_limits<int>::max());
139 for (auto inst : instructions) {
140 auto pc = inst_builder->GetPc(inst.GetAddress());
141 // Break if current pc is pc of some basic block, that means that it is the end of the current block.
142 if (pc != bb->GetGuestPc() && GetBlockForPc(pc) != nullptr) {
143 break;
144 }
145 COMPILER_LOG(DEBUG, IR_BUILDER) << "PBC instruction: " << inst << " # "
146 << reinterpret_cast<void *>(inst.GetAddress() - instructions_buf);
147 // Copy current defs for assigning them to catch-phi if current inst is throwable
148 ASSERT(inst_builder->GetCurrentDefs().size() == inst_defs_.size());
149 std::copy(inst_builder->GetCurrentDefs().begin(), inst_builder->GetCurrentDefs().end(), inst_defs_.begin());
150 auto current_last_inst = bb->GetLastInst();
151 auto bb_count = GetGraph()->GetVectorBlocks().size();
152 inst_builder->BuildInstruction(&inst);
153 if (inst_builder->IsFailed()) {
154 COMPILER_LOG(WARNING, IR_BUILDER) << "Unsupported instruction";
155 return false;
156 }
157 if (inst.CanThrow()) {
158 // One PBC instruction can be expanded to the group of IR's instructions, find first built instruction in
159 // this group, and then mark all instructions as throwable; All instructions should be marked, since some of
160 // them can be deleted during optimizations, unnecessary catch-phi moves will be resolved before Register
161 // Allocator
162 auto throwable_inst = (current_last_inst == nullptr) ? bb->GetFirstInst() : current_last_inst->GetNext();
163 ProcessThrowableInstructions(inst_builder, throwable_inst);
164
165 auto &vb = GetGraph()->GetVectorBlocks();
166 for (size_t i = bb_count; i < vb.size(); i++) {
167 ProcessThrowableInstructions(inst_builder, vb[i]->GetFirstInst());
168 }
169 }
170 // Break if we meet terminator instruction. If instruction in the middle of basic block we don't create
171 // further dead instructions.
172 if (inst.IsTerminator() && !inst.IsSuspend()) {
173 break;
174 }
175 }
176 return true;
177 }
178
ProcessThrowableInstructions(InstBuilder * inst_builder,Inst * throwable_inst)179 void IrBuilder::ProcessThrowableInstructions(InstBuilder *inst_builder, Inst *throwable_inst)
180 {
181 for (; throwable_inst != nullptr; throwable_inst = throwable_inst->GetNext()) {
182 if (throwable_inst->IsSaveState()) {
183 continue;
184 }
185 if (throwable_inst->IsCheck()) {
186 throwable_inst = throwable_inst->GetFirstUser()->GetInst();
187 }
188 COMPILER_LOG(DEBUG, IR_BUILDER) << "Throwable inst, Id = " << throwable_inst->GetId();
189 catch_handlers_.clear();
190 EnumerateTryBlocksCoveredPc(throwable_inst->GetPc(), [this](const TryCodeBlock &try_block) {
191 auto tbb = try_block.begin_bb;
192 tbb->EnumerateCatchHandlers([this](BasicBlock *catch_handler, [[maybe_unused]] size_t type_id) {
193 catch_handlers_.insert(catch_handler);
194 return true;
195 });
196 });
197 if (!catch_handlers_.empty()) {
198 inst_builder->AddCatchPhiInputs(catch_handlers_, inst_defs_, throwable_inst);
199 }
200 }
201 }
202
InstNotJump(BytecodeInstruction * inst)203 static inline bool InstNotJump(BytecodeInstruction *inst)
204 {
205 return inst->GetAddress() != nullptr && InstBuilder::GetInstructionJumpOffset(inst) == INVALID_OFFSET &&
206 !inst->HasFlag(BytecodeInstruction::RETURN);
207 }
208
BuildBasicBlocks(const BytecodeInstructions & instructions)209 void IrBuilder::BuildBasicBlocks(const BytecodeInstructions &instructions)
210 {
211 blocks_.resize(instructions.GetSize() + 1);
212 bool fallthrough = false;
213
214 CreateBlock(0);
215 // Create basic blocks
216 for (auto inst : instructions) {
217 auto pc = instructions.GetPc(inst);
218
219 if (fallthrough) {
220 CreateBlock(pc);
221 fallthrough = false;
222 }
223 auto offset = InstBuilder::GetInstructionJumpOffset(&inst);
224 if (offset != INVALID_OFFSET) {
225 auto target_pc = pc + static_cast<size_t>(offset);
226 CreateBlock(target_pc);
227 if (inst.HasFlag(BytecodeInstruction::CONDITIONAL)) {
228 fallthrough = true;
229 }
230 }
231 }
232 CreateTryCatchBoundariesBlocks();
233 GetGraph()->CreateStartBlock();
234 GetGraph()->CreateEndBlock(instructions.GetSize());
235 ConnectBasicBlocks(instructions);
236 ResolveTryCatchBlocks();
237 COMPILER_LOG(DEBUG, IR_BUILDER) << "Created " << GetGraph()->GetVectorBlocks().size() << " basic blocks";
238 }
239
240 template <class Callback>
EnumerateTryBlocksCoveredPc(uint32_t pc,const Callback & callback)241 void IrBuilder::EnumerateTryBlocksCoveredPc(uint32_t pc, const Callback &callback)
242 {
243 for (const auto &[begin_pc, try_block] : try_blocks_) {
244 if (begin_pc <= pc && pc < try_block.boundaries.end_pc) {
245 callback(try_block);
246 }
247 }
248 }
249
250 /**
251 * Return `TryCodeBlock` and flag if was created a new one
252 */
InsertTryBlockInfo(const Boundaries & try_boundaries)253 IrBuilder::TryCodeBlock *IrBuilder::InsertTryBlockInfo(const Boundaries &try_boundaries)
254 {
255 auto try_id = static_cast<uint32_t>(try_blocks_.size());
256 auto range = try_blocks_.equal_range(try_boundaries.begin_pc);
257 for (auto iter = range.first; iter != range.second; ++iter) {
258 // use try-block with the same boundaries
259 if (try_boundaries.end_pc == iter->second.boundaries.end_pc) {
260 return &iter->second;
261 }
262 // insert in the increasing `end_pc` order
263 if (try_boundaries.end_pc > iter->second.boundaries.end_pc) {
264 auto it = try_blocks_.emplace_hint(iter, try_boundaries.begin_pc, TryCodeBlock {try_boundaries});
265 it->second.Init(GetGraph(), try_id);
266 return &it->second;
267 }
268 }
269 auto it = try_blocks_.emplace(try_boundaries.begin_pc, TryCodeBlock {try_boundaries});
270 it->second.Init(GetGraph(), try_id);
271 return &it->second;
272 }
273
CreateTryCatchBoundariesBlocks()274 void IrBuilder::CreateTryCatchBoundariesBlocks()
275 {
276 auto panda_file = static_cast<panda_file::File *>(GetGraph()->GetRuntime()->GetBinaryFileForMethod(GetMethod()));
277 panda_file::MethodDataAccessor mda(*panda_file,
278 panda_file::File::EntityId(GetGraph()->GetRuntime()->GetMethodId(GetMethod())));
279 panda_file::CodeDataAccessor cda(*panda_file, mda.GetCodeId().value());
280
281 cda.EnumerateTryBlocks([this](panda_file::CodeDataAccessor::TryBlock &try_block) {
282 auto start_pc = try_block.GetStartPc();
283 auto end_pc = start_pc + try_block.GetLength();
284 auto try_info = InsertTryBlockInfo({start_pc, end_pc});
285 try_block.EnumerateCatchBlocks([this, try_info](panda_file::CodeDataAccessor::CatchBlock &catch_block) {
286 auto pc = catch_block.GetHandlerPc();
287 catches_pc_.insert(pc);
288 auto type_idx = catch_block.GetTypeIdx();
289 auto type_id = type_idx == panda_file::INVALID_INDEX
290 ? 0
291 : GetGraph()->GetRuntime()->ResolveTypeIndex(GetMethod(), type_idx);
292 try_info->catches->emplace_back(CatchCodeBlock {pc, type_id});
293 return true;
294 });
295
296 return true;
297 });
298
299 COMPILER_LOG(INFO, IR_BUILDER) << "There are: " << try_blocks_.size() << " try-blocks in the method";
300 COMPILER_LOG(INFO, IR_BUILDER) << "There are: " << catches_pc_.size() << " catch-handlers in the method";
301
302 for (const auto &[pc, try_block] : try_blocks_) {
303 CreateBlock(pc);
304 CreateBlock(try_block.boundaries.end_pc);
305 }
306 for (auto pc : catches_pc_) {
307 CreateBlock(pc);
308 }
309 }
310
311 struct BlocksConnectorInfo {
312 bool fallthrough {};
313 bool dead_instructions {};
314 BytecodeInstruction prev_inst {nullptr};
315 };
316
ConnectBasicBlocks(const BytecodeInstructions & instructions)317 void IrBuilder::ConnectBasicBlocks(const BytecodeInstructions &instructions)
318 {
319 BlocksConnectorInfo info;
320 BasicBlock *curr_bb = blocks_[0];
321 GetGraph()->GetStartBlock()->AddSucc(curr_bb);
322 for (auto inst : instructions) {
323 auto pc = instructions.GetPc(inst);
324 auto target_block = blocks_[pc];
325 TrackTryBoundaries(pc, inst);
326 if (info.fallthrough) {
327 ASSERT(target_block != nullptr);
328 // May be the second edge between same blocks
329 curr_bb->AddSucc(target_block, true);
330 info.fallthrough = false;
331 curr_bb = target_block;
332 } else if (target_block != nullptr) {
333 if (catches_pc_.count(pc) == 0) {
334 if (InstNotJump(&info.prev_inst) && !info.dead_instructions) {
335 curr_bb->AddSucc(target_block);
336 }
337 }
338 curr_bb = target_block;
339 info.dead_instructions = false;
340 } else if (info.dead_instructions) {
341 // We are processing dead instructions now, skipping them until we meet the next block.
342 continue;
343 }
344 if (auto jmp_target_block = GetBlockToJump(&inst, pc); jmp_target_block != nullptr) {
345 curr_bb->AddSucc(jmp_target_block);
346 // In case of unconditional branch, we reset curr_bb, so if next instruction won't start new block, then
347 // we'll skip further dead instructions.
348 info.fallthrough = inst.HasFlag(BytecodeInstruction::CONDITIONAL);
349 if (!info.fallthrough) {
350 info.dead_instructions = true;
351 }
352 }
353 info.prev_inst = inst;
354 }
355
356 // Erase end block if it wasn't connected, should be infinite loop in the graph
357 if (GetGraph()->GetEndBlock()->GetPredsBlocks().empty()) {
358 GetGraph()->EraseBlock(GetGraph()->GetEndBlock());
359 GetGraph()->SetEndBlock(nullptr);
360 COMPILER_LOG(INFO, IR_BUILDER) << "Builded graph without end block";
361 }
362 }
363
TrackTryBoundaries(size_t pc,const BytecodeInstruction & inst)364 void IrBuilder::TrackTryBoundaries(size_t pc, const BytecodeInstruction &inst)
365 {
366 opened_try_blocks_.remove_if([pc](TryCodeBlock *try_block) { return try_block->boundaries.end_pc == pc; });
367
368 if (try_blocks_.count(pc) > 0) {
369 auto range = try_blocks_.equal_range(pc);
370 for (auto it = range.first; it != range.second; ++it) {
371 auto &try_block = it->second;
372 if (try_block.boundaries.end_pc > pc) {
373 opened_try_blocks_.push_back(&try_block);
374 auto allocator = GetGraph()->GetLocalAllocator();
375 try_block.basic_blocks = allocator->New<ArenaVector<BasicBlock *>>(allocator->Adapter());
376 } else {
377 // Empty try-block
378 ASSERT(try_block.boundaries.end_pc == pc);
379 }
380 }
381 }
382
383 if (opened_try_blocks_.empty()) {
384 return;
385 }
386
387 if (auto bb = blocks_[pc]; bb != nullptr) {
388 for (auto try_block : opened_try_blocks_) {
389 try_block->basic_blocks->push_back(bb);
390 }
391 }
392
393 if (inst.CanThrow()) {
394 for (auto &try_block : opened_try_blocks_) {
395 try_block->contains_throwable_inst = true;
396 }
397 }
398 }
399
GetBlockToJump(BytecodeInstruction * inst,size_t pc)400 BasicBlock *IrBuilder::GetBlockToJump(BytecodeInstruction *inst, size_t pc)
401 {
402 if ((inst->HasFlag(BytecodeInstruction::RETURN) && !inst->HasFlag(BytecodeInstruction::SUSPEND)) ||
403 inst->IsThrow(BytecodeInstruction::Exceptions::X_THROW)) {
404 return GetGraph()->GetEndBlock();
405 }
406
407 #ifdef ENABLE_BYTECODE_OPT
408 if (inst->GetOpcode() == BytecodeInstruction::Opcode::RETURNUNDEFINED) {
409 return GetGraph()->GetEndBlock();
410 }
411 #endif
412
413 if (auto offset = InstBuilder::GetInstructionJumpOffset(inst); offset != INVALID_OFFSET) {
414 ASSERT(blocks_[pc + static_cast<size_t>(offset)] != nullptr);
415 return blocks_[pc + static_cast<size_t>(offset)];
416 }
417 return nullptr;
418 }
419
420 /**
421 * Mark blocks which were connected to the graph.
422 * Catch-handlers will not be marked, since they have not been connected yet.
423 */
MarkNormalControlFlow(BasicBlock * block,Marker marker)424 static void MarkNormalControlFlow(BasicBlock *block, Marker marker)
425 {
426 block->SetMarker(marker);
427 for (auto succ : block->GetSuccsBlocks()) {
428 if (!succ->IsMarked(marker)) {
429 MarkNormalControlFlow(succ, marker);
430 }
431 }
432 }
433
MarkTryCatchBlocks(Marker marker)434 void IrBuilder::MarkTryCatchBlocks(Marker marker)
435 {
436 // All blocks without `normal` mark are considered as catch-blocks
437 for (auto bb : GetGraph()->GetBlocksRPO()) {
438 if (bb->IsMarked(marker)) {
439 continue;
440 }
441 if (bb->IsTryBegin()) {
442 bb->SetCatch(bb->GetSuccessor(0)->IsCatch());
443 } else if (bb->IsTryEnd()) {
444 bb->SetCatch(bb->GetPredecessor(0)->IsCatch());
445 } else {
446 bb->SetCatch(true);
447 }
448 }
449
450 // Nested try-blocks can be removed, but referring to them basic blocks can be placed in the external try-blocks.
451 // So `try` marks are added after removing unreachable blocks
452 for (auto it : try_blocks_) {
453 const auto &try_block = it.second;
454 if (try_block.begin_bb->GetGraph() != try_block.end_bb->GetGraph()) {
455 RestoreTryEnd(try_block);
456 }
457 try_block.begin_bb->SetTryId(try_block.id);
458 try_block.end_bb->SetTryId(try_block.id);
459 if (try_block.basic_blocks == nullptr) {
460 continue;
461 }
462 for (auto bb : *try_block.basic_blocks) {
463 bb->SetTryId(try_block.id);
464 bb->SetTry(true);
465 }
466 }
467 }
468
469 /*
470 * Connect catch-blocks to the graph.
471 */
ResolveTryCatchBlocks()472 void IrBuilder::ResolveTryCatchBlocks()
473 {
474 auto marker_holder = MarkerHolder(GetGraph());
475 auto marker = marker_holder.GetMarker();
476 MarkNormalControlFlow(GetGraph()->GetStartBlock(), marker);
477 ConnectTryCatchBlocks();
478 GetGraph()->RemoveUnreachableBlocks();
479 MarkTryCatchBlocks(marker);
480 }
481
ConnectTryCatchBlocks()482 void IrBuilder::ConnectTryCatchBlocks()
483 {
484 ArenaMap<uint32_t, BasicBlock *> catch_blocks(GetGraph()->GetLocalAllocator()->Adapter());
485 // Firstly create catch_begin blocks, as they should precede try_begin blocks
486 for (auto pc : catches_pc_) {
487 auto catch_begin = GetGraph()->CreateEmptyBlock();
488 catch_begin->SetGuestPc(pc);
489 catch_begin->SetCatch(true);
490 catch_begin->SetCatchBegin(true);
491 auto first_catch_bb = GetBlockForPc(pc);
492 catch_begin->AddSucc(first_catch_bb);
493 catch_blocks.emplace(pc, catch_begin);
494 }
495
496 // Connect try_begin and catch_begin blocks
497 for (auto it : try_blocks_) {
498 const auto &try_block = it.second;
499 if (try_block.contains_throwable_inst) {
500 ConnectTryCodeBlock(try_block, catch_blocks);
501 } else if (try_block.basic_blocks != nullptr) {
502 try_block.basic_blocks->clear();
503 }
504 }
505 }
506
ConnectTryCodeBlock(const TryCodeBlock & try_block,const ArenaMap<uint32_t,BasicBlock * > & catch_blocks)507 void IrBuilder::ConnectTryCodeBlock(const TryCodeBlock &try_block, const ArenaMap<uint32_t, BasicBlock *> &catch_blocks)
508 {
509 auto try_begin = try_block.begin_bb;
510 ASSERT(try_begin != nullptr);
511 auto try_end = try_block.end_bb;
512 ASSERT(try_end != nullptr);
513 // Create auxiliary `Try` instruction
514 auto try_inst = GetGraph()->CreateInstTry();
515 try_inst->SetTryEndBlock(try_end);
516 try_begin->AppendInst(try_inst);
517 // Insert `try_begin` and `try_end`
518 auto first_try_bb = GetBlockForPc(try_block.boundaries.begin_pc);
519 auto last_try_bb = GetPrevBlockForPc(try_block.boundaries.end_pc);
520 first_try_bb->InsertBlockBefore(try_begin);
521 last_try_bb->InsertBlockBeforeSucc(try_end, last_try_bb->GetSuccessor(0));
522 // Connect catch-handlers
523 for (auto catch_block : *try_block.catches) {
524 auto catch_begin = catch_blocks.at(catch_block.pc);
525 if (!try_begin->HasSucc(catch_begin)) {
526 try_begin->AddSucc(catch_begin, true);
527 try_end->AddSucc(catch_begin, true);
528 }
529 try_inst->AppendCatchTypeId(catch_block.type_id, try_begin->GetSuccBlockIndex(catch_begin));
530 }
531 }
532
533 /**
534 * `try_end` restoring is required in the following case:
535 * try {
536 * try { a++;}
537 * catch { a++; }
538 * }
539 *
540 * Nested try doesn't contain throwable instructions and related catch-handler will not be connected to the graph.
541 * As a result all `catch` basic blocks will be eliminated together with outer's `try_end`, since it was inserted just
542 * after `catch`
543 */
RestoreTryEnd(const TryCodeBlock & try_block)544 void IrBuilder::RestoreTryEnd(const TryCodeBlock &try_block)
545 {
546 ASSERT(try_block.end_bb->GetGraph() == nullptr);
547 ASSERT(try_block.end_bb->GetSuccsBlocks().empty());
548 ASSERT(try_block.end_bb->GetPredsBlocks().empty());
549
550 GetGraph()->RestoreBlock(try_block.end_bb);
551 auto last_try_bb = GetPrevBlockForPc(try_block.boundaries.end_pc);
552 last_try_bb->InsertBlockBeforeSucc(try_block.end_bb, last_try_bb->GetSuccessor(0));
553 for (auto succ : try_block.begin_bb->GetSuccsBlocks()) {
554 if (succ->IsCatchBegin()) {
555 try_block.end_bb->AddSucc(succ);
556 }
557 }
558 }
559 } // namespace panda::compiler
560