1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "code_data_accessor-inl.h"
17 #include "method_data_accessor-inl.h"
18 #include "ir_builder.h"
19
20 namespace panda::compiler {
RunImpl()21 bool IrBuilder::RunImpl()
22 {
23 COMPILER_LOG(INFO, IR_BUILDER) << "Start building ir for method: "
24 << GetGraph()->GetRuntime()->GetClassNameFromMethod(GetMethod()) << "."
25 << GetGraph()->GetRuntime()->GetMethodName(GetMethod())
26 << "(args=" << GetGraph()->GetRuntime()->GetMethodTotalArgumentsCount(GetMethod())
27 << ", regs=" << GetGraph()->GetRuntime()->GetMethodRegistersCount(GetMethod())
28 << ")";
29
30 auto instructions_buf = GetGraph()->GetRuntime()->GetMethodCode(GetMethod());
31 BytecodeInstructions pbc_instructions(instructions_buf, GetGraph()->GetRuntime()->GetMethodCodeSize(GetMethod()));
32 size_t vregs_count = GetGraph()->GetRuntime()->GetMethodRegistersCount(GetMethod()) +
33 GetGraph()->GetRuntime()->GetMethodTotalArgumentsCount(GetMethod()) + 1;
34 if (!CheckMethodLimitations(pbc_instructions, vregs_count)) {
35 return false;
36 }
37 GetGraph()->SetVRegsCount(vregs_count);
38 BuildBasicBlocks(pbc_instructions);
39 GetGraph()->RunPass<DominatorsTree>();
40 GetGraph()->RunPass<LoopAnalyzer>();
41
42 InstBuilder inst_builder(GetGraph(), GetMethod());
43 inst_builder.Prepare();
44 inst_defs_.resize(vregs_count);
45 COMPILER_LOG(INFO, IR_BUILDER) << "Start instructions building...";
46 for (auto bb : GetGraph()->GetBlocksRPO()) {
47 if (!BuildBasicBlock(bb, &inst_builder, instructions_buf)) {
48 return false;
49 }
50 }
51 GetGraph()->RunPass<DominatorsTree>();
52 GetGraph()->InvalidateAnalysis<LoopAnalyzer>();
53 GetGraph()->RunPass<LoopAnalyzer>();
54 inst_builder.FixInstructions();
55
56 if (options.IsCompilerPrintStats() || options.WasSetCompilerDumpStatsCsv()) {
57 uint64_t pbc_inst_num = 0;
58 for ([[maybe_unused]] auto i : pbc_instructions) {
59 pbc_inst_num++;
60 }
61 GetGraph()->GetPassManager()->GetStatistics()->AddPbcInstNum(pbc_inst_num);
62 }
63 COMPILER_LOG(INFO, IR_BUILDER) << "IR successfully built: " << GetGraph()->GetVectorBlocks().size()
64 << " basic blocks, " << GetGraph()->GetCurrentInstructionId() << " instructions";
65 return true;
66 }
67
CheckMethodLimitations(const BytecodeInstructions & instructions,size_t vregs_count)68 bool IrBuilder::CheckMethodLimitations(const BytecodeInstructions &instructions, size_t vregs_count)
69 {
70 // TODO(a.popov) Optimize catch-phi's memory consumption and get rid of this limitation
71 static constexpr auto TRY_BLOCKS_LIMIT = 128U;
72
73 size_t bytecode_size_limit = options.GetCompilerMaxBytecodeSize();
74
75 // The option CompilerInlineFullIntrinsics increases the size of the code several times.
76 // So the limit for this option is reduced
77 if (options.IsCompilerInlineFullIntrinsics()) {
78 ASSERT(GetGraph()->IsDynamicMethod());
79 bytecode_size_limit >>= 2U;
80 }
81
82 if (instructions.GetSize() > bytecode_size_limit) {
83 COMPILER_LOG(INFO, IR_BUILDER) << "Method is too big: size=" << instructions.GetSize()
84 << ", limit=" << bytecode_size_limit;
85 return false;
86 }
87 if (vregs_count >= options.GetCompilerMaxVregsNum()) {
88 COMPILER_LOG(INFO, IR_BUILDER) << "Method has too many virtual registers: " << vregs_count
89 << ", limit=" << options.GetCompilerMaxVregsNum();
90 return false;
91 }
92
93 auto panda_file = static_cast<panda_file::File *>(GetGraph()->GetRuntime()->GetBinaryFileForMethod(GetMethod()));
94 CHECK_NOT_NULL(panda_file);
95 panda_file::MethodDataAccessor mda(*panda_file,
96 panda_file::File::EntityId(GetGraph()->GetRuntime()->GetMethodId(GetMethod())));
97 panda_file::CodeDataAccessor cda(*panda_file, mda.GetCodeId().value());
98 if (cda.GetTriesSize() > TRY_BLOCKS_LIMIT) {
99 COMPILER_LOG(INFO, IR_BUILDER) << "Method has too many try blocks: " << cda.GetTriesSize()
100 << ", limit=" << TRY_BLOCKS_LIMIT;
101 return false;
102 }
103 return true;
104 }
105
BuildBasicBlock(BasicBlock * bb,InstBuilder * inst_builder,const uint8_t * instructions_buf)106 bool IrBuilder::BuildBasicBlock(BasicBlock *bb, InstBuilder *inst_builder, const uint8_t *instructions_buf)
107 {
108 inst_builder->SetCurrentBlock(bb);
109 inst_builder->UpdateDefs();
110 ASSERT(bb->GetGuestPc() != INVALID_PC);
111 // If block is not in the `blocks_` vector, it's auxiliary block without instructions
112 if (bb == blocks_[bb->GetGuestPc()]) {
113 return BuildInstructionsForBB(bb, inst_builder, instructions_buf);
114 }
115 COMPILER_LOG(DEBUG, IR_BUILDER) << "Auxiliary block, skipping";
116 return true;
117 }
118
BuildInstructionsForBB(BasicBlock * bb,InstBuilder * inst_builder,const uint8_t * instructions_buf)119 bool IrBuilder::BuildInstructionsForBB(BasicBlock *bb, InstBuilder *inst_builder, const uint8_t *instructions_buf)
120 {
121 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
122 BytecodeInstructions instructions(instructions_buf + bb->GetGuestPc(), std::numeric_limits<int>::max());
123 for (auto inst : instructions) {
124 auto pc = inst_builder->GetPc(inst.GetAddress());
125 // Break if current pc is pc of some basic block, that means that it is the end of the current block.
126 if (pc != bb->GetGuestPc() && GetBlockForPc(pc) != nullptr) {
127 break;
128 }
129 COMPILER_LOG(DEBUG, IR_BUILDER) << "PBC instruction: " << inst << " # "
130 << reinterpret_cast<void *>(inst.GetAddress() - instructions_buf);
131 // Copy current defs for assigning them to catch-phi if current inst is throwable
132 ASSERT(inst_builder->GetCurrentDefs().size() == inst_defs_.size());
133 std::copy(inst_builder->GetCurrentDefs().begin(), inst_builder->GetCurrentDefs().end(), inst_defs_.begin());
134 auto current_last_inst = bb->GetLastInst();
135 auto bb_count = GetGraph()->GetVectorBlocks().size();
136 inst_builder->BuildInstruction(&inst);
137 if (inst_builder->IsFailed()) {
138 COMPILER_LOG(WARNING, IR_BUILDER) << "Unsupported instruction";
139 return false;
140 }
141
142 // One PBC instruction can be expanded to the group of IR's instructions, find first built instruction in
143 // this group, and then mark all instructions as throwable; All instructions should be marked, since some of
144 // them can be deleted during optimizations, unnecessary catch-phi moves will be resolved before Register
145 // Allocator
146 auto throwable_inst = (current_last_inst == nullptr) ? bb->GetFirstInst() : current_last_inst->GetNext();
147 ProcessThrowableInstructions(inst_builder, throwable_inst);
148
149 auto &vb = GetGraph()->GetVectorBlocks();
150 for (size_t i = bb_count; i < vb.size(); i++) {
151 ProcessThrowableInstructions(inst_builder, vb[i]->GetFirstInst());
152 }
153
154 // Break if we meet terminator instruction. If instruction in the middle of basic block we don't create
155 // further dead instructions.
156 if (inst.IsTerminator() && !inst.IsSuspend()) {
157 break;
158 }
159 }
160 return true;
161 }
162
ProcessThrowableInstructions(InstBuilder * inst_builder,Inst * throwable_inst)163 void IrBuilder::ProcessThrowableInstructions(InstBuilder *inst_builder, Inst *throwable_inst)
164 {
165 for (; throwable_inst != nullptr; throwable_inst = throwable_inst->GetNext()) {
166 if (throwable_inst->IsSaveState()) {
167 continue;
168 }
169 COMPILER_LOG(DEBUG, IR_BUILDER) << "Throwable inst, Id = " << throwable_inst->GetId();
170 catch_handlers_.clear();
171 EnumerateTryBlocksCoveredPc(throwable_inst->GetPc(), [this](const TryCodeBlock &try_block) {
172 auto tbb = try_block.begin_bb;
173 tbb->EnumerateCatchHandlers([this](BasicBlock *catch_handler, [[maybe_unused]] size_t type_id) {
174 catch_handlers_.insert(catch_handler);
175 return true;
176 });
177 });
178 if (!catch_handlers_.empty()) {
179 inst_builder->AddCatchPhiInputs(catch_handlers_, inst_defs_, throwable_inst);
180 }
181 }
182 }
183
InstNotJump(BytecodeInstruction * inst)184 static inline bool InstNotJump(BytecodeInstruction *inst)
185 {
186 return inst->GetAddress() != nullptr && InstBuilder::GetInstructionJumpOffset(inst) == INVALID_OFFSET &&
187 !inst->HasFlag(BytecodeInstruction::RETURN);
188 }
189
BuildBasicBlocks(const BytecodeInstructions & instructions)190 void IrBuilder::BuildBasicBlocks(const BytecodeInstructions &instructions)
191 {
192 blocks_.resize(instructions.GetSize() + 1);
193 bool fallthrough = false;
194
195 CreateBlock(0);
196 // Create basic blocks
197 for (auto inst : instructions) {
198 auto pc = instructions.GetPc(inst);
199
200 if (fallthrough) {
201 CreateBlock(pc);
202 fallthrough = false;
203 }
204 auto offset = InstBuilder::GetInstructionJumpOffset(&inst);
205 if (offset != INVALID_OFFSET) {
206 auto target_pc = pc + static_cast<size_t>(offset);
207 CreateBlock(target_pc);
208 if (inst.HasFlag(BytecodeInstruction::CONDITIONAL)) {
209 fallthrough = true;
210 }
211 }
212 }
213 CreateTryCatchBoundariesBlocks();
214 GetGraph()->CreateStartBlock();
215 GetGraph()->CreateEndBlock(instructions.GetSize());
216 ConnectBasicBlocks(instructions);
217 ResolveTryCatchBlocks();
218 COMPILER_LOG(DEBUG, IR_BUILDER) << "Created " << GetGraph()->GetVectorBlocks().size() << " basic blocks";
219 }
220
221 template <class Callback>
EnumerateTryBlocksCoveredPc(uint32_t pc,const Callback & callback)222 void IrBuilder::EnumerateTryBlocksCoveredPc(uint32_t pc, const Callback &callback)
223 {
224 for (const auto &[begin_pc, try_block] : try_blocks_) {
225 if (begin_pc <= pc && pc < try_block.boundaries.end_pc) {
226 callback(try_block);
227 }
228 }
229 }
230
231 /**
232 * Return `TryCodeBlock` and flag if was created a new one
233 */
InsertTryBlockInfo(const Boundaries & try_boundaries)234 IrBuilder::TryCodeBlock *IrBuilder::InsertTryBlockInfo(const Boundaries &try_boundaries)
235 {
236 auto try_id = static_cast<uint32_t>(try_blocks_.size());
237 auto range = try_blocks_.equal_range(try_boundaries.begin_pc);
238 for (auto iter = range.first; iter != range.second; ++iter) {
239 // use try-block with the same boundaries
240 if (try_boundaries.end_pc == iter->second.boundaries.end_pc) {
241 return &iter->second;
242 }
243 // insert in the increasing `end_pc` order
244 if (try_boundaries.end_pc > iter->second.boundaries.end_pc) {
245 auto it = try_blocks_.emplace_hint(iter, try_boundaries.begin_pc, TryCodeBlock {try_boundaries});
246 it->second.Init(GetGraph(), try_id);
247 return &it->second;
248 }
249 }
250 auto it = try_blocks_.emplace(try_boundaries.begin_pc, TryCodeBlock {try_boundaries});
251 it->second.Init(GetGraph(), try_id);
252 return &it->second;
253 }
254
CreateTryCatchBoundariesBlocks()255 void IrBuilder::CreateTryCatchBoundariesBlocks()
256 {
257 auto panda_file = static_cast<panda_file::File *>(GetGraph()->GetRuntime()->GetBinaryFileForMethod(GetMethod()));
258 CHECK_NOT_NULL(panda_file);
259 panda_file::MethodDataAccessor mda(*panda_file,
260 panda_file::File::EntityId(GetGraph()->GetRuntime()->GetMethodId(GetMethod())));
261 panda_file::CodeDataAccessor cda(*panda_file, mda.GetCodeId().value());
262
263 cda.EnumerateTryBlocks([this](panda_file::CodeDataAccessor::TryBlock &try_block) {
264 auto start_pc = try_block.GetStartPc();
265 auto end_pc = start_pc + try_block.GetLength();
266 auto try_info = InsertTryBlockInfo({start_pc, end_pc});
267 try_block.EnumerateCatchBlocks([this, try_info](panda_file::CodeDataAccessor::CatchBlock &catch_block) {
268 auto pc = catch_block.GetHandlerPc();
269 catches_pc_.insert(pc);
270 try_info->catches->emplace_back(CatchCodeBlock {pc, 0u});
271 return true;
272 });
273
274 return true;
275 });
276
277 COMPILER_LOG(INFO, IR_BUILDER) << "There are: " << try_blocks_.size() << " try-blocks in the method";
278 COMPILER_LOG(INFO, IR_BUILDER) << "There are: " << catches_pc_.size() << " catch-handlers in the method";
279
280 for (const auto &[pc, try_block] : try_blocks_) {
281 CreateBlock(pc);
282 CreateBlock(try_block.boundaries.end_pc);
283 }
284 for (auto pc : catches_pc_) {
285 CreateBlock(pc);
286 }
287 }
288
289 struct BlocksConnectorInfo {
290 bool fallthrough {};
291 bool dead_instructions {};
292 BytecodeInstruction prev_inst {nullptr};
293 };
294
ConnectBasicBlocks(const BytecodeInstructions & instructions)295 void IrBuilder::ConnectBasicBlocks(const BytecodeInstructions &instructions)
296 {
297 BlocksConnectorInfo info;
298 BasicBlock *curr_bb = blocks_[0];
299 GetGraph()->GetStartBlock()->AddSucc(curr_bb);
300 for (auto inst : instructions) {
301 auto pc = instructions.GetPc(inst);
302 auto target_block = blocks_[pc];
303 TrackTryBoundaries(pc, inst);
304 if (info.fallthrough) {
305 ASSERT(target_block != nullptr);
306 // May be the second edge between same blocks
307 curr_bb->AddSucc(target_block, true);
308 info.fallthrough = false;
309 curr_bb = target_block;
310 } else if (target_block != nullptr) {
311 if (catches_pc_.count(pc) == 0) {
312 if (InstNotJump(&info.prev_inst) && !info.dead_instructions) {
313 curr_bb->AddSucc(target_block);
314 }
315 }
316 curr_bb = target_block;
317 info.dead_instructions = false;
318 } else if (info.dead_instructions) {
319 // We are processing dead instructions now, skipping them until we meet the next block.
320 continue;
321 }
322 if (auto jmp_target_block = GetBlockToJump(&inst, pc); jmp_target_block != nullptr) {
323 curr_bb->AddSucc(jmp_target_block);
324 // In case of unconditional branch, we reset curr_bb, so if next instruction won't start new block, then
325 // we'll skip further dead instructions.
326 info.fallthrough = inst.HasFlag(BytecodeInstruction::CONDITIONAL);
327 if (!info.fallthrough) {
328 info.dead_instructions = true;
329 }
330 }
331 info.prev_inst = inst;
332 }
333
334 // Erase end block if it wasn't connected, should be infinite loop in the graph
335 if (GetGraph()->GetEndBlock()->GetPredsBlocks().empty()) {
336 GetGraph()->EraseBlock(GetGraph()->GetEndBlock());
337 GetGraph()->SetEndBlock(nullptr);
338 COMPILER_LOG(INFO, IR_BUILDER) << "Builded graph without end block";
339 }
340 }
341
TrackTryBoundaries(size_t pc,const BytecodeInstruction & inst)342 void IrBuilder::TrackTryBoundaries(size_t pc, const BytecodeInstruction &inst)
343 {
344 opened_try_blocks_.remove_if([pc](TryCodeBlock *try_block) { return try_block->boundaries.end_pc == pc; });
345
346 if (try_blocks_.count(pc) > 0) {
347 auto range = try_blocks_.equal_range(pc);
348 for (auto it = range.first; it != range.second; ++it) {
349 auto &try_block = it->second;
350 if (try_block.boundaries.end_pc > pc) {
351 opened_try_blocks_.push_back(&try_block);
352 auto allocator = GetGraph()->GetLocalAllocator();
353 try_block.basic_blocks = allocator->New<ArenaVector<BasicBlock *>>(allocator->Adapter());
354 } else {
355 // Empty try-block
356 ASSERT(try_block.boundaries.end_pc == pc);
357 }
358 }
359 }
360
361 if (opened_try_blocks_.empty()) {
362 return;
363 }
364
365 if (auto bb = blocks_[pc]; bb != nullptr) {
366 for (auto try_block : opened_try_blocks_) {
367 try_block->basic_blocks->push_back(bb);
368 }
369 }
370
371 for (auto &try_block : opened_try_blocks_) {
372 try_block->contains_throwable_inst = true;
373 }
374 }
375
GetBlockToJump(BytecodeInstruction * inst,size_t pc)376 BasicBlock *IrBuilder::GetBlockToJump(BytecodeInstruction *inst, size_t pc)
377 {
378 if ((inst->HasFlag(BytecodeInstruction::RETURN) && !inst->HasFlag(BytecodeInstruction::SUSPEND)) ||
379 inst->IsThrow(BytecodeInstruction::Exceptions::X_THROW)) {
380 return GetGraph()->GetEndBlock();
381 }
382
383 #ifdef ENABLE_BYTECODE_OPT
384 if (inst->GetOpcode() == BytecodeInstruction::Opcode::RETURNUNDEFINED) {
385 return GetGraph()->GetEndBlock();
386 }
387 #endif
388
389 if (auto offset = InstBuilder::GetInstructionJumpOffset(inst); offset != INVALID_OFFSET) {
390 ASSERT(blocks_[pc + static_cast<size_t>(offset)] != nullptr);
391 return blocks_[pc + static_cast<size_t>(offset)];
392 }
393 return nullptr;
394 }
395
396 /**
397 * Mark blocks which were connected to the graph.
398 * Catch-handlers will not be marked, since they have not been connected yet.
399 */
MarkNormalControlFlow(BasicBlock * block,Marker marker)400 static void MarkNormalControlFlow(BasicBlock *block, Marker marker)
401 {
402 block->SetMarker(marker);
403 for (auto succ : block->GetSuccsBlocks()) {
404 if (!succ->IsMarked(marker)) {
405 MarkNormalControlFlow(succ, marker);
406 }
407 }
408 }
409
MarkTryCatchBlocks(Marker marker)410 void IrBuilder::MarkTryCatchBlocks(Marker marker)
411 {
412 // All blocks without `normal` mark are considered as catch-blocks
413 for (auto bb : GetGraph()->GetBlocksRPO()) {
414 if (bb->IsMarked(marker)) {
415 continue;
416 }
417 if (bb->IsTryBegin()) {
418 bb->SetCatch(bb->GetSuccessor(0)->IsCatch());
419 } else if (bb->IsTryEnd()) {
420 bb->SetCatch(bb->GetPredecessor(0)->IsCatch());
421 } else {
422 bb->SetCatch(true);
423 }
424 }
425
426 // Nested try-blocks can be removed, but referring to them basic blocks can be placed in the external try-blocks.
427 // So `try` marks are added after removing unreachable blocks
428 for (auto it : try_blocks_) {
429 const auto &try_block = it.second;
430 if (try_block.begin_bb->GetGraph() != try_block.end_bb->GetGraph()) {
431 RestoreTryEnd(try_block);
432 }
433 try_block.begin_bb->SetTryId(try_block.id);
434 try_block.end_bb->SetTryId(try_block.id);
435 if (try_block.basic_blocks == nullptr) {
436 continue;
437 }
438 for (auto bb : *try_block.basic_blocks) {
439 bb->SetTryId(try_block.id);
440 bb->SetTry(true);
441 }
442 }
443 }
444
445 /*
446 * Connect catch-blocks to the graph.
447 */
ResolveTryCatchBlocks()448 void IrBuilder::ResolveTryCatchBlocks()
449 {
450 auto marker_holder = MarkerHolder(GetGraph());
451 auto marker = marker_holder.GetMarker();
452 MarkNormalControlFlow(GetGraph()->GetStartBlock(), marker);
453 ConnectTryCatchBlocks();
454 GetGraph()->RemoveUnreachableBlocks();
455 MarkTryCatchBlocks(marker);
456 }
457
ConnectTryCatchBlocks()458 void IrBuilder::ConnectTryCatchBlocks()
459 {
460 ArenaMap<uint32_t, BasicBlock *> catch_blocks(GetGraph()->GetLocalAllocator()->Adapter());
461 // Firstly create catch_begin blocks, as they should precede try_begin blocks
462 for (auto pc : catches_pc_) {
463 auto catch_begin = GetGraph()->CreateEmptyBlock();
464 catch_begin->SetGuestPc(pc);
465 catch_begin->SetCatch(true);
466 catch_begin->SetCatchBegin(true);
467 auto first_catch_bb = GetBlockForPc(pc);
468 catch_begin->AddSucc(first_catch_bb);
469 catch_blocks.emplace(pc, catch_begin);
470 }
471
472 // Connect try_begin and catch_begin blocks
473 for (auto it : try_blocks_) {
474 const auto &try_block = it.second;
475 if (try_block.contains_throwable_inst) {
476 ConnectTryCodeBlock(try_block, catch_blocks);
477 } else if (try_block.basic_blocks != nullptr) {
478 try_block.basic_blocks->clear();
479 }
480 }
481 }
482
ConnectTryCodeBlock(const TryCodeBlock & try_block,const ArenaMap<uint32_t,BasicBlock * > & catch_blocks)483 void IrBuilder::ConnectTryCodeBlock(const TryCodeBlock &try_block, const ArenaMap<uint32_t, BasicBlock *> &catch_blocks)
484 {
485 auto try_begin = try_block.begin_bb;
486 ASSERT(try_begin != nullptr);
487 auto try_end = try_block.end_bb;
488 ASSERT(try_end != nullptr);
489 // Create auxiliary `Try` instruction
490 auto try_inst = GetGraph()->CreateInstTry();
491 try_inst->SetTryEndBlock(try_end);
492 try_begin->AppendInst(try_inst);
493 // Insert `try_begin` and `try_end`
494 auto first_try_bb = GetBlockForPc(try_block.boundaries.begin_pc);
495 auto last_try_bb = GetPrevBlockForPc(try_block.boundaries.end_pc);
496 first_try_bb->InsertBlockBefore(try_begin);
497 last_try_bb->InsertBlockBeforeSucc(try_end, last_try_bb->GetSuccessor(0));
498 // Connect catch-handlers
499 for (auto catch_block : *try_block.catches) {
500 auto catch_begin = catch_blocks.at(catch_block.pc);
501 if (!try_begin->HasSucc(catch_begin)) {
502 try_begin->AddSucc(catch_begin, true);
503 try_end->AddSucc(catch_begin, true);
504 }
505 try_inst->AppendCatchTypeId(catch_block.type_id, try_begin->GetSuccBlockIndex(catch_begin));
506 }
507 }
508
509 /**
510 * `try_end` restoring is required in the following case:
511 * try {
512 * try { a++;}
513 * catch { a++; }
514 * }
515 *
516 * Nested try doesn't contain throwable instructions and related catch-handler will not be connected to the graph.
517 * As a result all `catch` basic blocks will be eliminated together with outer's `try_end`, since it was inserted just
518 * after `catch`
519 */
RestoreTryEnd(const TryCodeBlock & try_block)520 void IrBuilder::RestoreTryEnd(const TryCodeBlock &try_block)
521 {
522 ASSERT(try_block.end_bb->GetGraph() == nullptr);
523 ASSERT(try_block.end_bb->GetSuccsBlocks().empty());
524 ASSERT(try_block.end_bb->GetPredsBlocks().empty());
525
526 GetGraph()->RestoreBlock(try_block.end_bb);
527 auto last_try_bb = GetPrevBlockForPc(try_block.boundaries.end_pc);
528 last_try_bb->InsertBlockBeforeSucc(try_block.end_bb, last_try_bb->GetSuccessor(0));
529 for (auto succ : try_block.begin_bb->GetSuccsBlocks()) {
530 if (succ->IsCatchBegin()) {
531 try_block.end_bb->AddSucc(succ);
532 }
533 }
534 }
535 } // namespace panda::compiler
536