1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <fstream>
17 #include <method_data_accessor.h>
18 #include "compiler_logger.h"
19 #include "pbc_iterator.h"
20 #include "bytecode_instruction.h"
21 #include "code_data_accessor.h"
22 #include "code_data_accessor-inl.h"
23 #include "method_data_accessor.h"
24 #include "optimizer/analysis/dominators_tree.h"
25 #include "optimizer/analysis/loop_analyzer.h"
26 #include "method_data_accessor-inl.h"
27 #include "ir_builder.h"
28
29 namespace panda::compiler {
RunImpl()30 bool IrBuilder::RunImpl()
31 {
32 COMPILER_LOG(INFO, IR_BUILDER) << "Start building ir for method: "
33 << GetGraph()->GetRuntime()->GetClassNameFromMethod(GetMethod()) << "."
34 << GetGraph()->GetRuntime()->GetMethodName(GetMethod())
35 << "(args=" << GetGraph()->GetRuntime()->GetMethodTotalArgumentsCount(GetMethod())
36 << ", regs=" << GetGraph()->GetRuntime()->GetMethodRegistersCount(GetMethod())
37 << ")";
38
39 auto instructions_buf = GetGraph()->GetRuntime()->GetMethodCode(GetMethod());
40 BytecodeInstructions pbc_instructions(instructions_buf, GetGraph()->GetRuntime()->GetMethodCodeSize(GetMethod()));
41 size_t vregs_count = GetGraph()->GetRuntime()->GetMethodRegistersCount(GetMethod()) +
42 GetGraph()->GetRuntime()->GetMethodTotalArgumentsCount(GetMethod()) + 1;
43 if (!CheckMethodLimitations(pbc_instructions, vregs_count)) {
44 return false;
45 }
46 GetGraph()->SetVRegsCount(vregs_count);
47 BuildBasicBlocks(pbc_instructions);
48 GetGraph()->RunPass<DominatorsTree>();
49 GetGraph()->RunPass<LoopAnalyzer>();
50
51 InstBuilder inst_builder(GetGraph(), GetMethod());
52 inst_builder.Prepare();
53 inst_defs_.resize(vregs_count);
54 COMPILER_LOG(INFO, IR_BUILDER) << "Start instructions building...";
55 for (auto bb : GetGraph()->GetBlocksRPO()) {
56 if (!BuildBasicBlock(bb, &inst_builder, instructions_buf)) {
57 return false;
58 }
59 }
60 GetGraph()->RunPass<DominatorsTree>();
61 GetGraph()->InvalidateAnalysis<LoopAnalyzer>();
62 GetGraph()->RunPass<LoopAnalyzer>();
63 inst_builder.FixInstructions();
64
65 if (options.IsCompilerPrintStats() || options.WasSetCompilerDumpStatsCsv()) {
66 uint64_t pbc_inst_num = 0;
67 for ([[maybe_unused]] auto i : pbc_instructions) {
68 pbc_inst_num++;
69 }
70 GetGraph()->GetPassManager()->GetStatistics()->AddPbcInstNum(pbc_inst_num);
71 }
72 COMPILER_LOG(INFO, IR_BUILDER) << "IR successfully built: " << GetGraph()->GetVectorBlocks().size()
73 << " basic blocks, " << GetGraph()->GetCurrentInstructionId() << " instructions";
74 return true;
75 }
76
CheckMethodLimitations(const BytecodeInstructions & instructions,size_t vregs_count)77 bool IrBuilder::CheckMethodLimitations(const BytecodeInstructions &instructions, size_t vregs_count)
78 {
79 // TODO(a.popov) Optimize catch-phi's memory consumption and get rid of this limitation
80 static constexpr auto TRY_BLOCKS_LIMIT = 128U;
81
82 size_t bytecode_size_limit = options.GetCompilerMaxBytecodeSize();
83
84 // The option CompilerInlineFullIntrinsics increases the size of the code several times.
85 // So the limit for this option is reduced
86 if (options.IsCompilerInlineFullIntrinsics()) {
87 ASSERT(GetGraph()->IsDynamicMethod());
88 bytecode_size_limit >>= 2U;
89 }
90
91 if (instructions.GetSize() > bytecode_size_limit) {
92 COMPILER_LOG(INFO, IR_BUILDER) << "Method is too big: size=" << instructions.GetSize()
93 << ", limit=" << bytecode_size_limit;
94 return false;
95 }
96 if (vregs_count >= options.GetCompilerMaxVregsNum()) {
97 COMPILER_LOG(INFO, IR_BUILDER) << "Method has too many virtual registers: " << vregs_count
98 << ", limit=" << options.GetCompilerMaxVregsNum();
99 return false;
100 }
101
102 auto panda_file = static_cast<panda_file::File *>(GetGraph()->GetRuntime()->GetBinaryFileForMethod(GetMethod()));
103 panda_file::MethodDataAccessor mda(*panda_file,
104 panda_file::File::EntityId(GetGraph()->GetRuntime()->GetMethodId(GetMethod())));
105 panda_file::CodeDataAccessor cda(*panda_file, mda.GetCodeId().value());
106 if (cda.GetTriesSize() > TRY_BLOCKS_LIMIT) {
107 COMPILER_LOG(INFO, IR_BUILDER) << "Method has too many try blocks: " << cda.GetTriesSize()
108 << ", limit=" << TRY_BLOCKS_LIMIT;
109 return false;
110 }
111 return true;
112 }
113
BuildBasicBlock(BasicBlock * bb,InstBuilder * inst_builder,const uint8_t * instructions_buf)114 bool IrBuilder::BuildBasicBlock(BasicBlock *bb, InstBuilder *inst_builder, const uint8_t *instructions_buf)
115 {
116 inst_builder->SetCurrentBlock(bb);
117 inst_builder->UpdateDefs();
118 ASSERT(bb->GetGuestPc() != INVALID_PC);
119 // If block is not in the `blocks_` vector, it's auxiliary block without instructions
120 if (bb == blocks_[bb->GetGuestPc()]) {
121 return BuildInstructionsForBB(bb, inst_builder, instructions_buf);
122 }
123 COMPILER_LOG(DEBUG, IR_BUILDER) << "Auxiliary block, skipping";
124 return true;
125 }
126
BuildInstructionsForBB(BasicBlock * bb,InstBuilder * inst_builder,const uint8_t * instructions_buf)127 bool IrBuilder::BuildInstructionsForBB(BasicBlock *bb, InstBuilder *inst_builder, const uint8_t *instructions_buf)
128 {
129 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
130 BytecodeInstructions instructions(instructions_buf + bb->GetGuestPc(), std::numeric_limits<int>::max());
131 for (auto inst : instructions) {
132 auto pc = inst_builder->GetPc(inst.GetAddress());
133 // Break if current pc is pc of some basic block, that means that it is the end of the current block.
134 if (pc != bb->GetGuestPc() && GetBlockForPc(pc) != nullptr) {
135 break;
136 }
137 COMPILER_LOG(DEBUG, IR_BUILDER) << "PBC instruction: " << inst << " # "
138 << reinterpret_cast<void *>(inst.GetAddress() - instructions_buf);
139 // Copy current defs for assigning them to catch-phi if current inst is throwable
140 ASSERT(inst_builder->GetCurrentDefs().size() == inst_defs_.size());
141 std::copy(inst_builder->GetCurrentDefs().begin(), inst_builder->GetCurrentDefs().end(), inst_defs_.begin());
142 auto current_last_inst = bb->GetLastInst();
143 auto bb_count = GetGraph()->GetVectorBlocks().size();
144 inst_builder->BuildInstruction(&inst);
145 if (inst_builder->IsFailed()) {
146 COMPILER_LOG(WARNING, IR_BUILDER) << "Unsupported instruction";
147 return false;
148 }
149
150 // One PBC instruction can be expanded to the group of IR's instructions, find first built instruction in
151 // this group, and then mark all instructions as throwable; All instructions should be marked, since some of
152 // them can be deleted during optimizations, unnecessary catch-phi moves will be resolved before Register
153 // Allocator
154 auto throwable_inst = (current_last_inst == nullptr) ? bb->GetFirstInst() : current_last_inst->GetNext();
155 ProcessThrowableInstructions(inst_builder, throwable_inst);
156
157 auto &vb = GetGraph()->GetVectorBlocks();
158 for (size_t i = bb_count; i < vb.size(); i++) {
159 ProcessThrowableInstructions(inst_builder, vb[i]->GetFirstInst());
160 }
161
162 // Break if we meet terminator instruction. If instruction in the middle of basic block we don't create
163 // further dead instructions.
164 if (inst.IsTerminator() && !inst.IsSuspend()) {
165 break;
166 }
167 }
168 return true;
169 }
170
ProcessThrowableInstructions(InstBuilder * inst_builder,Inst * throwable_inst)171 void IrBuilder::ProcessThrowableInstructions(InstBuilder *inst_builder, Inst *throwable_inst)
172 {
173 for (; throwable_inst != nullptr; throwable_inst = throwable_inst->GetNext()) {
174 if (throwable_inst->IsSaveState()) {
175 continue;
176 }
177 COMPILER_LOG(DEBUG, IR_BUILDER) << "Throwable inst, Id = " << throwable_inst->GetId();
178 catch_handlers_.clear();
179 EnumerateTryBlocksCoveredPc(throwable_inst->GetPc(), [this](const TryCodeBlock &try_block) {
180 auto tbb = try_block.begin_bb;
181 tbb->EnumerateCatchHandlers([this](BasicBlock *catch_handler, [[maybe_unused]] size_t type_id) {
182 catch_handlers_.insert(catch_handler);
183 return true;
184 });
185 });
186 if (!catch_handlers_.empty()) {
187 inst_builder->AddCatchPhiInputs(catch_handlers_, inst_defs_, throwable_inst);
188 }
189 }
190 }
191
InstNotJump(BytecodeInstruction * inst)192 static inline bool InstNotJump(BytecodeInstruction *inst)
193 {
194 return inst->GetAddress() != nullptr && InstBuilder::GetInstructionJumpOffset(inst) == INVALID_OFFSET &&
195 !inst->HasFlag(BytecodeInstruction::RETURN);
196 }
197
BuildBasicBlocks(const BytecodeInstructions & instructions)198 void IrBuilder::BuildBasicBlocks(const BytecodeInstructions &instructions)
199 {
200 blocks_.resize(instructions.GetSize() + 1);
201 bool fallthrough = false;
202
203 CreateBlock(0);
204 // Create basic blocks
205 for (auto inst : instructions) {
206 auto pc = instructions.GetPc(inst);
207
208 if (fallthrough) {
209 CreateBlock(pc);
210 fallthrough = false;
211 }
212 auto offset = InstBuilder::GetInstructionJumpOffset(&inst);
213 if (offset != INVALID_OFFSET) {
214 auto target_pc = pc + static_cast<size_t>(offset);
215 CreateBlock(target_pc);
216 if (inst.HasFlag(BytecodeInstruction::CONDITIONAL)) {
217 fallthrough = true;
218 }
219 }
220 }
221 CreateTryCatchBoundariesBlocks();
222 GetGraph()->CreateStartBlock();
223 GetGraph()->CreateEndBlock(instructions.GetSize());
224 ConnectBasicBlocks(instructions);
225 ResolveTryCatchBlocks();
226 COMPILER_LOG(DEBUG, IR_BUILDER) << "Created " << GetGraph()->GetVectorBlocks().size() << " basic blocks";
227 }
228
229 template <class Callback>
EnumerateTryBlocksCoveredPc(uint32_t pc,const Callback & callback)230 void IrBuilder::EnumerateTryBlocksCoveredPc(uint32_t pc, const Callback &callback)
231 {
232 for (const auto &[begin_pc, try_block] : try_blocks_) {
233 if (begin_pc <= pc && pc < try_block.boundaries.end_pc) {
234 callback(try_block);
235 }
236 }
237 }
238
239 /**
240 * Return `TryCodeBlock` and flag if was created a new one
241 */
InsertTryBlockInfo(const Boundaries & try_boundaries)242 IrBuilder::TryCodeBlock *IrBuilder::InsertTryBlockInfo(const Boundaries &try_boundaries)
243 {
244 auto try_id = static_cast<uint32_t>(try_blocks_.size());
245 auto range = try_blocks_.equal_range(try_boundaries.begin_pc);
246 for (auto iter = range.first; iter != range.second; ++iter) {
247 // use try-block with the same boundaries
248 if (try_boundaries.end_pc == iter->second.boundaries.end_pc) {
249 return &iter->second;
250 }
251 // insert in the increasing `end_pc` order
252 if (try_boundaries.end_pc > iter->second.boundaries.end_pc) {
253 auto it = try_blocks_.emplace_hint(iter, try_boundaries.begin_pc, TryCodeBlock {try_boundaries});
254 it->second.Init(GetGraph(), try_id);
255 return &it->second;
256 }
257 }
258 auto it = try_blocks_.emplace(try_boundaries.begin_pc, TryCodeBlock {try_boundaries});
259 it->second.Init(GetGraph(), try_id);
260 return &it->second;
261 }
262
CreateTryCatchBoundariesBlocks()263 void IrBuilder::CreateTryCatchBoundariesBlocks()
264 {
265 auto panda_file = static_cast<panda_file::File *>(GetGraph()->GetRuntime()->GetBinaryFileForMethod(GetMethod()));
266 panda_file::MethodDataAccessor mda(*panda_file,
267 panda_file::File::EntityId(GetGraph()->GetRuntime()->GetMethodId(GetMethod())));
268 panda_file::CodeDataAccessor cda(*panda_file, mda.GetCodeId().value());
269
270 cda.EnumerateTryBlocks([this](panda_file::CodeDataAccessor::TryBlock &try_block) {
271 auto start_pc = try_block.GetStartPc();
272 auto end_pc = start_pc + try_block.GetLength();
273 auto try_info = InsertTryBlockInfo({start_pc, end_pc});
274 try_block.EnumerateCatchBlocks([this, try_info](panda_file::CodeDataAccessor::CatchBlock &catch_block) {
275 auto pc = catch_block.GetHandlerPc();
276 catches_pc_.insert(pc);
277 try_info->catches->emplace_back(CatchCodeBlock {pc, 0u});
278 return true;
279 });
280
281 return true;
282 });
283
284 COMPILER_LOG(INFO, IR_BUILDER) << "There are: " << try_blocks_.size() << " try-blocks in the method";
285 COMPILER_LOG(INFO, IR_BUILDER) << "There are: " << catches_pc_.size() << " catch-handlers in the method";
286
287 for (const auto &[pc, try_block] : try_blocks_) {
288 CreateBlock(pc);
289 CreateBlock(try_block.boundaries.end_pc);
290 }
291 for (auto pc : catches_pc_) {
292 CreateBlock(pc);
293 }
294 }
295
296 struct BlocksConnectorInfo {
297 bool fallthrough {};
298 bool dead_instructions {};
299 BytecodeInstruction prev_inst {nullptr};
300 };
301
ConnectBasicBlocks(const BytecodeInstructions & instructions)302 void IrBuilder::ConnectBasicBlocks(const BytecodeInstructions &instructions)
303 {
304 BlocksConnectorInfo info;
305 BasicBlock *curr_bb = blocks_[0];
306 GetGraph()->GetStartBlock()->AddSucc(curr_bb);
307 for (auto inst : instructions) {
308 auto pc = instructions.GetPc(inst);
309 auto target_block = blocks_[pc];
310 TrackTryBoundaries(pc, inst);
311 if (info.fallthrough) {
312 ASSERT(target_block != nullptr);
313 // May be the second edge between same blocks
314 curr_bb->AddSucc(target_block, true);
315 info.fallthrough = false;
316 curr_bb = target_block;
317 } else if (target_block != nullptr) {
318 if (catches_pc_.count(pc) == 0) {
319 if (InstNotJump(&info.prev_inst) && !info.dead_instructions) {
320 curr_bb->AddSucc(target_block);
321 }
322 }
323 curr_bb = target_block;
324 info.dead_instructions = false;
325 } else if (info.dead_instructions) {
326 // We are processing dead instructions now, skipping them until we meet the next block.
327 continue;
328 }
329 if (auto jmp_target_block = GetBlockToJump(&inst, pc); jmp_target_block != nullptr) {
330 curr_bb->AddSucc(jmp_target_block);
331 // In case of unconditional branch, we reset curr_bb, so if next instruction won't start new block, then
332 // we'll skip further dead instructions.
333 info.fallthrough = inst.HasFlag(BytecodeInstruction::CONDITIONAL);
334 if (!info.fallthrough) {
335 info.dead_instructions = true;
336 }
337 }
338 info.prev_inst = inst;
339 }
340
341 // Erase end block if it wasn't connected, should be infinite loop in the graph
342 if (GetGraph()->GetEndBlock()->GetPredsBlocks().empty()) {
343 GetGraph()->EraseBlock(GetGraph()->GetEndBlock());
344 GetGraph()->SetEndBlock(nullptr);
345 COMPILER_LOG(INFO, IR_BUILDER) << "Builded graph without end block";
346 }
347 }
348
TrackTryBoundaries(size_t pc,const BytecodeInstruction & inst)349 void IrBuilder::TrackTryBoundaries(size_t pc, const BytecodeInstruction &inst)
350 {
351 opened_try_blocks_.remove_if([pc](TryCodeBlock *try_block) { return try_block->boundaries.end_pc == pc; });
352
353 if (try_blocks_.count(pc) > 0) {
354 auto range = try_blocks_.equal_range(pc);
355 for (auto it = range.first; it != range.second; ++it) {
356 auto &try_block = it->second;
357 if (try_block.boundaries.end_pc > pc) {
358 opened_try_blocks_.push_back(&try_block);
359 auto allocator = GetGraph()->GetLocalAllocator();
360 try_block.basic_blocks = allocator->New<ArenaVector<BasicBlock *>>(allocator->Adapter());
361 } else {
362 // Empty try-block
363 ASSERT(try_block.boundaries.end_pc == pc);
364 }
365 }
366 }
367
368 if (opened_try_blocks_.empty()) {
369 return;
370 }
371
372 if (auto bb = blocks_[pc]; bb != nullptr) {
373 for (auto try_block : opened_try_blocks_) {
374 try_block->basic_blocks->push_back(bb);
375 }
376 }
377
378 for (auto &try_block : opened_try_blocks_) {
379 try_block->contains_throwable_inst = true;
380 }
381 }
382
GetBlockToJump(BytecodeInstruction * inst,size_t pc)383 BasicBlock *IrBuilder::GetBlockToJump(BytecodeInstruction *inst, size_t pc)
384 {
385 if ((inst->HasFlag(BytecodeInstruction::RETURN) && !inst->HasFlag(BytecodeInstruction::SUSPEND)) ||
386 inst->IsThrow(BytecodeInstruction::Exceptions::X_THROW)) {
387 return GetGraph()->GetEndBlock();
388 }
389
390 #ifdef ENABLE_BYTECODE_OPT
391 if (inst->GetOpcode() == BytecodeInstruction::Opcode::RETURNUNDEFINED) {
392 return GetGraph()->GetEndBlock();
393 }
394 #endif
395
396 if (auto offset = InstBuilder::GetInstructionJumpOffset(inst); offset != INVALID_OFFSET) {
397 ASSERT(blocks_[pc + static_cast<size_t>(offset)] != nullptr);
398 return blocks_[pc + static_cast<size_t>(offset)];
399 }
400 return nullptr;
401 }
402
403 /**
404 * Mark blocks which were connected to the graph.
405 * Catch-handlers will not be marked, since they have not been connected yet.
406 */
MarkNormalControlFlow(BasicBlock * block,Marker marker)407 static void MarkNormalControlFlow(BasicBlock *block, Marker marker)
408 {
409 block->SetMarker(marker);
410 for (auto succ : block->GetSuccsBlocks()) {
411 if (!succ->IsMarked(marker)) {
412 MarkNormalControlFlow(succ, marker);
413 }
414 }
415 }
416
MarkTryCatchBlocks(Marker marker)417 void IrBuilder::MarkTryCatchBlocks(Marker marker)
418 {
419 // All blocks without `normal` mark are considered as catch-blocks
420 for (auto bb : GetGraph()->GetBlocksRPO()) {
421 if (bb->IsMarked(marker)) {
422 continue;
423 }
424 if (bb->IsTryBegin()) {
425 bb->SetCatch(bb->GetSuccessor(0)->IsCatch());
426 } else if (bb->IsTryEnd()) {
427 bb->SetCatch(bb->GetPredecessor(0)->IsCatch());
428 } else {
429 bb->SetCatch(true);
430 }
431 }
432
433 // Nested try-blocks can be removed, but referring to them basic blocks can be placed in the external try-blocks.
434 // So `try` marks are added after removing unreachable blocks
435 for (auto it : try_blocks_) {
436 const auto &try_block = it.second;
437 if (try_block.begin_bb->GetGraph() != try_block.end_bb->GetGraph()) {
438 RestoreTryEnd(try_block);
439 }
440 try_block.begin_bb->SetTryId(try_block.id);
441 try_block.end_bb->SetTryId(try_block.id);
442 if (try_block.basic_blocks == nullptr) {
443 continue;
444 }
445 for (auto bb : *try_block.basic_blocks) {
446 bb->SetTryId(try_block.id);
447 bb->SetTry(true);
448 }
449 }
450 }
451
452 /*
453 * Connect catch-blocks to the graph.
454 */
ResolveTryCatchBlocks()455 void IrBuilder::ResolveTryCatchBlocks()
456 {
457 auto marker_holder = MarkerHolder(GetGraph());
458 auto marker = marker_holder.GetMarker();
459 MarkNormalControlFlow(GetGraph()->GetStartBlock(), marker);
460 ConnectTryCatchBlocks();
461 GetGraph()->RemoveUnreachableBlocks();
462 MarkTryCatchBlocks(marker);
463 }
464
ConnectTryCatchBlocks()465 void IrBuilder::ConnectTryCatchBlocks()
466 {
467 ArenaMap<uint32_t, BasicBlock *> catch_blocks(GetGraph()->GetLocalAllocator()->Adapter());
468 // Firstly create catch_begin blocks, as they should precede try_begin blocks
469 for (auto pc : catches_pc_) {
470 auto catch_begin = GetGraph()->CreateEmptyBlock();
471 catch_begin->SetGuestPc(pc);
472 catch_begin->SetCatch(true);
473 catch_begin->SetCatchBegin(true);
474 auto first_catch_bb = GetBlockForPc(pc);
475 catch_begin->AddSucc(first_catch_bb);
476 catch_blocks.emplace(pc, catch_begin);
477 }
478
479 // Connect try_begin and catch_begin blocks
480 for (auto it : try_blocks_) {
481 const auto &try_block = it.second;
482 if (try_block.contains_throwable_inst) {
483 ConnectTryCodeBlock(try_block, catch_blocks);
484 } else if (try_block.basic_blocks != nullptr) {
485 try_block.basic_blocks->clear();
486 }
487 }
488 }
489
ConnectTryCodeBlock(const TryCodeBlock & try_block,const ArenaMap<uint32_t,BasicBlock * > & catch_blocks)490 void IrBuilder::ConnectTryCodeBlock(const TryCodeBlock &try_block, const ArenaMap<uint32_t, BasicBlock *> &catch_blocks)
491 {
492 auto try_begin = try_block.begin_bb;
493 ASSERT(try_begin != nullptr);
494 auto try_end = try_block.end_bb;
495 ASSERT(try_end != nullptr);
496 // Create auxiliary `Try` instruction
497 auto try_inst = GetGraph()->CreateInstTry();
498 try_inst->SetTryEndBlock(try_end);
499 try_begin->AppendInst(try_inst);
500 // Insert `try_begin` and `try_end`
501 auto first_try_bb = GetBlockForPc(try_block.boundaries.begin_pc);
502 auto last_try_bb = GetPrevBlockForPc(try_block.boundaries.end_pc);
503 first_try_bb->InsertBlockBefore(try_begin);
504 last_try_bb->InsertBlockBeforeSucc(try_end, last_try_bb->GetSuccessor(0));
505 // Connect catch-handlers
506 for (auto catch_block : *try_block.catches) {
507 auto catch_begin = catch_blocks.at(catch_block.pc);
508 if (!try_begin->HasSucc(catch_begin)) {
509 try_begin->AddSucc(catch_begin, true);
510 try_end->AddSucc(catch_begin, true);
511 }
512 try_inst->AppendCatchTypeId(catch_block.type_id, try_begin->GetSuccBlockIndex(catch_begin));
513 }
514 }
515
516 /**
517 * `try_end` restoring is required in the following case:
518 * try {
519 * try { a++;}
520 * catch { a++; }
521 * }
522 *
523 * Nested try doesn't contain throwable instructions and related catch-handler will not be connected to the graph.
524 * As a result all `catch` basic blocks will be eliminated together with outer's `try_end`, since it was inserted just
525 * after `catch`
526 */
RestoreTryEnd(const TryCodeBlock & try_block)527 void IrBuilder::RestoreTryEnd(const TryCodeBlock &try_block)
528 {
529 ASSERT(try_block.end_bb->GetGraph() == nullptr);
530 ASSERT(try_block.end_bb->GetSuccsBlocks().empty());
531 ASSERT(try_block.end_bb->GetPredsBlocks().empty());
532
533 GetGraph()->RestoreBlock(try_block.end_bb);
534 auto last_try_bb = GetPrevBlockForPc(try_block.boundaries.end_pc);
535 last_try_bb->InsertBlockBeforeSucc(try_block.end_bb, last_try_bb->GetSuccessor(0));
536 for (auto succ : try_block.begin_bb->GetSuccsBlocks()) {
537 if (succ->IsCatchBegin()) {
538 try_block.end_bb->AddSucc(succ);
539 }
540 }
541 }
542 } // namespace panda::compiler
543