• Home
  • Raw
  • Download

Lines Matching +full:block +full:- +full:fixup

1 //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===//
8 //===----------------------------------------------------------------------===//
18 //===----------------------------------------------------------------------===//
46 CGF.CreateDefaultAlignTempAlloca(V->getType(), "saved-rvalue"); in save()
54 llvm::StructType::get(V.first->getType(), V.second->getType(), in save()
56 Address addr = CGF.CreateDefaultAlignTempAlloca(ComplexTy, "saved-complex"); in save()
60 CGF.CGM.getDataLayout().getTypeAllocSize(V.first->getType())); in save()
73 CGF.CreateTempAlloca(V.getType(), CGF.getPointerAlign(), "saved-rvalue"); in save()
79 /// Given a saved r-value produced by SaveRValue, perform the code
84 auto alignment = cast<llvm::AllocaInst>(value)->getAlignment(); in restore()
103 CGF.CGM.getDataLayout().getTypeAllocSize(real->getType())); in restore()
110 llvm_unreachable("bad saved r-value kind"); in restore()
113 /// Push an entry of the given size onto this protected-scope stack.
121 } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) { in allocate()
122 unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer; in allocate()
123 unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer); in allocate()
132 char *NewStartOfData = NewEndOfBuffer - UsedCapacity; in allocate()
141 StartOfData -= Size; in allocate()
153 if (!cleanup || !cleanup->isLifetimeMarker()) in containsOnlyLifetimeMarkers()
164 if (cleanup->isLifetimeMarker()) { in requiresLandingPad()
165 si = cleanup->getEnclosingEHScope(); in requiresLandingPad()
205 Scope->setLifetimeMarker(); in pushCleanup()
207 return Scope->getCleanupBuffer(); in pushCleanup()
222 // Check whether we can shrink the branch-fixups stack. in popCleanup()
267 /// fixups than the fixup depth on the innermost normal cleanup, or
269 /// wrong place. We *could* try to shrink fixup depths, but that's
278 assert(BranchFixups.size() >= MinSize && "fixup stack out of order"); in popNullFixups()
311 store->setAlignment(addr.getAlignment().getQuantity()); in createStoreInstBefore()
317 load->setAlignment(addr.getAlignment().getQuantity()); in createLoadInstBefore()
330 // Skip this fixup if its destination isn't set. in ResolveAllBranchFixups()
331 BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I); in ResolveAllBranchFixups() local
332 if (Fixup.Destination == nullptr) continue; in ResolveAllBranchFixups()
340 // i.e. where there's an unresolved fixup inside a single cleanup in ResolveAllBranchFixups()
342 if (Fixup.OptimisticBranchBlock == nullptr) { in ResolveAllBranchFixups()
343 createStoreInstBefore(CGF.Builder.getInt32(Fixup.DestinationIndex), in ResolveAllBranchFixups()
345 Fixup.InitialBranch); in ResolveAllBranchFixups()
346 Fixup.InitialBranch->setSuccessor(0, CleanupEntry); in ResolveAllBranchFixups()
350 if (!CasesAdded.insert(Fixup.Destination).second) in ResolveAllBranchFixups()
353 Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex), in ResolveAllBranchFixups()
354 Fixup.Destination); in ResolveAllBranchFixups()
360 /// Transitions the terminator of the given exit-block of a cleanup to
363 llvm::BasicBlock *Block) { in TransitionToCleanupSwitch() argument
366 llvm::TerminatorInst *Term = Block->getTerminator(); in TransitionToCleanupSwitch()
367 assert(Term && "can't transition block without terminator"); in TransitionToCleanupSwitch()
370 assert(Br->isUnconditional()); in TransitionToCleanupSwitch()
374 llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block); in TransitionToCleanupSwitch()
375 Br->eraseFromParent(); in TransitionToCleanupSwitch()
382 void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) { in ResolveBranchFixups() argument
383 assert(Block && "resolving a null target block"); in ResolveBranchFixups()
393 // Skip this fixup if its destination doesn't match. in ResolveBranchFixups()
394 BranchFixup &Fixup = EHStack.getBranchFixup(I); in ResolveBranchFixups() local
395 if (Fixup.Destination != Block) continue; in ResolveBranchFixups()
397 Fixup.Destination = nullptr; in ResolveBranchFixups()
400 // If it doesn't have an optimistic branch block, LatestBranch is in ResolveBranchFixups()
402 llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock; in ResolveBranchFixups()
406 // Don't process the same optimistic branch block twice. in ResolveBranchFixups()
413 Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block); in ResolveBranchFixups()
438 /// cleanups from the given savepoint in the lifetime-extended cleanups stack.
475 /// Attempts to reduce a cleanup's entry block to a fallthrough. This
479 /// Returns the new block, whatever it is.
482 llvm::BasicBlock *Pred = Entry->getSinglePredecessor(); in SimplifyCleanupEntry()
485 llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator()); in SimplifyCleanupEntry()
486 if (!Br || Br->isConditional()) return Entry; in SimplifyCleanupEntry()
487 assert(Br->getSuccessor(0) == Entry); in SimplifyCleanupEntry()
490 // block, we'll need to continue inserting at the end of the in SimplifyCleanupEntry()
493 assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end()); in SimplifyCleanupEntry()
496 Br->eraseFromParent(); in SimplifyCleanupEntry()
500 Entry->replaceAllUsesWith(Pred); in SimplifyCleanupEntry()
503 Pred->getInstList().splice(Pred->end(), Entry->getInstList()); in SimplifyCleanupEntry()
505 // Kill the entry block. in SimplifyCleanupEntry()
506 Entry->eraseFromParent(); in SimplifyCleanupEntry()
531 Fn->Emit(CGF, flags); in EmitCleanup()
534 // Emit the continuation block if there was an active flag. in EmitCleanup()
542 // Exit is the exit block of a cleanup, so it always terminates in in ForwardPrebranchedFallthrough()
544 llvm::TerminatorInst *Term = Exit->getTerminator(); in ForwardPrebranchedFallthrough()
547 assert(Br->isUnconditional() && Br->getSuccessor(0) == From); in ForwardPrebranchedFallthrough()
548 Br->setSuccessor(0, To); in ForwardPrebranchedFallthrough()
551 for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I) in ForwardPrebranchedFallthrough()
552 if (Switch->getSuccessor(I) == From) in ForwardPrebranchedFallthrough()
553 Switch->setSuccessor(I, To); in ForwardPrebranchedFallthrough()
557 /// We don't need a normal entry block for the given cleanup.
558 /// Optimistic fixup branches can cause these blocks to come into
571 i = entry->use_begin(), e = entry->use_end(); i != e; ) { in destroyOptimisticNormalEntry()
577 // The only uses should be fixup switches. in destroyOptimisticNormalEntry()
579 if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) { in destroyOptimisticNormalEntry()
581 llvm::BranchInst::Create(si->case_begin().getCaseSuccessor(), si); in destroyOptimisticNormalEntry()
583 // The switch operand is a load from the cleanup-dest alloca. in destroyOptimisticNormalEntry()
584 llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition()); in destroyOptimisticNormalEntry()
587 si->eraseFromParent(); in destroyOptimisticNormalEntry()
590 assert(condition->getOperand(0) == CGF.NormalCleanupDest); in destroyOptimisticNormalEntry()
591 assert(condition->use_empty()); in destroyOptimisticNormalEntry()
592 condition->eraseFromParent(); in destroyOptimisticNormalEntry()
596 assert(entry->use_empty()); in destroyOptimisticNormalEntry()
600 /// Pops a cleanup block. If the block includes a normal cleanup, the
619 // generated a lazy EH cleanup block. in PopCleanupBlock()
627 // - whether there are branch fix-ups through this cleanup in PopCleanupBlock()
631 // - whether there are branch-throughs or branch-afters in PopCleanupBlock()
634 // - whether there's a fallthrough in PopCleanupBlock()
638 // Branch-through fall-throughs leave the insertion point set to the in PopCleanupBlock()
643 (FallthroughSource && FallthroughSource->getTerminator()); in PopCleanupBlock()
650 FallthroughSource->getTerminator()->getSuccessor(0) in PopCleanupBlock()
665 // cleanup, just forward it to the next block, leaving the in PopCleanupBlock()
666 // insertion point in the prebranched block. in PopCleanupBlock()
671 // Otherwise, we need to make a new block. If the normal cleanup in PopCleanupBlock()
673 // entry block, but this is simpler, and it avoids conflicts with in PopCleanupBlock()
674 // dead optimistic fixup branches. in PopCleanupBlock()
676 prebranchDest = createBasicBlock("forwarded-prebranch"); in PopCleanupBlock()
681 assert(normalEntry && !normalEntry->use_empty()); in PopCleanupBlock()
737 // the cleanup block and then try to clean up after ourselves. in PopCleanupBlock()
739 // Force the entry block to exist. in PopCleanupBlock()
747 // destination index. For fall-throughs this is always zero. in PopCleanupBlock()
759 // II. Emit the entry block. This implicitly branches to it if in PopCleanupBlock()
770 // Compute the branch-through dest if we need it: in PopCleanupBlock()
771 // - if there are branch-throughs threaded through the scope in PopCleanupBlock()
772 // - if fall-through is a branch-through in PopCleanupBlock()
773 // - if there are fixups that will be optimistically forwarded in PopCleanupBlock()
787 // If there's exactly one branch-after and no other threads, in PopCleanupBlock()
796 if (NormalCleanupDestSlot->hasOneUse()) { in PopCleanupBlock()
797 NormalCleanupDestSlot->user_back()->eraseFromParent(); in PopCleanupBlock()
798 NormalCleanupDestSlot->eraseFromParent(); in PopCleanupBlock()
805 // Build a switch-out if we need it: in PopCleanupBlock()
806 // - if there are branch-afters threaded through the scope in PopCleanupBlock()
807 // - if fall-through is a branch-after in PopCleanupBlock()
808 // - if there are fixups that have nowhere left to go and in PopCleanupBlock()
817 // TODO: base this on the number of branch-afters and fixups in PopCleanupBlock()
829 // Branch-after fallthrough. in PopCleanupBlock()
833 Switch->addCase(Builder.getInt32(0), FallthroughDest); in PopCleanupBlock()
837 Switch->addCase(Scope.getBranchAfterIndex(I), in PopCleanupBlock()
846 // We should always have a branch-through destination in this case. in PopCleanupBlock()
860 NormalExit->getInstList().push_back(InstsToAppend[I]); in PopCleanupBlock()
865 BranchFixup &Fixup = EHStack.getBranchFixup(I); in PopCleanupBlock() local
866 if (!Fixup.Destination) continue; in PopCleanupBlock()
867 if (!Fixup.OptimisticBranchBlock) { in PopCleanupBlock()
868 createStoreInstBefore(Builder.getInt32(Fixup.DestinationIndex), in PopCleanupBlock()
870 Fixup.InitialBranch); in PopCleanupBlock()
871 Fixup.InitialBranch->setSuccessor(0, NormalEntry); in PopCleanupBlock()
873 Fixup.OptimisticBranchBlock = NormalExit; in PopCleanupBlock()
882 // Non-prebranched fallthrough doesn't need to be forwarded. in PopCleanupBlock()
907 // This might invalidate (non-IR) pointers to NormalEntry. in PopCleanupBlock()
969 /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
992 /// Terminate the current block by emitting a branch which might leave
993 /// the current cleanup-protected scope. The target scope may not yet
994 /// be known, in which case this will require a fixup.
996 /// As a side-effect, this method clears the insertion point.
1021 // to the current cleanup scope as a branch fixup. in EmitBranchThroughCleanup()
1023 BranchFixup &Fixup = EHStack.addBranchFixup(); in EmitBranchThroughCleanup() local
1024 Fixup.Destination = Dest.getBlock(); in EmitBranchThroughCleanup()
1025 Fixup.DestinationIndex = Dest.getDestIndex(); in EmitBranchThroughCleanup()
1026 Fixup.InitialBranch = BI; in EmitBranchThroughCleanup()
1027 Fixup.OptimisticBranchBlock = nullptr; in EmitBranchThroughCleanup()
1039 // Adjust BI to point to the first cleanup block. in EmitBranchThroughCleanup()
1043 BI->setSuccessor(0, CreateNormalEntry(*this, Scope)); in EmitBranchThroughCleanup()
1075 // If we needed a normal block for any reason, that counts. in IsUsedAsNormalCleanup()
1094 // If we needed an EH block for any reason, that counts. in IsUsedAsEHCleanup()
1095 if (EHStack.find(cleanup)->hasEHBranches()) in IsUsedAsEHCleanup()
1118 /// The given cleanup block is changing activation state. Configure a
1122 /// extra uses *after* the change-over point.
1139 // - as a normal cleanup in SetupCleanupBlockActivation()
1146 // - as an EH cleanup in SetupCleanupBlockActivation()
1168 // If we're in a conditional block, ignore the dominating IP and in SetupCleanupBlockActivation()