From 777d6e56ad9b1fed9866daf3ee6486d85c5b7d32 Mon Sep 17 00:00:00 2001 From: John McCall Date: Thu, 11 Aug 2011 02:22:43 +0000 Subject: [PATCH] Simplify EH control flow by observing that EH scopes form a simple hierarchy of delegation, and that EH selector values are meaningful function-wide (good thing, too, or inlining wouldn't work). 2,3d 1a hierarchy of delegation and that EH selector values have the same meaning everywhere in the function instead of being meaningful only in the context of a specific selector. This removes the need for routing edges through EH cleanups, since a cleanup simply always branches to its enclosing scope. git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@137293 91177308-0d34-0410-b5e6-96231b3b80d8 --- lib/CodeGen/CGCleanup.cpp | 245 ++++-------- lib/CodeGen/CGCleanup.h | 321 ++++++++-------- lib/CodeGen/CGException.cpp | 479 +++++++++++++----------- lib/CodeGen/CGObjCMac.cpp | 1 - lib/CodeGen/CGObjCRuntime.cpp | 2 +- lib/CodeGen/CGStmt.cpp | 17 + lib/CodeGen/CodeGenFunction.cpp | 6 +- lib/CodeGen/CodeGenFunction.h | 88 ++--- test/CodeGenCXX/destructors.cpp | 1 - test/CodeGenCXX/eh.cpp | 30 +- test/CodeGenCXX/exceptions.cpp | 2 - test/CodeGenCXX/goto.cpp | 1 - test/CodeGenCXX/partial-destruction.cpp | 3 - 13 files changed, 540 insertions(+), 656 deletions(-) diff --git a/lib/CodeGen/CGCleanup.cpp b/lib/CodeGen/CGCleanup.cpp index 2da10ca006..b2d0786cb6 100644 --- a/lib/CodeGen/CGCleanup.cpp +++ b/lib/CodeGen/CGCleanup.cpp @@ -119,16 +119,30 @@ char *EHScopeStack::allocate(size_t Size) { } EHScopeStack::stable_iterator -EHScopeStack::getEnclosingEHCleanup(iterator it) const { - assert(it != end()); - do { - if (isa(*it)) { - if (cast(*it).isEHCleanup()) - return stabilize(it); - return cast(*it).getEnclosingEHCleanup(); +EHScopeStack::getInnermostActiveNormalCleanup() const { + for (stable_iterator si = getInnermostNormalCleanup(), se = stable_end(); + si != se; ) { + EHCleanupScope &cleanup = cast(*find(si)); + if (cleanup.isActive()) return si; + si = cleanup.getEnclosingNormalCleanup(); + } + return stable_end(); +} + +EHScopeStack::stable_iterator EHScopeStack::getInnermostActiveEHScope() const { + for (stable_iterator si = getInnermostEHScope(), se = stable_end(); + si != se; ) { + // Skip over inactive cleanups. + EHCleanupScope *cleanup = dyn_cast(&*find(si)); + if (cleanup && !cleanup->isActive()) { + si = cleanup->getEnclosingEHScope(); + continue; } - ++it; - } while (it != end()); + + // All other scopes are always active. + return si; + } + return stable_end(); } @@ -146,11 +160,11 @@ void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) { Size, BranchFixups.size(), InnermostNormalCleanup, - InnermostEHCleanup); + InnermostEHScope); if (IsNormalCleanup) InnermostNormalCleanup = stable_begin(); if (IsEHCleanup) - InnermostEHCleanup = stable_begin(); + InnermostEHScope = stable_begin(); return Scope->getCleanupBuffer(); } @@ -161,11 +175,9 @@ void EHScopeStack::popCleanup() { assert(isa(*begin())); EHCleanupScope &Cleanup = cast(*begin()); InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup(); - InnermostEHCleanup = Cleanup.getEnclosingEHCleanup(); + InnermostEHScope = Cleanup.getEnclosingEHScope(); StartOfData += Cleanup.getAllocatedSize(); - if (empty()) NextEHDestIndex = FirstEHDestIndex; - // Destroy the cleanup. Cleanup.~EHCleanupScope(); @@ -182,37 +194,35 @@ void EHScopeStack::popCleanup() { } } -EHFilterScope *EHScopeStack::pushFilter(unsigned NumFilters) { - char *Buffer = allocate(EHFilterScope::getSizeForNumFilters(NumFilters)); - CatchDepth++; - return new (Buffer) EHFilterScope(NumFilters); +EHFilterScope *EHScopeStack::pushFilter(unsigned numFilters) { + assert(getInnermostEHScope() == stable_end()); + char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters)); + EHFilterScope *filter = new (buffer) EHFilterScope(numFilters); + InnermostEHScope = stable_begin(); + return filter; } void EHScopeStack::popFilter() { assert(!empty() && "popping exception stack when not empty"); - EHFilterScope &Filter = cast(*begin()); - StartOfData += EHFilterScope::getSizeForNumFilters(Filter.getNumFilters()); - - if (empty()) NextEHDestIndex = FirstEHDestIndex; + EHFilterScope &filter = cast(*begin()); + StartOfData += EHFilterScope::getSizeForNumFilters(filter.getNumFilters()); - assert(CatchDepth > 0 && "mismatched filter push/pop"); - CatchDepth--; + InnermostEHScope = filter.getEnclosingEHScope(); } -EHCatchScope *EHScopeStack::pushCatch(unsigned NumHandlers) { - char *Buffer = allocate(EHCatchScope::getSizeForNumHandlers(NumHandlers)); - CatchDepth++; - EHCatchScope *Scope = new (Buffer) EHCatchScope(NumHandlers); - for (unsigned I = 0; I != NumHandlers; ++I) - Scope->getHandlers()[I].Index = getNextEHDestIndex(); - return Scope; +EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) { + char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers)); + EHCatchScope *scope = + new (buffer) EHCatchScope(numHandlers, InnermostEHScope); + InnermostEHScope = stable_begin(); + return scope; } void EHScopeStack::pushTerminate() { char *Buffer = allocate(EHTerminateScope::getSize()); - CatchDepth++; - new (Buffer) EHTerminateScope(getNextEHDestIndex()); + new (Buffer) EHTerminateScope(InnermostEHScope); + InnermostEHScope = stable_begin(); } /// Remove any 'null' fixups on the stack. However, we can't pop more @@ -384,17 +394,6 @@ static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF, return Entry; } -static llvm::BasicBlock *CreateEHEntry(CodeGenFunction &CGF, - EHCleanupScope &Scope) { - assert(Scope.isEHCleanup()); - llvm::BasicBlock *Entry = Scope.getEHBlock(); - if (!Entry) { - Entry = CGF.createBasicBlock("eh.cleanup"); - Scope.setEHBlock(Entry); - } - return Entry; -} - /// Attempts to reduce a cleanup's entry block to a fallthrough. This /// is basically llvm::MergeBlockIntoPredecessor, except /// simplified/optimized for the tighter constraints on cleanup blocks. @@ -544,7 +543,10 @@ void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) { // Check whether we need an EH cleanup. This is only true if we've // generated a lazy EH cleanup block. - bool RequiresEHCleanup = Scope.hasEHBranches(); + llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock(); + assert(Scope.hasEHBranches() == (EHEntry != 0)); + bool RequiresEHCleanup = (EHEntry != 0); + EHScopeStack::stable_iterator EHParent = Scope.getEnclosingEHScope(); // Check the three conditions which might require a normal cleanup: @@ -580,12 +582,6 @@ void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) { RequiresNormalCleanup = true; } - EHScopeStack::Cleanup::Flags cleanupFlags; - if (Scope.isNormalCleanup()) - cleanupFlags.setIsNormalCleanupKind(); - if (Scope.isEHCleanup()) - cleanupFlags.setIsEHCleanupKind(); - // If we have a prebranched fallthrough into an inactive normal // cleanup, rewrite it so that it leads to the appropriate place. if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) { @@ -634,61 +630,11 @@ void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) { EHScopeStack::Cleanup *Fn = reinterpret_cast(CleanupBuffer.data()); - // We want to emit the EH cleanup after the normal cleanup, but go - // ahead and do the setup for the EH cleanup while the scope is still - // alive. - llvm::BasicBlock *EHEntry = 0; - SmallVector EHInstsToAppend; - if (RequiresEHCleanup) { - EHEntry = CreateEHEntry(*this, Scope); - - // Figure out the branch-through dest if necessary. - llvm::BasicBlock *EHBranchThroughDest = 0; - if (Scope.hasEHBranchThroughs()) { - assert(Scope.getEnclosingEHCleanup() != EHStack.stable_end()); - EHScope &S = *EHStack.find(Scope.getEnclosingEHCleanup()); - EHBranchThroughDest = CreateEHEntry(*this, cast(S)); - } - - // If we have exactly one branch-after and no branch-throughs, we - // can dispatch it without a switch. - if (!Scope.hasEHBranchThroughs() && - Scope.getNumEHBranchAfters() == 1) { - assert(!EHBranchThroughDest); - - // TODO: remove the spurious eh.cleanup.dest stores if this edge - // never went through any switches. - llvm::BasicBlock *BranchAfterDest = Scope.getEHBranchAfterBlock(0); - EHInstsToAppend.push_back(llvm::BranchInst::Create(BranchAfterDest)); - - // Otherwise, if we have any branch-afters, we need a switch. - } else if (Scope.getNumEHBranchAfters()) { - // The default of the switch belongs to the branch-throughs if - // they exist. - llvm::BasicBlock *Default = - (EHBranchThroughDest ? EHBranchThroughDest : getUnreachableBlock()); - - const unsigned SwitchCapacity = Scope.getNumEHBranchAfters(); - - llvm::LoadInst *Load = - new llvm::LoadInst(getEHCleanupDestSlot(), "cleanup.dest"); - llvm::SwitchInst *Switch = - llvm::SwitchInst::Create(Load, Default, SwitchCapacity); - - EHInstsToAppend.push_back(Load); - EHInstsToAppend.push_back(Switch); - - for (unsigned I = 0, E = Scope.getNumEHBranchAfters(); I != E; ++I) - Switch->addCase(Scope.getEHBranchAfterIndex(I), - Scope.getEHBranchAfterBlock(I)); - - // Otherwise, we have only branch-throughs; jump to the next EH - // cleanup. - } else { - assert(EHBranchThroughDest); - EHInstsToAppend.push_back(llvm::BranchInst::Create(EHBranchThroughDest)); - } - } + EHScopeStack::Cleanup::Flags cleanupFlags; + if (Scope.isNormalCleanup()) + cleanupFlags.setIsNormalCleanupKind(); + if (Scope.isEHCleanup()) + cleanupFlags.setIsEHCleanupKind(); if (!RequiresNormalCleanup) { destroyOptimisticNormalEntry(*this, Scope); @@ -890,10 +836,7 @@ void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) { cleanupFlags.setIsForEHCleanup(); EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag); - // Append the prepared cleanup prologue from above. - llvm::BasicBlock *EHExit = Builder.GetInsertBlock(); - for (unsigned I = 0, E = EHInstsToAppend.size(); I != E; ++I) - EHExit->getInstList().push_back(EHInstsToAppend[I]); + Builder.CreateBr(getEHDispatchBlock(EHParent)); Builder.restoreIP(SavedIP); @@ -1005,64 +948,6 @@ void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) { Builder.ClearInsertionPoint(); } -void CodeGenFunction::EmitBranchThroughEHCleanup(UnwindDest Dest) { - // We should never get invalid scope depths for an UnwindDest; that - // implies that the destination wasn't set up correctly. - assert(Dest.getScopeDepth().isValid() && "invalid scope depth on EH dest?"); - - if (!HaveInsertPoint()) - return; - - // Create the branch. - llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock()); - - // Calculate the innermost active cleanup. - EHScopeStack::stable_iterator - InnermostCleanup = EHStack.getInnermostActiveEHCleanup(); - - // If the destination is in the same EH cleanup scope as us, we - // don't need to thread through anything. - if (InnermostCleanup.encloses(Dest.getScopeDepth())) { - Builder.ClearInsertionPoint(); - return; - } - assert(InnermostCleanup != EHStack.stable_end()); - - // Store the index at the start. - llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex()); - new llvm::StoreInst(Index, getEHCleanupDestSlot(), BI); - - // Adjust BI to point to the first cleanup block. - { - EHCleanupScope &Scope = - cast(*EHStack.find(InnermostCleanup)); - BI->setSuccessor(0, CreateEHEntry(*this, Scope)); - } - - // Add this destination to all the scopes involved. - for (EHScopeStack::stable_iterator - I = InnermostCleanup, E = Dest.getScopeDepth(); ; ) { - assert(E.strictlyEncloses(I)); - EHCleanupScope &Scope = cast(*EHStack.find(I)); - assert(Scope.isEHCleanup()); - I = Scope.getEnclosingEHCleanup(); - - // If this is the last cleanup we're propagating through, add this - // as a branch-after. - if (I == E) { - Scope.addEHBranchAfter(Index, Dest.getBlock()); - break; - } - - // Otherwise, add it as a branch-through. If this isn't new - // information, all the rest of the work has been done before. - if (!Scope.addEHBranchThrough(Dest.getBlock())) - break; - } - - Builder.ClearInsertionPoint(); -} - static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack, EHScopeStack::stable_iterator C) { // If we needed a normal block for any reason, that counts. @@ -1083,18 +968,21 @@ static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack, } static bool IsUsedAsEHCleanup(EHScopeStack &EHStack, - EHScopeStack::stable_iterator C) { + EHScopeStack::stable_iterator cleanup) { // If we needed an EH block for any reason, that counts. - if (cast(*EHStack.find(C)).getEHBlock()) + if (EHStack.find(cleanup)->hasEHBranches()) return true; // Check whether any enclosed cleanups were needed. for (EHScopeStack::stable_iterator - I = EHStack.getInnermostEHCleanup(); I != C; ) { - assert(C.strictlyEncloses(I)); - EHCleanupScope &S = cast(*EHStack.find(I)); - if (S.getEHBlock()) return true; - I = S.getEnclosingEHCleanup(); + i = EHStack.getInnermostEHScope(); i != cleanup; ) { + assert(cleanup.strictlyEncloses(i)); + + EHScope &scope = *EHStack.find(i); + if (scope.hasEHBranches()) + return true; + + i = scope.getEnclosingEHScope(); } return false; @@ -1189,10 +1077,3 @@ llvm::Value *CodeGenFunction::getNormalCleanupDestSlot() { CreateTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot"); return NormalCleanupDest; } - -llvm::Value *CodeGenFunction::getEHCleanupDestSlot() { - if (!EHCleanupDest) - EHCleanupDest = - CreateTempAlloca(Builder.getInt32Ty(), "eh.cleanup.dest.slot"); - return EHCleanupDest; -} diff --git a/lib/CodeGen/CGCleanup.h b/lib/CodeGen/CGCleanup.h index 9cf04bf79a..7726e442c0 100644 --- a/lib/CodeGen/CGCleanup.h +++ b/lib/CodeGen/CGCleanup.h @@ -29,25 +29,102 @@ namespace CodeGen { /// A protected scope for zero-cost EH handling. class EHScope { llvm::BasicBlock *CachedLandingPad; + llvm::BasicBlock *CachedEHDispatchBlock; - unsigned K : 2; + EHScopeStack::stable_iterator EnclosingEHScope; + + class CommonBitFields { + friend class EHScope; + unsigned Kind : 2; + }; + enum { NumCommonBits = 2 }; protected: - enum { BitsRemaining = 30 }; + class CatchBitFields { + friend class EHCatchScope; + unsigned : NumCommonBits; + + unsigned NumHandlers : 32 - NumCommonBits; + }; + + class CleanupBitFields { + friend class EHCleanupScope; + unsigned : NumCommonBits; + + /// Whether this cleanup needs to be run along normal edges. + unsigned IsNormalCleanup : 1; + + /// Whether this cleanup needs to be run along exception edges. + unsigned IsEHCleanup : 1; + + /// Whether this cleanup is currently active. + unsigned IsActive : 1; + + /// Whether the normal cleanup should test the activation flag. + unsigned TestFlagInNormalCleanup : 1; + + /// Whether the EH cleanup should test the activation flag. + unsigned TestFlagInEHCleanup : 1; + + /// The amount of extra storage needed by the Cleanup. + /// Always a multiple of the scope-stack alignment. + unsigned CleanupSize : 12; + + /// The number of fixups required by enclosing scopes (not including + /// this one). If this is the top cleanup scope, all the fixups + /// from this index onwards belong to this scope. + unsigned FixupDepth : 32 - 17 - NumCommonBits; // currently 13 + }; + + class FilterBitFields { + friend class EHFilterScope; + unsigned : NumCommonBits; + + unsigned NumFilters : 32 - NumCommonBits; + }; + + union { + CommonBitFields CommonBits; + CatchBitFields CatchBits; + CleanupBitFields CleanupBits; + FilterBitFields FilterBits; + }; public: enum Kind { Cleanup, Catch, Terminate, Filter }; - EHScope(Kind K) : CachedLandingPad(0), K(K) {} + EHScope(Kind kind, EHScopeStack::stable_iterator enclosingEHScope) + : CachedLandingPad(0), CachedEHDispatchBlock(0), + EnclosingEHScope(enclosingEHScope) { + CommonBits.Kind = kind; + } - Kind getKind() const { return static_cast(K); } + Kind getKind() const { return static_cast(CommonBits.Kind); } llvm::BasicBlock *getCachedLandingPad() const { return CachedLandingPad; } - void setCachedLandingPad(llvm::BasicBlock *Block) { - CachedLandingPad = Block; + void setCachedLandingPad(llvm::BasicBlock *block) { + CachedLandingPad = block; + } + + llvm::BasicBlock *getCachedEHDispatchBlock() const { + return CachedEHDispatchBlock; + } + + void setCachedEHDispatchBlock(llvm::BasicBlock *block) { + CachedEHDispatchBlock = block; + } + + bool hasEHBranches() const { + if (llvm::BasicBlock *block = getCachedEHDispatchBlock()) + return !block->use_empty(); + return false; + } + + EHScopeStack::stable_iterator getEnclosingEHScope() const { + return EnclosingEHScope; } }; @@ -57,8 +134,6 @@ public: /// Objective C @finally blocks are represented using a cleanup scope /// after the catch scope. class EHCatchScope : public EHScope { - unsigned NumHandlers : BitsRemaining; - // In effect, we have a flexible array member // Handler Handlers[0]; // But that's only standard in C99, not C++, so we have to do @@ -73,8 +148,7 @@ public: /// The catch handler for this type. llvm::BasicBlock *Block; - /// The unwind destination index for this handler. - unsigned Index; + bool isCatchAll() const { return Type == 0; } }; private: @@ -93,12 +167,14 @@ public: return sizeof(EHCatchScope) + N * sizeof(Handler); } - EHCatchScope(unsigned NumHandlers) - : EHScope(Catch), NumHandlers(NumHandlers) { + EHCatchScope(unsigned numHandlers, + EHScopeStack::stable_iterator enclosingEHScope) + : EHScope(Catch, enclosingEHScope) { + CatchBits.NumHandlers = numHandlers; } unsigned getNumHandlers() const { - return NumHandlers; + return CatchBits.NumHandlers; } void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) { @@ -127,44 +203,16 @@ public: /// A cleanup scope which generates the cleanup blocks lazily. class EHCleanupScope : public EHScope { - /// Whether this cleanup needs to be run along normal edges. - bool IsNormalCleanup : 1; - - /// Whether this cleanup needs to be run along exception edges. - bool IsEHCleanup : 1; - - /// Whether this cleanup is currently active. - bool IsActive : 1; - - /// Whether the normal cleanup should test the activation flag. - bool TestFlagInNormalCleanup : 1; - - /// Whether the EH cleanup should test the activation flag. - bool TestFlagInEHCleanup : 1; - - /// The amount of extra storage needed by the Cleanup. - /// Always a multiple of the scope-stack alignment. - unsigned CleanupSize : 12; - - /// The number of fixups required by enclosing scopes (not including - /// this one). If this is the top cleanup scope, all the fixups - /// from this index onwards belong to this scope. - unsigned FixupDepth : BitsRemaining - 17; // currently 13 - /// The nearest normal cleanup scope enclosing this one. EHScopeStack::stable_iterator EnclosingNormal; - /// The nearest EH cleanup scope enclosing this one. + /// The nearest EH scope enclosing this one. EHScopeStack::stable_iterator EnclosingEH; /// The dual entry/exit block along the normal edge. This is lazily /// created if needed before the cleanup is popped. llvm::BasicBlock *NormalBlock; - /// The dual entry/exit block along the EH edge. This is lazily - /// created if needed before the cleanup is popped. - llvm::BasicBlock *EHBlock; - /// An optional i1 variable indicating whether this cleanup has been /// activated yet. llvm::AllocaInst *ActiveFlag; @@ -180,15 +228,6 @@ class EHCleanupScope : public EHScope { /// Normal branch-afters. SmallVector, 4> BranchAfters; - - /// The destinations of EH branch-afters and branch-throughs. - /// TODO: optimize for the extremely common case of a single - /// branch-through. - llvm::SmallPtrSet EHBranches; - - /// EH branch-afters. - SmallVector, 4> - EHBranchAfters; }; mutable struct ExtInfo *ExtInfo; @@ -210,56 +249,64 @@ public: } size_t getAllocatedSize() const { - return sizeof(EHCleanupScope) + CleanupSize; + return sizeof(EHCleanupScope) + CleanupBits.CleanupSize; } - EHCleanupScope(bool IsNormal, bool IsEH, bool IsActive, - unsigned CleanupSize, unsigned FixupDepth, - EHScopeStack::stable_iterator EnclosingNormal, - EHScopeStack::stable_iterator EnclosingEH) - : EHScope(EHScope::Cleanup), - IsNormalCleanup(IsNormal), IsEHCleanup(IsEH), IsActive(IsActive), - TestFlagInNormalCleanup(false), TestFlagInEHCleanup(false), - CleanupSize(CleanupSize), FixupDepth(FixupDepth), - EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH), - NormalBlock(0), EHBlock(0), ActiveFlag(0), ExtInfo(0) - { - assert(this->CleanupSize == CleanupSize && "cleanup size overflow"); + EHCleanupScope(bool isNormal, bool isEH, bool isActive, + unsigned cleanupSize, unsigned fixupDepth, + EHScopeStack::stable_iterator enclosingNormal, + EHScopeStack::stable_iterator enclosingEH) + : EHScope(EHScope::Cleanup, enclosingEH), EnclosingNormal(enclosingNormal), + NormalBlock(0), ActiveFlag(0), ExtInfo(0) { + CleanupBits.IsNormalCleanup = isNormal; + CleanupBits.IsEHCleanup = isEH; + CleanupBits.IsActive = isActive; + CleanupBits.TestFlagInNormalCleanup = false; + CleanupBits.TestFlagInEHCleanup = false; + CleanupBits.CleanupSize = cleanupSize; + CleanupBits.FixupDepth = fixupDepth; + + assert(CleanupBits.CleanupSize == cleanupSize && "cleanup size overflow"); } ~EHCleanupScope() { delete ExtInfo; } - bool isNormalCleanup() const { return IsNormalCleanup; } + bool isNormalCleanup() const { return CleanupBits.IsNormalCleanup; } llvm::BasicBlock *getNormalBlock() const { return NormalBlock; } void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; } - bool isEHCleanup() const { return IsEHCleanup; } - llvm::BasicBlock *getEHBlock() const { return EHBlock; } - void setEHBlock(llvm::BasicBlock *BB) { EHBlock = BB; } + bool isEHCleanup() const { return CleanupBits.IsEHCleanup; } + llvm::BasicBlock *getEHBlock() const { return getCachedEHDispatchBlock(); } + void setEHBlock(llvm::BasicBlock *BB) { setCachedEHDispatchBlock(BB); } - bool isActive() const { return IsActive; } - void setActive(bool A) { IsActive = A; } + bool isActive() const { return CleanupBits.IsActive; } + void setActive(bool A) { CleanupBits.IsActive = A; } llvm::AllocaInst *getActiveFlag() const { return ActiveFlag; } void setActiveFlag(llvm::AllocaInst *Var) { ActiveFlag = Var; } - void setTestFlagInNormalCleanup() { TestFlagInNormalCleanup = true; } - bool shouldTestFlagInNormalCleanup() const { return TestFlagInNormalCleanup; } + void setTestFlagInNormalCleanup() { + CleanupBits.TestFlagInNormalCleanup = true; + } + bool shouldTestFlagInNormalCleanup() const { + return CleanupBits.TestFlagInNormalCleanup; + } - void setTestFlagInEHCleanup() { TestFlagInEHCleanup = true; } - bool shouldTestFlagInEHCleanup() const { return TestFlagInEHCleanup; } + void setTestFlagInEHCleanup() { + CleanupBits.TestFlagInEHCleanup = true; + } + bool shouldTestFlagInEHCleanup() const { + return CleanupBits.TestFlagInEHCleanup; + } - unsigned getFixupDepth() const { return FixupDepth; } + unsigned getFixupDepth() const { return CleanupBits.FixupDepth; } EHScopeStack::stable_iterator getEnclosingNormalCleanup() const { return EnclosingNormal; } - EHScopeStack::stable_iterator getEnclosingEHCleanup() const { - return EnclosingEH; - } - size_t getCleanupSize() const { return CleanupSize; } + size_t getCleanupSize() const { return CleanupBits.CleanupSize; } void *getCleanupBuffer() { return this + 1; } EHScopeStack::Cleanup *getCleanup() { @@ -327,41 +374,6 @@ public: return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size()); } - // Same stuff, only for EH branches instead of normal branches. - // It's quite possible that we could find a better representation - // for this. - - bool hasEHBranches() const { return ExtInfo && !ExtInfo->EHBranches.empty(); } - void addEHBranchAfter(llvm::ConstantInt *Index, - llvm::BasicBlock *Block) { - struct ExtInfo &ExtInfo = getExtInfo(); - if (ExtInfo.EHBranches.insert(Block)) - ExtInfo.EHBranchAfters.push_back(std::make_pair(Block, Index)); - } - - unsigned getNumEHBranchAfters() const { - return ExtInfo ? ExtInfo->EHBranchAfters.size() : 0; - } - - llvm::BasicBlock *getEHBranchAfterBlock(unsigned I) const { - assert(I < getNumEHBranchAfters()); - return ExtInfo->EHBranchAfters[I].first; - } - - llvm::ConstantInt *getEHBranchAfterIndex(unsigned I) const { - assert(I < getNumEHBranchAfters()); - return ExtInfo->EHBranchAfters[I].second; - } - - bool addEHBranchThrough(llvm::BasicBlock *Block) { - return getExtInfo().EHBranches.insert(Block); - } - - bool hasEHBranchThroughs() const { - if (!ExtInfo) return false; - return (ExtInfo->EHBranchAfters.size() != ExtInfo->EHBranches.size()); - } - static bool classof(const EHScope *Scope) { return (Scope->getKind() == Cleanup); } @@ -373,8 +385,6 @@ public: /// /// This is used to implement C++ exception specifications. class EHFilterScope : public EHScope { - unsigned NumFilters : BitsRemaining; - // Essentially ends in a flexible array member: // llvm::Value *FilterTypes[0]; @@ -387,42 +397,42 @@ class EHFilterScope : public EHScope { } public: - EHFilterScope(unsigned NumFilters) : - EHScope(Filter), NumFilters(NumFilters) {} + EHFilterScope(unsigned numFilters) + : EHScope(Filter, EHScopeStack::stable_end()) { + FilterBits.NumFilters = numFilters; + } - static size_t getSizeForNumFilters(unsigned NumFilters) { - return sizeof(EHFilterScope) + NumFilters * sizeof(llvm::Value*); + static size_t getSizeForNumFilters(unsigned numFilters) { + return sizeof(EHFilterScope) + numFilters * sizeof(llvm::Value*); } - unsigned getNumFilters() const { return NumFilters; } + unsigned getNumFilters() const { return FilterBits.NumFilters; } - void setFilter(unsigned I, llvm::Value *FilterValue) { - assert(I < getNumFilters()); - getFilters()[I] = FilterValue; + void setFilter(unsigned i, llvm::Value *filterValue) { + assert(i < getNumFilters()); + getFilters()[i] = filterValue; } - llvm::Value *getFilter(unsigned I) const { - assert(I < getNumFilters()); - return getFilters()[I]; + llvm::Value *getFilter(unsigned i) const { + assert(i < getNumFilters()); + return getFilters()[i]; } - static bool classof(const EHScope *Scope) { - return Scope->getKind() == Filter; + static bool classof(const EHScope *scope) { + return scope->getKind() == Filter; } }; /// An exceptions scope which calls std::terminate if any exception /// reaches it. class EHTerminateScope : public EHScope { - unsigned DestIndex : BitsRemaining; public: - EHTerminateScope(unsigned Index) : EHScope(Terminate), DestIndex(Index) {} + EHTerminateScope(EHScopeStack::stable_iterator enclosingEHScope) + : EHScope(Terminate, enclosingEHScope) {} static size_t getSize() { return sizeof(EHTerminateScope); } - unsigned getDestIndex() const { return DestIndex; } - - static bool classof(const EHScope *Scope) { - return Scope->getKind() == Terminate; + static bool classof(const EHScope *scope) { + return scope->getKind() == Terminate; } }; @@ -498,26 +508,17 @@ inline EHScopeStack::iterator EHScopeStack::end() const { inline void EHScopeStack::popCatch() { assert(!empty() && "popping exception stack when not empty"); - assert(isa(*begin())); - StartOfData += EHCatchScope::getSizeForNumHandlers( - cast(*begin()).getNumHandlers()); - - if (empty()) NextEHDestIndex = FirstEHDestIndex; - - assert(CatchDepth > 0 && "mismatched catch/terminate push/pop"); - CatchDepth--; + EHCatchScope &scope = cast(*begin()); + InnermostEHScope = scope.getEnclosingEHScope(); + StartOfData += EHCatchScope::getSizeForNumHandlers(scope.getNumHandlers()); } inline void EHScopeStack::popTerminate() { assert(!empty() && "popping exception stack when not empty"); - assert(isa(*begin())); + EHTerminateScope &scope = cast(*begin()); + InnermostEHScope = scope.getEnclosingEHScope(); StartOfData += EHTerminateScope::getSize(); - - if (empty()) NextEHDestIndex = FirstEHDestIndex; - - assert(CatchDepth > 0 && "mismatched catch/terminate push/pop"); - CatchDepth--; } inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const { @@ -532,28 +533,6 @@ EHScopeStack::stabilize(iterator ir) const { return stable_iterator(EndOfBuffer - ir.Ptr); } -inline EHScopeStack::stable_iterator -EHScopeStack::getInnermostActiveNormalCleanup() const { - for (EHScopeStack::stable_iterator - I = getInnermostNormalCleanup(), E = stable_end(); I != E; ) { - EHCleanupScope &S = cast(*find(I)); - if (S.isActive()) return I; - I = S.getEnclosingNormalCleanup(); - } - return stable_end(); -} - -inline EHScopeStack::stable_iterator -EHScopeStack::getInnermostActiveEHCleanup() const { - for (EHScopeStack::stable_iterator - I = getInnermostEHCleanup(), E = stable_end(); I != E; ) { - EHCleanupScope &S = cast(*find(I)); - if (S.isActive()) return I; - I = S.getEnclosingEHCleanup(); - } - return stable_end(); -} - } } diff --git a/lib/CodeGen/CGException.cpp b/lib/CodeGen/CGException.cpp index 46d127cf80..61c1739eab 100644 --- a/lib/CodeGen/CGException.cpp +++ b/lib/CodeGen/CGException.cpp @@ -467,6 +467,46 @@ void CodeGenFunction::EmitStartEHSpec(const Decl *D) { } } +/// Emit the dispatch block for a filter scope if necessary. +static void emitFilterDispatchBlock(CodeGenFunction &CGF, + EHFilterScope &filterScope) { + llvm::BasicBlock *dispatchBlock = filterScope.getCachedEHDispatchBlock(); + if (!dispatchBlock) return; + if (dispatchBlock->use_empty()) { + delete dispatchBlock; + return; + } + + CGBuilderTy::InsertPoint savedIP = CGF.Builder.saveIP(); + CGF.EmitBlockAfterUses(dispatchBlock); + + // If this isn't a catch-all filter, we need to check whether we got + // here because the filter triggered. + if (filterScope.getNumFilters()) { + // Load the selector value. + llvm::Value *selector = + CGF.Builder.CreateLoad(CGF.getEHSelectorSlot(), "selector"); + + llvm::BasicBlock *unexpectedBB = CGF.createBasicBlock("ehspec.unexpected"); + + llvm::Value *zero = CGF.Builder.getInt32(0); + llvm::Value *failsFilter = + CGF.Builder.CreateICmpSLT(selector, zero, "ehspec.fails"); + CGF.Builder.CreateCondBr(failsFilter, unexpectedBB, CGF.getEHResumeBlock()); + + CGF.EmitBlock(unexpectedBB); + } + + // Call __cxa_call_unexpected. This doesn't need to be an invoke + // because __cxa_call_unexpected magically filters exceptions + // according to the last landing pad the exception was thrown + // into. Seriously. + llvm::Value *exn = CGF.Builder.CreateLoad(CGF.getExceptionSlot()); + CGF.Builder.CreateCall(getUnexpectedFn(CGF), exn) + ->setDoesNotReturn(); + CGF.Builder.CreateUnreachable(); +} + void CodeGenFunction::EmitEndEHSpec(const Decl *D) { if (!CGM.getLangOptions().CXXExceptions) return; @@ -484,6 +524,8 @@ void CodeGenFunction::EmitEndEHSpec(const Decl *D) { EHStack.popTerminate(); } } else if (EST == EST_Dynamic || EST == EST_DynamicNone) { + EHFilterScope &filterScope = cast(*EHStack.begin()); + emitFilterDispatchBlock(*this, filterScope); EHStack.popFilter(); } } @@ -525,6 +567,50 @@ void CodeGenFunction::EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) { } } +llvm::BasicBlock * +CodeGenFunction::getEHDispatchBlock(EHScopeStack::stable_iterator si) { + // The dispatch block for the end of the scope chain is a block that + // just resumes unwinding. + if (si == EHStack.stable_end()) + return getEHResumeBlock(); + + // Otherwise, we should look at the actual scope. + EHScope &scope = *EHStack.find(si); + + llvm::BasicBlock *dispatchBlock = scope.getCachedEHDispatchBlock(); + if (!dispatchBlock) { + switch (scope.getKind()) { + case EHScope::Catch: { + // Apply a special case to a single catch-all. + EHCatchScope &catchScope = cast(scope); + if (catchScope.getNumHandlers() == 1 && + catchScope.getHandler(0).isCatchAll()) { + dispatchBlock = catchScope.getHandler(0).Block; + + // Otherwise, make a dispatch block. + } else { + dispatchBlock = createBasicBlock("catch.dispatch"); + } + break; + } + + case EHScope::Cleanup: + dispatchBlock = createBasicBlock("ehcleanup"); + break; + + case EHScope::Filter: + dispatchBlock = createBasicBlock("filter.dispatch"); + break; + + case EHScope::Terminate: + dispatchBlock = getTerminateHandler(); + break; + } + scope.setCachedEHDispatchBlock(dispatchBlock); + } + return dispatchBlock; +} + /// Check whether this is a non-EH scope, i.e. a scope which doesn't /// affect exception handling. Currently, the only non-EH scopes are /// normal-only cleanup scopes. @@ -621,280 +707,148 @@ const CleanupHackLevel_t CleanupHackLevel = CHL_MandatoryCleanup; llvm::BasicBlock *CodeGenFunction::EmitLandingPad() { assert(EHStack.requiresLandingPad()); - for (EHScopeStack::iterator ir = EHStack.begin(); ; ) { - assert(ir != EHStack.end() && - "stack requiring landing pad is nothing but non-EH scopes?"); - - // If this is a terminate scope, just use the singleton terminate - // landing pad. - if (isa(*ir)) - return getTerminateLandingPad(); - - // If this isn't an EH scope, iterate; otherwise break out. - if (!isNonEHScope(*ir)) break; - ++ir; + EHScope &innermostEHScope = *EHStack.find(EHStack.getInnermostEHScope()); + switch (innermostEHScope.getKind()) { + case EHScope::Terminate: + return getTerminateLandingPad(); - // We haven't checked this scope for a cached landing pad yet. - if (llvm::BasicBlock *LP = ir->getCachedLandingPad()) - return LP; + case EHScope::Catch: + case EHScope::Cleanup: + case EHScope::Filter: + if (llvm::BasicBlock *lpad = innermostEHScope.getCachedLandingPad()) + return lpad; } // Save the current IR generation state. - CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); + CGBuilderTy::InsertPoint savedIP = Builder.saveAndClearIP(); - const EHPersonality &Personality = EHPersonality::get(getLangOptions()); + const EHPersonality &personality = EHPersonality::get(getLangOptions()); // Create and configure the landing pad. - llvm::BasicBlock *LP = createBasicBlock("lpad"); - EmitBlock(LP); + llvm::BasicBlock *lpad = createBasicBlock("lpad"); + EmitBlock(lpad); // Save the exception pointer. It's safe to use a single exception // pointer per function because EH cleanups can never have nested // try/catches. - llvm::CallInst *Exn = + llvm::CallInst *exn = Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_exception), "exn"); - Exn->setDoesNotThrow(); - Builder.CreateStore(Exn, getExceptionSlot()); + exn->setDoesNotThrow(); // Build the selector arguments. - SmallVector EHSelector; - EHSelector.push_back(Exn); - EHSelector.push_back(getOpaquePersonalityFn(CGM, Personality)); + SmallVector selector; + selector.push_back(exn); + selector.push_back(getOpaquePersonalityFn(CGM, personality)); // Accumulate all the handlers in scope. - llvm::DenseMap EHHandlers; - UnwindDest CatchAll; - bool HasEHCleanup = false; - bool HasEHFilter = false; - SmallVector EHFilters; + bool hasCatchAll = false; + bool hasCleanup = false; + bool hasFilter = false; + SmallVector filterTypes; + llvm::SmallPtrSet catchTypes; for (EHScopeStack::iterator I = EHStack.begin(), E = EHStack.end(); I != E; ++I) { switch (I->getKind()) { case EHScope::Cleanup: - if (!HasEHCleanup) - HasEHCleanup = cast(*I).isEHCleanup(); - // We otherwise don't care about cleanups. + // If we have a cleanup, remember that. + hasCleanup = (hasCleanup || cast(*I).isEHCleanup()); continue; case EHScope::Filter: { assert(I.next() == EHStack.end() && "EH filter is not end of EH stack"); - assert(!CatchAll.isValid() && "EH filter reached after catch-all"); + assert(!hasCatchAll && "EH filter reached after catch-all"); // Filter scopes get added to the selector in weird ways. - EHFilterScope &Filter = cast(*I); - HasEHFilter = true; + EHFilterScope &filter = cast(*I); + hasFilter = true; // Add all the filter values which we aren't already explicitly // catching. - for (unsigned I = 0, E = Filter.getNumFilters(); I != E; ++I) { - llvm::Value *FV = Filter.getFilter(I); - if (!EHHandlers.count(FV)) - EHFilters.push_back(FV); + for (unsigned i = 0, e = filter.getNumFilters(); i != e; ++i) { + llvm::Value *filterType = filter.getFilter(i); + if (!catchTypes.count(filterType)) + filterTypes.push_back(filterType); } goto done; } case EHScope::Terminate: // Terminate scopes are basically catch-alls. - assert(!CatchAll.isValid()); - CatchAll = UnwindDest(getTerminateHandler(), - EHStack.getEnclosingEHCleanup(I), - cast(*I).getDestIndex()); + assert(!hasCatchAll); + hasCatchAll = true; goto done; case EHScope::Catch: break; } - EHCatchScope &Catch = cast(*I); - for (unsigned HI = 0, HE = Catch.getNumHandlers(); HI != HE; ++HI) { - EHCatchScope::Handler Handler = Catch.getHandler(HI); - - // Catch-all. We should only have one of these per catch. - if (!Handler.Type) { - assert(!CatchAll.isValid()); - CatchAll = UnwindDest(Handler.Block, - EHStack.getEnclosingEHCleanup(I), - Handler.Index); - continue; + EHCatchScope &catchScope = cast(*I); + for (unsigned hi = 0, he = catchScope.getNumHandlers(); hi != he; ++hi) { + EHCatchScope::Handler handler = catchScope.getHandler(hi); + + // If this is a catch-all, register that and abort. + if (!handler.Type) { + assert(!hasCatchAll); + hasCatchAll = true; + goto done; } // Check whether we already have a handler for this type. - UnwindDest &Dest = EHHandlers[Handler.Type]; - if (Dest.isValid()) continue; - - EHSelector.push_back(Handler.Type); - Dest = UnwindDest(Handler.Block, - EHStack.getEnclosingEHCleanup(I), - Handler.Index); + if (catchTypes.insert(handler.Type)) { + // If not, add it directly to the selector. + selector.push_back(handler.Type); + } } - - // Stop if we found a catch-all. - if (CatchAll.isValid()) break; } done: - unsigned LastToEmitInLoop = EHSelector.size(); - // If we have a catch-all, add null to the selector. - if (CatchAll.isValid()) { - EHSelector.push_back(getCatchAllValue(*this)); + assert(!(hasCatchAll && hasFilter)); + if (hasCatchAll) { + selector.push_back(getCatchAllValue(*this)); // If we have an EH filter, we need to add those handlers in the // right place in the selector, which is to say, at the end. - } else if (HasEHFilter) { + } else if (hasFilter) { // Create a filter expression: an integer constant saying how many // filters there are (+1 to avoid ambiguity with 0 for cleanup), // followed by the filter types. The personality routine only // lands here if the filter doesn't match. - EHSelector.push_back(llvm::ConstantInt::get(Builder.getInt32Ty(), - EHFilters.size() + 1)); - EHSelector.append(EHFilters.begin(), EHFilters.end()); + selector.push_back(Builder.getInt32(filterTypes.size() + 1)); + selector.append(filterTypes.begin(), filterTypes.end()); // Also check whether we need a cleanup. - if (CleanupHackLevel == CHL_MandatoryCatchall || HasEHCleanup) - EHSelector.push_back(CleanupHackLevel == CHL_MandatoryCatchall + if (CleanupHackLevel == CHL_MandatoryCatchall || hasCleanup) + selector.push_back(CleanupHackLevel == CHL_MandatoryCatchall ? getCatchAllValue(*this) : getCleanupValue(*this)); // Otherwise, signal that we at least have cleanups. - } else if (CleanupHackLevel == CHL_MandatoryCatchall || HasEHCleanup) { - EHSelector.push_back(CleanupHackLevel == CHL_MandatoryCatchall + } else if (CleanupHackLevel == CHL_MandatoryCatchall || hasCleanup) { + selector.push_back(CleanupHackLevel == CHL_MandatoryCatchall ? getCatchAllValue(*this) : getCleanupValue(*this)); - - // At the MandatoryCleanup hack level, we don't need to actually - // spuriously tell the unwinder that we have cleanups, but we do - // need to always be prepared to handle cleanups. - } else if (CleanupHackLevel == CHL_MandatoryCleanup) { - // Just don't decrement LastToEmitInLoop. - - } else { - assert(LastToEmitInLoop > 2); - LastToEmitInLoop--; } - assert(EHSelector.size() >= 3 && "selector call has only two arguments!"); + assert(selector.size() >= 3 && "selector call has only two arguments!"); // Tell the backend how to generate the landing pad. - llvm::CallInst *Selection = + llvm::CallInst *selectorCall = Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_selector), - EHSelector, "eh.selector"); - Selection->setDoesNotThrow(); + selector, "eh.selector"); + selectorCall->setDoesNotThrow(); - // Save the selector value in mandatory-cleanup mode. - if (CleanupHackLevel == CHL_MandatoryCleanup) - Builder.CreateStore(Selection, getEHSelectorSlot()); - - // Select the right handler. - llvm::Value *llvm_eh_typeid_for = - CGM.getIntrinsic(llvm::Intrinsic::eh_typeid_for); - - // The results of llvm_eh_typeid_for aren't reliable --- at least - // not locally --- so we basically have to do this as an 'if' chain. - // We walk through the first N-1 catch clauses, testing and chaining, - // and then fall into the final clause (which is either a cleanup, a - // filter (possibly with a cleanup), a catch-all, or another catch). - for (unsigned I = 2; I != LastToEmitInLoop; ++I) { - llvm::Value *Type = EHSelector[I]; - UnwindDest Dest = EHHandlers[Type]; - assert(Dest.isValid() && "no handler entry for value in selector?"); - - // Figure out where to branch on a match. As a debug code-size - // optimization, if the scope depth matches the innermost cleanup, - // we branch directly to the catch handler. - llvm::BasicBlock *Match = Dest.getBlock(); - bool MatchNeedsCleanup = - Dest.getScopeDepth() != EHStack.getInnermostEHCleanup(); - if (MatchNeedsCleanup) - Match = createBasicBlock("eh.match"); - - llvm::BasicBlock *Next = createBasicBlock("eh.next"); - - // Check whether the exception matches. - llvm::CallInst *Id - = Builder.CreateCall(llvm_eh_typeid_for, - Builder.CreateBitCast(Type, Int8PtrTy)); - Id->setDoesNotThrow(); - Builder.CreateCondBr(Builder.CreateICmpEQ(Selection, Id), - Match, Next); - - // Emit match code if necessary. - if (MatchNeedsCleanup) { - EmitBlock(Match); - EmitBranchThroughEHCleanup(Dest); - } + // Save the selector and exception pointer. + Builder.CreateStore(exn, getExceptionSlot()); + Builder.CreateStore(selectorCall, getEHSelectorSlot()); - // Continue to the next match. - EmitBlock(Next); - } - - // Emit the final case in the selector. - // This might be a catch-all.... - if (CatchAll.isValid()) { - assert(isa(EHSelector.back())); - EmitBranchThroughEHCleanup(CatchAll); - - // ...or an EH filter... - } else if (HasEHFilter) { - llvm::Value *SavedSelection = Selection; - - // First, unwind out to the outermost scope if necessary. - if (EHStack.hasEHCleanups()) { - // The end here might not dominate the beginning, so we might need to - // save the selector if we need it. - llvm::AllocaInst *SelectorVar = 0; - if (HasEHCleanup) { - SelectorVar = CreateTempAlloca(Builder.getInt32Ty(), "selector.var"); - Builder.CreateStore(Selection, SelectorVar); - } - - llvm::BasicBlock *CleanupContBB = createBasicBlock("ehspec.cleanup.cont"); - EmitBranchThroughEHCleanup(UnwindDest(CleanupContBB, EHStack.stable_end(), - EHStack.getNextEHDestIndex())); - EmitBlock(CleanupContBB); - - if (HasEHCleanup) - SavedSelection = Builder.CreateLoad(SelectorVar, "ehspec.saved-selector"); - } - - // If there was a cleanup, we'll need to actually check whether we - // landed here because the filter triggered. - if (CleanupHackLevel != CHL_Ideal || HasEHCleanup) { - llvm::BasicBlock *UnexpectedBB = createBasicBlock("ehspec.unexpected"); - - llvm::Constant *Zero = llvm::ConstantInt::get(Int32Ty, 0); - llvm::Value *FailsFilter = - Builder.CreateICmpSLT(SavedSelection, Zero, "ehspec.fails"); - Builder.CreateCondBr(FailsFilter, UnexpectedBB, getRethrowDest().getBlock()); - - EmitBlock(UnexpectedBB); - } - - // Call __cxa_call_unexpected. This doesn't need to be an invoke - // because __cxa_call_unexpected magically filters exceptions - // according to the last landing pad the exception was thrown - // into. Seriously. - Builder.CreateCall(getUnexpectedFn(*this), - Builder.CreateLoad(getExceptionSlot())) - ->setDoesNotReturn(); - Builder.CreateUnreachable(); - - // ...or a normal catch handler... - } else if (CleanupHackLevel == CHL_Ideal && !HasEHCleanup) { - llvm::Value *Type = EHSelector.back(); - EmitBranchThroughEHCleanup(EHHandlers[Type]); - - // ...or a cleanup. - } else { - EmitBranchThroughEHCleanup(getRethrowDest()); - } + Builder.CreateBr(getEHDispatchBlock(EHStack.getInnermostEHScope())); // Restore the old IR generation state. - Builder.restoreIP(SavedIP); + Builder.restoreIP(savedIP); - return LP; + return lpad; } namespace { @@ -1138,16 +1092,113 @@ namespace { }; } +/// Emit the structure of the dispatch block for the given catch scope. +/// It is an invariant that the dispatch block already exists. +static void emitCatchDispatchBlock(CodeGenFunction &CGF, + EHCatchScope &catchScope) { + llvm::BasicBlock *dispatchBlock = catchScope.getCachedEHDispatchBlock(); + assert(dispatchBlock); + + // If there's only a single catch-all, getEHDispatchBlock returned + // that catch-all as the dispatch block. + if (catchScope.getNumHandlers() == 1 && + catchScope.getHandler(0).isCatchAll()) { + assert(dispatchBlock == catchScope.getHandler(0).Block); + return; + } + + CGBuilderTy::InsertPoint savedIP = CGF.Builder.saveIP(); + CGF.EmitBlockAfterUses(dispatchBlock); + + // Select the right handler. + llvm::Value *llvm_eh_typeid_for = + CGF.CGM.getIntrinsic(llvm::Intrinsic::eh_typeid_for); + + // Load the selector value. + llvm::Value *selector = + CGF.Builder.CreateLoad(CGF.getEHSelectorSlot(), "selector"); + + // Test against each of the exception types we claim to catch. + for (unsigned i = 0, e = catchScope.getNumHandlers(); ; ++i) { + assert(i < e && "ran off end of handlers!"); + const EHCatchScope::Handler &handler = catchScope.getHandler(i); + + llvm::Value *typeValue = handler.Type; + assert(typeValue && "fell into catch-all case!"); + typeValue = CGF.Builder.CreateBitCast(typeValue, CGF.Int8PtrTy); + + // Figure out the next block. + bool nextIsEnd; + llvm::BasicBlock *nextBlock; + + // If this is the last handler, we're at the end, and the next + // block is the block for the enclosing EH scope. + if (i + 1 == e) { + nextBlock = CGF.getEHDispatchBlock(catchScope.getEnclosingEHScope()); + nextIsEnd = true; + + // If the next handler is a catch-all, we're at the end, and the + // next block is that handler. + } else if (catchScope.getHandler(i+1).isCatchAll()) { + nextBlock = catchScope.getHandler(i+1).Block; + nextIsEnd = true; + + // Otherwise, we're not at the end and we need a new block. + } else { + nextBlock = CGF.createBasicBlock("catch.fallthrough"); + nextIsEnd = false; + } + + // Figure out the catch type's index in the LSDA's type table. + llvm::CallInst *typeIndex = + CGF.Builder.CreateCall(llvm_eh_typeid_for, typeValue); + typeIndex->setDoesNotThrow(); + + llvm::Value *matchesTypeIndex = + CGF.Builder.CreateICmpEQ(selector, typeIndex, "matches"); + CGF.Builder.CreateCondBr(matchesTypeIndex, handler.Block, nextBlock); + + // If the next handler is a catch-all, we're completely done. + if (nextIsEnd) { + CGF.Builder.restoreIP(savedIP); + return; + + // Otherwise we need to emit and continue at that block. + } else { + CGF.EmitBlock(nextBlock); + } + } + + llvm_unreachable("fell out of loop!"); +} + +void CodeGenFunction::popCatchScope() { + EHCatchScope &catchScope = cast(*EHStack.begin()); + if (catchScope.hasEHBranches()) + emitCatchDispatchBlock(*this, catchScope); + EHStack.popCatch(); +} + void CodeGenFunction::ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) { unsigned NumHandlers = S.getNumHandlers(); EHCatchScope &CatchScope = cast(*EHStack.begin()); assert(CatchScope.getNumHandlers() == NumHandlers); + // If the catch was not required, bail out now. + if (!CatchScope.hasEHBranches()) { + EHStack.popCatch(); + return; + } + + // Emit the structure of the EH dispatch for this catch. + emitCatchDispatchBlock(*this, CatchScope); + // Copy the handler blocks off before we pop the EH stack. Emitting // the handlers might scribble on this memory. SmallVector Handlers(NumHandlers); memcpy(Handlers.data(), CatchScope.begin(), NumHandlers * sizeof(EHCatchScope::Handler)); + EHStack.popCatch(); // The fall-through block. @@ -1163,12 +1214,19 @@ void CodeGenFunction::ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) { ImplicitRethrow = isa(CurCodeDecl) || isa(CurCodeDecl); - for (unsigned I = 0; I != NumHandlers; ++I) { - llvm::BasicBlock *CatchBlock = Handlers[I].Block; - EmitBlock(CatchBlock); + // Perversely, we emit the handlers backwards precisely because we + // want them to appear in source order. In all of these cases, the + // catch block will have exactly one predecessor, which will be a + // particular block in the catch dispatch. However, in the case of + // a catch-all, one of the dispatch blocks will branch to two + // different handlers, and EmitBlockAfterUses will cause the second + // handler to be moved before the first. + for (unsigned I = NumHandlers; I != 0; --I) { + llvm::BasicBlock *CatchBlock = Handlers[I-1].Block; + EmitBlockAfterUses(CatchBlock); // Catch the exception if this isn't a catch-all. - const CXXCatchStmt *C = S.getHandler(I); + const CXXCatchStmt *C = S.getHandler(I-1); // Enter a cleanup scope, including the catch variable and the // end-catch. @@ -1350,7 +1408,8 @@ void CodeGenFunction::FinallyInfo::exit(CodeGenFunction &CGF) { // Leave the finally catch-all. EHCatchScope &catchScope = cast(*CGF.EHStack.begin()); llvm::BasicBlock *catchBB = catchScope.getHandler(0).Block; - CGF.EHStack.popCatch(); + + CGF.popCatchScope(); // If there are any references to the catch-all block, emit it. if (catchBB->use_empty()) { @@ -1443,14 +1502,14 @@ llvm::BasicBlock *CodeGenFunction::getTerminateHandler() { return TerminateHandler; } -CodeGenFunction::UnwindDest CodeGenFunction::getRethrowDest() { - if (RethrowBlock.isValid()) return RethrowBlock; +llvm::BasicBlock *CodeGenFunction::getEHResumeBlock() { + if (EHResumeBlock) return EHResumeBlock; CGBuilderTy::InsertPoint SavedIP = Builder.saveIP(); // We emit a jump to a notional label at the outermost unwind state. - llvm::BasicBlock *Unwind = createBasicBlock("eh.resume"); - Builder.SetInsertPoint(Unwind); + EHResumeBlock = createBasicBlock("eh.resume"); + Builder.SetInsertPoint(EHResumeBlock); const EHPersonality &Personality = EHPersonality::get(CGM.getLangOptions()); @@ -1494,7 +1553,5 @@ CodeGenFunction::UnwindDest CodeGenFunction::getRethrowDest() { Builder.restoreIP(SavedIP); - RethrowBlock = UnwindDest(Unwind, EHStack.stable_end(), 0); - return RethrowBlock; + return EHResumeBlock; } - diff --git a/lib/CodeGen/CGObjCMac.cpp b/lib/CodeGen/CGObjCMac.cpp index 5c56e81adb..5553b20fa5 100644 --- a/lib/CodeGen/CGObjCMac.cpp +++ b/lib/CodeGen/CGObjCMac.cpp @@ -2753,7 +2753,6 @@ void FragileHazards::collectLocals() { llvm::DenseSet AllocasToIgnore; addIfPresent(AllocasToIgnore, CGF.ReturnValue); addIfPresent(AllocasToIgnore, CGF.NormalCleanupDest); - addIfPresent(AllocasToIgnore, CGF.EHCleanupDest); // Collect all the allocas currently in the function. This is // probably way too aggressive. diff --git a/lib/CodeGen/CGObjCRuntime.cpp b/lib/CodeGen/CGObjCRuntime.cpp index 6d9b5b3374..2f533b15de 100644 --- a/lib/CodeGen/CGObjCRuntime.cpp +++ b/lib/CodeGen/CGObjCRuntime.cpp @@ -211,7 +211,7 @@ void CGObjCRuntime::EmitTryCatchStmt(CodeGenFunction &CGF, // Leave the try. if (S.getNumCatchStmts()) - CGF.EHStack.popCatch(); + CGF.popCatchScope(); // Remember where we were. CGBuilderTy::InsertPoint SavedIP = CGF.Builder.saveAndClearIP(); diff --git a/lib/CodeGen/CGStmt.cpp b/lib/CodeGen/CGStmt.cpp index 9987fd900b..e200305c0d 100644 --- a/lib/CodeGen/CGStmt.cpp +++ b/lib/CodeGen/CGStmt.cpp @@ -286,6 +286,23 @@ void CodeGenFunction::EmitBranch(llvm::BasicBlock *Target) { Builder.ClearInsertionPoint(); } +void CodeGenFunction::EmitBlockAfterUses(llvm::BasicBlock *block) { + bool inserted = false; + for (llvm::BasicBlock::use_iterator + i = block->use_begin(), e = block->use_end(); i != e; ++i) { + if (llvm::Instruction *insn = dyn_cast(*i)) { + CurFn->getBasicBlockList().insertAfter(insn->getParent(), block); + inserted = true; + break; + } + } + + if (!inserted) + CurFn->getBasicBlockList().push_back(block); + + Builder.SetInsertPoint(block); +} + CodeGenFunction::JumpDest CodeGenFunction::getJumpDestForLabel(const LabelDecl *D) { JumpDest &Dest = LabelMap[D]; diff --git a/lib/CodeGen/CodeGenFunction.cpp b/lib/CodeGen/CodeGenFunction.cpp index bdad01344a..cd8f635ee5 100644 --- a/lib/CodeGen/CodeGenFunction.cpp +++ b/lib/CodeGen/CodeGenFunction.cpp @@ -32,8 +32,8 @@ CodeGenFunction::CodeGenFunction(CodeGenModule &cgm) : CodeGenTypeCache(cgm), CGM(cgm), Target(CGM.getContext().Target), Builder(cgm.getModule().getContext()), AutoreleaseResult(false), BlockInfo(0), BlockPointer(0), - NormalCleanupDest(0), EHCleanupDest(0), NextCleanupDestIndex(1), - ExceptionSlot(0), EHSelectorSlot(0), + NormalCleanupDest(0), NextCleanupDestIndex(1), + EHResumeBlock(0), ExceptionSlot(0), EHSelectorSlot(0), DebugInfo(0), DisableDebugInfo(false), DidCallStackSave(false), IndirectBranch(0), SwitchInsn(0), CaseRangeBlock(0), UnreachableBlock(0), CXXThisDecl(0), CXXThisValue(0), CXXVTTDecl(0), CXXVTTValue(0), @@ -189,7 +189,7 @@ void CodeGenFunction::FinishFunction(SourceLocation EndLoc) { } } - EmitIfUsed(*this, RethrowBlock.getBlock()); + EmitIfUsed(*this, EHResumeBlock); EmitIfUsed(*this, TerminateLandingPad); EmitIfUsed(*this, TerminateHandler); EmitIfUsed(*this, UnreachableBlock); diff --git a/lib/CodeGen/CodeGenFunction.h b/lib/CodeGen/CodeGenFunction.h index 42864ceb63..381842097f 100644 --- a/lib/CodeGen/CodeGenFunction.h +++ b/lib/CodeGen/CodeGenFunction.h @@ -325,16 +325,8 @@ private: /// The innermost normal cleanup on the stack. stable_iterator InnermostNormalCleanup; - /// The innermost EH cleanup on the stack. - stable_iterator InnermostEHCleanup; - - /// The number of catches on the stack. - unsigned CatchDepth; - - /// The current EH destination index. Reset to FirstCatchIndex - /// whenever the last EH cleanup is popped. - unsigned NextEHDestIndex; - enum { FirstEHDestIndex = 1 }; + /// The innermost EH scope on the stack. + stable_iterator InnermostEHScope; /// The current set of branch fixups. A branch fixup is a jump to /// an as-yet unemitted label, i.e. a label for which we don't yet @@ -362,8 +354,7 @@ private: public: EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0), InnermostNormalCleanup(stable_end()), - InnermostEHCleanup(stable_end()), - CatchDepth(0), NextEHDestIndex(FirstEHDestIndex) {} + InnermostEHScope(stable_end()) {} ~EHScopeStack() { delete[] StartOfBuffer; } // Variadic templates would make this not terrible. @@ -435,8 +426,7 @@ public: return new (Buffer) T(N, a0, a1, a2); } - /// Pops a cleanup scope off the stack. This should only be called - /// by CodeGenFunction::PopCleanupBlock. + /// Pops a cleanup scope off the stack. This is private to CGCleanup.cpp. void popCleanup(); /// Push a set of catch handlers on the stack. The catch is @@ -444,7 +434,7 @@ public: /// set on it. class EHCatchScope *pushCatch(unsigned NumHandlers); - /// Pops a catch scope off the stack. + /// Pops a catch scope off the stack. This is private to CGException.cpp. void popCatch(); /// Push an exceptions filter on the stack. @@ -463,7 +453,7 @@ public: bool empty() const { return StartOfData == EndOfBuffer; } bool requiresLandingPad() const { - return (CatchDepth || hasEHCleanups()); + return InnermostEHScope != stable_end(); } /// Determines whether there are any normal cleanups on the stack. @@ -476,19 +466,13 @@ public: stable_iterator getInnermostNormalCleanup() const { return InnermostNormalCleanup; } - stable_iterator getInnermostActiveNormalCleanup() const; // CGException.h + stable_iterator getInnermostActiveNormalCleanup() const; - /// Determines whether there are any EH cleanups on the stack. - bool hasEHCleanups() const { - return InnermostEHCleanup != stable_end(); + stable_iterator getInnermostEHScope() const { + return InnermostEHScope; } - /// Returns the innermost EH cleanup on the stack, or stable_end() - /// if there are no EH cleanups. - stable_iterator getInnermostEHCleanup() const { - return InnermostEHCleanup; - } - stable_iterator getInnermostActiveEHCleanup() const; // CGException.h + stable_iterator getInnermostActiveEHScope() const; /// An unstable reference to a scope-stack depth. Invalidated by /// pushes but not pops. @@ -515,10 +499,6 @@ public: /// Translates an iterator into a stable_iterator. stable_iterator stabilize(iterator it) const; - /// Finds the nearest cleanup enclosing the given iterator. - /// Returns stable_iterator::invalid() if there are no such cleanups. - stable_iterator getEnclosingEHCleanup(iterator it) const; - /// Turn a stable reference to a scope depth into a unstable pointer /// to the EH stack. iterator find(stable_iterator save) const; @@ -547,9 +527,6 @@ public: /// Clears the branch-fixups list. This should only be called by /// ResolveAllBranchFixups. void clearFixups() { BranchFixups.clear(); } - - /// Gets the next EH destination index. - unsigned getNextEHDestIndex() { return NextEHDestIndex++; } }; /// CodeGenFunction - This class organizes the per-function state that is used @@ -580,26 +557,6 @@ public: unsigned Index; }; - /// An unwind destination is an abstract label, branching to which - /// may require a jump out through EH cleanups. - struct UnwindDest { - UnwindDest() : Block(0), ScopeDepth(), Index(0) {} - UnwindDest(llvm::BasicBlock *Block, - EHScopeStack::stable_iterator Depth, - unsigned Index) - : Block(Block), ScopeDepth(Depth), Index(Index) {} - - bool isValid() const { return Block != 0; } - llvm::BasicBlock *getBlock() const { return Block; } - EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; } - unsigned getDestIndex() const { return Index; } - - private: - llvm::BasicBlock *Block; - EHScopeStack::stable_iterator ScopeDepth; - unsigned Index; - }; - CodeGenModule &CGM; // Per-module state. const TargetInfo &Target; @@ -629,9 +586,6 @@ public: /// iff the function has no return value. llvm::Value *ReturnValue; - /// RethrowBlock - Unified rethrow block. - UnwindDest RethrowBlock; - /// AllocaInsertPoint - This is an instruction in the entry block before which /// we prefer to insert allocas. llvm::AssertingVH AllocaInsertPt; @@ -652,10 +606,12 @@ public: /// i32s containing the indexes of the cleanup destinations. llvm::AllocaInst *NormalCleanupDest; - llvm::AllocaInst *EHCleanupDest; unsigned NextCleanupDestIndex; + /// EHResumeBlock - Unified block containing a call to llvm.eh.resume. + llvm::BasicBlock *EHResumeBlock; + /// The exception slot. All landing pads write the current /// exception pointer into this alloca. llvm::Value *ExceptionSlot; @@ -886,14 +842,13 @@ public: /// a conservatively correct answer for this method. bool isObviouslyBranchWithoutCleanups(JumpDest Dest) const; - /// EmitBranchThroughEHCleanup - Emit a branch from the current - /// insert block through the EH cleanup handling code (if any) and - /// then on to \arg Dest. - void EmitBranchThroughEHCleanup(UnwindDest Dest); + /// popCatchScope - Pops the catch scope at the top of the EHScope + /// stack, emitting any required code (other than the catch handlers + /// themselves). + void popCatchScope(); - /// getRethrowDest - Returns the unified outermost-scope rethrow - /// destination. - UnwindDest getRethrowDest(); + llvm::BasicBlock *getEHResumeBlock(); + llvm::BasicBlock *getEHDispatchBlock(EHScopeStack::stable_iterator scope); /// An object to manage conditionally-evaluated expressions. class ConditionalEvaluation { @@ -1167,7 +1122,6 @@ public: llvm::Value *getEHSelectorSlot(); llvm::Value *getNormalCleanupDestSlot(); - llvm::Value *getEHCleanupDestSlot(); llvm::BasicBlock *getUnreachableBlock() { if (!UnreachableBlock) { @@ -1444,6 +1398,10 @@ public: /// means the block can be ignored if it is unreachable. void EmitBlock(llvm::BasicBlock *BB, bool IsFinished=false); + /// EmitBlockAfterUses - Emit the given block somewhere hopefully + /// near its uses, and leave the insertion point in it. + void EmitBlockAfterUses(llvm::BasicBlock *BB); + /// EmitBranch - Emit a branch to the specified basic block from the current /// insert block, taking care to avoid creation of branches from dummy /// blocks. It is legal to call this function even if there is no current diff --git a/test/CodeGenCXX/destructors.cpp b/test/CodeGenCXX/destructors.cpp index bfdf93c5a2..dc24551a59 100644 --- a/test/CodeGenCXX/destructors.cpp +++ b/test/CodeGenCXX/destructors.cpp @@ -237,7 +237,6 @@ namespace test5 { // CHECK: [[ELEMS:%.*]] = alloca [5 x [[A:%.*]]], align // CHECK-NEXT: [[EXN:%.*]] = alloca i8* // CHECK-NEXT: [[SEL:%.*]] = alloca i32 - // CHECK-NEXT: [[EHCLEANUP:%.*]] = alloca i32 // CHECK-NEXT: [[BEGIN:%.*]] = getelementptr inbounds [5 x [[A]]]* [[ELEMS]], i32 0, i32 0 // CHECK-NEXT: [[END:%.*]] = getelementptr inbounds [[A]]* [[BEGIN]], i64 5 // CHECK-NEXT: br label diff --git a/test/CodeGenCXX/eh.cpp b/test/CodeGenCXX/eh.cpp index 736b124ab4..24807d92d7 100644 --- a/test/CodeGenCXX/eh.cpp +++ b/test/CodeGenCXX/eh.cpp @@ -32,7 +32,6 @@ void test2() { // CHECK: define void @_Z5test2v() // CHECK: [[EXNVAR:%.*]] = alloca i8* // CHECK-NEXT: [[SELECTORVAR:%.*]] = alloca i32 -// CHECK-NEXT: [[CLEANUPDESTVAR:%.*]] = alloca i32 // CHECK-NEXT: [[EXNOBJ:%.*]] = call i8* @__cxa_allocate_exception(i64 16) // CHECK-NEXT: [[EXN:%.*]] = bitcast i8* [[EXNOBJ]] to [[DSTAR:%[^*]*\*]] // CHECK-NEXT: invoke void @_ZN7test2_DC1ERKS_([[DSTAR]] [[EXN]], [[DSTAR]] @d2) @@ -107,7 +106,6 @@ namespace test7 { // CHECK: [[CAUGHTEXNVAR:%.*]] = alloca i8* // CHECK-NEXT: [[SELECTORVAR:%.*]] = alloca i32 // CHECK-NEXT: [[INTCATCHVAR:%.*]] = alloca i32 -// CHECK-NEXT: [[EHCLEANUPDESTVAR:%.*]] = alloca i32 try { try { // CHECK-NEXT: [[EXNALLOC:%.*]] = call i8* @__cxa_allocate_exception @@ -118,24 +116,28 @@ namespace test7 { } // CHECK: [[CAUGHTEXN:%.*]] = call i8* @llvm.eh.exception() -// CHECK-NEXT: store i8* [[CAUGHTEXN]], i8** [[CAUGHTEXNVAR]] // CHECK-NEXT: [[SELECTOR:%.*]] = call i32 (i8*, i8*, ...)* @llvm.eh.selector(i8* [[CAUGHTEXN]], i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*), i8* bitcast (i8** @_ZTIi to i8*), i8* null) +// CHECK-NEXT: store i8* [[CAUGHTEXN]], i8** [[CAUGHTEXNVAR]] // CHECK-NEXT: store i32 [[SELECTOR]], i32* [[SELECTORVAR]] -// CHECK-NEXT: call i32 @llvm.eh.typeid.for(i8* bitcast (i8** @_ZTIi to i8*)) -// CHECK-NEXT: icmp eq +// CHECK-NEXT: br label +// CHECK: [[SELECTOR:%.*]] = load i32* [[SELECTORVAR]] +// CHECK-NEXT: [[T0:%.*]] = call i32 @llvm.eh.typeid.for(i8* bitcast (i8** @_ZTIi to i8*)) +// CHECK-NEXT: icmp eq i32 [[SELECTOR]], [[T0]] // CHECK-NEXT: br i1 -// CHECK: load i8** [[CAUGHTEXNVAR]] -// CHECK-NEXT: call i8* @__cxa_begin_catch -// CHECK: invoke void @__cxa_rethrow +// CHECK: [[T0:%.*]] = load i8** [[CAUGHTEXNVAR]] +// CHECK-NEXT: [[T1:%.*]] = call i8* @__cxa_begin_catch(i8* [[T0]]) +// CHECK-NEXT: [[T2:%.*]] = bitcast i8* [[T1]] to i32* +// CHECK-NEXT: [[T3:%.*]] = load i32* [[T2]] +// CHECK-NEXT: store i32 [[T3]], i32* {{%.*}}, align 4 +// CHECK-NEXT: invoke void @__cxa_rethrow catch (int) { throw; } } // CHECK: [[CAUGHTEXN:%.*]] = call i8* @llvm.eh.exception() -// CHECK-NEXT: store i8* [[CAUGHTEXN]], i8** [[CAUGHTEXNVAR]] // CHECK-NEXT: [[SELECTOR:%.*]] = call i32 (i8*, i8*, ...)* @llvm.eh.selector(i8* [[CAUGHTEXN]], i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*), i8* null) +// CHECK-NEXT: store i8* [[CAUGHTEXN]], i8** [[CAUGHTEXNVAR]] // CHECK-NEXT: store i32 [[SELECTOR]], i32* [[SELECTORVAR]] -// CHECK-NEXT: store i32 1, i32* [[EHCLEANUPDESTVAR]] // CHECK-NEXT: call void @__cxa_end_catch() // CHECK-NEXT: br label // CHECK: load i8** [[CAUGHTEXNVAR]] @@ -186,15 +188,14 @@ namespace test9 { // CHECK: invoke void @_ZN5test96opaqueEv() opaque(); } catch (int x) { + // CHECK: call i8* @llvm.eh.exception + // CHECK: call i32 (i8*, i8*, ...)* @llvm.eh.selector(i8* {{.*}}, i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*), i8* bitcast (i8** @_ZTIi to i8*)) + // CHECK: call i8* @__cxa_begin_catch // CHECK: invoke void @_ZN5test96opaqueEv() // CHECK: invoke void @__cxa_rethrow() opaque(); } - - // landing pad from first call to invoke - // CHECK: call i8* @llvm.eh.exception - // CHECK: call i32 (i8*, i8*, ...)* @llvm.eh.selector(i8* {{.*}}, i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*), i8* bitcast (i8** @_ZTIi to i8*)) } // __cxa_end_catch can throw for some kinds of caught exceptions. @@ -406,7 +407,6 @@ namespace test16 { // CHECK-NEXT: [[TEMP:%.*]] = alloca [[A:%.*]], // CHECK-NEXT: [[EXNSLOT:%.*]] = alloca i8* // CHECK-NEXT: [[SELECTORSLOT:%.*]] = alloca i32 - // CHECK-NEXT: [[EHDEST:%.*]] = alloca i32 // CHECK-NEXT: [[TEMP_ACTIVE:%.*]] = alloca i1 cond() ? throw B(A()) : foo(); diff --git a/test/CodeGenCXX/exceptions.cpp b/test/CodeGenCXX/exceptions.cpp index b32b90bf74..0ce9679a34 100644 --- a/test/CodeGenCXX/exceptions.cpp +++ b/test/CodeGenCXX/exceptions.cpp @@ -276,7 +276,6 @@ namespace test5 { // CHECK-NEXT: [[SELECTORSLOT:%.*]] = alloca i32 // CHECK-NEXT: [[A:%.*]] = alloca [[A_T:%.*]], align 1 // CHECK-NEXT: [[T:%.*]] = alloca [[T_T:%.*]], align 1 - // CHECK-NEXT: alloca i32 // CHECK-NEXT: invoke void @_ZN5test53fooEv() // CHECK: [[EXN:%.*]] = load i8** [[EXNSLOT]] // CHECK-NEXT: [[ADJ:%.*]] = call i8* @__cxa_get_exception_ptr(i8* [[EXN]]) @@ -325,7 +324,6 @@ namespace test7 { // CHECK-NEXT: alloca [[A:%.*]], // CHECK-NEXT: alloca i8* // CHECK-NEXT: alloca i32 - // CHECK-NEXT: alloca i32 // CHECK-NEXT: [[OUTER_A:%.*]] = alloca i1 // CHECK-NEXT: alloca i8* // CHECK-NEXT: [[INNER_NEW:%.*]] = alloca i1 diff --git a/test/CodeGenCXX/goto.cpp b/test/CodeGenCXX/goto.cpp index 9a12a91253..f32847d122 100644 --- a/test/CodeGenCXX/goto.cpp +++ b/test/CodeGenCXX/goto.cpp @@ -13,7 +13,6 @@ namespace test0 { // CHECK-NEXT: [[Z:%.*]] = alloca [[A]] // CHECK-NEXT: [[EXN:%.*]] = alloca i8* // CHECK-NEXT: [[SEL:%.*]] = alloca i32 - // CHECK-NEXT: alloca i32 // CHECK-NEXT: [[V:%.*]] = alloca [[V:%.*]]*, // CHECK-NEXT: [[TMP:%.*]] = alloca [[A]] // CHECK-NEXT: [[CLEANUPACTIVE:%.*]] = alloca i1 diff --git a/test/CodeGenCXX/partial-destruction.cpp b/test/CodeGenCXX/partial-destruction.cpp index 82deca06cf..b28c1384b8 100644 --- a/test/CodeGenCXX/partial-destruction.cpp +++ b/test/CodeGenCXX/partial-destruction.cpp @@ -16,7 +16,6 @@ namespace test0 { // CHECK-NEXT: [[ENDVAR:%.*]] = alloca [[A]]* // CHECK-NEXT: [[EXN:%.*]] = alloca i8* // CHECK-NEXT: [[SEL:%.*]] = alloca i32 - // CHECK-NEXT: [[CLEANUP:%.*]] = alloca i32 // Initialize. // CHECK-NEXT: [[E_BEGIN:%.*]] = getelementptr inbounds [10 x [[A]]]* [[AS]], i64 0, i64 0 @@ -100,7 +99,6 @@ namespace test1 { // CHECK: [[V:%.*]] = alloca [[B:%.*]], align 4 // CHECK-NEXT: alloca i8* // CHECK-NEXT: alloca i32 - // CHECK-NEXT: alloca i32 // CHECK-NEXT: [[X:%.*]] = getelementptr inbounds [[B]]* [[V]], i32 0, i32 0 // CHECK-NEXT: call void @_ZN5test11AC1Ei([[A:%.*]]* [[X]], i32 5) // CHECK-NEXT: [[Y:%.*]] = getelementptr inbounds [[B]]* [[V]], i32 0, i32 1 @@ -129,7 +127,6 @@ namespace test2 { // CHECK: [[V:%.*]] = alloca [4 x [7 x [[A:%.*]]]], align 1 // CHECK-NEXT: alloca i8* // CHECK-NEXT: alloca i32 - // CHECK-NEXT: alloca i32 // Main initialization loop. // CHECK-NEXT: [[BEGIN:%.*]] = getelementptr inbounds [4 x [7 x [[A]]]]* [[V]], i32 0, i32 0, i32 0 -- 2.40.0