From: John McCall Date: Tue, 13 Jul 2010 20:32:21 +0000 (+0000) Subject: Teach IR generation how to lazily emit cleanups. This has a lot of advantages, X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=da65ea86482bc116906edfb9ba1d7124f76cc867;p=clang Teach IR generation how to lazily emit cleanups. This has a lot of advantages, mostly in avoiding unnecessary work at compile time but also in producing more sensible block orderings. Move the destructor cleanups for local variables over to use lazy cleanups. Eventually all cleanups will do this; for now we have some awkward code duplication. Tell IR generation just to never produce landing pads in -fno-exceptions. This is a much more comprehensive solution to a problem which previously was half-solved by checks in most cleanup-generation spots. git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@108270 91177308-0d34-0410-b5e6-96231b3b80d8 --- diff --git a/lib/CodeGen/CGClass.cpp b/lib/CodeGen/CGClass.cpp index df5ea18c0c..c50fe90f8a 100644 --- a/lib/CodeGen/CGClass.cpp +++ b/lib/CodeGen/CGClass.cpp @@ -340,7 +340,7 @@ static void EmitBaseInitializer(CodeGenFunction &CGF, if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor()) { // FIXME: Is this OK for C++0x delegating constructors? - CodeGenFunction::CleanupBlock Cleanup(CGF, CodeGenFunction::EHCleanup); + CodeGenFunction::CleanupBlock Cleanup(CGF, EHCleanup); CXXDestructorDecl *DD = BaseClassDecl->getDestructor(); CGF.EmitCXXDestructorCall(DD, Dtor_Base, isBaseVirtual, V); @@ -534,7 +534,7 @@ static void EmitMemberInitializer(CodeGenFunction &CGF, CXXRecordDecl *RD = cast(RT->getDecl()); if (!RD->hasTrivialDestructor()) { // FIXME: Is this OK for C++0x delegating constructors? - CodeGenFunction::CleanupBlock Cleanup(CGF, CodeGenFunction::EHCleanup); + CodeGenFunction::CleanupBlock Cleanup(CGF, EHCleanup); llvm::Value *ThisPtr = CGF.LoadCXXThis(); LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 0); diff --git a/lib/CodeGen/CGDecl.cpp b/lib/CodeGen/CGDecl.cpp index 959a9ae483..1a62ea9555 100644 --- a/lib/CodeGen/CGDecl.cpp +++ b/lib/CodeGen/CGDecl.cpp @@ -388,6 +388,58 @@ const llvm::Type *CodeGenFunction::BuildByRefType(const ValueDecl *D) { return Info.first; } +namespace { + struct CallArrayDtor : EHScopeStack::LazyCleanup { + CallArrayDtor(const CXXDestructorDecl *Dtor, + const ConstantArrayType *Type, + llvm::Value *Loc) + : Dtor(Dtor), Type(Type), Loc(Loc) {} + + const CXXDestructorDecl *Dtor; + const ConstantArrayType *Type; + llvm::Value *Loc; + + void Emit(CodeGenFunction &CGF, bool IsForEH) { + QualType BaseElementTy = CGF.getContext().getBaseElementType(Type); + const llvm::Type *BasePtr = CGF.ConvertType(BaseElementTy); + BasePtr = llvm::PointerType::getUnqual(BasePtr); + llvm::Value *BaseAddrPtr = CGF.Builder.CreateBitCast(Loc, BasePtr); + CGF.EmitCXXAggrDestructorCall(Dtor, Type, BaseAddrPtr); + } + }; + + struct CallVarDtor : EHScopeStack::LazyCleanup { + CallVarDtor(const CXXDestructorDecl *Dtor, + llvm::Value *NRVOFlag, + llvm::Value *Loc) + : Dtor(Dtor), NRVOFlag(NRVOFlag), Loc(Loc) {} + + const CXXDestructorDecl *Dtor; + llvm::Value *NRVOFlag; + llvm::Value *Loc; + + void Emit(CodeGenFunction &CGF, bool IsForEH) { + // Along the exceptions path we always execute the dtor. + bool NRVO = !IsForEH && NRVOFlag; + + llvm::BasicBlock *SkipDtorBB = 0; + if (NRVO) { + // If we exited via NRVO, we skip the destructor call. + llvm::BasicBlock *RunDtorBB = CGF.createBasicBlock("nrvo.unused"); + SkipDtorBB = CGF.createBasicBlock("nrvo.skipdtor"); + llvm::Value *DidNRVO = CGF.Builder.CreateLoad(NRVOFlag, "nrvo.val"); + CGF.Builder.CreateCondBr(DidNRVO, SkipDtorBB, RunDtorBB); + CGF.EmitBlock(RunDtorBB); + } + + CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, + /*ForVirtualBase=*/false, Loc); + + if (NRVO) CGF.EmitBlock(SkipDtorBB); + } + }; +} + /// EmitLocalBlockVarDecl - Emit code and set up an entry in LocalDeclMap for a /// variable declaration with auto, register, or no storage class specifier. /// These turn into simple stack objects, or GlobalValues depending on target. @@ -686,53 +738,11 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D, if (const ConstantArrayType *Array = getContext().getAsConstantArrayType(Ty)) { - CleanupBlock Scope(*this, NormalCleanup); - - QualType BaseElementTy = getContext().getBaseElementType(Array); - const llvm::Type *BasePtr = ConvertType(BaseElementTy); - BasePtr = llvm::PointerType::getUnqual(BasePtr); - llvm::Value *BaseAddrPtr = - Builder.CreateBitCast(Loc, BasePtr); - EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr); - - if (Exceptions) { - Scope.beginEHCleanup(); - - QualType BaseElementTy = getContext().getBaseElementType(Array); - const llvm::Type *BasePtr = ConvertType(BaseElementTy); - BasePtr = llvm::PointerType::getUnqual(BasePtr); - llvm::Value *BaseAddrPtr = - Builder.CreateBitCast(Loc, BasePtr); - EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr); - } + EHStack.pushLazyCleanup(NormalAndEHCleanup, + D, Array, Loc); } else { - // Normal destruction. - CleanupBlock Scope(*this, NormalCleanup); - - llvm::BasicBlock *SkipDtor = 0; - if (NRVO) { - // If we exited via NRVO, we skip the destructor call. - llvm::BasicBlock *NoNRVO = createBasicBlock("nrvo.unused"); - SkipDtor = createBasicBlock("nrvo.skipdtor"); - Builder.CreateCondBr(Builder.CreateLoad(NRVOFlag, "nrvo.val"), - SkipDtor, - NoNRVO); - EmitBlock(NoNRVO); - } - - // We don't call the destructor along the normal edge if we're - // applying the NRVO. - EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, - Loc); - - if (NRVO) EmitBlock(SkipDtor); - - // Along the exceptions path we always execute the dtor. - if (Exceptions) { - Scope.beginEHCleanup(); - EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, - Loc); - } + EHStack.pushLazyCleanup(NormalAndEHCleanup, + D, NRVOFlag, Loc); } } } diff --git a/lib/CodeGen/CGException.cpp b/lib/CodeGen/CGException.cpp index 085dddd95d..b10e5ae6f6 100644 --- a/lib/CodeGen/CGException.cpp +++ b/lib/CodeGen/CGException.cpp @@ -62,12 +62,37 @@ EHScopeStack::getEnclosingEHCleanup(iterator it) const { return stabilize(it); return cast(*it).getEnclosingEHCleanup(); } + if (isa(*it)) { + if (cast(*it).isEHCleanup()) + return stabilize(it); + return cast(*it).getEnclosingEHCleanup(); + } ++it; } while (it != end()); return stable_end(); } +void *EHScopeStack::pushLazyCleanup(CleanupKind Kind, size_t Size) { + assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned"); + char *Buffer = allocate(EHLazyCleanupScope::getSizeForCleanupSize(Size)); + bool IsNormalCleanup = Kind != EHCleanup; + bool IsEHCleanup = Kind != NormalCleanup; + EHLazyCleanupScope *Scope = + new (Buffer) EHLazyCleanupScope(IsNormalCleanup, + IsEHCleanup, + Size, + BranchFixups.size(), + InnermostNormalCleanup, + InnermostEHCleanup); + if (IsNormalCleanup) + InnermostNormalCleanup = stable_begin(); + if (IsEHCleanup) + InnermostEHCleanup = stable_begin(); + + return Scope->getCleanupBuffer(); +} + void EHScopeStack::pushCleanup(llvm::BasicBlock *NormalEntry, llvm::BasicBlock *NormalExit, llvm::BasicBlock *EHEntry, @@ -86,11 +111,18 @@ void EHScopeStack::pushCleanup(llvm::BasicBlock *NormalEntry, void EHScopeStack::popCleanup() { assert(!empty() && "popping exception stack when not empty"); - assert(isa(*begin())); - EHCleanupScope &Cleanup = cast(*begin()); - InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup(); - InnermostEHCleanup = Cleanup.getEnclosingEHCleanup(); - StartOfData += EHCleanupScope::getSize(); + if (isa(*begin())) { + EHLazyCleanupScope &Cleanup = cast(*begin()); + InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup(); + InnermostEHCleanup = Cleanup.getEnclosingEHCleanup(); + StartOfData += Cleanup.getAllocatedSize(); + } else { + assert(isa(*begin())); + EHCleanupScope &Cleanup = cast(*begin()); + InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup(); + InnermostEHCleanup = Cleanup.getEnclosingEHCleanup(); + StartOfData += EHCleanupScope::getSize(); + } // Check whether we can shrink the branch-fixups stack. if (!BranchFixups.empty()) { @@ -144,7 +176,11 @@ void EHScopeStack::popNullFixups() { assert(hasNormalCleanups()); EHScopeStack::iterator it = find(InnermostNormalCleanup); - unsigned MinSize = cast(*it).getFixupDepth(); + unsigned MinSize; + if (isa(*it)) + MinSize = cast(*it).getFixupDepth(); + else + MinSize = cast(*it).getFixupDepth(); assert(BranchFixups.size() >= MinSize && "fixup stack out of order"); while (BranchFixups.size() > MinSize && @@ -391,7 +427,7 @@ static void EmitAnyExprToExn(CodeGenFunction &CGF, const Expr *E, // FIXME: StmtExprs probably force this to include a non-EH // handler. { - CodeGenFunction::CleanupBlock Cleanup(CGF, CodeGenFunction::EHCleanup); + CodeGenFunction::CleanupBlock Cleanup(CGF, EHCleanup); llvm::BasicBlock *FreeBB = CGF.createBasicBlock("free-exnobj"); llvm::BasicBlock *DoneBB = CGF.createBasicBlock("free-exnobj.done"); @@ -598,13 +634,28 @@ void CodeGenFunction::EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) { /// affect exception handling. Currently, the only non-EH scopes are /// normal-only cleanup scopes. static bool isNonEHScope(const EHScope &S) { - return isa(S) && !cast(S).isEHCleanup(); + switch (S.getKind()) { + case EHScope::Cleanup: + return !cast(S).isEHCleanup(); + case EHScope::LazyCleanup: + return !cast(S).isEHCleanup(); + case EHScope::Filter: + case EHScope::Catch: + case EHScope::Terminate: + return false; + } + + // Suppress warning. + return false; } llvm::BasicBlock *CodeGenFunction::getInvokeDestImpl() { assert(EHStack.requiresLandingPad()); assert(!EHStack.empty()); + if (!Exceptions) + return 0; + // Check the innermost scope for a cached landing pad. If this is // a non-EH cleanup, we'll check enclosing scopes in EmitLandingPad. llvm::BasicBlock *LP = EHStack.begin()->getCachedLandingPad(); @@ -713,6 +764,12 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() { I != E; ++I) { switch (I->getKind()) { + case EHScope::LazyCleanup: + if (!HasEHCleanup) + HasEHCleanup = cast(*I).isEHCleanup(); + // We otherwise don't care about cleanups. + continue; + case EHScope::Cleanup: if (!HasEHCleanup) HasEHCleanup = cast(*I).isEHCleanup(); @@ -954,8 +1011,7 @@ static llvm::Value *CallBeginCatch(CodeGenFunction &CGF, llvm::Value *Exn) { Call->setDoesNotThrow(); { - CodeGenFunction::CleanupBlock EndCatchCleanup(CGF, - CodeGenFunction::NormalAndEHCleanup); + CodeGenFunction::CleanupBlock EndCatchCleanup(CGF, NormalAndEHCleanup); // __cxa_end_catch never throws, so this can just be a call. CGF.Builder.CreateCall(getEndCatchFn(CGF))->setDoesNotThrow(); @@ -1213,13 +1269,11 @@ CodeGenFunction::EnterFinallyBlock(const Stmt *Body, // Enter a normal cleanup which will perform the @finally block. { - CodeGenFunction::CleanupBlock - NormalCleanup(*this, CodeGenFunction::NormalCleanup); + CodeGenFunction::CleanupBlock Cleanup(*this, NormalCleanup); // Enter a cleanup to call the end-catch function if one was provided. if (EndCatchFn) { - CodeGenFunction::CleanupBlock - FinallyExitCleanup(CGF, CodeGenFunction::NormalAndEHCleanup); + CodeGenFunction::CleanupBlock FinallyExitCleanup(CGF, NormalAndEHCleanup); llvm::BasicBlock *EndCatchBB = createBasicBlock("finally.endcatch"); llvm::BasicBlock *CleanupContBB = createBasicBlock("finally.cleanup.cont"); @@ -1435,3 +1489,4 @@ CodeGenFunction::CleanupBlock::~CleanupBlock() { CGF.Builder.restoreIP(SavedIP); } +void EHScopeStack::LazyCleanup::_anchor() {} diff --git a/lib/CodeGen/CGException.h b/lib/CodeGen/CGException.h index 8755dca2b2..80739cd8d7 100644 --- a/lib/CodeGen/CGException.h +++ b/lib/CodeGen/CGException.h @@ -31,13 +31,13 @@ namespace CodeGen { class EHScope { llvm::BasicBlock *CachedLandingPad; - unsigned K : 2; + unsigned K : 3; protected: - enum { BitsRemaining = 30 }; + enum { BitsRemaining = 29 }; public: - enum Kind { Cleanup, Catch, Terminate, Filter }; + enum Kind { Cleanup, LazyCleanup, Catch, Terminate, Filter }; EHScope(Kind K) : CachedLandingPad(0), K(K) {} @@ -127,6 +127,87 @@ public: } }; +/// A cleanup scope which generates the cleanup blocks lazily. +class EHLazyCleanupScope : public EHScope { + /// Whether this cleanup needs to be run along normal edges. + bool IsNormalCleanup : 1; + + /// Whether this cleanup needs to be run along exception edges. + bool IsEHCleanup : 1; + + /// The amount of extra storage needed by the LazyCleanup. + /// Always a multiple of the scope-stack alignment. + unsigned CleanupSize : 12; + + /// The number of fixups required by enclosing scopes (not including + /// this one). If this is the top cleanup scope, all the fixups + /// from this index onwards belong to this scope. + unsigned FixupDepth : BitsRemaining - 14; + + /// The nearest normal cleanup scope enclosing this one. + EHScopeStack::stable_iterator EnclosingNormal; + + /// The nearest EH cleanup scope enclosing this one. + EHScopeStack::stable_iterator EnclosingEH; + + /// The dual entry/exit block along the normal edge. This is lazily + /// created if needed before the cleanup is popped. + llvm::BasicBlock *NormalBlock; + + /// The dual entry/exit block along the EH edge. This is lazily + /// created if needed before the cleanup is popped. + llvm::BasicBlock *EHBlock; + +public: + /// Gets the size required for a lazy cleanup scope with the given + /// cleanup-data requirements. + static size_t getSizeForCleanupSize(size_t Size) { + return sizeof(EHLazyCleanupScope) + Size; + } + + size_t getAllocatedSize() const { + return sizeof(EHLazyCleanupScope) + CleanupSize; + } + + EHLazyCleanupScope(bool IsNormal, bool IsEH, unsigned CleanupSize, + unsigned FixupDepth, + EHScopeStack::stable_iterator EnclosingNormal, + EHScopeStack::stable_iterator EnclosingEH) + : EHScope(EHScope::LazyCleanup), + IsNormalCleanup(IsNormal), IsEHCleanup(IsEH), + CleanupSize(CleanupSize), FixupDepth(FixupDepth), + EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH), + NormalBlock(0), EHBlock(0) + {} + + bool isNormalCleanup() const { return IsNormalCleanup; } + llvm::BasicBlock *getNormalBlock() const { return NormalBlock; } + void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; } + + bool isEHCleanup() const { return IsEHCleanup; } + llvm::BasicBlock *getEHBlock() const { return EHBlock; } + void setEHBlock(llvm::BasicBlock *BB) { EHBlock = BB; } + + unsigned getFixupDepth() const { return FixupDepth; } + EHScopeStack::stable_iterator getEnclosingNormalCleanup() const { + return EnclosingNormal; + } + EHScopeStack::stable_iterator getEnclosingEHCleanup() const { + return EnclosingEH; + } + + size_t getCleanupSize() const { return CleanupSize; } + void *getCleanupBuffer() { return this + 1; } + + EHScopeStack::LazyCleanup *getCleanup() { + return reinterpret_cast(getCleanupBuffer()); + } + + static bool classof(const EHScope *Scope) { + return (Scope->getKind() == LazyCleanup); + } +}; + /// A scope which needs to execute some code if we try to unwind --- /// either normally, via the EH mechanism, or both --- through it. class EHCleanupScope : public EHScope { @@ -267,6 +348,11 @@ public: static_cast(get())->getNumFilters()); break; + case EHScope::LazyCleanup: + Ptr += static_cast(get()) + ->getAllocatedSize(); + break; + case EHScope::Cleanup: Ptr += EHCleanupScope::getSize(); break; diff --git a/lib/CodeGen/CGObjCGNU.cpp b/lib/CodeGen/CGObjCGNU.cpp index c9da348cbf..f3c80bcf08 100644 --- a/lib/CodeGen/CGObjCGNU.cpp +++ b/lib/CodeGen/CGObjCGNU.cpp @@ -1871,8 +1871,7 @@ void CGObjCGNU::EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF, // Register an all-paths cleanup to release the lock. { - CodeGenFunction::CleanupBlock - ReleaseScope(CGF, CodeGenFunction::NormalAndEHCleanup); + CodeGenFunction::CleanupBlock ReleaseScope(CGF, NormalAndEHCleanup); llvm::Value *SyncExit = CGM.CreateRuntimeFunction(FTy, "objc_sync_exit"); SyncArg = CGF.Builder.CreateBitCast(SyncArg, IdTy); diff --git a/lib/CodeGen/CGObjCMac.cpp b/lib/CodeGen/CGObjCMac.cpp index 0a766d5c82..7293537302 100644 --- a/lib/CodeGen/CGObjCMac.cpp +++ b/lib/CodeGen/CGObjCMac.cpp @@ -2697,8 +2697,7 @@ void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF, // Push a normal cleanup to leave the try scope. { - CodeGenFunction::CleanupBlock - FinallyScope(CGF, CodeGenFunction::NormalCleanup); + CodeGenFunction::CleanupBlock FinallyScope(CGF, NormalCleanup); // Check whether we need to call objc_exception_try_exit. // In optimized code, this branch will always be folded. @@ -5693,8 +5692,7 @@ CGObjCNonFragileABIMac::EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF, // Register an all-paths cleanup to release the lock. { - CodeGenFunction::CleanupBlock - ReleaseScope(CGF, CodeGenFunction::NormalAndEHCleanup); + CodeGenFunction::CleanupBlock ReleaseScope(CGF, NormalAndEHCleanup); CGF.Builder.CreateCall(ObjCTypes.getSyncExitFn(), SyncArg) ->setDoesNotThrow(); @@ -5804,8 +5802,7 @@ void CGObjCNonFragileABIMac::EmitTryStmt(CodeGen::CodeGenFunction &CGF, // Add a cleanup to leave the catch. { - CodeGenFunction::CleanupBlock - EndCatchBlock(CGF, CodeGenFunction::NormalAndEHCleanup); + CodeGenFunction::CleanupBlock EndCatchBlock(CGF, NormalAndEHCleanup); // __objc_end_catch never throws. CGF.Builder.CreateCall(ObjCTypes.getObjCEndCatchFn()) diff --git a/lib/CodeGen/CodeGenFunction.cpp b/lib/CodeGen/CodeGenFunction.cpp index 5e505c2d82..eb6c4361be 100644 --- a/lib/CodeGen/CodeGenFunction.cpp +++ b/lib/CodeGen/CodeGenFunction.cpp @@ -825,11 +825,168 @@ static void SimplifyCleanupEdges(CodeGenFunction &CGF, SimplifyCleanupEntry(CGF, Entry); } +static void EmitLazyCleanup(CodeGenFunction &CGF, + EHScopeStack::LazyCleanup *Fn, + bool ForEH) { + if (ForEH) CGF.EHStack.pushTerminate(); + Fn->Emit(CGF, ForEH); + if (ForEH) CGF.EHStack.popTerminate(); + assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?"); +} + +static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF, + EHScopeStack::LazyCleanup *Fn, + bool ForEH, + llvm::BasicBlock *Entry) { + assert(Entry && "no entry block for cleanup"); + + // Remove the switch and load from the end of the entry block. + llvm::Instruction *Switch = &Entry->getInstList().back(); + Entry->getInstList().remove(Switch); + assert(isa(Switch)); + llvm::Instruction *Load = &Entry->getInstList().back(); + Entry->getInstList().remove(Load); + assert(isa(Load)); + + assert(Entry->getInstList().empty() && + "lazy cleanup block not empty after removing load/switch pair?"); + + // Emit the actual cleanup at the end of the entry block. + CGF.Builder.SetInsertPoint(Entry); + EmitLazyCleanup(CGF, Fn, ForEH); + + // Put the load and switch at the end of the exit block. + llvm::BasicBlock *Exit = CGF.Builder.GetInsertBlock(); + Exit->getInstList().push_back(Load); + Exit->getInstList().push_back(Switch); + + // Clean up the edges if possible. + SimplifyCleanupEdges(CGF, Entry, Exit); + + CGF.Builder.ClearInsertionPoint(); +} + +static void PopLazyCleanupBlock(CodeGenFunction &CGF) { + assert(isa(*CGF.EHStack.begin()) && "top not a cleanup!"); + EHLazyCleanupScope &Scope = cast(*CGF.EHStack.begin()); + assert(Scope.getFixupDepth() <= CGF.EHStack.getNumBranchFixups()); + + // Check whether we need an EH cleanup. This is only true if we've + // generated a lazy EH cleanup block. + llvm::BasicBlock *EHEntry = Scope.getEHBlock(); + bool RequiresEHCleanup = (EHEntry != 0); + + // Check the three conditions which might require a normal cleanup: + + // - whether there are branch fix-ups through this cleanup + unsigned FixupDepth = Scope.getFixupDepth(); + bool HasFixups = CGF.EHStack.getNumBranchFixups() != FixupDepth; + + // - whether control has already been threaded through this cleanup + llvm::BasicBlock *NormalEntry = Scope.getNormalBlock(); + bool HasExistingBranches = (NormalEntry != 0); + + // - whether there's a fallthrough + llvm::BasicBlock *FallthroughSource = CGF.Builder.GetInsertBlock(); + bool HasFallthrough = (FallthroughSource != 0); + + bool RequiresNormalCleanup = false; + if (Scope.isNormalCleanup() && + (HasFixups || HasExistingBranches || HasFallthrough)) { + RequiresNormalCleanup = true; + } + + // If we don't need the cleanup at all, we're done. + if (!RequiresNormalCleanup && !RequiresEHCleanup) { + CGF.EHStack.popCleanup(); + assert(CGF.EHStack.getNumBranchFixups() == 0 || + CGF.EHStack.hasNormalCleanups()); + return; + } + + // Copy the cleanup emission data out. Note that SmallVector + // guarantees maximal alignment for its buffer regardless of its + // type parameter. + llvm::SmallVector CleanupBuffer; + CleanupBuffer.reserve(Scope.getCleanupSize()); + memcpy(CleanupBuffer.data(), + Scope.getCleanupBuffer(), Scope.getCleanupSize()); + CleanupBuffer.set_size(Scope.getCleanupSize()); + EHScopeStack::LazyCleanup *Fn = + reinterpret_cast(CleanupBuffer.data()); + + // We're done with the scope; pop it off so we can emit the cleanups. + CGF.EHStack.popCleanup(); + + if (RequiresNormalCleanup) { + // If we have a fallthrough and no other need for the cleanup, + // emit it directly. + if (HasFallthrough && !HasFixups && !HasExistingBranches) { + EmitLazyCleanup(CGF, Fn, /*ForEH*/ false); + + // Otherwise, the best approach is to thread everything through + // the cleanup block and then try to clean up after ourselves. + } else { + // Force the entry block to exist. + if (!HasExistingBranches) { + NormalEntry = CGF.createBasicBlock("cleanup"); + CreateCleanupSwitch(CGF, NormalEntry); + } + + CGF.EmitBlock(NormalEntry); + + // Thread the fallthrough edge through the (momentarily trivial) + // cleanup. + llvm::BasicBlock *FallthroughDestination = 0; + if (HasFallthrough) { + assert(isa(FallthroughSource->getTerminator())); + FallthroughDestination = CGF.createBasicBlock("cleanup.cont"); + + BranchFixup Fix; + Fix.Destination = FallthroughDestination; + Fix.LatestBranch = FallthroughSource->getTerminator(); + Fix.LatestBranchIndex = 0; + Fix.Origin = Fix.LatestBranch; + + // Restore fixup invariant. EmitBlock added a branch to the + // cleanup which we need to redirect to the destination. + cast(Fix.LatestBranch) + ->setSuccessor(0, Fix.Destination); + + ThreadFixupThroughCleanup(CGF, Fix, NormalEntry, NormalEntry); + } + + // Thread any "real" fixups we need to thread. + for (unsigned I = FixupDepth, E = CGF.EHStack.getNumBranchFixups(); + I != E; ++I) + if (CGF.EHStack.getBranchFixup(I).Destination) + ThreadFixupThroughCleanup(CGF, CGF.EHStack.getBranchFixup(I), + NormalEntry, NormalEntry); + + SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ false, NormalEntry); + + if (HasFallthrough) + CGF.EmitBlock(FallthroughDestination); + } + } + + // Emit the EH cleanup if required. + if (RequiresEHCleanup) { + CGBuilderTy::InsertPoint SavedIP = CGF.Builder.saveAndClearIP(); + CGF.EmitBlock(EHEntry); + SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ true, EHEntry); + CGF.Builder.restoreIP(SavedIP); + } +} + /// Pops a cleanup block. If the block includes a normal cleanup, the /// current insertion point is threaded through the cleanup, as are /// any branch fixups on the cleanup. void CodeGenFunction::PopCleanupBlock() { assert(!EHStack.empty() && "cleanup stack is empty!"); + if (isa(*EHStack.begin())) + return PopLazyCleanupBlock(*this); + assert(isa(*EHStack.begin()) && "top not a cleanup!"); EHCleanupScope &Scope = cast(*EHStack.begin()); assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups()); @@ -1007,6 +1164,16 @@ void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) { if (Scope.isNormalCleanup()) ThreadFixupThroughCleanup(*this, Fixup, Scope.getNormalEntry(), Scope.getNormalExit()); + } else if (isa(*I)) { + EHLazyCleanupScope &Scope = cast(*I); + if (Scope.isNormalCleanup()) { + llvm::BasicBlock *Block = Scope.getNormalBlock(); + if (!Block) { + Block = createBasicBlock("cleanup"); + Scope.setNormalBlock(Block); + } + ThreadFixupThroughCleanup(*this, Fixup, Block, Block); + } } } @@ -1046,6 +1213,16 @@ void CodeGenFunction::EmitBranchThroughEHCleanup(JumpDest Dest) { if (Scope.isEHCleanup()) ThreadFixupThroughCleanup(*this, Fixup, Scope.getEHEntry(), Scope.getEHExit()); + } else if (isa(*I)) { + EHLazyCleanupScope &Scope = cast(*I); + if (Scope.isEHCleanup()) { + llvm::BasicBlock *Block = Scope.getEHBlock(); + if (!Block) { + Block = createBasicBlock("eh.cleanup"); + Scope.setEHBlock(Block); + } + ThreadFixupThroughCleanup(*this, Fixup, Block, Block); + } } } diff --git a/lib/CodeGen/CodeGenFunction.h b/lib/CodeGen/CodeGenFunction.h index 26fb882e56..ce3453ae5e 100644 --- a/lib/CodeGen/CodeGenFunction.h +++ b/lib/CodeGen/CodeGenFunction.h @@ -95,6 +95,8 @@ struct BranchFixup { unsigned LatestBranchIndex; }; +enum CleanupKind { NormalAndEHCleanup, EHCleanup, NormalCleanup }; + /// A stack of scopes which respond to exceptions, including cleanups /// and catch blocks. class EHScopeStack { @@ -123,6 +125,28 @@ public: } }; + /// A lazy cleanup. These will be allocated on the cleanup stack + /// and so must be trivially copyable. We "enforce" this by + /// providing no virtual destructor so that subclasses will be + /// encouraged to contain no non-POD types. + /// + /// LazyCleanup implementations should generally be declared in an + /// anonymous namespace. + class LazyCleanup { + // Anchor the construction vtable. + virtual void _anchor(); + + public: + /// Emit the cleanup. For normal cleanups, this is run in the + /// same EH context as when the cleanup was pushed, i.e. the + /// immediately-enclosing context of the cleanup scope. For + /// EH cleanups, this is run in a terminate context. + /// + // \param IsForEHCleanup true if this is for an EH cleanup, false + /// if for a normal cleanup. + virtual void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) = 0; + }; + private: // The implementation for this class is in CGException.h and // CGException.cpp; the definition is here because it's used as a @@ -171,6 +195,8 @@ private: void popNullFixups(); + void *pushLazyCleanup(CleanupKind K, size_t DataSize); + public: EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0), InnermostNormalCleanup(stable_end()), @@ -178,6 +204,32 @@ public: CatchDepth(0) {} ~EHScopeStack() { delete[] StartOfBuffer; } + // Variadic templates would make this not terrible. + + /// Push a lazily-created cleanup on the stack. + template + void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1) { + void *Buffer = pushLazyCleanup(Kind, sizeof(T)); + LazyCleanup *Obj = new(Buffer) T(a0, a1); + (void) Obj; + } + + /// Push a lazily-created cleanup on the stack. + template + void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) { + void *Buffer = pushLazyCleanup(Kind, sizeof(T)); + LazyCleanup *Obj = new(Buffer) T(a0, a1, a2); + (void) Obj; + } + + /// Push a lazily-created cleanup on the stack. + template + void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) { + void *Buffer = pushLazyCleanup(Kind, sizeof(T)); + LazyCleanup *Obj = new(Buffer) T(a0, a1, a2, a3); + (void) Obj; + } + /// Push a cleanup on the stack. void pushCleanup(llvm::BasicBlock *NormalEntry, llvm::BasicBlock *NormalExit, @@ -375,8 +427,6 @@ public: llvm::Constant *RethrowFn); void ExitFinallyBlock(FinallyInfo &FinallyInfo); - enum CleanupKind { NormalAndEHCleanup, EHCleanup, NormalCleanup }; - /// PushDestructorCleanup - Push a cleanup to call the /// complete-object destructor of an object of the given type at the /// given address. Does nothing if T is not a C++ class type with a diff --git a/test/CodeGenCXX/condition.cpp b/test/CodeGenCXX/condition.cpp index bbc6d2f73b..652e7c89c1 100644 --- a/test/CodeGenCXX/condition.cpp +++ b/test/CodeGenCXX/condition.cpp @@ -26,6 +26,7 @@ struct Y { X getX(); +// CHECK: define void @_Z11if_destructi( void if_destruct(int z) { // Verify that the condition variable is destroyed at the end of the // "if" statement. diff --git a/test/CodeGenCXX/nrvo.cpp b/test/CodeGenCXX/nrvo.cpp index 6181f0eee1..8d19b1effe 100644 --- a/test/CodeGenCXX/nrvo.cpp +++ b/test/CodeGenCXX/nrvo.cpp @@ -57,20 +57,15 @@ X test2(bool B) { // CHECK-EH: call void @_ZN1XC1Ev // CHECK-EH-NEXT: invoke void @_ZN1XC1Ev - // -> %invoke.cont1, %lpad + // -> %invoke.cont, %lpad - // %invoke.cont1: + // %invoke.cont: // CHECK-EH: br i1 // -> %if.then, %if.end // %if.then: returning 'x' // CHECK-EH: invoke void @_ZN1XC1ERKS_ - // -> %cleanup, %lpad5 - - // %invoke.cont: rethrow block for %eh.cleanup. - // This really should be elsewhere in the function. - // CHECK-EH: call void @_Unwind_Resume_or_Rethrow - // CHECK-EH-NEXT: unreachable + // -> %cleanup, %lpad1 // %lpad: landing pad for ctor of 'y', dtor of 'y' // CHECK-EH: call i8* @llvm.eh.exception() @@ -78,25 +73,30 @@ X test2(bool B) { // CHECK-EH-NEXT: br label // -> %eh.cleanup - // %invoke.cont2: normal cleanup for 'x' - // CHECK-EH: call void @_ZN1XD1Ev - // CHECK-EH-NEXT: ret void - - // %lpad5: landing pad for return copy ctors, EH cleanup for 'y' + // %lpad1: landing pad for return copy ctors, EH cleanup for 'y' // CHECK-EH: invoke void @_ZN1XD1Ev // -> %eh.cleanup, %terminate.lpad // %if.end: returning 'y' // CHECK-EH: invoke void @_ZN1XC1ERKS_ - // -> %cleanup, %lpad5 + // -> %cleanup, %lpad1 // %cleanup: normal cleanup for 'y' // CHECK-EH: invoke void @_ZN1XD1Ev - // -> %invoke.cont2, %lpad + // -> %invoke.cont11, %lpad + + // %invoke.cont11: normal cleanup for 'x' + // CHECK-EH: call void @_ZN1XD1Ev + // CHECK-EH-NEXT: ret void // %eh.cleanup: EH cleanup for 'x' // CHECK-EH: invoke void @_ZN1XD1Ev - // -> %invoke.cont, %terminate.lpad + // -> %invoke.cont17, %terminate.lpad + + // %invoke.cont17: rethrow block for %eh.cleanup. + // This really should be elsewhere in the function. + // CHECK-EH: call void @_Unwind_Resume_or_Rethrow + // CHECK-EH-NEXT: unreachable // %terminate.lpad: terminate landing pad. // CHECK-EH: call i8* @llvm.eh.exception() diff --git a/test/CodeGenObjC/metadata_symbols.m b/test/CodeGenObjC/metadata_symbols.m index 59441e523f..31639466e8 100644 --- a/test/CodeGenObjC/metadata_symbols.m +++ b/test/CodeGenObjC/metadata_symbols.m @@ -1,4 +1,4 @@ -// RUN: %clang_cc1 -triple x86_64-apple-darwin10 -fobjc-nonfragile-abi -emit-llvm -o %t %s +// RUN: %clang_cc1 -triple x86_64-apple-darwin10 -fobjc-nonfragile-abi -emit-llvm -fexceptions -o %t %s // RUN: FileCheck -check-prefix=CHECK-X86_64 < %t %s // RUN: grep '@"OBJC_EHTYPE_$_EH3"' %t | count 3 diff --git a/test/CodeGenObjC/unwind-fn.m b/test/CodeGenObjC/unwind-fn.m index 0aa8cde024..48217f07f9 100644 --- a/test/CodeGenObjC/unwind-fn.m +++ b/test/CodeGenObjC/unwind-fn.m @@ -1,5 +1,5 @@ -// RUN: %clang_cc1 -fobjc-nonfragile-abi -emit-llvm -o - %s | FileCheck --check-prefix=DEFAULT_EH %s -// RUN: %clang_cc1 -fsjlj-exceptions -fobjc-nonfragile-abi -emit-llvm -o - %s | FileCheck --check-prefix=SJLJ_EH %s +// RUN: %clang_cc1 -fobjc-nonfragile-abi -emit-llvm -fexceptions -o - %s | FileCheck --check-prefix=DEFAULT_EH %s +// RUN: %clang_cc1 -fsjlj-exceptions -fobjc-nonfragile-abi -fexceptions -emit-llvm -o - %s | FileCheck --check-prefix=SJLJ_EH %s // DEFAULT_EH: declare void @_Unwind_Resume_or_Rethrow(i8*) // SJLJ_EH: declare void @_Unwind_SjLj_Resume(i8*)