]> granicus.if.org Git - clang/commitdiff
Teach IR generation how to lazily emit cleanups. This has a lot of advantages,
authorJohn McCall <rjmccall@apple.com>
Tue, 13 Jul 2010 20:32:21 +0000 (20:32 +0000)
committerJohn McCall <rjmccall@apple.com>
Tue, 13 Jul 2010 20:32:21 +0000 (20:32 +0000)
mostly in avoiding unnecessary work at compile time but also in producing more
sensible block orderings.

Move the destructor cleanups for local variables over to use lazy cleanups.
Eventually all cleanups will do this;  for now we have some awkward code
duplication.

Tell IR generation just to never produce landing pads in -fno-exceptions.
This is a much more comprehensive solution to a problem which previously was
half-solved by checks in most cleanup-generation spots.

git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@108270 91177308-0d34-0410-b5e6-96231b3b80d8

12 files changed:
lib/CodeGen/CGClass.cpp
lib/CodeGen/CGDecl.cpp
lib/CodeGen/CGException.cpp
lib/CodeGen/CGException.h
lib/CodeGen/CGObjCGNU.cpp
lib/CodeGen/CGObjCMac.cpp
lib/CodeGen/CodeGenFunction.cpp
lib/CodeGen/CodeGenFunction.h
test/CodeGenCXX/condition.cpp
test/CodeGenCXX/nrvo.cpp
test/CodeGenObjC/metadata_symbols.m
test/CodeGenObjC/unwind-fn.m

index df5ea18c0c20da257479688ce0c0888cf15769d2..c50fe90f8a81c05459f526d8bf25e427c33b6e25 100644 (file)
@@ -340,7 +340,7 @@ static void EmitBaseInitializer(CodeGenFunction &CGF,
   
   if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor()) {
     // FIXME: Is this OK for C++0x delegating constructors?
-    CodeGenFunction::CleanupBlock Cleanup(CGF, CodeGenFunction::EHCleanup);
+    CodeGenFunction::CleanupBlock Cleanup(CGF, EHCleanup);
 
     CXXDestructorDecl *DD = BaseClassDecl->getDestructor();
     CGF.EmitCXXDestructorCall(DD, Dtor_Base, isBaseVirtual, V);
@@ -534,7 +534,7 @@ static void EmitMemberInitializer(CodeGenFunction &CGF,
     CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
     if (!RD->hasTrivialDestructor()) {
       // FIXME: Is this OK for C++0x delegating constructors?
-      CodeGenFunction::CleanupBlock Cleanup(CGF, CodeGenFunction::EHCleanup);
+      CodeGenFunction::CleanupBlock Cleanup(CGF, EHCleanup);
       
       llvm::Value *ThisPtr = CGF.LoadCXXThis();
       LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 0);
index 959a9ae483b026961f45fd84be15fbe0b44738cf..1a62ea95555d8e3da874ecfd6c5a57f5d030c58b 100644 (file)
@@ -388,6 +388,58 @@ const llvm::Type *CodeGenFunction::BuildByRefType(const ValueDecl *D) {
   return Info.first;
 }
 
+namespace {
+  struct CallArrayDtor : EHScopeStack::LazyCleanup {
+    CallArrayDtor(const CXXDestructorDecl *Dtor, 
+                  const ConstantArrayType *Type,
+                  llvm::Value *Loc)
+      : Dtor(Dtor), Type(Type), Loc(Loc) {}
+
+    const CXXDestructorDecl *Dtor;
+    const ConstantArrayType *Type;
+    llvm::Value *Loc;
+
+    void Emit(CodeGenFunction &CGF, bool IsForEH) {
+      QualType BaseElementTy = CGF.getContext().getBaseElementType(Type);
+      const llvm::Type *BasePtr = CGF.ConvertType(BaseElementTy);
+      BasePtr = llvm::PointerType::getUnqual(BasePtr);
+      llvm::Value *BaseAddrPtr = CGF.Builder.CreateBitCast(Loc, BasePtr);
+      CGF.EmitCXXAggrDestructorCall(Dtor, Type, BaseAddrPtr);
+    }
+  };
+
+  struct CallVarDtor : EHScopeStack::LazyCleanup {
+    CallVarDtor(const CXXDestructorDecl *Dtor,
+                llvm::Value *NRVOFlag,
+                llvm::Value *Loc)
+      : Dtor(Dtor), NRVOFlag(NRVOFlag), Loc(Loc) {}
+
+    const CXXDestructorDecl *Dtor;
+    llvm::Value *NRVOFlag;
+    llvm::Value *Loc;
+
+    void Emit(CodeGenFunction &CGF, bool IsForEH) {
+      // Along the exceptions path we always execute the dtor.
+      bool NRVO = !IsForEH && NRVOFlag;
+
+      llvm::BasicBlock *SkipDtorBB = 0;
+      if (NRVO) {
+        // If we exited via NRVO, we skip the destructor call.
+        llvm::BasicBlock *RunDtorBB = CGF.createBasicBlock("nrvo.unused");
+        SkipDtorBB = CGF.createBasicBlock("nrvo.skipdtor");
+        llvm::Value *DidNRVO = CGF.Builder.CreateLoad(NRVOFlag, "nrvo.val");
+        CGF.Builder.CreateCondBr(DidNRVO, SkipDtorBB, RunDtorBB);
+        CGF.EmitBlock(RunDtorBB);
+      }
+          
+      CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
+                                /*ForVirtualBase=*/false, Loc);
+
+      if (NRVO) CGF.EmitBlock(SkipDtorBB);
+    }
+  };
+}
+
 /// EmitLocalBlockVarDecl - Emit code and set up an entry in LocalDeclMap for a
 /// variable declaration with auto, register, or no storage class specifier.
 /// These turn into simple stack objects, or GlobalValues depending on target.
@@ -686,53 +738,11 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D,
         
         if (const ConstantArrayType *Array = 
               getContext().getAsConstantArrayType(Ty)) {
-          CleanupBlock Scope(*this, NormalCleanup);
-
-          QualType BaseElementTy = getContext().getBaseElementType(Array);
-          const llvm::Type *BasePtr = ConvertType(BaseElementTy);
-          BasePtr = llvm::PointerType::getUnqual(BasePtr);
-          llvm::Value *BaseAddrPtr =
-            Builder.CreateBitCast(Loc, BasePtr);
-          EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr);
-
-          if (Exceptions) {
-            Scope.beginEHCleanup();
-
-            QualType BaseElementTy = getContext().getBaseElementType(Array);
-            const llvm::Type *BasePtr = ConvertType(BaseElementTy);
-            BasePtr = llvm::PointerType::getUnqual(BasePtr);
-            llvm::Value *BaseAddrPtr =
-              Builder.CreateBitCast(Loc, BasePtr);
-            EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr);
-          }
+          EHStack.pushLazyCleanup<CallArrayDtor>(NormalAndEHCleanup,
+                                                 D, Array, Loc);
         } else {
-          // Normal destruction. 
-          CleanupBlock Scope(*this, NormalCleanup);
-
-          llvm::BasicBlock *SkipDtor = 0;
-          if (NRVO) {
-            // If we exited via NRVO, we skip the destructor call.
-            llvm::BasicBlock *NoNRVO = createBasicBlock("nrvo.unused");
-            SkipDtor = createBasicBlock("nrvo.skipdtor");
-            Builder.CreateCondBr(Builder.CreateLoad(NRVOFlag, "nrvo.val"),
-                                 SkipDtor,
-                                 NoNRVO);
-            EmitBlock(NoNRVO);
-          }
-          
-          // We don't call the destructor along the normal edge if we're
-          // applying the NRVO.
-          EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false,
-                                Loc);
-
-          if (NRVO) EmitBlock(SkipDtor);
-
-          // Along the exceptions path we always execute the dtor.
-          if (Exceptions) {
-            Scope.beginEHCleanup();
-            EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false,
-                                  Loc);
-          }
+          EHStack.pushLazyCleanup<CallVarDtor>(NormalAndEHCleanup,
+                                               D, NRVOFlag, Loc);
         }
       }
   }
index 085dddd95d4f5d70ba278423e90c12757b78fd7f..b10e5ae6f6a54f323ddaed47e8cc45e0b4aa4d3f 100644 (file)
@@ -62,12 +62,37 @@ EHScopeStack::getEnclosingEHCleanup(iterator it) const {
         return stabilize(it);
       return cast<EHCleanupScope>(*it).getEnclosingEHCleanup();
     }
+    if (isa<EHLazyCleanupScope>(*it)) {
+      if (cast<EHLazyCleanupScope>(*it).isEHCleanup())
+        return stabilize(it);
+      return cast<EHLazyCleanupScope>(*it).getEnclosingEHCleanup();
+    }
     ++it;
   } while (it != end());
   return stable_end();
 }
 
 
+void *EHScopeStack::pushLazyCleanup(CleanupKind Kind, size_t Size) {
+  assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned");
+  char *Buffer = allocate(EHLazyCleanupScope::getSizeForCleanupSize(Size));
+  bool IsNormalCleanup = Kind != EHCleanup;
+  bool IsEHCleanup = Kind != NormalCleanup;
+  EHLazyCleanupScope *Scope =
+    new (Buffer) EHLazyCleanupScope(IsNormalCleanup,
+                                    IsEHCleanup,
+                                    Size,
+                                    BranchFixups.size(),
+                                    InnermostNormalCleanup,
+                                    InnermostEHCleanup);
+  if (IsNormalCleanup)
+    InnermostNormalCleanup = stable_begin();
+  if (IsEHCleanup)
+    InnermostEHCleanup = stable_begin();
+
+  return Scope->getCleanupBuffer();
+}
+
 void EHScopeStack::pushCleanup(llvm::BasicBlock *NormalEntry,
                                llvm::BasicBlock *NormalExit,
                                llvm::BasicBlock *EHEntry,
@@ -86,11 +111,18 @@ void EHScopeStack::pushCleanup(llvm::BasicBlock *NormalEntry,
 void EHScopeStack::popCleanup() {
   assert(!empty() && "popping exception stack when not empty");
 
-  assert(isa<EHCleanupScope>(*begin()));
-  EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
-  InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
-  InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
-  StartOfData += EHCleanupScope::getSize();
+  if (isa<EHLazyCleanupScope>(*begin())) {
+    EHLazyCleanupScope &Cleanup = cast<EHLazyCleanupScope>(*begin());
+    InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
+    InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
+    StartOfData += Cleanup.getAllocatedSize();
+  } else {
+    assert(isa<EHCleanupScope>(*begin()));
+    EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
+    InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
+    InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
+    StartOfData += EHCleanupScope::getSize();
+  }
 
   // Check whether we can shrink the branch-fixups stack.
   if (!BranchFixups.empty()) {
@@ -144,7 +176,11 @@ void EHScopeStack::popNullFixups() {
   assert(hasNormalCleanups());
 
   EHScopeStack::iterator it = find(InnermostNormalCleanup);
-  unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
+  unsigned MinSize;
+  if (isa<EHCleanupScope>(*it))
+    MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
+  else
+    MinSize = cast<EHLazyCleanupScope>(*it).getFixupDepth();
   assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
 
   while (BranchFixups.size() > MinSize &&
@@ -391,7 +427,7 @@ static void EmitAnyExprToExn(CodeGenFunction &CGF, const Expr *E,
   // FIXME: StmtExprs probably force this to include a non-EH
   // handler.
   {
-    CodeGenFunction::CleanupBlock Cleanup(CGF, CodeGenFunction::EHCleanup);
+    CodeGenFunction::CleanupBlock Cleanup(CGF, EHCleanup);
     llvm::BasicBlock *FreeBB = CGF.createBasicBlock("free-exnobj");
     llvm::BasicBlock *DoneBB = CGF.createBasicBlock("free-exnobj.done");
 
@@ -598,13 +634,28 @@ void CodeGenFunction::EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) {
 /// affect exception handling.  Currently, the only non-EH scopes are
 /// normal-only cleanup scopes.
 static bool isNonEHScope(const EHScope &S) {
-  return isa<EHCleanupScope>(S) && !cast<EHCleanupScope>(S).isEHCleanup();
+  switch (S.getKind()) {
+  case EHScope::Cleanup:
+    return !cast<EHCleanupScope>(S).isEHCleanup();
+  case EHScope::LazyCleanup:
+    return !cast<EHLazyCleanupScope>(S).isEHCleanup();
+  case EHScope::Filter:
+  case EHScope::Catch:
+  case EHScope::Terminate:
+    return false;
+  }
+
+  // Suppress warning.
+  return false;
 }
 
 llvm::BasicBlock *CodeGenFunction::getInvokeDestImpl() {
   assert(EHStack.requiresLandingPad());
   assert(!EHStack.empty());
 
+  if (!Exceptions)
+    return 0;
+
   // Check the innermost scope for a cached landing pad.  If this is
   // a non-EH cleanup, we'll check enclosing scopes in EmitLandingPad.
   llvm::BasicBlock *LP = EHStack.begin()->getCachedLandingPad();
@@ -713,6 +764,12 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
          I != E; ++I) {
 
     switch (I->getKind()) {
+    case EHScope::LazyCleanup:
+      if (!HasEHCleanup)
+        HasEHCleanup = cast<EHLazyCleanupScope>(*I).isEHCleanup();
+      // We otherwise don't care about cleanups.
+      continue;
+
     case EHScope::Cleanup:
       if (!HasEHCleanup)
         HasEHCleanup = cast<EHCleanupScope>(*I).isEHCleanup();
@@ -954,8 +1011,7 @@ static llvm::Value *CallBeginCatch(CodeGenFunction &CGF, llvm::Value *Exn) {
   Call->setDoesNotThrow();
 
   {
-    CodeGenFunction::CleanupBlock EndCatchCleanup(CGF,
-                                  CodeGenFunction::NormalAndEHCleanup);
+    CodeGenFunction::CleanupBlock EndCatchCleanup(CGF, NormalAndEHCleanup);
 
     // __cxa_end_catch never throws, so this can just be a call.
     CGF.Builder.CreateCall(getEndCatchFn(CGF))->setDoesNotThrow();
@@ -1213,13 +1269,11 @@ CodeGenFunction::EnterFinallyBlock(const Stmt *Body,
 
   // Enter a normal cleanup which will perform the @finally block.
   {
-    CodeGenFunction::CleanupBlock
-      NormalCleanup(*this, CodeGenFunction::NormalCleanup);
+    CodeGenFunction::CleanupBlock Cleanup(*this, NormalCleanup);
 
     // Enter a cleanup to call the end-catch function if one was provided.
     if (EndCatchFn) {
-      CodeGenFunction::CleanupBlock
-        FinallyExitCleanup(CGF, CodeGenFunction::NormalAndEHCleanup);
+      CodeGenFunction::CleanupBlock FinallyExitCleanup(CGF, NormalAndEHCleanup);
 
       llvm::BasicBlock *EndCatchBB = createBasicBlock("finally.endcatch");
       llvm::BasicBlock *CleanupContBB = createBasicBlock("finally.cleanup.cont");
@@ -1435,3 +1489,4 @@ CodeGenFunction::CleanupBlock::~CleanupBlock() {
   CGF.Builder.restoreIP(SavedIP);
 }
 
+void EHScopeStack::LazyCleanup::_anchor() {}
index 8755dca2b223dd76131127b630a8f290ba5467ea..80739cd8d73ef55ff03af8070c6eb94e758760e0 100644 (file)
@@ -31,13 +31,13 @@ namespace CodeGen {
 class EHScope {
   llvm::BasicBlock *CachedLandingPad;
 
-  unsigned K : 2;
+  unsigned K : 3;
 
 protected:
-  enum { BitsRemaining = 30 };
+  enum { BitsRemaining = 29 };
 
 public:
-  enum Kind { Cleanup, Catch, Terminate, Filter };
+  enum Kind { Cleanup, LazyCleanup, Catch, Terminate, Filter };
 
   EHScope(Kind K) : CachedLandingPad(0), K(K) {}
 
@@ -127,6 +127,87 @@ public:
   }
 };
 
+/// A cleanup scope which generates the cleanup blocks lazily.
+class EHLazyCleanupScope : public EHScope {
+  /// Whether this cleanup needs to be run along normal edges.
+  bool IsNormalCleanup : 1;
+
+  /// Whether this cleanup needs to be run along exception edges.
+  bool IsEHCleanup : 1;
+
+  /// The amount of extra storage needed by the LazyCleanup.
+  /// Always a multiple of the scope-stack alignment.
+  unsigned CleanupSize : 12;
+
+  /// The number of fixups required by enclosing scopes (not including
+  /// this one).  If this is the top cleanup scope, all the fixups
+  /// from this index onwards belong to this scope.
+  unsigned FixupDepth : BitsRemaining - 14;
+
+  /// The nearest normal cleanup scope enclosing this one.
+  EHScopeStack::stable_iterator EnclosingNormal;
+
+  /// The nearest EH cleanup scope enclosing this one.
+  EHScopeStack::stable_iterator EnclosingEH;
+
+  /// The dual entry/exit block along the normal edge.  This is lazily
+  /// created if needed before the cleanup is popped.
+  llvm::BasicBlock *NormalBlock;
+
+  /// The dual entry/exit block along the EH edge.  This is lazily
+  /// created if needed before the cleanup is popped.
+  llvm::BasicBlock *EHBlock;
+
+public:
+  /// Gets the size required for a lazy cleanup scope with the given
+  /// cleanup-data requirements.
+  static size_t getSizeForCleanupSize(size_t Size) {
+    return sizeof(EHLazyCleanupScope) + Size;
+  }
+
+  size_t getAllocatedSize() const {
+    return sizeof(EHLazyCleanupScope) + CleanupSize;
+  }
+
+  EHLazyCleanupScope(bool IsNormal, bool IsEH, unsigned CleanupSize,
+                     unsigned FixupDepth,
+                     EHScopeStack::stable_iterator EnclosingNormal,
+                     EHScopeStack::stable_iterator EnclosingEH)
+    : EHScope(EHScope::LazyCleanup),
+      IsNormalCleanup(IsNormal), IsEHCleanup(IsEH),
+      CleanupSize(CleanupSize), FixupDepth(FixupDepth),
+      EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH),
+      NormalBlock(0), EHBlock(0)
+  {}
+
+  bool isNormalCleanup() const { return IsNormalCleanup; }
+  llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
+  void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; }
+
+  bool isEHCleanup() const { return IsEHCleanup; }
+  llvm::BasicBlock *getEHBlock() const { return EHBlock; }
+  void setEHBlock(llvm::BasicBlock *BB) { EHBlock = BB; }
+
+  unsigned getFixupDepth() const { return FixupDepth; }
+  EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
+    return EnclosingNormal;
+  }
+  EHScopeStack::stable_iterator getEnclosingEHCleanup() const {
+    return EnclosingEH;
+  }
+
+  size_t getCleanupSize() const { return CleanupSize; }
+  void *getCleanupBuffer() { return this + 1; }
+
+  EHScopeStack::LazyCleanup *getCleanup() {
+    return reinterpret_cast<EHScopeStack::LazyCleanup*>(getCleanupBuffer());
+  }
+
+  static bool classof(const EHScope *Scope) {
+    return (Scope->getKind() == LazyCleanup);
+  }
+};
+
 /// A scope which needs to execute some code if we try to unwind ---
 /// either normally, via the EH mechanism, or both --- through it.
 class EHCleanupScope : public EHScope {
@@ -267,6 +348,11 @@ public:
           static_cast<const EHFilterScope*>(get())->getNumFilters());
       break;
 
+    case EHScope::LazyCleanup:
+      Ptr += static_cast<const EHLazyCleanupScope*>(get())
+        ->getAllocatedSize();
+      break;
+
     case EHScope::Cleanup:
       Ptr += EHCleanupScope::getSize();
       break;
index c9da348cbfe3f47847cfaaaca58adddcb03c64a7..f3c80bcf08e3eea6bb7962b88cab582419033e0f 100644 (file)
@@ -1871,8 +1871,7 @@ void CGObjCGNU::EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
 
   // Register an all-paths cleanup to release the lock.
   {
-    CodeGenFunction::CleanupBlock
-      ReleaseScope(CGF, CodeGenFunction::NormalAndEHCleanup);
+    CodeGenFunction::CleanupBlock ReleaseScope(CGF, NormalAndEHCleanup);
 
     llvm::Value *SyncExit = CGM.CreateRuntimeFunction(FTy, "objc_sync_exit");
     SyncArg = CGF.Builder.CreateBitCast(SyncArg, IdTy);
index 0a766d5c821fc0de6fe5293dabdbee258beacee1..72935373027241de3427405ec34f03f85b140a3d 100644 (file)
@@ -2697,8 +2697,7 @@ void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
 
   // Push a normal cleanup to leave the try scope.
   {
-    CodeGenFunction::CleanupBlock
-      FinallyScope(CGF, CodeGenFunction::NormalCleanup);
+    CodeGenFunction::CleanupBlock FinallyScope(CGF, NormalCleanup);
 
     // Check whether we need to call objc_exception_try_exit.
     // In optimized code, this branch will always be folded.
@@ -5693,8 +5692,7 @@ CGObjCNonFragileABIMac::EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
 
   // Register an all-paths cleanup to release the lock.
   {
-    CodeGenFunction::CleanupBlock
-      ReleaseScope(CGF, CodeGenFunction::NormalAndEHCleanup);
+    CodeGenFunction::CleanupBlock ReleaseScope(CGF, NormalAndEHCleanup);
 
     CGF.Builder.CreateCall(ObjCTypes.getSyncExitFn(), SyncArg)
       ->setDoesNotThrow();
@@ -5804,8 +5802,7 @@ void CGObjCNonFragileABIMac::EmitTryStmt(CodeGen::CodeGenFunction &CGF,
 
     // Add a cleanup to leave the catch.
     {
-      CodeGenFunction::CleanupBlock
-        EndCatchBlock(CGF, CodeGenFunction::NormalAndEHCleanup);
+      CodeGenFunction::CleanupBlock EndCatchBlock(CGF, NormalAndEHCleanup);
 
       // __objc_end_catch never throws.
       CGF.Builder.CreateCall(ObjCTypes.getObjCEndCatchFn())
index 5e505c2d82128668bc0f1f6320023605f651468c..eb6c4361be89c4f696872a029ed56c8df39f45cd 100644 (file)
@@ -825,11 +825,168 @@ static void SimplifyCleanupEdges(CodeGenFunction &CGF,
   SimplifyCleanupEntry(CGF, Entry);  
 }
 
+static void EmitLazyCleanup(CodeGenFunction &CGF,
+                            EHScopeStack::LazyCleanup *Fn,
+                            bool ForEH) {
+  if (ForEH) CGF.EHStack.pushTerminate();
+  Fn->Emit(CGF, ForEH);
+  if (ForEH) CGF.EHStack.popTerminate();
+  assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
+}
+
+static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF,
+                                    EHScopeStack::LazyCleanup *Fn,
+                                    bool ForEH,
+                                    llvm::BasicBlock *Entry) {
+  assert(Entry && "no entry block for cleanup");
+
+  // Remove the switch and load from the end of the entry block.
+  llvm::Instruction *Switch = &Entry->getInstList().back();
+  Entry->getInstList().remove(Switch);
+  assert(isa<llvm::SwitchInst>(Switch));
+  llvm::Instruction *Load = &Entry->getInstList().back();
+  Entry->getInstList().remove(Load);
+  assert(isa<llvm::LoadInst>(Load));
+
+  assert(Entry->getInstList().empty() &&
+         "lazy cleanup block not empty after removing load/switch pair?");
+
+  // Emit the actual cleanup at the end of the entry block.
+  CGF.Builder.SetInsertPoint(Entry);
+  EmitLazyCleanup(CGF, Fn, ForEH);
+
+  // Put the load and switch at the end of the exit block.
+  llvm::BasicBlock *Exit = CGF.Builder.GetInsertBlock();
+  Exit->getInstList().push_back(Load);
+  Exit->getInstList().push_back(Switch);
+
+  // Clean up the edges if possible.
+  SimplifyCleanupEdges(CGF, Entry, Exit);
+
+  CGF.Builder.ClearInsertionPoint();
+}
+
+static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
+  assert(isa<EHLazyCleanupScope>(*CGF.EHStack.begin()) && "top not a cleanup!");
+  EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*CGF.EHStack.begin());
+  assert(Scope.getFixupDepth() <= CGF.EHStack.getNumBranchFixups());
+
+  // Check whether we need an EH cleanup.  This is only true if we've
+  // generated a lazy EH cleanup block.
+  llvm::BasicBlock *EHEntry = Scope.getEHBlock();
+  bool RequiresEHCleanup = (EHEntry != 0);
+
+  // Check the three conditions which might require a normal cleanup:
+
+  // - whether there are branch fix-ups through this cleanup
+  unsigned FixupDepth = Scope.getFixupDepth();
+  bool HasFixups = CGF.EHStack.getNumBranchFixups() != FixupDepth;
+
+  // - whether control has already been threaded through this cleanup
+  llvm::BasicBlock *NormalEntry = Scope.getNormalBlock();
+  bool HasExistingBranches = (NormalEntry != 0);
+
+  // - whether there's a fallthrough
+  llvm::BasicBlock *FallthroughSource = CGF.Builder.GetInsertBlock();
+  bool HasFallthrough = (FallthroughSource != 0);
+
+  bool RequiresNormalCleanup = false;
+  if (Scope.isNormalCleanup() &&
+      (HasFixups || HasExistingBranches || HasFallthrough)) {
+    RequiresNormalCleanup = true;
+  }
+
+  // If we don't need the cleanup at all, we're done.
+  if (!RequiresNormalCleanup && !RequiresEHCleanup) {
+    CGF.EHStack.popCleanup();
+    assert(CGF.EHStack.getNumBranchFixups() == 0 ||
+           CGF.EHStack.hasNormalCleanups());
+    return;
+  }
+
+  // Copy the cleanup emission data out.  Note that SmallVector
+  // guarantees maximal alignment for its buffer regardless of its
+  // type parameter.
+  llvm::SmallVector<char, 8*sizeof(void*)> CleanupBuffer;
+  CleanupBuffer.reserve(Scope.getCleanupSize());
+  memcpy(CleanupBuffer.data(),
+         Scope.getCleanupBuffer(), Scope.getCleanupSize());
+  CleanupBuffer.set_size(Scope.getCleanupSize());
+  EHScopeStack::LazyCleanup *Fn =
+    reinterpret_cast<EHScopeStack::LazyCleanup*>(CleanupBuffer.data());
+
+  // We're done with the scope; pop it off so we can emit the cleanups.
+  CGF.EHStack.popCleanup();
+
+  if (RequiresNormalCleanup) {
+    // If we have a fallthrough and no other need for the cleanup,
+    // emit it directly.
+    if (HasFallthrough && !HasFixups && !HasExistingBranches) {
+      EmitLazyCleanup(CGF, Fn, /*ForEH*/ false);
+
+    // Otherwise, the best approach is to thread everything through
+    // the cleanup block and then try to clean up after ourselves.
+    } else {
+      // Force the entry block to exist.
+      if (!HasExistingBranches) {
+        NormalEntry = CGF.createBasicBlock("cleanup");
+        CreateCleanupSwitch(CGF, NormalEntry);
+      }
+
+      CGF.EmitBlock(NormalEntry);
+
+      // Thread the fallthrough edge through the (momentarily trivial)
+      // cleanup.
+      llvm::BasicBlock *FallthroughDestination = 0;
+      if (HasFallthrough) {
+        assert(isa<llvm::BranchInst>(FallthroughSource->getTerminator()));
+        FallthroughDestination = CGF.createBasicBlock("cleanup.cont");
+
+        BranchFixup Fix;
+        Fix.Destination = FallthroughDestination;
+        Fix.LatestBranch = FallthroughSource->getTerminator();
+        Fix.LatestBranchIndex = 0;
+        Fix.Origin = Fix.LatestBranch;
+
+        // Restore fixup invariant.  EmitBlock added a branch to the
+        // cleanup which we need to redirect to the destination.
+        cast<llvm::BranchInst>(Fix.LatestBranch)
+          ->setSuccessor(0, Fix.Destination);
+
+        ThreadFixupThroughCleanup(CGF, Fix, NormalEntry, NormalEntry);
+      }
+
+      // Thread any "real" fixups we need to thread.
+      for (unsigned I = FixupDepth, E = CGF.EHStack.getNumBranchFixups();
+           I != E; ++I)
+        if (CGF.EHStack.getBranchFixup(I).Destination)
+          ThreadFixupThroughCleanup(CGF, CGF.EHStack.getBranchFixup(I),
+                                    NormalEntry, NormalEntry);
+
+      SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ false, NormalEntry);
+
+      if (HasFallthrough)
+        CGF.EmitBlock(FallthroughDestination);
+    }
+  }
+
+  // Emit the EH cleanup if required.
+  if (RequiresEHCleanup) {
+    CGBuilderTy::InsertPoint SavedIP = CGF.Builder.saveAndClearIP();
+    CGF.EmitBlock(EHEntry);
+    SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ true, EHEntry);
+    CGF.Builder.restoreIP(SavedIP);
+  }
+}
+
 /// Pops a cleanup block.  If the block includes a normal cleanup, the
 /// current insertion point is threaded through the cleanup, as are
 /// any branch fixups on the cleanup.
 void CodeGenFunction::PopCleanupBlock() {
   assert(!EHStack.empty() && "cleanup stack is empty!");
+  if (isa<EHLazyCleanupScope>(*EHStack.begin()))
+    return PopLazyCleanupBlock(*this);
+
   assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
   assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
@@ -1007,6 +1164,16 @@ void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
       if (Scope.isNormalCleanup())
         ThreadFixupThroughCleanup(*this, Fixup, Scope.getNormalEntry(),
                                   Scope.getNormalExit());
+    } else if (isa<EHLazyCleanupScope>(*I)) {
+      EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I);
+      if (Scope.isNormalCleanup()) {
+        llvm::BasicBlock *Block = Scope.getNormalBlock();
+        if (!Block) {
+          Block = createBasicBlock("cleanup");
+          Scope.setNormalBlock(Block);
+        }
+        ThreadFixupThroughCleanup(*this, Fixup, Block, Block);
+      }
     }
   }
   
@@ -1046,6 +1213,16 @@ void CodeGenFunction::EmitBranchThroughEHCleanup(JumpDest Dest) {
       if (Scope.isEHCleanup())
         ThreadFixupThroughCleanup(*this, Fixup, Scope.getEHEntry(),
                                   Scope.getEHExit());
+    } else if (isa<EHLazyCleanupScope>(*I)) {
+      EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I);
+      if (Scope.isEHCleanup()) {
+        llvm::BasicBlock *Block = Scope.getEHBlock();
+        if (!Block) {
+          Block = createBasicBlock("eh.cleanup");
+          Scope.setEHBlock(Block);
+        }
+        ThreadFixupThroughCleanup(*this, Fixup, Block, Block);
+      }
     }
   }
   
index 26fb882e56f93d6255aac49a387b0e010c4e62b1..ce3453ae5e803bc0f83c9879c431ee705bf8a4fc 100644 (file)
@@ -95,6 +95,8 @@ struct BranchFixup {
   unsigned LatestBranchIndex;
 };
 
+enum CleanupKind { NormalAndEHCleanup, EHCleanup, NormalCleanup };
+
 /// A stack of scopes which respond to exceptions, including cleanups
 /// and catch blocks.
 class EHScopeStack {
@@ -123,6 +125,28 @@ public:
     }
   };
 
+  /// A lazy cleanup.  These will be allocated on the cleanup stack
+  /// and so must be trivially copyable.  We "enforce" this by
+  /// providing no virtual destructor so that subclasses will be
+  /// encouraged to contain no non-POD types.
+  ///
+  /// LazyCleanup implementations should generally be declared in an
+  /// anonymous namespace.
+  class LazyCleanup {
+    // Anchor the construction vtable.
+    virtual void _anchor();
+
+  public:
+    /// Emit the cleanup.  For normal cleanups, this is run in the
+    /// same EH context as when the cleanup was pushed, i.e. the
+    /// immediately-enclosing context of the cleanup scope.  For
+    /// EH cleanups, this is run in a terminate context.
+    ///
+    // \param IsForEHCleanup true if this is for an EH cleanup, false
+    ///  if for a normal cleanup.
+    virtual void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) = 0;
+  };
+
 private:
   // The implementation for this class is in CGException.h and
   // CGException.cpp; the definition is here because it's used as a
@@ -171,6 +195,8 @@ private:
 
   void popNullFixups();
 
+  void *pushLazyCleanup(CleanupKind K, size_t DataSize);
+
 public:
   EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0),
                    InnermostNormalCleanup(stable_end()),
@@ -178,6 +204,32 @@ public:
                    CatchDepth(0) {}
   ~EHScopeStack() { delete[] StartOfBuffer; }
 
+  // Variadic templates would make this not terrible.
+
+  /// Push a lazily-created cleanup on the stack.
+  template <class T, class A0, class A1>
+  void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1) {
+    void *Buffer = pushLazyCleanup(Kind, sizeof(T));
+    LazyCleanup *Obj = new(Buffer) T(a0, a1);
+    (void) Obj;
+  }
+
+  /// Push a lazily-created cleanup on the stack.
+  template <class T, class A0, class A1, class A2>
+  void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) {
+    void *Buffer = pushLazyCleanup(Kind, sizeof(T));
+    LazyCleanup *Obj = new(Buffer) T(a0, a1, a2);
+    (void) Obj;
+  }
+
+  /// Push a lazily-created cleanup on the stack.
+  template <class T, class A0, class A1, class A2, class A3>
+  void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) {
+    void *Buffer = pushLazyCleanup(Kind, sizeof(T));
+    LazyCleanup *Obj = new(Buffer) T(a0, a1, a2, a3);
+    (void) Obj;
+  }
+
   /// Push a cleanup on the stack.
   void pushCleanup(llvm::BasicBlock *NormalEntry,
                    llvm::BasicBlock *NormalExit,
@@ -375,8 +427,6 @@ public:
                                 llvm::Constant *RethrowFn);
   void ExitFinallyBlock(FinallyInfo &FinallyInfo);
 
-  enum CleanupKind { NormalAndEHCleanup, EHCleanup, NormalCleanup };
-
   /// PushDestructorCleanup - Push a cleanup to call the
   /// complete-object destructor of an object of the given type at the
   /// given address.  Does nothing if T is not a C++ class type with a
index bbc6d2f73b855a58d927f6a26f38a747d22118d4..652e7c89c157d652900b90cb3f907b19157b0c8e 100644 (file)
@@ -26,6 +26,7 @@ struct Y {
 
 X getX();
 
+// CHECK: define void @_Z11if_destructi(
 void if_destruct(int z) {
   // Verify that the condition variable is destroyed at the end of the
   // "if" statement.
index 6181f0eee131efeba7706d683dd78d99ee22a8ca..8d19b1effe956633b154022ca4c99e72bf112f11 100644 (file)
@@ -57,20 +57,15 @@ X test2(bool B) {
 
   // CHECK-EH:      call void @_ZN1XC1Ev
   // CHECK-EH-NEXT: invoke void @_ZN1XC1Ev
-  // -> %invoke.cont1, %lpad
+  // -> %invoke.cont, %lpad
 
-  // %invoke.cont1:
+  // %invoke.cont:
   // CHECK-EH:      br i1
   // -> %if.then, %if.end
 
   // %if.then: returning 'x'
   // CHECK-EH:      invoke void @_ZN1XC1ERKS_
-  // -> %cleanup, %lpad5
-
-  // %invoke.cont: rethrow block for %eh.cleanup.
-  // This really should be elsewhere in the function.
-  // CHECK-EH:      call void @_Unwind_Resume_or_Rethrow
-  // CHECK-EH-NEXT: unreachable
+  // -> %cleanup, %lpad1
 
   // %lpad: landing pad for ctor of 'y', dtor of 'y'
   // CHECK-EH:      call i8* @llvm.eh.exception()
@@ -78,25 +73,30 @@ X test2(bool B) {
   // CHECK-EH-NEXT: br label
   // -> %eh.cleanup
 
-  // %invoke.cont2: normal cleanup for 'x'
-  // CHECK-EH:      call void @_ZN1XD1Ev
-  // CHECK-EH-NEXT: ret void
-
-  // %lpad5: landing pad for return copy ctors, EH cleanup for 'y'
+  // %lpad1: landing pad for return copy ctors, EH cleanup for 'y'
   // CHECK-EH: invoke void @_ZN1XD1Ev
   // -> %eh.cleanup, %terminate.lpad
 
   // %if.end: returning 'y'
   // CHECK-EH: invoke void @_ZN1XC1ERKS_
-  // -> %cleanup, %lpad5
+  // -> %cleanup, %lpad1
 
   // %cleanup: normal cleanup for 'y'
   // CHECK-EH: invoke void @_ZN1XD1Ev
-  // -> %invoke.cont2, %lpad
+  // -> %invoke.cont11, %lpad
+
+  // %invoke.cont11: normal cleanup for 'x'
+  // CHECK-EH:      call void @_ZN1XD1Ev
+  // CHECK-EH-NEXT: ret void
 
   // %eh.cleanup:  EH cleanup for 'x'
   // CHECK-EH: invoke void @_ZN1XD1Ev
-  // -> %invoke.cont, %terminate.lpad
+  // -> %invoke.cont17, %terminate.lpad
+
+  // %invoke.cont17: rethrow block for %eh.cleanup.
+  // This really should be elsewhere in the function.
+  // CHECK-EH:      call void @_Unwind_Resume_or_Rethrow
+  // CHECK-EH-NEXT: unreachable
 
   // %terminate.lpad: terminate landing pad.
   // CHECK-EH:      call i8* @llvm.eh.exception()
index 59441e523fd6de7deefdbff81076fc0918f3ac09..31639466e8bb5d10dc26b983a80d75f67a6913bf 100644 (file)
@@ -1,4 +1,4 @@
-// RUN: %clang_cc1 -triple x86_64-apple-darwin10 -fobjc-nonfragile-abi -emit-llvm -o %t %s
+// RUN: %clang_cc1 -triple x86_64-apple-darwin10 -fobjc-nonfragile-abi -emit-llvm -fexceptions -o %t %s
 // RUN: FileCheck -check-prefix=CHECK-X86_64 < %t %s
 // RUN: grep '@"OBJC_EHTYPE_$_EH3"' %t | count 3
 
index 0aa8cde024daf03180d60f4916096d887ea97bde..48217f07f97469725954df043d147c589c0eec4d 100644 (file)
@@ -1,5 +1,5 @@
-// RUN: %clang_cc1 -fobjc-nonfragile-abi -emit-llvm -o - %s | FileCheck --check-prefix=DEFAULT_EH %s
-// RUN: %clang_cc1 -fsjlj-exceptions -fobjc-nonfragile-abi -emit-llvm -o - %s | FileCheck --check-prefix=SJLJ_EH %s
+// RUN: %clang_cc1 -fobjc-nonfragile-abi -emit-llvm -fexceptions -o - %s | FileCheck --check-prefix=DEFAULT_EH %s
+// RUN: %clang_cc1 -fsjlj-exceptions -fobjc-nonfragile-abi -fexceptions -emit-llvm -o - %s | FileCheck --check-prefix=SJLJ_EH %s
 
 // DEFAULT_EH: declare void @_Unwind_Resume_or_Rethrow(i8*)
 // SJLJ_EH: declare void @_Unwind_SjLj_Resume(i8*)