namespace {
/// Call the destructor for a direct base class.
- struct CallBaseDtor : EHScopeStack::LazyCleanup {
+ struct CallBaseDtor : EHScopeStack::Cleanup {
const CXXRecordDecl *BaseClass;
bool BaseIsVirtual;
CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
CGF.EmitAggExpr(BaseInit->getInit(), V, false, false, true);
if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor())
- CGF.EHStack.pushLazyCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
- isBaseVirtual);
+ CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
+ isBaseVirtual);
}
static void EmitAggMemberInitializer(CodeGenFunction &CGF,
}
namespace {
- struct CallMemberDtor : EHScopeStack::LazyCleanup {
+ struct CallMemberDtor : EHScopeStack::Cleanup {
FieldDecl *Field;
CXXDestructorDecl *Dtor;
CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
if (!RD->hasTrivialDestructor())
- CGF.EHStack.pushLazyCleanup<CallMemberDtor>(EHCleanup, Field,
- RD->getDestructor());
+ CGF.EHStack.pushCleanup<CallMemberDtor>(EHCleanup, Field,
+ RD->getDestructor());
}
}
namespace {
/// Call the operator delete associated with the current destructor.
- struct CallDtorDelete : EHScopeStack::LazyCleanup {
+ struct CallDtorDelete : EHScopeStack::Cleanup {
CallDtorDelete() {}
void Emit(CodeGenFunction &CGF, bool IsForEH) {
}
};
- struct CallArrayFieldDtor : EHScopeStack::LazyCleanup {
+ struct CallArrayFieldDtor : EHScopeStack::Cleanup {
const FieldDecl *Field;
CallArrayFieldDtor(const FieldDecl *Field) : Field(Field) {}
}
};
- struct CallFieldDtor : EHScopeStack::LazyCleanup {
+ struct CallFieldDtor : EHScopeStack::Cleanup {
const FieldDecl *Field;
CallFieldDtor(const FieldDecl *Field) : Field(Field) {}
if (DtorType == Dtor_Deleting) {
assert(DD->getOperatorDelete() &&
"operator delete missing - EmitDtorEpilogue");
- EHStack.pushLazyCleanup<CallDtorDelete>(NormalAndEHCleanup);
+ EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
return;
}
if (BaseClassDecl->hasTrivialDestructor())
continue;
- EHStack.pushLazyCleanup<CallBaseDtor>(NormalAndEHCleanup,
- BaseClassDecl,
- /*BaseIsVirtual*/ true);
+ EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
+ BaseClassDecl,
+ /*BaseIsVirtual*/ true);
}
return;
if (BaseClassDecl->hasTrivialDestructor())
continue;
- EHStack.pushLazyCleanup<CallBaseDtor>(NormalAndEHCleanup,
- BaseClassDecl,
- /*BaseIsVirtual*/ false);
+ EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
+ BaseClassDecl,
+ /*BaseIsVirtual*/ false);
}
// Destroy direct fields.
continue;
if (Array)
- EHStack.pushLazyCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field);
+ EHStack.pushCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field);
else
- EHStack.pushLazyCleanup<CallFieldDtor>(NormalAndEHCleanup, Field);
+ EHStack.pushCleanup<CallFieldDtor>(NormalAndEHCleanup, Field);
}
}
}
namespace {
- struct CallLocalDtor : EHScopeStack::LazyCleanup {
+ struct CallLocalDtor : EHScopeStack::Cleanup {
const CXXDestructorDecl *Dtor;
llvm::Value *Addr;
void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
llvm::Value *Addr) {
- EHStack.pushLazyCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
+ EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
}
void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) {
}
namespace {
- struct CallArrayDtor : EHScopeStack::LazyCleanup {
+ struct CallArrayDtor : EHScopeStack::Cleanup {
CallArrayDtor(const CXXDestructorDecl *Dtor,
const ConstantArrayType *Type,
llvm::Value *Loc)
}
};
- struct CallVarDtor : EHScopeStack::LazyCleanup {
+ struct CallVarDtor : EHScopeStack::Cleanup {
CallVarDtor(const CXXDestructorDecl *Dtor,
llvm::Value *NRVOFlag,
llvm::Value *Loc)
}
namespace {
- struct CallStackRestore : EHScopeStack::LazyCleanup {
+ struct CallStackRestore : EHScopeStack::Cleanup {
llvm::Value *Stack;
CallStackRestore(llvm::Value *Stack) : Stack(Stack) {}
void Emit(CodeGenFunction &CGF, bool IsForEH) {
}
};
- struct CallCleanupFunction : EHScopeStack::LazyCleanup {
+ struct CallCleanupFunction : EHScopeStack::Cleanup {
llvm::Constant *CleanupFn;
const CGFunctionInfo &FnInfo;
llvm::Value *Addr;
}
};
- struct CallBlockRelease : EHScopeStack::LazyCleanup {
+ struct CallBlockRelease : EHScopeStack::Cleanup {
llvm::Value *Addr;
CallBlockRelease(llvm::Value *Addr) : Addr(Addr) {}
DidCallStackSave = true;
// Push a cleanup block and restore the stack there.
- EHStack.pushLazyCleanup<CallStackRestore>(NormalCleanup, Stack);
+ EHStack.pushCleanup<CallStackRestore>(NormalCleanup, Stack);
}
// Get the element type.
if (const ConstantArrayType *Array =
getContext().getAsConstantArrayType(Ty)) {
- EHStack.pushLazyCleanup<CallArrayDtor>(NormalAndEHCleanup,
- D, Array, Loc);
+ EHStack.pushCleanup<CallArrayDtor>(NormalAndEHCleanup,
+ D, Array, Loc);
} else {
- EHStack.pushLazyCleanup<CallVarDtor>(NormalAndEHCleanup,
- D, NRVOFlag, Loc);
+ EHStack.pushCleanup<CallVarDtor>(NormalAndEHCleanup,
+ D, NRVOFlag, Loc);
}
}
}
assert(F && "Could not find function!");
const CGFunctionInfo &Info = CGM.getTypes().getFunctionInfo(FD);
- EHStack.pushLazyCleanup<CallCleanupFunction>(NormalAndEHCleanup,
- F, &Info, DeclPtr, &D);
+ EHStack.pushCleanup<CallCleanupFunction>(NormalAndEHCleanup,
+ F, &Info, DeclPtr, &D);
}
if (needsDispose && CGM.getLangOptions().getGCMode() != LangOptions::GCOnly)
- EHStack.pushLazyCleanup<CallBlockRelease>(NormalAndEHCleanup, DeclPtr);
+ EHStack.pushCleanup<CallBlockRelease>(NormalAndEHCleanup, DeclPtr);
}
/// Emit an alloca (or GlobalValue depending on target)
}
namespace {
- struct CallGuardAbort : EHScopeStack::LazyCleanup {
+ struct CallGuardAbort : EHScopeStack::Cleanup {
llvm::GlobalVariable *Guard;
CallGuardAbort(llvm::GlobalVariable *Guard) : Guard(Guard) {}
// Call __cxa_guard_abort along the exceptional edge.
if (Exceptions)
- EHStack.pushLazyCleanup<CallGuardAbort>(EHCleanup, GuardVariable);
+ EHStack.pushCleanup<CallGuardAbort>(EHCleanup, GuardVariable);
EmitBlock(InitBlock);
}
EHScopeStack::getEnclosingEHCleanup(iterator it) const {
assert(it != end());
do {
- if (isa<EHLazyCleanupScope>(*it)) {
- if (cast<EHLazyCleanupScope>(*it).isEHCleanup())
+ if (isa<EHCleanupScope>(*it)) {
+ if (cast<EHCleanupScope>(*it).isEHCleanup())
return stabilize(it);
- return cast<EHLazyCleanupScope>(*it).getEnclosingEHCleanup();
+ return cast<EHCleanupScope>(*it).getEnclosingEHCleanup();
}
++it;
} while (it != end());
}
-void *EHScopeStack::pushLazyCleanup(CleanupKind Kind, size_t Size) {
+void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned");
- char *Buffer = allocate(EHLazyCleanupScope::getSizeForCleanupSize(Size));
+ char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
bool IsNormalCleanup = Kind != EHCleanup;
bool IsEHCleanup = Kind != NormalCleanup;
- EHLazyCleanupScope *Scope =
- new (Buffer) EHLazyCleanupScope(IsNormalCleanup,
- IsEHCleanup,
- Size,
- BranchFixups.size(),
- InnermostNormalCleanup,
- InnermostEHCleanup);
+ EHCleanupScope *Scope =
+ new (Buffer) EHCleanupScope(IsNormalCleanup,
+ IsEHCleanup,
+ Size,
+ BranchFixups.size(),
+ InnermostNormalCleanup,
+ InnermostEHCleanup);
if (IsNormalCleanup)
InnermostNormalCleanup = stable_begin();
if (IsEHCleanup)
void EHScopeStack::popCleanup() {
assert(!empty() && "popping exception stack when not empty");
- assert(isa<EHLazyCleanupScope>(*begin()));
- EHLazyCleanupScope &Cleanup = cast<EHLazyCleanupScope>(*begin());
+ assert(isa<EHCleanupScope>(*begin()));
+ EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
StartOfData += Cleanup.getAllocatedSize();
assert(hasNormalCleanups());
EHScopeStack::iterator it = find(InnermostNormalCleanup);
- unsigned MinSize = cast<EHLazyCleanupScope>(*it).getFixupDepth();
+ unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
while (BranchFixups.size() > MinSize &&
namespace {
/// A cleanup to free the exception object if its initialization
/// throws.
- struct FreeExceptionCleanup : EHScopeStack::LazyCleanup {
+ struct FreeExceptionCleanup : EHScopeStack::Cleanup {
FreeExceptionCleanup(llvm::Value *ShouldFreeVar,
llvm::Value *ExnLocVar)
: ShouldFreeVar(ShouldFreeVar), ExnLocVar(ExnLocVar) {}
// exception during initialization.
// FIXME: stmt expressions might require this to be a normal
// cleanup, too.
- CGF.EHStack.pushLazyCleanup<FreeExceptionCleanup>(EHCleanup,
- ShouldFreeVar,
- ExnLocVar);
+ CGF.EHStack.pushCleanup<FreeExceptionCleanup>(EHCleanup,
+ ShouldFreeVar,
+ ExnLocVar);
EHScopeStack::stable_iterator Cleanup = CGF.EHStack.stable_begin();
CGF.Builder.CreateStore(ExnLoc, ExnLocVar);
/// normal-only cleanup scopes.
static bool isNonEHScope(const EHScope &S) {
switch (S.getKind()) {
- case EHScope::LazyCleanup:
- return !cast<EHLazyCleanupScope>(S).isEHCleanup();
+ case EHScope::Cleanup:
+ return !cast<EHCleanupScope>(S).isEHCleanup();
case EHScope::Filter:
case EHScope::Catch:
case EHScope::Terminate:
I != E; ++I) {
switch (I->getKind()) {
- case EHScope::LazyCleanup:
+ case EHScope::Cleanup:
if (!HasEHCleanup)
- HasEHCleanup = cast<EHLazyCleanupScope>(*I).isEHCleanup();
+ HasEHCleanup = cast<EHCleanupScope>(*I).isEHCleanup();
// We otherwise don't care about cleanups.
continue;
/// of the caught type, so we have to assume the actual thrown
/// exception type might have a throwing destructor, even if the
/// caught type's destructor is trivial or nothrow.
- struct CallEndCatch : EHScopeStack::LazyCleanup {
+ struct CallEndCatch : EHScopeStack::Cleanup {
CallEndCatch(bool MightThrow) : MightThrow(MightThrow) {}
bool MightThrow;
llvm::CallInst *Call = CGF.Builder.CreateCall(getBeginCatchFn(CGF), Exn);
Call->setDoesNotThrow();
- CGF.EHStack.pushLazyCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow);
+ CGF.EHStack.pushCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow);
return Call;
}
}
namespace {
- struct CallRethrow : EHScopeStack::LazyCleanup {
+ struct CallRethrow : EHScopeStack::Cleanup {
void Emit(CodeGenFunction &CGF, bool IsForEH) {
CGF.EmitCallOrInvoke(getReThrowFn(CGF), 0, 0);
}
// _cxa_rethrow. This needs to happen before __cxa_end_catch is
// called, and so it is pushed after BeginCatch.
if (ImplicitRethrow)
- EHStack.pushLazyCleanup<CallRethrow>(NormalCleanup);
+ EHStack.pushCleanup<CallRethrow>(NormalCleanup);
// Perform the body of the catch.
EmitStmt(C->getHandlerBlock());
}
namespace {
- struct CallEndCatchForFinally : EHScopeStack::LazyCleanup {
+ struct CallEndCatchForFinally : EHScopeStack::Cleanup {
llvm::Value *ForEHVar;
llvm::Value *EndCatchFn;
CallEndCatchForFinally(llvm::Value *ForEHVar, llvm::Value *EndCatchFn)
}
};
- struct PerformFinally : EHScopeStack::LazyCleanup {
+ struct PerformFinally : EHScopeStack::Cleanup {
const Stmt *Body;
llvm::Value *ForEHVar;
llvm::Value *EndCatchFn;
void Emit(CodeGenFunction &CGF, bool IsForEH) {
// Enter a cleanup to call the end-catch function if one was provided.
if (EndCatchFn)
- CGF.EHStack.pushLazyCleanup<CallEndCatchForFinally>(NormalAndEHCleanup,
- ForEHVar, EndCatchFn);
+ CGF.EHStack.pushCleanup<CallEndCatchForFinally>(NormalAndEHCleanup,
+ ForEHVar, EndCatchFn);
// Emit the finally block.
CGF.EmitStmt(Body);
InitTempAlloca(ForEHVar, llvm::ConstantInt::getFalse(getLLVMContext()));
// Enter a normal cleanup which will perform the @finally block.
- EHStack.pushLazyCleanup<PerformFinally>(NormalCleanup, Body,
- ForEHVar, EndCatchFn,
- RethrowFn, SavedExnVar);
+ EHStack.pushCleanup<PerformFinally>(NormalCleanup, Body,
+ ForEHVar, EndCatchFn,
+ RethrowFn, SavedExnVar);
// Enter a catch-all scope.
llvm::BasicBlock *CatchAllBB = createBasicBlock("finally.catchall");
return TerminateHandler;
}
-EHScopeStack::LazyCleanup::~LazyCleanup() {
- llvm_unreachable("LazyCleanup is indestructable");
+EHScopeStack::Cleanup::~Cleanup() {
+ llvm_unreachable("Cleanup is indestructable");
}
enum { BitsRemaining = 30 };
public:
- enum Kind { LazyCleanup, Catch, Terminate, Filter };
+ enum Kind { Cleanup, Catch, Terminate, Filter };
EHScope(Kind K) : CachedLandingPad(0), K(K) {}
};
/// A cleanup scope which generates the cleanup blocks lazily.
-class EHLazyCleanupScope : public EHScope {
+class EHCleanupScope : public EHScope {
/// Whether this cleanup needs to be run along normal edges.
bool IsNormalCleanup : 1;
/// Whether this cleanup needs to be run along exception edges.
bool IsEHCleanup : 1;
- /// The amount of extra storage needed by the LazyCleanup.
+ /// The amount of extra storage needed by the Cleanup.
/// Always a multiple of the scope-stack alignment.
unsigned CleanupSize : 12;
/// Gets the size required for a lazy cleanup scope with the given
/// cleanup-data requirements.
static size_t getSizeForCleanupSize(size_t Size) {
- return sizeof(EHLazyCleanupScope) + Size;
+ return sizeof(EHCleanupScope) + Size;
}
size_t getAllocatedSize() const {
- return sizeof(EHLazyCleanupScope) + CleanupSize;
+ return sizeof(EHCleanupScope) + CleanupSize;
}
- EHLazyCleanupScope(bool IsNormal, bool IsEH, unsigned CleanupSize,
- unsigned FixupDepth,
- EHScopeStack::stable_iterator EnclosingNormal,
- EHScopeStack::stable_iterator EnclosingEH)
- : EHScope(EHScope::LazyCleanup),
+ EHCleanupScope(bool IsNormal, bool IsEH, unsigned CleanupSize,
+ unsigned FixupDepth,
+ EHScopeStack::stable_iterator EnclosingNormal,
+ EHScopeStack::stable_iterator EnclosingEH)
+ : EHScope(EHScope::Cleanup),
IsNormalCleanup(IsNormal), IsEHCleanup(IsEH),
CleanupSize(CleanupSize), FixupDepth(FixupDepth),
EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH),
size_t getCleanupSize() const { return CleanupSize; }
void *getCleanupBuffer() { return this + 1; }
- EHScopeStack::LazyCleanup *getCleanup() {
- return reinterpret_cast<EHScopeStack::LazyCleanup*>(getCleanupBuffer());
+ EHScopeStack::Cleanup *getCleanup() {
+ return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
}
static bool classof(const EHScope *Scope) {
- return (Scope->getKind() == LazyCleanup);
+ return (Scope->getKind() == Cleanup);
}
};
static_cast<const EHFilterScope*>(get())->getNumFilters());
break;
- case EHScope::LazyCleanup:
- Ptr += static_cast<const EHLazyCleanupScope*>(get())
+ case EHScope::Cleanup:
+ Ptr += static_cast<const EHCleanupScope*>(get())
->getAllocatedSize();
break;
}
namespace {
- struct CallSyncExit : EHScopeStack::LazyCleanup {
+ struct CallSyncExit : EHScopeStack::Cleanup {
llvm::Value *SyncExitFn;
llvm::Value *SyncArg;
CallSyncExit(llvm::Value *SyncExitFn, llvm::Value *SyncArg)
// Register an all-paths cleanup to release the lock.
llvm::Value *SyncExit = CGM.CreateRuntimeFunction(FTy, "objc_sync_exit");
- CGF.EHStack.pushLazyCleanup<CallSyncExit>(NormalAndEHCleanup,
- SyncExit, SyncArg);
+ CGF.EHStack.pushCleanup<CallSyncExit>(NormalAndEHCleanup, SyncExit, SyncArg);
// Emit the body of the statement.
CGF.EmitStmt(S.getSynchBody());
}
namespace {
- struct PerformFragileFinally : EHScopeStack::LazyCleanup {
+ struct PerformFragileFinally : EHScopeStack::Cleanup {
const Stmt &S;
llvm::Value *SyncArg;
llvm::Value *CallTryExitVar;
CallTryExitVar);
// Push a normal cleanup to leave the try scope.
- CGF.EHStack.pushLazyCleanup<PerformFragileFinally>(NormalCleanup, &S,
- SyncArg,
- CallTryExitVar,
- ExceptionData,
- &ObjCTypes);
+ CGF.EHStack.pushCleanup<PerformFragileFinally>(NormalCleanup, &S,
+ SyncArg,
+ CallTryExitVar,
+ ExceptionData,
+ &ObjCTypes);
// Enter a try block:
// - Call objc_exception_try_enter to push ExceptionData on top of
}
namespace {
- struct CallSyncExit : EHScopeStack::LazyCleanup {
+ struct CallSyncExit : EHScopeStack::Cleanup {
llvm::Value *SyncExitFn;
llvm::Value *SyncArg;
CallSyncExit(llvm::Value *SyncExitFn, llvm::Value *SyncArg)
->setDoesNotThrow();
// Register an all-paths cleanup to release the lock.
- CGF.EHStack.pushLazyCleanup<CallSyncExit>(NormalAndEHCleanup,
- ObjCTypes.getSyncExitFn(),
- SyncArg);
+ CGF.EHStack.pushCleanup<CallSyncExit>(NormalAndEHCleanup,
+ ObjCTypes.getSyncExitFn(),
+ SyncArg);
// Emit the body of the statement.
CGF.EmitStmt(S.getSynchBody());
llvm::Value *TypeInfo;
};
- struct CallObjCEndCatch : EHScopeStack::LazyCleanup {
+ struct CallObjCEndCatch : EHScopeStack::Cleanup {
CallObjCEndCatch(bool MightThrow, llvm::Value *Fn) :
MightThrow(MightThrow), Fn(Fn) {}
bool MightThrow;
// Add a cleanup to leave the catch.
bool EndCatchMightThrow = (Handler.Variable == 0);
- CGF.EHStack.pushLazyCleanup<CallObjCEndCatch>(NormalAndEHCleanup,
- EndCatchMightThrow,
- ObjCTypes.getObjCEndCatchFn());
+ CGF.EHStack.pushCleanup<CallObjCEndCatch>(NormalAndEHCleanup,
+ EndCatchMightThrow,
+ ObjCTypes.getObjCEndCatchFn());
// Bind the catch parameter if it exists.
if (const VarDecl *CatchParam = Handler.Variable) {
using namespace CodeGen;
namespace {
- struct DestroyTemporary : EHScopeStack::LazyCleanup {
+ struct DestroyTemporary : EHScopeStack::Cleanup {
const CXXTemporary *Temporary;
llvm::Value *Addr;
llvm::Value *CondPtr;
Builder.CreateStore(Builder.getTrue(), CondPtr);
}
- EHStack.pushLazyCleanup<DestroyTemporary>(NormalAndEHCleanup,
- Temporary, Ptr, CondPtr);
+ EHStack.pushCleanup<DestroyTemporary>(NormalAndEHCleanup,
+ Temporary, Ptr, CondPtr);
}
RValue
SimplifyCleanupEntry(CGF, Entry);
}
-static void EmitLazyCleanup(CodeGenFunction &CGF,
- EHScopeStack::LazyCleanup *Fn,
- bool ForEH) {
+static void EmitCleanup(CodeGenFunction &CGF,
+ EHScopeStack::Cleanup *Fn,
+ bool ForEH) {
if (ForEH) CGF.EHStack.pushTerminate();
Fn->Emit(CGF, ForEH);
if (ForEH) CGF.EHStack.popTerminate();
assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
}
-static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF,
- EHScopeStack::LazyCleanup *Fn,
- bool ForEH,
- llvm::BasicBlock *Entry) {
+static void SplitAndEmitCleanup(CodeGenFunction &CGF,
+ EHScopeStack::Cleanup *Fn,
+ bool ForEH,
+ llvm::BasicBlock *Entry) {
assert(Entry && "no entry block for cleanup");
// Remove the switch and load from the end of the entry block.
// Emit the actual cleanup at the end of the entry block.
CGF.Builder.SetInsertPoint(Entry);
- EmitLazyCleanup(CGF, Fn, ForEH);
+ EmitCleanup(CGF, Fn, ForEH);
// Put the load and switch at the end of the exit block.
llvm::BasicBlock *Exit = CGF.Builder.GetInsertBlock();
CGF.Builder.ClearInsertionPoint();
}
-static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
- assert(isa<EHLazyCleanupScope>(*CGF.EHStack.begin()) && "top not a cleanup!");
- EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*CGF.EHStack.begin());
- assert(Scope.getFixupDepth() <= CGF.EHStack.getNumBranchFixups());
+/// Pops a cleanup block. If the block includes a normal cleanup, the
+/// current insertion point is threaded through the cleanup, as are
+/// any branch fixups on the cleanup.
+void CodeGenFunction::PopCleanupBlock() {
+ assert(!EHStack.empty() && "cleanup stack is empty!");
+ assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
+ EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
+ assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
// Check whether we need an EH cleanup. This is only true if we've
// generated a lazy EH cleanup block.
// - whether there are branch fix-ups through this cleanup
unsigned FixupDepth = Scope.getFixupDepth();
- bool HasFixups = CGF.EHStack.getNumBranchFixups() != FixupDepth;
+ bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
// - whether control has already been threaded through this cleanup
llvm::BasicBlock *NormalEntry = Scope.getNormalBlock();
bool HasExistingBranches = (NormalEntry != 0);
// - whether there's a fallthrough
- llvm::BasicBlock *FallthroughSource = CGF.Builder.GetInsertBlock();
+ llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
bool HasFallthrough = (FallthroughSource != 0);
bool RequiresNormalCleanup = false;
// If we don't need the cleanup at all, we're done.
if (!RequiresNormalCleanup && !RequiresEHCleanup) {
- CGF.EHStack.popCleanup();
- assert(CGF.EHStack.getNumBranchFixups() == 0 ||
- CGF.EHStack.hasNormalCleanups());
+ EHStack.popCleanup();
+ assert(EHStack.getNumBranchFixups() == 0 ||
+ EHStack.hasNormalCleanups());
return;
}
memcpy(CleanupBuffer.data(),
Scope.getCleanupBuffer(), Scope.getCleanupSize());
CleanupBuffer.set_size(Scope.getCleanupSize());
- EHScopeStack::LazyCleanup *Fn =
- reinterpret_cast<EHScopeStack::LazyCleanup*>(CleanupBuffer.data());
+ EHScopeStack::Cleanup *Fn =
+ reinterpret_cast<EHScopeStack::Cleanup*>(CleanupBuffer.data());
// We're done with the scope; pop it off so we can emit the cleanups.
- CGF.EHStack.popCleanup();
+ EHStack.popCleanup();
if (RequiresNormalCleanup) {
// If we have a fallthrough and no other need for the cleanup,
// emit it directly.
if (HasFallthrough && !HasFixups && !HasExistingBranches) {
- EmitLazyCleanup(CGF, Fn, /*ForEH*/ false);
+ EmitCleanup(*this, Fn, /*ForEH*/ false);
// Otherwise, the best approach is to thread everything through
// the cleanup block and then try to clean up after ourselves.
} else {
// Force the entry block to exist.
if (!HasExistingBranches) {
- NormalEntry = CGF.createBasicBlock("cleanup");
- CreateCleanupSwitch(CGF, NormalEntry);
+ NormalEntry = createBasicBlock("cleanup");
+ CreateCleanupSwitch(*this, NormalEntry);
}
- CGF.EmitBlock(NormalEntry);
+ EmitBlock(NormalEntry);
// Thread the fallthrough edge through the (momentarily trivial)
// cleanup.
llvm::BasicBlock *FallthroughDestination = 0;
if (HasFallthrough) {
assert(isa<llvm::BranchInst>(FallthroughSource->getTerminator()));
- FallthroughDestination = CGF.createBasicBlock("cleanup.cont");
+ FallthroughDestination = createBasicBlock("cleanup.cont");
BranchFixup Fix;
Fix.Destination = FallthroughDestination;
cast<llvm::BranchInst>(Fix.LatestBranch)
->setSuccessor(0, Fix.Destination);
- ThreadFixupThroughCleanup(CGF, Fix, NormalEntry, NormalEntry);
+ ThreadFixupThroughCleanup(*this, Fix, NormalEntry, NormalEntry);
}
// Thread any "real" fixups we need to thread.
- for (unsigned I = FixupDepth, E = CGF.EHStack.getNumBranchFixups();
+ for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
I != E; ++I)
if (CGF.EHStack.getBranchFixup(I).Destination)
- ThreadFixupThroughCleanup(CGF, CGF.EHStack.getBranchFixup(I),
+ ThreadFixupThroughCleanup(*this, EHStack.getBranchFixup(I),
NormalEntry, NormalEntry);
- SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ false, NormalEntry);
+ SplitAndEmitCleanup(*this, Fn, /*ForEH*/ false, NormalEntry);
if (HasFallthrough)
- CGF.EmitBlock(FallthroughDestination);
+ EmitBlock(FallthroughDestination);
}
}
// Emit the EH cleanup if required.
if (RequiresEHCleanup) {
- CGBuilderTy::InsertPoint SavedIP = CGF.Builder.saveAndClearIP();
- CGF.EmitBlock(EHEntry);
- SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ true, EHEntry);
- CGF.Builder.restoreIP(SavedIP);
+ CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
+ EmitBlock(EHEntry);
+ SplitAndEmitCleanup(*this, Fn, /*ForEH*/ true, EHEntry);
+ Builder.restoreIP(SavedIP);
}
}
-/// Pops a cleanup block. If the block includes a normal cleanup, the
-/// current insertion point is threaded through the cleanup, as are
-/// any branch fixups on the cleanup.
-void CodeGenFunction::PopCleanupBlock() {
- assert(!EHStack.empty() && "cleanup stack is empty!");
- assert(isa<EHLazyCleanupScope>(*EHStack.begin()));
- return PopLazyCleanupBlock(*this);
-}
-
void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
if (!HaveInsertPoint())
return;
for (EHScopeStack::iterator I = EHStack.begin(),
E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
- if (isa<EHLazyCleanupScope>(*I)) {
- EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I);
+ if (isa<EHCleanupScope>(*I)) {
+ EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
if (Scope.isNormalCleanup()) {
llvm::BasicBlock *Block = Scope.getNormalBlock();
if (!Block) {
for (EHScopeStack::iterator I = EHStack.begin(),
E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
- if (isa<EHLazyCleanupScope>(*I)) {
- EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I);
+ if (isa<EHCleanupScope>(*I)) {
+ EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
if (Scope.isEHCleanup()) {
llvm::BasicBlock *Block = Scope.getEHBlock();
if (!Block) {
}
};
- /// A lazy cleanup. Subclasses must be POD-like: cleanups will
- /// not be destructed, and they will be allocated on the cleanup
- /// stack and freely copied and moved around.
+ /// Information for lazily generating a cleanup. Subclasses must be
+ /// POD-like: cleanups will not be destructed, and they will be
+ /// allocated on the cleanup stack and freely copied and moved
+ /// around.
///
- /// LazyCleanup implementations should generally be declared in an
+ /// Cleanup implementations should generally be declared in an
/// anonymous namespace.
- class LazyCleanup {
+ class Cleanup {
public:
// Anchor the construction vtable. We use the destructor because
// gcc gives an obnoxious warning if there are virtual methods
// doesn't seem to be any other way around this warning.
//
// This destructor will never be called.
- virtual ~LazyCleanup();
+ virtual ~Cleanup();
/// Emit the cleanup. For normal cleanups, this is run in the
/// same EH context as when the cleanup was pushed, i.e. the
void popNullFixups();
- void *pushLazyCleanup(CleanupKind K, size_t DataSize);
+ void *pushCleanup(CleanupKind K, size_t DataSize);
public:
EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0),
/// Push a lazily-created cleanup on the stack.
template <class T>
- void pushLazyCleanup(CleanupKind Kind) {
- void *Buffer = pushLazyCleanup(Kind, sizeof(T));
- LazyCleanup *Obj = new(Buffer) T();
+ void pushCleanup(CleanupKind Kind) {
+ void *Buffer = pushCleanup(Kind, sizeof(T));
+ Cleanup *Obj = new(Buffer) T();
(void) Obj;
}
/// Push a lazily-created cleanup on the stack.
template <class T, class A0>
- void pushLazyCleanup(CleanupKind Kind, A0 a0) {
- void *Buffer = pushLazyCleanup(Kind, sizeof(T));
- LazyCleanup *Obj = new(Buffer) T(a0);
+ void pushCleanup(CleanupKind Kind, A0 a0) {
+ void *Buffer = pushCleanup(Kind, sizeof(T));
+ Cleanup *Obj = new(Buffer) T(a0);
(void) Obj;
}
/// Push a lazily-created cleanup on the stack.
template <class T, class A0, class A1>
- void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1) {
- void *Buffer = pushLazyCleanup(Kind, sizeof(T));
- LazyCleanup *Obj = new(Buffer) T(a0, a1);
+ void pushCleanup(CleanupKind Kind, A0 a0, A1 a1) {
+ void *Buffer = pushCleanup(Kind, sizeof(T));
+ Cleanup *Obj = new(Buffer) T(a0, a1);
(void) Obj;
}
/// Push a lazily-created cleanup on the stack.
template <class T, class A0, class A1, class A2>
- void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) {
- void *Buffer = pushLazyCleanup(Kind, sizeof(T));
- LazyCleanup *Obj = new(Buffer) T(a0, a1, a2);
+ void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) {
+ void *Buffer = pushCleanup(Kind, sizeof(T));
+ Cleanup *Obj = new(Buffer) T(a0, a1, a2);
(void) Obj;
}
/// Push a lazily-created cleanup on the stack.
template <class T, class A0, class A1, class A2, class A3>
- void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) {
- void *Buffer = pushLazyCleanup(Kind, sizeof(T));
- LazyCleanup *Obj = new(Buffer) T(a0, a1, a2, a3);
+ void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) {
+ void *Buffer = pushCleanup(Kind, sizeof(T));
+ Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3);
(void) Obj;
}
/// Push a lazily-created cleanup on the stack.
template <class T, class A0, class A1, class A2, class A3, class A4>
- void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) {
- void *Buffer = pushLazyCleanup(Kind, sizeof(T));
- LazyCleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4);
+ void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) {
+ void *Buffer = pushCleanup(Kind, sizeof(T));
+ Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4);
(void) Obj;
}