(unsigned)Instruction::Call, (unsigned)Instruction::CleanupRet,
(unsigned)Instruction::CatchSwitch, (unsigned)Instruction::Resume};
+ auto *LivenessAA = A.getAAFor<AAIsDead>(*this, F);
+
for (unsigned Opcode : Opcodes) {
for (Instruction *I : OpcodeInstMap[Opcode]) {
+ // Skip dead instructions.
+ if (LivenessAA && LivenessAA->isAssumedDead(I))
+ continue;
+
if (!I->mayThrow())
continue;
/// Return an assumed unique return value if a single candidate is found. If
/// there cannot be one, return a nullptr. If it is not clear yet, return the
/// Optional::NoneType.
- Optional<Value *> getAssumedUniqueReturnValue() const;
+ Optional<Value *> getAssumedUniqueReturnValue(const AAIsDead *LivenessAA) const;
/// See AbstractState::checkForallReturnedValues(...).
- bool
- checkForallReturnedValues(std::function<bool(Value &)> &Pred) const override;
+ bool checkForallReturnedValues(
+ std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> &Pred)
+ const override;
/// Pretty print the attribute similar to the IR representation.
const std::string getAsStr() const override;
IsFixed = true;
IsValidState &= true;
}
+
void indicatePessimisticFixpoint() override {
IsFixed = true;
IsValidState = false;
assert(isValidState());
NumFnKnownReturns++;
+ auto *LivenessAA = A.getAAFor<AAIsDead>(*this, getAnchorScope());
+
// Check if we have an assumed unique return value that we could manifest.
- Optional<Value *> UniqueRV = getAssumedUniqueReturnValue();
+ Optional<Value *> UniqueRV = getAssumedUniqueReturnValue(LivenessAA);
if (!UniqueRV.hasValue() || !UniqueRV.getValue())
return Changed;
(isValidState() ? std::to_string(getNumReturnValues()) : "?") + ")";
}
-Optional<Value *> AAReturnedValuesImpl::getAssumedUniqueReturnValue() const {
+Optional<Value *> AAReturnedValuesImpl::getAssumedUniqueReturnValue(
+ const AAIsDead *LivenessAA) const {
// If checkForallReturnedValues provides a unique value, ignoring potential
// undef values that can also be present, it is assumed to be the actual
// return value and forwarded to the caller of this method. If there are
// returned value.
Optional<Value *> UniqueRV;
- std::function<bool(Value &)> Pred = [&](Value &RV) -> bool {
+ std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred =
+ [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &RetInsts) -> bool {
+
+ // If all ReturnInsts are dead, then ReturnValue is dead as well
+ // and can be ignored.
+ if (LivenessAA &&
+ !LivenessAA->isLiveInstSet(RetInsts.begin(), RetInsts.end()))
+ return true;
+
// If we found a second returned value and neither the current nor the saved
// one is an undef, there is no unique returned value. Undefs are special
// since we can pretend they have any value.
}
bool AAReturnedValuesImpl::checkForallReturnedValues(
- std::function<bool(Value &)> &Pred) const {
+ std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> &Pred)
+ const {
if (!isValidState())
return false;
// encountered an overdefined one during an update.
for (auto &It : ReturnedValues) {
Value *RV = It.first;
+ const SmallPtrSetImpl<ReturnInst *> &RetInsts = It.second;
ImmutableCallSite ICS(RV);
if (ICS && !HasOverdefinedReturnedCalls)
continue;
- if (!Pred(*RV))
+ if (!Pred(*RV, RetInsts))
return false;
}
// Keep track of any change to trigger updates on dependent attributes.
ChangeStatus Changed = ChangeStatus::UNCHANGED;
+ auto *LivenessAA = A.getAAFor<AAIsDead>(*this, getAnchorScope());
+
// Look at all returned call sites.
for (auto &It : ReturnedValues) {
SmallPtrSet<ReturnInst *, 2> &ReturnInsts = It.second;
Value *RV = It.first;
+
+ // Ignore dead ReturnValues.
+ if (LivenessAA && !LivenessAA->isLiveInstSet(ReturnInsts.begin(), ReturnInsts.end()))
+ continue;
+
LLVM_DEBUG(dbgs() << "[AAReturnedValues] Potentially returned value " << *RV
<< "\n");
continue;
}
+ auto *LivenessCSAA = A.getAAFor<AAIsDead>(*this, RetCSAA->getAnchorScope());
+
// Try to find a assumed unique return value for the called function.
- Optional<Value *> AssumedUniqueRV = RetCSAA->getAssumedUniqueReturnValue();
+ Optional<Value *> AssumedUniqueRV =
+ RetCSAA->getAssumedUniqueReturnValue(LivenessCSAA);
// If no assumed unique return value was found due to the lack of
// candidates, we may need to resolve more calls (through more update
ChangeStatus AANoSyncFunction::updateImpl(Attributor &A) {
Function &F = getAnchorScope();
+ auto *LivenessAA = A.getAAFor<AAIsDead>(*this, F);
+
/// We are looking for volatile instructions or Non-Relaxed atomics.
/// FIXME: We should ipmrove the handling of intrinsics.
for (Instruction *I : InfoCache.getReadOrWriteInstsForFunction(F)) {
+ // Skip assumed dead instructions.
+ if (LivenessAA && LivenessAA->isAssumedDead(I))
+ continue;
+
ImmutableCallSite ICS(I);
auto *NoSyncAA = A.getAAFor<AANoSyncFunction>(*this, *I);
for (unsigned Opcode : Opcodes) {
for (Instruction *I : OpcodeInstMap[Opcode]) {
+ // Skip assumed dead instructions.
+ if (LivenessAA && LivenessAA->isAssumedDead(I))
+ continue;
// At this point we handled all read/write effects and they are all
// nosync, so they can be skipped.
if (I->mayReadOrWriteMemory())
ChangeStatus AANoFreeFunction::updateImpl(Attributor &A) {
Function &F = getAnchorScope();
+ auto *LivenessAA = A.getAAFor<AAIsDead>(*this, F);
+
// The map from instruction opcodes to those instructions in the function.
auto &OpcodeInstMap = InfoCache.getOpcodeInstMapForFunction(F);
{(unsigned)Instruction::Invoke, (unsigned)Instruction::CallBr,
(unsigned)Instruction::Call}) {
for (Instruction *I : OpcodeInstMap[Opcode]) {
-
+ // Skip assumed dead instructions.
+ if (LivenessAA && LivenessAA->isAssumedDead(I))
+ continue;
auto ICS = ImmutableCallSite(I);
auto *NoFreeAA = A.getAAFor<AANoFreeFunction>(*this, *I);
/// (i) A value is known nonZero(=nonnull).
/// (ii) A value is associated with AANonNull and its isAssumedNonNull() is
/// true.
- std::function<bool(Value &)> generatePredicate(Attributor &);
+ std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)>
+ generatePredicate(Attributor &);
};
-std::function<bool(Value &)> AANonNullImpl::generatePredicate(Attributor &A) {
+std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)>
+AANonNullImpl::generatePredicate(Attributor &A) {
// FIXME: The `AAReturnedValues` should provide the predicate with the
// `ReturnInst` vector as well such that we can use the control flow sensitive
// version of `isKnownNonZero`. This should fix `test11` in
// `test/Transforms/FunctionAttrs/nonnull.ll`
- std::function<bool(Value &)> Pred = [&](Value &RV) -> bool {
- if (isKnownNonZero(&RV, getAnchorScope().getParent()->getDataLayout()))
+ std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred =
+ [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &RetInsts) -> bool {
+ Function &F = getAnchorScope();
+
+ if (isKnownNonZero(&RV, F.getParent()->getDataLayout()))
return true;
auto *NonNullAA = A.getAAFor<AANonNull>(*this, RV);
return ChangeStatus::CHANGED;
}
- std::function<bool(Value &)> Pred = this->generatePredicate(A);
+ std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred =
+ this->generatePredicate(A);
+
if (!AARetVal->checkForallReturnedValues(Pred)) {
indicatePessimisticFixpoint();
return ChangeStatus::CHANGED;
return false;
};
- if (!A.checkForAllCallSites(F, CallSiteCheck, true)) {
+ if (!A.checkForAllCallSites(F, CallSiteCheck, true, *this)) {
indicatePessimisticFixpoint();
return ChangeStatus::CHANGED;
}
// The map from instruction opcodes to those instructions in the function.
auto &OpcodeInstMap = InfoCache.getOpcodeInstMapForFunction(F);
+ auto *LivenessAA = A.getAAFor<AAIsDead>(*this, F);
+
for (unsigned Opcode :
{(unsigned)Instruction::Invoke, (unsigned)Instruction::CallBr,
(unsigned)Instruction::Call}) {
for (Instruction *I : OpcodeInstMap[Opcode]) {
+ // Skip assumed dead instructions.
+ if (LivenessAA && LivenessAA->isAssumedDead(I))
+ continue;
+
auto ICS = ImmutableCallSite(I);
if (ICS.hasFnAttr(Attribute::WillReturn))
return ChangeStatus::CHANGED;
}
- std::function<bool(Value &)> Pred = [&](Value &RV) -> bool {
+ std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred =
+ [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &RetInsts) -> bool {
if (Constant *C = dyn_cast<Constant>(&RV))
if (C->isNullValue() || isa<UndefValue>(C))
return true;
/// See AbstractAttribute::updateImpl(...).
ChangeStatus updateImpl(Attributor &A) override;
- /// See AAIsDead::isAssumedDead().
+ /// See AAIsDead::isAssumedDead(BasicBlock *).
bool isAssumedDead(BasicBlock *BB) const override {
+ assert(BB->getParent() == &getAnchorScope() &&
+ "BB must be in the same anchor scope function.");
+
if (!getAssumed())
return false;
return !AssumedLiveBlocks.count(BB);
}
- /// See AAIsDead::isKnownDead().
+ /// See AAIsDead::isKnownDead(BasicBlock *).
bool isKnownDead(BasicBlock *BB) const override {
- if (!getKnown())
+ return getKnown() && isAssumedDead(BB);
+ }
+
+ /// See AAIsDead::isAssumed(Instruction *I).
+ bool isAssumedDead(Instruction *I) const override {
+ assert(I->getParent()->getParent() == &getAnchorScope() &&
+ "Instruction must be in the same anchor scope function.");
+
+ if(!getAssumed())
return false;
- return !AssumedLiveBlocks.count(BB);
+
+ // If it is not in AssumedLiveBlocks then it for sure dead.
+ // Otherwise, it can still be after noreturn call in a live block.
+ if (!AssumedLiveBlocks.count(I->getParent()))
+ return true;
+
+ // If it is not after a noreturn call, than it is live.
+ if (!isAfterNoReturn(I))
+ return false;
+
+ // Definitely dead.
+ return true;
}
+ /// See AAIsDead::isKnownDead(Instruction *I).
+ bool isKnownDead(Instruction *I) const override {
+ return getKnown() && isAssumedDead(I);
+ }
+
+ /// Check if instruction is after noreturn call, in other words, assumed dead.
+ bool isAfterNoReturn(Instruction *I) const;
+
/// Collection of to be explored paths.
SmallSetVector<Instruction *, 8> ToBeExploredPaths;
SmallSetVector<Instruction *, 4> NoReturnCalls;
};
+bool AAIsDeadFunction::isAfterNoReturn(Instruction *I) const {
+ Instruction *PrevI = I->getPrevNode();
+ while (PrevI) {
+ if (NoReturnCalls.count(PrevI))
+ return true;
+ PrevI = PrevI->getPrevNode();
+ }
+ return false;
+}
+
bool AAIsDeadFunction::explorePath(Attributor &A, Instruction *I) {
BasicBlock *BB = I->getParent();
NoReturnCalls.remove(I);
+ // At least one new path.
+ Status = ChangeStatus::CHANGED;
+
// No new paths.
if (Size == ToBeExploredPaths.size())
continue;
- // At least one new path.
- Status = ChangeStatus::CHANGED;
-
// explore new paths.
while (Size != ToBeExploredPaths.size())
explorePath(A, ToBeExploredPaths[Size++]);
LLVM_DEBUG(
dbgs() << "[AAIsDead] AssumedLiveBlocks: " << AssumedLiveBlocks.size()
- << "Total number of blocks: " << getAnchorScope().size() << "\n");
+ << " Total number of blocks: " << getAnchorScope().size() << "\n");
return Status;
}
bool IsNonNull = isAssumedNonNull();
bool IsGlobal = isAssumedGlobal();
- std::function<bool(Value &)> Pred = [&](Value &RV) -> bool {
+ std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred =
+ [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &RetInsts) -> bool {
takeAssumedDerefBytesMinimum(
computeAssumedDerefenceableBytes(A, RV, IsNonNull, IsGlobal));
return isValidState();
return isValidState();
};
- if (!A.checkForAllCallSites(F, CallSiteCheck, true)) {
+ if (!A.checkForAllCallSites(F, CallSiteCheck, true, *this)) {
indicatePessimisticFixpoint();
return ChangeStatus::CHANGED;
}
// optimistic fixpoint is reached earlier.
base_t BeforeState = getAssumed();
- std::function<bool(Value &)> Pred = [&](Value &RV) -> bool {
+ std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred =
+ [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &RetInsts) -> bool {
auto *AlignAA = A.getAAFor<AAAlign>(*this, RV);
if (AlignAA)
return isValidState();
};
- if (!A.checkForAllCallSites(F, CallSiteCheck, true))
+ if (!A.checkForAllCallSites(F, CallSiteCheck, true, *this))
indicatePessimisticFixpoint();
return BeforeState == getAssumed() ? ChangeStatus::UNCHANGED
bool Attributor::checkForAllCallSites(Function &F,
std::function<bool(CallSite)> &Pred,
- bool RequireAllCallSites) {
+ bool RequireAllCallSites,
+ AbstractAttribute &AA) {
// We can try to determine information from
// the call sites. However, this is only possible all call sites are known,
// hence the function has internal linkage.
}
for (const Use &U : F.uses()) {
+ Instruction *I = cast<Instruction>(U.getUser());
+ Function *AnchorValue = I->getParent()->getParent();
+
+ auto *LivenessAA = getAAFor<AAIsDead>(AA, *AnchorValue);
+
+ // Skip dead calls.
+ if (LivenessAA && LivenessAA->isAssumedDead(I))
+ continue;
CallSite CS(U.getUser());
if (!CS || !CS.isCallee(&U) || !CS.getCaller()->hasExactDefinition()) {
; RUN: opt -attributor --attributor-disable=false -S < %s | FileCheck %s
-declare void @no_return_call() noreturn
+declare void @no_return_call() nofree noreturn nounwind readnone
-declare void @normal_call()
+declare void @normal_call() readnone
declare i32 @foo()
declare i32 @foo_noreturn() noreturn
-declare i32 @bar()
+declare i32 @bar() nosync readnone
-; TEST 1: cond.true is dead, but cond.end is not, since cond.false is live
+; CHECK: Function Attrs: nofree norecurse nounwind uwtable willreturn
+define i32 @volatile_load(i32*) norecurse nounwind uwtable {
+ %2 = load volatile i32, i32* %0, align 4
+ ret i32 %2
+}
+
+; CHECK: Function Attrs: nofree norecurse nosync nounwind uwtable willreturn
+; CHECK-NEXT: define internal i32 @internal_load(i32* nonnull)
+define internal i32 @internal_load(i32*) norecurse nounwind uwtable {
+ %2 = load i32, i32* %0, align 4
+ ret i32 %2
+}
+; TEST 1: Only first block is live.
+
+; CHECK: Function Attrs: nofree nosync nounwind
+; CHECK-NEXT: define i32 @first_block_no_return(i32 %a, i32* nonnull %ptr1, i32* %ptr2)
+define i32 @first_block_no_return(i32 %a, i32* nonnull %ptr1, i32* %ptr2) #0 {
+entry:
+ call i32 @internal_load(i32* %ptr1)
+ ; CHECK: call i32 @internal_load(i32* nonnull %ptr1)
+ call void @no_return_call()
+ ; CHECK: call void @no_return_call()
+ ; CHECK-NEXT: unreachable
+ %cmp = icmp eq i32 %a, 0
+ br i1 %cmp, label %cond.true, label %cond.false
+
+cond.true: ; preds = %entry
+ call i32 @internal_load(i32* %ptr2)
+ ; CHECK: call i32 @internal_load(i32* %ptr2)
+ %load = call i32 @volatile_load(i32* %ptr1)
+ call void @normal_call()
+ %call = call i32 @foo()
+ br label %cond.end
+
+cond.false: ; preds = %entry
+ call void @normal_call()
+ %call1 = call i32 @bar()
+ br label %cond.end
+
+cond.end: ; preds = %cond.false, %cond.true
+ %cond = phi i32 [ %call, %cond.true ], [ %call1, %cond.false ]
+ ret i32 %cond
+}
+
+; TEST 2: cond.true is dead, but cond.end is not, since cond.false is live
; This is just an example. For example we can put a sync call in a
; dead block and check if it is deduced.
-define i32 @dead_block_present(i32 %a) #0 {
+; CHECK: Function Attrs: nosync
+; CHECK-NEXT: define i32 @dead_block_present(i32 %a, i32* %ptr1)
+define i32 @dead_block_present(i32 %a, i32* %ptr1) #0 {
entry:
%cmp = icmp eq i32 %a, 0
br i1 %cmp, label %cond.true, label %cond.false
call void @no_return_call()
; CHECK: call void @no_return_call()
; CHECK-NEXT: unreachable
- %call = call i32 @foo()
+ %call = call i32 @volatile_load(i32* %ptr1)
br label %cond.end
cond.false: ; preds = %entry
ret i32 %cond
}
-; TEST 2: both cond.true and cond.false are dead, therfore cond.end is dead as well.
+; TEST 3: both cond.true and cond.false are dead, therfore cond.end is dead as well.
define i32 @all_dead(i32 %a) #0 {
entry:
declare i32 @__gxx_personality_v0(...)
-; TEST 3: All blocks are live.
+; TEST 4: All blocks are live.
; CHECK: define i32 @all_live(i32 %a)
define i32 @all_live(i32 %a) #0 {
ret i32 %cond
}
-; TEST 4 noreturn invoke instruction replaced by a call and an unreachable instruction
+; TEST 5 noreturn invoke instruction replaced by a call and an unreachable instruction
; put after it.
; CHECK: define i32 @invoke_noreturn(i32 %a)
ret i32 0
}
-; TEST 5: Undefined behvior, taken from LangRef.
+; TEST 6: Undefined behvior, taken from LangRef.
; FIXME: Should be able to detect undefined behavior.
; CHECK define @ub(i32)
br label %while.body
}
-; TEST 6: Infinite loop.
+; TEST 7: Infinite loop.
; FIXME: Detect infloops, and mark affected blocks dead.
define i32 @test5(i32, i32) #0 {
ret void
}
-; TEST 7: Recursion
+; TEST 8: Recursion
; FIXME: everything after first block should be marked dead
; and unreachable should be put after call to @rec().
%7 = phi i32 [ %1, %cond.elseif ], [ 0, %cond.else ], [ 0, %cond.if ]
ret i32 %7
}
-; TEST 8: Recursion
+; TEST 9: Recursion
; FIXME: contains recursive call to itself in cond.elseif block
define i32 @test7(i32, i32) #0 {
%8 = phi i32 [ %1, %cond.elseif ], [ 0, %cond.else ], [ 0, %cond.if ]
ret i32 %8
}
-
-; TEST 9: Only first block is live.
-
-define i32 @first_block_no_return(i32 %a) #0 {
-entry:
- call void @no_return_call()
- ; CHECK: call void @no_return_call()
- ; CHECK-NEXT: unreachable
- %cmp = icmp eq i32 %a, 0
- br i1 %cmp, label %cond.true, label %cond.false
-
-cond.true: ; preds = %entry
- call void @normal_call()
- %call = call i32 @foo()
- br label %cond.end
-
-cond.false: ; preds = %entry
- call void @normal_call()
- %call1 = call i32 @bar()
- br label %cond.end
-
-cond.end: ; preds = %cond.false, %cond.true
- %cond = phi i32 [ %call, %cond.true ], [ %call1, %cond.false ]
- ret i32 %cond
-}