RecursivelyDeleteTriviallyDeadInstructions(OldArg);
}
+/// If Inst is a ReturnRV and its operand is a call or invoke, return the
+/// operand. Otherwise return null.
+static inline const Instruction *getreturnRVOperand(const Instruction &Inst,
+ ARCInstKind Class) {
+ if (Class != ARCInstKind::RetainRV)
+ return nullptr;
+
+ const auto *Opnd = Inst.getOperand(0)->stripPointerCasts();
+ if (const auto *C = dyn_cast<CallInst>(Opnd))
+ return C;
+ return dyn_cast<InvokeInst>(Opnd);
+}
+
} // end namespace objcarc
} // end namespace llvm
const Value *Ptr,
ProvenanceAnalysis &PA,
ARCInstKind Class) {
+ auto SetSeqAndInsertReverseInsertPt = [&](Sequence NewSeq){
+ assert(!HasReverseInsertPts());
+ SetSeq(NewSeq);
+ // If this is an invoke instruction, we're scanning it as part of
+ // one of its successor blocks, since we can't insert code after it
+ // in its own block, and we don't want to split critical edges.
+ if (isa<InvokeInst>(Inst))
+ InsertReverseInsertPt(&*BB->getFirstInsertionPt());
+ else
+ InsertReverseInsertPt(&*++Inst->getIterator());
+ };
+
// Check for possible direct uses.
switch (GetSeq()) {
case S_Release:
if (CanUse(Inst, Ptr, PA, Class)) {
DEBUG(dbgs() << " CanUse: Seq: " << GetSeq() << "; " << *Ptr
<< "\n");
- assert(!HasReverseInsertPts());
- // If this is an invoke instruction, we're scanning it as part of
- // one of its successor blocks, since we can't insert code after it
- // in its own block, and we don't want to split critical edges.
- if (isa<InvokeInst>(Inst))
- InsertReverseInsertPt(&*BB->getFirstInsertionPt());
- else
- InsertReverseInsertPt(&*++Inst->getIterator());
- SetSeq(S_Use);
+ SetSeqAndInsertReverseInsertPt(S_Use);
} else if (Seq == S_Release && IsUser(Class)) {
DEBUG(dbgs() << " PreciseReleaseUse: Seq: " << GetSeq() << "; "
<< *Ptr << "\n");
// Non-movable releases depend on any possible objc pointer use.
- SetSeq(S_Stop);
- assert(!HasReverseInsertPts());
- // As above; handle invoke specially.
- if (isa<InvokeInst>(Inst))
- InsertReverseInsertPt(&*BB->getFirstInsertionPt());
- else
- InsertReverseInsertPt(&*++Inst->getIterator());
+ SetSeqAndInsertReverseInsertPt(S_Stop);
+ } else if (const auto *Call = getreturnRVOperand(*Inst, Class)) {
+ if (CanUse(Call, Ptr, PA, GetBasicARCInstKind(Call))) {
+ DEBUG(dbgs() << " ReleaseUse: Seq: " << GetSeq() << "; "
+ << *Ptr << "\n");
+ SetSeqAndInsertReverseInsertPt(S_Stop);
+ }
}
break;
case S_Stop:
ret {}* %s
}
+declare i8* @first_test25();
+declare i8* @second_test25(i8*);
+declare void @somecall_test25();
+
+; ARC optimizer used to move the last release between the call to second_test25
+; and the call to objc_retainAutoreleasedReturnValue, causing %second to be
+; released prematurely when %first and %second were pointing to the same object.
+
+; CHECK-LABEL: define void @test25(
+; CHECK: %[[CALL1:.*]] = call i8* @second_test25(
+; CHECK-NEXT: tail call i8* @objc_retainAutoreleasedReturnValue(i8* %[[CALL1]])
+
+define void @test25() {
+ %first = call i8* @first_test25()
+ %v0 = call i8* @objc_retain(i8* %first)
+ call void @somecall_test25()
+ %second = call i8* @second_test25(i8* %first)
+ %call2 = call i8* @objc_retainAutoreleasedReturnValue(i8* %second)
+ call void @objc_release(i8* %second), !clang.imprecise_release !0
+ call void @objc_release(i8* %first), !clang.imprecise_release !0
+ ret void
+}
+
+!0 = !{}
+
; CHECK: attributes [[NUW]] = { nounwind }