/// returns true if module is modified.
bool UpgradeRetainReleaseMarker(Module &M);
+ /// Convert calls to ARC runtime functions to intrinsic calls if the bitcode
+ /// has the arm64 retainAutoreleasedReturnValue marker.
+ bool UpgradeARCRuntimeCalls(Module &M);
+
void UpgradeSectionAttributes(Module &M);
/// If the given TBAA tag uses the scalar TBAA format, create a new node
UpgradeModuleFlags(*TheModule);
UpgradeRetainReleaseMarker(*TheModule);
+ UpgradeARCRuntimeCalls(*TheModule);
return Error::success();
}
static bool UpgradeIntrinsicFunction1(Function *F, Function *&NewFn) {
assert(F && "Illegal to upgrade a non-existent Function.");
- // Upgrade intrinsics "clang.arc.use" which doesn't start with "llvm.".
- if (F->getName() == "clang.arc.use") {
- NewFn = nullptr;
- return true;
- }
-
// Quickly eliminate it, if it's not a candidate.
StringRef Name = F->getName();
if (Name.size() <= 8 || !Name.startswith("llvm."))
// Get the Function's name.
StringRef Name = F->getName();
- // clang.arc.use is an old name for llvm.arc.clang.arc.use. It is dropped
- // from upgrader because the optimizer now only recognizes intrinsics for
- // ARC runtime calls.
- if (Name == "clang.arc.use") {
- CI->eraseFromParent();
- return;
- }
-
assert(Name.startswith("llvm.") && "Intrinsic doesn't start with 'llvm.'");
Name = Name.substr(5);
return Changed;
}
+bool llvm::UpgradeARCRuntimeCalls(Module &M) {
+ auto UpgradeToIntrinsic = [&](const char *OldFunc,
+ llvm::Intrinsic::ID IntrinsicFunc) {
+ Function *Fn = M.getFunction(OldFunc);
+
+ if (!Fn)
+ return false;
+
+ Function *NewFn = llvm::Intrinsic::getDeclaration(&M, IntrinsicFunc);
+ Fn->replaceAllUsesWith(NewFn);
+ Fn->eraseFromParent();
+ return true;
+ };
+
+ // Unconditionally convert "clang.arc.use" to "llvm.objc.clang.arc.use".
+ bool Changed =
+ UpgradeToIntrinsic("clang.arc.use", llvm::Intrinsic::objc_clang_arc_use);
+
+ // Return if the bitcode doesn't have the arm64 retainAutoreleasedReturnValue
+ // marker. We don't know for sure that it was compiled with ARC in that case.
+ if (!M.getModuleFlag("clang.arc.retainAutoreleasedReturnValueMarker"))
+ return false;
+
+ std::pair<const char *, llvm::Intrinsic::ID> RuntimeFuncs[] = {
+ {"objc_autorelease", llvm::Intrinsic::objc_autorelease},
+ {"objc_autoreleasePoolPop", llvm::Intrinsic::objc_autoreleasePoolPop},
+ {"objc_autoreleasePoolPush", llvm::Intrinsic::objc_autoreleasePoolPush},
+ {"objc_autoreleaseReturnValue",
+ llvm::Intrinsic::objc_autoreleaseReturnValue},
+ {"objc_copyWeak", llvm::Intrinsic::objc_copyWeak},
+ {"objc_destroyWeak", llvm::Intrinsic::objc_destroyWeak},
+ {"objc_initWeak", llvm::Intrinsic::objc_initWeak},
+ {"objc_loadWeak", llvm::Intrinsic::objc_loadWeak},
+ {"objc_loadWeakRetained", llvm::Intrinsic::objc_loadWeakRetained},
+ {"objc_moveWeak", llvm::Intrinsic::objc_moveWeak},
+ {"objc_release", llvm::Intrinsic::objc_release},
+ {"objc_retain", llvm::Intrinsic::objc_retain},
+ {"objc_retainAutorelease", llvm::Intrinsic::objc_retainAutorelease},
+ {"objc_retainAutoreleaseReturnValue",
+ llvm::Intrinsic::objc_retainAutoreleaseReturnValue},
+ {"objc_retainAutoreleasedReturnValue",
+ llvm::Intrinsic::objc_retainAutoreleasedReturnValue},
+ {"objc_retainBlock", llvm::Intrinsic::objc_retainBlock},
+ {"objc_storeStrong", llvm::Intrinsic::objc_storeStrong},
+ {"objc_storeWeak", llvm::Intrinsic::objc_storeWeak},
+ {"objc_unsafeClaimAutoreleasedReturnValue",
+ llvm::Intrinsic::objc_unsafeClaimAutoreleasedReturnValue},
+ {"objc_retainedObject", llvm::Intrinsic::objc_retainedObject},
+ {"objc_unretainedObject", llvm::Intrinsic::objc_unretainedObject},
+ {"objc_unretainedPointer", llvm::Intrinsic::objc_unretainedPointer},
+ {"objc_retain_autorelease", llvm::Intrinsic::objc_retain_autorelease},
+ {"objc_sync_enter", llvm::Intrinsic::objc_sync_enter},
+ {"objc_sync_exit", llvm::Intrinsic::objc_sync_exit},
+ {"objc_arc_annotation_topdown_bbstart",
+ llvm::Intrinsic::objc_arc_annotation_topdown_bbstart},
+ {"objc_arc_annotation_topdown_bbend",
+ llvm::Intrinsic::objc_arc_annotation_topdown_bbend},
+ {"objc_arc_annotation_bottomup_bbstart",
+ llvm::Intrinsic::objc_arc_annotation_bottomup_bbstart},
+ {"objc_arc_annotation_bottomup_bbend",
+ llvm::Intrinsic::objc_arc_annotation_bottomup_bbend}};
+
+ for (auto &I : RuntimeFuncs)
+ Changed |= UpgradeToIntrinsic(I.first, I.second);
+
+ return Changed;
+}
+
bool llvm::UpgradeModuleFlags(Module &M) {
NamedMDNode *ModFlags = M.getModuleFlagsMetadata();
if (!ModFlags)
--- /dev/null
+; Test that calls to ARC runtime functions are converted to intrinsic calls if
+; the bitcode has the arm64 retainAutoreleasedReturnValueMarker metadata.
+
+; upgrade-arc-runtime-calls.bc and upgrade-mrr-runtime-calls.bc are identical
+; except that the former has the arm64 retainAutoreleasedReturnValueMarker
+; metadata.
+
+; RUN: llvm-dis < %S/upgrade-arc-runtime-calls.bc | FileCheck -check-prefixes=ARC %s
+; RUN: llvm-dis < %S/upgrade-mrr-runtime-calls.bc | FileCheck -check-prefixes=MRR %s
+
+// ARC: define void @testRuntimeCalls(i8* %[[A:.*]], i8** %[[B:.*]], i8** %[[C:.*]]) {
+// ARC: %[[V0:.*]] = tail call i8* @llvm.objc.autorelease(i8* %[[A]])
+// ARC-NEXT: tail call void @llvm.objc.autoreleasePoolPop(i8* %[[A]])
+// ARC-NEXT: %[[V1:.*]] = tail call i8* @llvm.objc.autoreleasePoolPush()
+// ARC-NEXT: %[[V2:.*]] = tail call i8* @llvm.objc.autoreleaseReturnValue(i8* %[[A]])
+// ARC-NEXT: tail call void @llvm.objc.copyWeak(i8** %[[B]], i8** %[[C]])
+// ARC-NEXT: tail call void @llvm.objc.destroyWeak(i8** %[[B]])
+// ARC-NEXT: %[[V3:.*]] = tail call i8* @llvm.objc.initWeak(i8** %[[B]], i8* %[[A]])
+// ARC-NEXT: %[[V4:.*]] = tail call i8* @llvm.objc.loadWeak(i8** %[[B]])
+// ARC-NEXT: %[[V5:.*]] = tail call i8* @llvm.objc.loadWeakRetained(i8** %[[B]])
+// ARC-NEXT: tail call void @llvm.objc.moveWeak(i8** %[[B]], i8** %[[C]])
+// ARC-NEXT: tail call void @llvm.objc.release(i8* %[[A]])
+// ARC-NEXT: %[[V6:.*]] = tail call i8* @llvm.objc.retain(i8* %[[A]])
+// ARC-NEXT: %[[V7:.*]] = tail call i8* @llvm.objc.retainAutorelease(i8* %[[A]])
+// ARC-NEXT: %[[V8:.*]] = tail call i8* @llvm.objc.retainAutoreleaseReturnValue(i8* %[[A]])
+// ARC-NEXT: %[[V9:.*]] = tail call i8* @llvm.objc.retainAutoreleasedReturnValue(i8* %[[A]])
+// ARC-NEXT: %[[V10:.*]] = tail call i8* @llvm.objc.retainBlock(i8* %[[A]])
+// ARC-NEXT: tail call void @llvm.objc.storeStrong(i8** %[[B]], i8* %[[A]])
+// ARC-NEXT: %[[V11:.*]] = tail call i8* @llvm.objc.storeWeak(i8** %[[B]], i8* %[[A]])
+// ARC-NEXT: tail call void (...) @llvm.objc.clang.arc.use(i8* %[[A]])
+// ARC-NEXT: %[[V12:.*]] = tail call i8* @llvm.objc.unsafeClaimAutoreleasedReturnValue(i8* %[[A]])
+// ARC-NEXT: %[[V13:.*]] = tail call i8* @llvm.objc.retainedObject(i8* %[[A]])
+// ARC-NEXT: %[[V14:.*]] = tail call i8* @llvm.objc.unretainedObject(i8* %[[A]])
+// ARC-NEXT: %[[V15:.*]] = tail call i8* @llvm.objc.unretainedPointer(i8* %[[A]])
+// ARC-NEXT: %[[V16:.*]] = tail call i8* @objc_retain.autorelease(i8* %[[A]])
+// ARC-NEXT: %[[V17:.*]] = tail call i32 @objc_sync.enter(i8* %[[A]])
+// ARC-NEXT: %[[V18:.*]] = tail call i32 @objc_sync.exit(i8* %[[A]])
+// ARC-NEXT: tail call void @llvm.objc.arc.annotation.topdown.bbstart(i8** %[[B]], i8** %[[C]])
+// ARC-NEXT: tail call void @llvm.objc.arc.annotation.topdown.bbend(i8** %[[B]], i8** %[[C]])
+// ARC-NEXT: tail call void @llvm.objc.arc.annotation.bottomup.bbstart(i8** %[[B]], i8** %[[C]])
+// ARC-NEXT: tail call void @llvm.objc.arc.annotation.bottomup.bbend(i8** %[[B]], i8** %[[C]])
+// ARC-NEXT: ret void
+
+// MRR: define void @testRuntimeCalls(i8* %[[A:.*]], i8** %[[B:.*]], i8** %[[C:.*]]) {
+// MRR: %[[V0:.*]] = tail call i8* @objc_autorelease(i8* %[[A]])
+// MRR-NEXT: tail call void @objc_autoreleasePoolPop(i8* %[[A]])
+// MRR-NEXT: %[[V1:.*]] = tail call i8* @objc_autoreleasePoolPush()
+// MRR-NEXT: %[[V2:.*]] = tail call i8* @objc_autoreleaseReturnValue(i8* %[[A]])
+// MRR-NEXT: tail call void @objc_copyWeak(i8** %[[B]], i8** %[[C]])
+// MRR-NEXT: tail call void @objc_destroyWeak(i8** %[[B]])
+// MRR-NEXT: %[[V3:.*]] = tail call i8* @objc_initWeak(i8** %[[B]], i8* %[[A]])
+// MRR-NEXT: %[[V4:.*]] = tail call i8* @objc_loadWeak(i8** %[[B]])
+// MRR-NEXT: %[[V5:.*]] = tail call i8* @objc_loadWeakRetained(i8** %[[B]])
+// MRR-NEXT: tail call void @objc_moveWeak(i8** %[[B]], i8** %[[C]])
+// MRR-NEXT: tail call void @objc_release(i8* %[[A]])
+// MRR-NEXT: %[[V6:.*]] = tail call i8* @objc_retain(i8* %[[A]])
+// MRR-NEXT: %[[V7:.*]] = tail call i8* @objc_retainAutorelease(i8* %[[A]])
+// MRR-NEXT: %[[V8:.*]] = tail call i8* @objc_retainAutoreleaseReturnValue(i8* %[[A]])
+// MRR-NEXT: %[[V9:.*]] = tail call i8* @objc_retainAutoreleasedReturnValue(i8* %[[A]])
+// MRR-NEXT: %[[V10:.*]] = tail call i8* @objc_retainBlock(i8* %[[A]])
+// MRR-NEXT: tail call void @objc_storeStrong(i8** %[[B]], i8* %[[A]])
+// MRR-NEXT: %[[V11:.*]] = tail call i8* @objc_storeWeak(i8** %[[B]], i8* %[[A]])
+// MRR-NEXT: tail call void (...) @llvm.objc.clang.arc.use(i8* %[[A]])
+// MRR-NEXT: %[[V12:.*]] = tail call i8* @objc_unsafeClaimAutoreleasedReturnValue(i8* %[[A]])
+// MRR-NEXT: %[[V13:.*]] = tail call i8* @objc_retainedObject(i8* %[[A]])
+// MRR-NEXT: %[[V14:.*]] = tail call i8* @objc_unretainedObject(i8* %[[A]])
+// MRR-NEXT: %[[V15:.*]] = tail call i8* @objc_unretainedPointer(i8* %[[A]])
+// MRR-NEXT: %[[V16:.*]] = tail call i8* @objc_retain.autorelease(i8* %[[A]])
+// MRR-NEXT: %[[V17:.*]] = tail call i32 @objc_sync.enter(i8* %[[A]])
+// MRR-NEXT: %[[V18:.*]] = tail call i32 @objc_sync.exit(i8* %[[A]])
+// MRR-NEXT: tail call void @objc_arc_annotation_topdown_bbstart(i8** %[[B]], i8** %[[C]])
+// MRR-NEXT: tail call void @objc_arc_annotation_topdown_bbend(i8** %[[B]], i8** %[[C]])
+// MRR-NEXT: tail call void @objc_arc_annotation_bottomup_bbstart(i8** %[[B]], i8** %[[C]])
+// MRR-NEXT: tail call void @objc_arc_annotation_bottomup_bbend(i8** %[[B]], i8** %[[C]])
+// MRR-NEXT: ret void
-; Test upgrade of clang.arc.use by removing it.
+; Test upgrade of clang.arc.use by upgrading to llvm.objc.clang.arc.use.
; Bitcode input generated from llvm 6.0
; RUN: llvm-dis %s.bc -o - | FileCheck %s
%0 = type opaque
define void @foo() {
%1 = tail call %0* @foo0()
-; CHECK-NOT: clang.arc.use
+; CHECK: call void (...) @llvm.objc.clang.arc.use(
call void (...) @clang.arc.use(%0* %1)
ret void
}