From: Robin Morisset <morisset@google.com>
Date: Mon, 18 Aug 2014 22:18:11 +0000 (+0000)
Subject: Weak relaxing of the constraints on atomics in MemoryDependencyAnalysis
X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=6c0e1e0fa658f4e7466c6787aedce992ece2db55;p=llvm

Weak relaxing of the constraints on atomics in MemoryDependencyAnalysis

Monotonic accesses do not have to kill the analysis, as long as the QueryInstr is not
itself atomic.

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@215942 91177308-0d34-0410-b5e6-96231b3b80d8
---

diff --git a/lib/Analysis/MemoryDependenceAnalysis.cpp b/lib/Analysis/MemoryDependenceAnalysis.cpp
index 59669372719..33fe425f135 100644
--- a/lib/Analysis/MemoryDependenceAnalysis.cpp
+++ b/lib/Analysis/MemoryDependenceAnalysis.cpp
@@ -409,9 +409,18 @@ getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad,
     // a load depends on another must aliased load from the same value.
     if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
       // Atomic loads have complications involved.
+      // A monotonic load is OK if the query inst is itself not atomic.
       // FIXME: This is overly conservative.
-      if (!LI->isUnordered())
-        return MemDepResult::getClobber(LI);
+      if (!LI->isUnordered()) {
+        if (!QueryInst || LI->getOrdering() != Monotonic)
+          return MemDepResult::getClobber(LI);
+        if (auto *QueryLI = dyn_cast<LoadInst>(QueryInst))
+          if (!QueryLI->isUnordered())
+            return MemDepResult::getClobber(LI);
+        if (auto *QuerySI = dyn_cast<StoreInst>(QueryInst))
+          if (!QuerySI->isUnordered())
+            return MemDepResult::getClobber(LI);
+      }
 
       AliasAnalysis::Location LoadLoc = AA->getLocation(LI);
 
@@ -469,9 +478,18 @@ getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad,
 
     if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
       // Atomic stores have complications involved.
+      // A monotonic store is OK if the query inst is itself not atomic.
       // FIXME: This is overly conservative.
-      if (!SI->isUnordered())
-        return MemDepResult::getClobber(SI);
+      if (!SI->isUnordered()) {
+        if (!QueryInst || SI->getOrdering() != Monotonic)
+          return MemDepResult::getClobber(SI);
+        if (auto *QueryLI = dyn_cast<LoadInst>(QueryInst))
+          if (!QueryLI->isUnordered())
+            return MemDepResult::getClobber(SI);
+        if (auto *QuerySI = dyn_cast<StoreInst>(QueryInst))
+          if (!QuerySI->isUnordered())
+            return MemDepResult::getClobber(SI);
+      }
 
       // If alias analysis can tell that this store is guaranteed to not modify
       // the query pointer, ignore it.  Use getModRefInfo to handle cases where
diff --git a/test/Transforms/DeadStoreElimination/atomic.ll b/test/Transforms/DeadStoreElimination/atomic.ll
index 2e84298ad40..621958dbb79 100644
--- a/test/Transforms/DeadStoreElimination/atomic.ll
+++ b/test/Transforms/DeadStoreElimination/atomic.ll
@@ -105,3 +105,50 @@ entry:
   ret i32 %x
 }
 
+; DSE across monotonic load (allowed as long as the eliminated store isUnordered)
+define i32 @test9()  nounwind uwtable ssp {
+; CHECK: test9
+; CHECK-NOT: store i32 0
+; CHECK: store i32 1
+entry:
+  store i32 0, i32* @x
+  %x = load atomic i32* @y monotonic, align 4
+  store i32 1, i32* @x
+  ret i32 %x
+}
+
+; DSE across monotonic store (allowed as long as the eliminated store isUnordered)
+define void @test10()  nounwind uwtable ssp {
+; CHECK: test10
+; CHECK-NOT: store i32 0
+; CHECK: store i32 1
+entry:
+  store i32 0, i32* @x
+  store atomic i32 42, i32* @y monotonic, align 4
+  store i32 1, i32* @x
+  ret void
+}
+
+; DSE across monotonic load (forbidden since the eliminated store is atomic)
+define i32 @test11()  nounwind uwtable ssp {
+; CHECK: test11
+; CHECK: store atomic i32 0
+; CHECK: store atomic i32 1
+entry:
+  store atomic i32 0, i32* @x monotonic, align 4
+  %x = load atomic i32* @y monotonic, align 4
+  store atomic i32 1, i32* @x monotonic, align 4
+  ret i32 %x
+}
+
+; DSE across monotonic store (forbidden since the eliminated store is atomic)
+define void @test12()  nounwind uwtable ssp {
+; CHECK: test12
+; CHECK: store atomic i32 0
+; CHECK: store atomic i32 1
+entry:
+  store atomic i32 0, i32* @x monotonic, align 4
+  store atomic i32 42, i32* @y monotonic, align 4
+  store atomic i32 1, i32* @x monotonic, align 4
+  ret void
+}