]> granicus.if.org Git - postgresql/commitdiff
Fix issues in e8fdbd58fe.
authorAndres Freund <andres@anarazel.de>
Sat, 8 Apr 2017 00:03:21 +0000 (17:03 -0700)
committerAndres Freund <andres@anarazel.de>
Sat, 8 Apr 2017 00:09:03 +0000 (17:09 -0700)
When the 64bit atomics simulation is in use, we can't necessarily
guarantee the correct alignment of the atomics due to lack of compiler
support for doing so- that's fine from a safety perspective, because
everything is protected by a lock, but we asserted the alignment in
all cases.  Weaken them.  Per complaint from Alvaro Herrera.

My #ifdefery for PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY wasn't
sufficient. Fix that.  Per complaint from Alexander Korotkov.

src/include/port/atomics.h
src/include/port/atomics/generic.h

index 5f3d2662987eeaf5bc2ac230ec1fe7f56c0f2fd8..a6ef0f0f6ae6a75b707dbe622e99f1048eb6ce4e 100644 (file)
@@ -425,30 +425,41 @@ pg_atomic_sub_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 sub_)
 static inline void
 pg_atomic_init_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
 {
+       /*
+        * Can't necessarily enforce alignment - and don't need it - when using
+        * the spinlock based fallback implementation. Therefore only assert when
+        * not using it.
+        */
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
        AssertPointerAlignment(ptr, 8);
-
+#endif
        pg_atomic_init_u64_impl(ptr, val);
 }
 
 static inline uint64
 pg_atomic_read_u64(volatile pg_atomic_uint64 *ptr)
 {
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
        AssertPointerAlignment(ptr, 8);
+#endif
        return pg_atomic_read_u64_impl(ptr);
 }
 
 static inline void
 pg_atomic_write_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
 {
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
        AssertPointerAlignment(ptr, 8);
+#endif
        pg_atomic_write_u64_impl(ptr, val);
 }
 
 static inline uint64
 pg_atomic_exchange_u64(volatile pg_atomic_uint64 *ptr, uint64 newval)
 {
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
        AssertPointerAlignment(ptr, 8);
-
+#endif
        return pg_atomic_exchange_u64_impl(ptr, newval);
 }
 
@@ -456,22 +467,28 @@ static inline bool
 pg_atomic_compare_exchange_u64(volatile pg_atomic_uint64 *ptr,
                                                           uint64 *expected, uint64 newval)
 {
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
        AssertPointerAlignment(ptr, 8);
        AssertPointerAlignment(expected, 8);
+#endif
        return pg_atomic_compare_exchange_u64_impl(ptr, expected, newval);
 }
 
 static inline uint64
 pg_atomic_fetch_add_u64(volatile pg_atomic_uint64 *ptr, int64 add_)
 {
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
        AssertPointerAlignment(ptr, 8);
+#endif
        return pg_atomic_fetch_add_u64_impl(ptr, add_);
 }
 
 static inline uint64
 pg_atomic_fetch_sub_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
 {
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
        AssertPointerAlignment(ptr, 8);
+#endif
        Assert(sub_ != PG_INT64_MIN);
        return pg_atomic_fetch_sub_u64_impl(ptr, sub_);
 }
@@ -479,28 +496,36 @@ pg_atomic_fetch_sub_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
 static inline uint64
 pg_atomic_fetch_and_u64(volatile pg_atomic_uint64 *ptr, uint64 and_)
 {
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
        AssertPointerAlignment(ptr, 8);
+#endif
        return pg_atomic_fetch_and_u64_impl(ptr, and_);
 }
 
 static inline uint64
 pg_atomic_fetch_or_u64(volatile pg_atomic_uint64 *ptr, uint64 or_)
 {
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
        AssertPointerAlignment(ptr, 8);
+#endif
        return pg_atomic_fetch_or_u64_impl(ptr, or_);
 }
 
 static inline uint64
 pg_atomic_add_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 add_)
 {
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
        AssertPointerAlignment(ptr, 8);
+#endif
        return pg_atomic_add_fetch_u64_impl(ptr, add_);
 }
 
 static inline uint64
 pg_atomic_sub_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
 {
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
        AssertPointerAlignment(ptr, 8);
+#endif
        Assert(sub_ != PG_INT64_MIN);
        return pg_atomic_sub_fetch_u64_impl(ptr, sub_);
 }
index c0942482fc52484d42d53bd0a8474a0d5f17c4d8..424543604ab1dd94f0169fd8a8dec3fe537d9371 100644 (file)
@@ -271,26 +271,26 @@ pg_atomic_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 xchg_)
 }
 #endif
 
-#ifndef PG_HAVE_ATOMIC_READ_U64
-#define PG_HAVE_ATOMIC_READ_U64
-static inline uint64
-pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
-{
-       return *(&ptr->value);
-}
-#endif
-
 #ifndef PG_HAVE_ATOMIC_WRITE_U64
 #define PG_HAVE_ATOMIC_WRITE_U64
+
+#if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
+       !defined(PG_HAVE_ATOMIC_U64_SIMULATION)
+
 static inline void
 pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
 {
+       /*
+        * On this platform aligned 64bit writes are guaranteed to be atomic,
+        * except if using the fallback implementation, where can't guarantee the
+        * required alignment.
+        */
+       AssertPointerAlignment(ptr, 8);
        ptr->value = val;
 }
-#endif
 
-#ifndef PG_HAVE_ATOMIC_WRITE_U64
-#define PG_HAVE_ATOMIC_WRITE_U64
+#else
+
 static inline void
 pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
 {
@@ -300,10 +300,30 @@ pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
         */
        pg_atomic_exchange_u64_impl(ptr, val);
 }
-#endif
+
+#endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
+#endif /* PG_HAVE_ATOMIC_WRITE_U64 */
 
 #ifndef PG_HAVE_ATOMIC_READ_U64
 #define PG_HAVE_ATOMIC_READ_U64
+
+#if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
+       !defined(PG_HAVE_ATOMIC_U64_SIMULATION)
+
+static inline uint64
+pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
+{
+       /*
+        * On this platform aligned 64bit reads are guaranteed to be atomic,
+        * except if using the fallback implementation, where can't guarantee the
+        * required alignment.
+        */
+       AssertPointerAlignment(ptr, 8);
+       return *(&ptr->value);
+}
+
+#else
+
 static inline uint64
 pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
 {
@@ -319,7 +339,8 @@ pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
 
        return old;
 }
-#endif
+#endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
+#endif /* PG_HAVE_ATOMIC_READ_U64 */
 
 #ifndef PG_HAVE_ATOMIC_INIT_U64
 #define PG_HAVE_ATOMIC_INIT_U64