static inline void
pg_atomic_init_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
{
+ /*
+ * Can't necessarily enforce alignment - and don't need it - when using
+ * the spinlock based fallback implementation. Therefore only assert when
+ * not using it.
+ */
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
-
+#endif
pg_atomic_init_u64_impl(ptr, val);
}
static inline uint64
pg_atomic_read_u64(volatile pg_atomic_uint64 *ptr)
{
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
+#endif
return pg_atomic_read_u64_impl(ptr);
}
static inline void
pg_atomic_write_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
{
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
+#endif
pg_atomic_write_u64_impl(ptr, val);
}
static inline uint64
pg_atomic_exchange_u64(volatile pg_atomic_uint64 *ptr, uint64 newval)
{
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
-
+#endif
return pg_atomic_exchange_u64_impl(ptr, newval);
}
pg_atomic_compare_exchange_u64(volatile pg_atomic_uint64 *ptr,
uint64 *expected, uint64 newval)
{
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
AssertPointerAlignment(expected, 8);
+#endif
return pg_atomic_compare_exchange_u64_impl(ptr, expected, newval);
}
static inline uint64
pg_atomic_fetch_add_u64(volatile pg_atomic_uint64 *ptr, int64 add_)
{
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
+#endif
return pg_atomic_fetch_add_u64_impl(ptr, add_);
}
static inline uint64
pg_atomic_fetch_sub_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
{
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
+#endif
Assert(sub_ != PG_INT64_MIN);
return pg_atomic_fetch_sub_u64_impl(ptr, sub_);
}
static inline uint64
pg_atomic_fetch_and_u64(volatile pg_atomic_uint64 *ptr, uint64 and_)
{
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
+#endif
return pg_atomic_fetch_and_u64_impl(ptr, and_);
}
static inline uint64
pg_atomic_fetch_or_u64(volatile pg_atomic_uint64 *ptr, uint64 or_)
{
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
+#endif
return pg_atomic_fetch_or_u64_impl(ptr, or_);
}
static inline uint64
pg_atomic_add_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 add_)
{
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
+#endif
return pg_atomic_add_fetch_u64_impl(ptr, add_);
}
static inline uint64
pg_atomic_sub_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
{
+#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
AssertPointerAlignment(ptr, 8);
+#endif
Assert(sub_ != PG_INT64_MIN);
return pg_atomic_sub_fetch_u64_impl(ptr, sub_);
}
}
#endif
-#ifndef PG_HAVE_ATOMIC_READ_U64
-#define PG_HAVE_ATOMIC_READ_U64
-static inline uint64
-pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
-{
- return *(&ptr->value);
-}
-#endif
-
#ifndef PG_HAVE_ATOMIC_WRITE_U64
#define PG_HAVE_ATOMIC_WRITE_U64
+
+#if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
+ !defined(PG_HAVE_ATOMIC_U64_SIMULATION)
+
static inline void
pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
{
+ /*
+ * On this platform aligned 64bit writes are guaranteed to be atomic,
+ * except if using the fallback implementation, where can't guarantee the
+ * required alignment.
+ */
+ AssertPointerAlignment(ptr, 8);
ptr->value = val;
}
-#endif
-#ifndef PG_HAVE_ATOMIC_WRITE_U64
-#define PG_HAVE_ATOMIC_WRITE_U64
+#else
+
static inline void
pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
{
*/
pg_atomic_exchange_u64_impl(ptr, val);
}
-#endif
+
+#endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
+#endif /* PG_HAVE_ATOMIC_WRITE_U64 */
#ifndef PG_HAVE_ATOMIC_READ_U64
#define PG_HAVE_ATOMIC_READ_U64
+
+#if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
+ !defined(PG_HAVE_ATOMIC_U64_SIMULATION)
+
+static inline uint64
+pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
+{
+ /*
+ * On this platform aligned 64bit reads are guaranteed to be atomic,
+ * except if using the fallback implementation, where can't guarantee the
+ * required alignment.
+ */
+ AssertPointerAlignment(ptr, 8);
+ return *(&ptr->value);
+}
+
+#else
+
static inline uint64
pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
{
return old;
}
-#endif
+#endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
+#endif /* PG_HAVE_ATOMIC_READ_U64 */
#ifndef PG_HAVE_ATOMIC_INIT_U64
#define PG_HAVE_ATOMIC_INIT_U64