#
-# Copyright (c) 2003 by Hewlett-Packard Company. All rights reserved.
+# Copyright (c) 2003 Hewlett-Packard Developlment Company, L.P.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
$(AR) ruc atomic_ops.a atomic_ops.o
$(RANLIB) atomic_ops.a
-test_atomic: test_atomic.c test_atomic_include.h $(ATOMIC_OPS_HEADERS)
- $(CC) $(CFLAGS) test_atomic.c -o test_atomic -lpthread
+test_atomic: test_atomic.c atomic_ops.c test_atomic_include.h $(ATOMIC_OPS_HEADERS)
+ $(CC) $(CFLAGS) test_atomic.c atomic_ops.c -o test_atomic -lpthread
-test_atomic_pthreads: test_atomic.c test_atomic_include.h $(ATOMIC_OPS_HEADERS)
- $(CC) $(CFLAGS) -DAO_USE_PTHREAD_DEFS test_atomic.c -o test_atomic_pthreads -lpthread
+test_atomic_pthreads: test_atomic.c atomic_ops.c test_atomic_include.h $(ATOMIC_OPS_HEADERS)
+ $(CC) $(CFLAGS) -DAO_USE_PTHREAD_DEFS test_atomic.c atomic_ops.c -o test_atomic_pthreads -lpthread
test_atomic_include.h: test_atomic.template
sed -e s/XX// test_atomic.template > test_atomic_include.h
AO_INLINE void
AO_store_release(volatile AO_T *p, AO_T val)
{
+ AO_compiler_barrier(); /* Empirically necessary. Gcc bug? */
/* A normal volatile store generates an st.rel */
*p = val;
}
#define AO_HAVE_fetch_and_add_full
+/* Really only works for 486 and later */
+AO_INLINE void
+AO_or_full (volatile AO_T *p, AO_T incr)
+{
+ __asm__ __volatile__ ("lock; orl %1, %0" :
+ "+m" (*p) : "r" (incr) : "memory");
+}
+
+#define AO_HAVE_or_full
+
AO_INLINE AO_TS_T
AO_test_and_set_full(volatile AO_T *addr)
{
/* We define only the full barrier variants, and count on the */
/* generalization section below to fill in the rest. */
-static pthread_mutex_t AO_lock = PTHREAD_MUTEX_INITIALIZER;
+extern pthread_mutex_t AO_pt_lock;
AO_INLINE void
AO_nop_full()
{
- pthread_mutex_lock(&AO_lock);
- pthread_mutex_unlock(&AO_lock);
+ pthread_mutex_lock(&AO_pt_lock);
+ pthread_mutex_unlock(&AO_pt_lock);
}
#define AO_HAVE_nop_full
AO_load_full(volatile AO_T *addr)
{
AO_T result;
- pthread_mutex_lock(&AO_lock);
+ pthread_mutex_lock(&AO_pt_lock);
result = *addr;
- pthread_mutex_unlock(&AO_lock);
+ pthread_mutex_unlock(&AO_pt_lock);
return result;
}
AO_INLINE void
AO_store_full(volatile AO_T *addr, AO_T val)
{
- pthread_mutex_lock(&AO_lock);
+ pthread_mutex_lock(&AO_pt_lock);
*addr = val;
- pthread_mutex_unlock(&AO_lock);
+ pthread_mutex_unlock(&AO_pt_lock);
}
#define AO_HAVE_store_full
AO_test_and_set_full(volatile AO_TS_T *addr)
{
int result;
- pthread_mutex_lock(&AO_lock);
+ pthread_mutex_lock(&AO_pt_lock);
result = (int)(*addr);
*addr = AO_TS_SET;
- pthread_mutex_unlock(&AO_lock);
+ pthread_mutex_unlock(&AO_pt_lock);
assert(result == AO_TS_SET || result == AO_TS_CLEAR);
return result;
}
#define AO_HAVE_test_and_set_full
-static AO_T
-AO_fetch_and_add_full(volatile AO_T *p, long incr)
+AO_INLINE AO_T
+AO_fetch_and_add_full(volatile AO_T *p, AO_T incr)
{
AO_T tmp;
- pthread_mutex_lock(&AO_lock);
+ pthread_mutex_lock(&AO_pt_lock);
tmp = *p;
*p = tmp + incr;
- pthread_mutex_unlock(&AO_lock);
+ pthread_mutex_unlock(&AO_pt_lock);
return tmp;
}
#define AO_HAVE_fetch_and_add_full
-#define AO_fetch_and_add1_full(addr) AO_fetch_and_add_full(addr,1)
-#define AO_fetch_and_sub1_full(addr) AO_fetch_and_add_full(addr,-1)
+AO_INLINE void
+AO_or_full(volatile AO_T *p, AO_T incr)
+{
+ AO_T tmp;
+
+ pthread_mutex_lock(&AO_pt_lock);
+ tmp = *p;
+ *p = (tmp | incr);
+ pthread_mutex_unlock(&AO_pt_lock);
+}
+
+#define AO_HAVE_or_full
AO_INLINE int
AO_compare_and_swap_full(volatile AO_T *addr,
AO_T old, AO_T new_val)
{
- pthread_mutex_lock(&AO_lock);
+ pthread_mutex_lock(&AO_pt_lock);
if (*addr == old)
{
*addr = new_val;
- pthread_mutex_unlock(&AO_lock);
+ pthread_mutex_unlock(&AO_pt_lock);
return 1;
}
else
- pthread_mutex_unlock(&AO_lock);
+ pthread_mutex_unlock(&AO_pt_lock);
return 0;
}
* Lock for pthreads-based implementation.
*/
-static pthread_mutex_t AO_pt_lock = PTHREAD_MUTEX_INITIALIZER;
+pthread_mutex_t AO_pt_lock = PTHREAD_MUTEX_INITIALIZER;
/*
* Out of line compare-and-swap emulation based on test and set.
/* AO_fetch_and_add */
/* AO_fetch_and_add1 */
/* AO_fetch_and_sub1 */
+/* AO_or */
/* AO_compare_and_swap */
/* */
/* Note that atomicity guarantees are valid only if both */
/* AO_TS_SET, and returns the prior value. */
/* An AO_TS_T clear location can be reset with the */
/* AO_CLEAR macro, which normally uses AO_store_release. */
-/* AO_fetch_and_add takes an address and a long increment */
+/* AO_fetch_and_add takes an address and an AO_T increment */
/* value. The AO_fetch_and_add1 and AO_fetch_and_sub1 variants */
/* are provided, since they allow faster implementations on */
-/* some hardware. */
+/* some hardware. AO_or atomically ors an AO_T value into a */
+/* memory location, but does not provide access to the original.*/
/* */
/* We expect this list to grow slowly over time. */
/* */
#if defined(AO_HAVE_compare_and_swap_full) && \
!defined(AO_HAVE_fetch_and_add_full)
AO_INLINE AO_T
- AO_fetch_and_add_full(volatile AO_T *addr, long incr)
+ AO_fetch_and_add_full(volatile AO_T *addr, AO_T incr)
{
AO_T old;
do
#if defined(AO_HAVE_fetch_and_add_full) &&\
!defined(AO_HAVE_fetch_and_sub1_full)
-# define AO_fetch_and_sub1_full(addr) AO_fetch_and_add_full(addr,-1)
+# define AO_fetch_and_sub1_full(addr) AO_fetch_and_add_full(addr,(AO_T)(-1))
# define AO_HAVE_fetch_and_sub1_full
#endif
#if defined(AO_HAVE_fetch_and_add_release) &&\
!defined(AO_HAVE_fetch_and_sub1_release)
-# define AO_fetch_and_sub1_release(addr) AO_fetch_and_add_release(addr,-1)
+# define AO_fetch_and_sub1_release(addr) \
+ AO_fetch_and_add_release(addr,(AO_T)(-1))
# define AO_HAVE_fetch_and_sub1_release
#endif
#if defined(AO_HAVE_fetch_and_add_acquire) &&\
!defined(AO_HAVE_fetch_and_sub1_acquire)
-# define AO_fetch_and_sub1_acquire(addr) AO_fetch_and_add_acquire(addr,-1)
+# define AO_fetch_and_sub1_acquire(addr) \
+ AO_fetch_and_add_acquire(addr,(AO_T)(-1))
# define AO_HAVE_fetch_and_sub1_acquire
#endif
#if defined(AO_HAVE_fetch_and_add_write) &&\
!defined(AO_HAVE_fetch_and_sub1_write)
-# define AO_fetch_and_sub1_write(addr) AO_fetch_and_add_write(addr,-1)
+# define AO_fetch_and_sub1_write(addr) \
+ AO_fetch_and_add_write(addr,(AO_T)(-1))
# define AO_HAVE_fetch_and_sub1_write
#endif
#if defined(AO_HAVE_fetch_and_add_read) &&\
!defined(AO_HAVE_fetch_and_sub1_read)
-# define AO_fetch_and_sub1_read(addr) AO_fetch_and_add_read(addr,-1)
+# define AO_fetch_and_sub1_read(addr) \
+ AO_fetch_and_add_read(addr,(AO_T)(-1))
# define AO_HAVE_fetch_and_sub1_read
#endif
#if defined(AO_HAVE_fetch_and_add_release_write) &&\
!defined(AO_HAVE_fetch_and_sub1_release_write)
# define AO_fetch_and_sub1_release_write(addr) \
- AO_fetch_and_add_release_write(addr,-1)
+ AO_fetch_and_add_release_write(addr,(AO_T)(-1))
# define AO_HAVE_fetch_and_sub1_release_write
#endif
#if defined(AO_HAVE_fetch_and_add_acquire_read) &&\
!defined(AO_HAVE_fetch_and_sub1_acquire_read)
# define AO_fetch_and_sub1_acquire_read(addr) \
- AO_fetch_and_add_acquire_read(addr,-1)
+ AO_fetch_and_add_acquire_read(addr,(AO_T)(-1))
# define AO_HAVE_fetch_and_sub1_acquire_read
#endif
# define AO_HAVE_fetch_and_sub1_acquire_read
#endif
+/* Atomic or */
+#if defined(AO_HAVE_compare_and_swap_full) && \
+ !defined(AO_HAVE_or_full)
+ AO_INLINE void
+ AO_or_full(volatile AO_T *addr, AO_T incr)
+ {
+ AO_T old;
+ do
+ {
+ old = *addr;
+ }
+ while (!AO_compare_and_swap_full(addr, old, (old | incr)));
+ }
+# define AO_HAVE_or_full
+#endif
+
+#if defined(AO_HAVE_or_full)
+# if !defined(AO_HAVE_or_release)
+# define AO_or_release(addr, val) \
+ AO_or_full(addr, val)
+# define AO_HAVE_or_release
+# endif
+# if !defined(AO_HAVE_or_acquire)
+# define AO_or_acquire(addr, val) \
+ AO_or_full(addr, val)
+# define AO_HAVE_or_acquire
+# endif
+# if !defined(AO_HAVE_or_write)
+# define AO_or_write(addr, val) \
+ AO_or_full(addr, val)
+# define AO_HAVE_or_write
+# endif
+# if !defined(AO_HAVE_or_read)
+# define AO_or_read(addr, val) \
+ AO_or_full(addr, val)
+# define AO_HAVE_or_read
+# endif
+#endif /* AO_HAVE_or_full */
+
+#if !defined(AO_HAVE_or) && \
+ defined(AO_HAVE_or_release)
+# define AO_or(addr, val) \
+ AO_or_release(addr, val)
+# define AO_HAVE_or
+#endif
+#if !defined(AO_HAVE_or) && \
+ defined(AO_HAVE_or_acquire)
+# define AO_or(addr, val) \
+ AO_or_acquire(addr, val)
+# define AO_HAVE_or
+#endif
+#if !defined(AO_HAVE_or) && \
+ defined(AO_HAVE_or_write)
+# define AO_or(addr, val) \
+ AO_or_write(addr, val)
+# define AO_HAVE_or
+#endif
+#if !defined(AO_HAVE_or) && \
+ defined(AO_HAVE_or_read)
+# define AO_or(addr, val) \
+ AO_or_read(addr, val)
+# define AO_HAVE_or
+#endif
+
+#if defined(AO_HAVE_or_acquire) &&\
+ defined(AO_HAVE_nop_full) && \
+ !defined(AO_HAVE_or_full)
+# define AO_or_full(addr, val) \
+ (AO_nop_full(), AO_or_acquire(addr, val))
+#endif
+
+#if !defined(AO_HAVE_or_release_write) && \
+ defined(AO_HAVE_or_write)
+# define AO_or_release_write(addr, val) \
+ AO_or_write(addr, val)
+# define AO_HAVE_or_release_write
+#endif
+#if !defined(AO_HAVE_or_release_write) && \
+ defined(AO_HAVE_or_release)
+# define AO_or_release_write(addr, val) \
+ AO_or_release(addr, val)
+# define AO_HAVE_or_release_write
+#endif
+#if !defined(AO_HAVE_or_acquire_read) && \
+ defined(AO_HAVE_or_read)
+# define AO_or_acquire_read(addr, val) \
+ AO_or_read(addr, val)
+# define AO_HAVE_or_acquire_read
+#endif
+#if !defined(AO_HAVE_or_acquire_read) && \
+ defined(AO_HAVE_or_acquire)
+# define AO_or_acquire_read(addr, val) \
+ AO_or_acquire(addr, val)
+# define AO_HAVE_or_acquire_read
+#endif
+
/* Test_and_set */
Atomic load of *addr.
void store(volatile AO_T * addr, AO_T new_val)
Atomically store new_val to *addr.
-void fetch_and_add(volatile AO_T *addr, incr)
+AO_T fetch_and_add(volatile AO_T *addr, AO_T incr)
Atomically add incr to *addr, and return the original value of *addr.
-void fetch_and_add1(volatile AO_T *addr)
+AO_T fetch_and_add1(volatile AO_T *addr)
Equivalent to AO_fetch_and_add(addr, 1).
-void fetch_and_sub1(volatile AO_T *addr)
+AO_T fetch_and_sub1(volatile AO_T *addr)
Equivalent to AO_fetch_and_add(addr, (AO_T)(-1)).
+void or(volatile AO_T *addr, AO_T incr)
+ Atomically or incr into *addr.
int compare_and_swap(volatile AO_T * addr, AO_T old_val, AO_T new_val)
Atomically compare *addr to old_val, and replace *addr by new_val
if the first comparison succeeds. Returns nonzero if the comparison
my_counter1 = AO_load(&counter1);
if (my_counter1 < my_counter2)
{
- fprintf(stderr, "Saw release store out of order: %d < %d\n",
- my_counter1, my_counter2);
+ fprintf(stderr, "Saw release store out of order: %lu < %lu\n",
+ (unsigned long)my_counter1, (unsigned long)my_counter2);
abort();
}
}
junk *= 17;
junk *= 17;
}
+ return 0;
}
int test_and_set_test(void)
void test_atomicXX(void)
{
- AO_T x, y;
+ AO_T x;
# if defined(AO_HAVE_test_and_setXX)
AO_TS_T z = AO_TS_INITIALIZER;
# endif
MISSING(AO_fetch_and_add);
# endif
# if defined(AO_HAVE_fetch_and_add1XX)
- TA_assert(AO_fetch_and_add1(&x) == 13);
+ TA_assert(AO_fetch_and_add1XX(&x) == 13);
# else
MISSING(AO_fetch_and_add1);
++x;
# endif
# if defined(AO_HAVE_fetch_and_sub1XX)
- TA_assert(AO_fetch_and_sub1(&x) == 14);
+ TA_assert(AO_fetch_and_sub1XX(&x) == 14);
# else
MISSING(AO_fetch_and_sub1);
--x;
# endif
# if defined(AO_HAVE_compare_and_swapXX)
- TA_assert(!AO_compare_and_swap(&x, 14, 42));
+ TA_assert(!AO_compare_and_swapXX(&x, 14, 42));
TA_assert(x == 13);
- TA_assert(AO_compare_and_swap(&x, 13, 42));
+ TA_assert(AO_compare_and_swapXX(&x, 13, 42));
TA_assert(x == 42);
# else
MISSING(AO_compare_and_swap);
# endif
+# if defined(AO_HAVE_orXX)
+ AO_orXX(&x, 66);
+ TA_assert(x == 106);
+# else
+ MISSING(AO_or);
+ x |= 34;
+# endif
}