return atomic_exchange_and_add( &value_, +1 );
}
- native_t operator+=(int val)
+ native_t operator+=(native_t val)
{
return atomic_exchange_and_add( &value_, val );
}
// the below is necessary because __sync_fetch_and_add is not universally available on i386.. I 3> RHEL5.
#if defined( __GNUC__ ) && ( defined( __i386__ ) || defined( __x86_64__ ) )
- static int atomic_exchange_and_add( native_t * pw, int dv )
+ static native_t atomic_exchange_and_add( native_t * pw, native_t dv )
{
// int r = *pw;
// *pw += dv;
// return r;
- int r;
+ native_t r;
__asm__ __volatile__
(
return r;
}
#else
- static int atomic_exchange_and_add( native_t * pw, int dv )
+ static native_t atomic_exchange_and_add( native_t * pw, native_t dv )
{
return __sync_fetch_and_add(pw, dv);
}
return 0;
}
+static void *threadMangler2(void* a)
+{
+ StatBag* S = (StatBag*)a;
+ for(unsigned int n=0; n < 10000000; ++n)
+ S->inc("c");
+ return 0;
+}
+
+
+
BOOST_AUTO_TEST_SUITE(misc_hh)
BOOST_AUTO_TEST_CASE(test_StatBagBasic) {
pthread_join(tid[i], &res);
BOOST_CHECK_EQUAL(s.read("c"), 40000000U);
+
+ s.set("c", 0);
+
+ for(int i=0; i < 4; ++i)
+ pthread_create(&tid[i], 0, threadMangler2, (void*)&s);
+
+ for(int i=0; i < 4 ; ++i)
+ pthread_join(tid[i], &res);
+
+ BOOST_CHECK_EQUAL(s.read("c"), 40000000U);
+
+
+ s.set("c", 1ULL<<31);
+ BOOST_CHECK_EQUAL(s.read("c"), (1ULL<<31) );
+ s.inc("c");
+ BOOST_CHECK_EQUAL(s.read("c"), (1ULL<<31) +1 );
+
+#if UINTPTR_MAX > 0xffffffffULL
+ s.set("c", 1ULL<<33);
+ BOOST_CHECK_EQUAL(s.read("c"), (1ULL<<33) );
+ s.inc("c");
+ BOOST_CHECK_EQUAL(s.read("c"), (1ULL<<33) +1 );
+
+ s.set("c", ~0ULL);
+ BOOST_CHECK_EQUAL(s.read("c"), 0xffffffffffffffffULL );
+ s.inc("c");
+ BOOST_CHECK_EQUAL(s.read("c"), 0 );
+#else
+ s.set("c", ~0UL);
+ BOOST_CHECK_EQUAL(s.read("c"), 0xffffffffUL );
+ s.inc("c");
+ BOOST_CHECK_EQUAL(s.read("c"), 0 );
+#endif
+
}