]> granicus.if.org Git - icinga2/commitdiff
Allow thin mutex calls to be inlined
authorGunnar Beutner <gunnar@beutner.name>
Wed, 12 Nov 2014 05:48:23 +0000 (06:48 +0100)
committerGunnar Beutner <gunnar@beutner.name>
Wed, 12 Nov 2014 05:50:52 +0000 (06:50 +0100)
refs #7622

lib/base/thinmutex.cpp
lib/base/thinmutex.hpp

index b2eedd430cddc2f1f927420be4d5fcdc94454fff..4276b1a69a4aa3fde34aa5d4862b7282f65160f9 100644 (file)
@@ -52,6 +52,39 @@ static void InitThinMutex(void)
 INITIALIZE_ONCE(&InitThinMutex);
 #endif /* _DEBUG */
 
+void ThinMutex::LockSlowPath(void)
+{
+       LockSlowPath(false);
+}
+
+void ThinMutex::LockSlowPath(bool make_native)
+{
+       unsigned int it = 0;
+
+#ifdef _WIN32
+#      ifdef _WIN64
+       while (InterlockedCompareExchange64(&m_Data, THINLOCK_LOCKED, THINLOCK_UNLOCKED) != THINLOCK_UNLOCKED) {
+#      else /* _WIN64 */
+       while (InterlockedCompareExchange(&m_Data, THINLOCK_LOCKED, THINLOCK_UNLOCKED) != THINLOCK_UNLOCKED) {
+#      endif /* _WIN64 */
+#else /* _WIN32 */
+       while (!__sync_bool_compare_and_swap(&m_Data, THINLOCK_UNLOCKED, THINLOCK_LOCKED)) {
+#endif /* _WIN32 */
+               if (m_Data > THINLOCK_LOCKED) {
+                       LockNative();
+                       return;
+               }
+
+               make_native = true;
+
+               Spin(it);
+               it++;
+       }
+
+       if (make_native)
+               MakeNative();
+}
+
 void ThinMutex::MakeNative(void)
 {
        boost::mutex *mtx = new boost::mutex();
index 7605937fa51748030ba24009b8decec873d5cfd2..b560f8f5ab8f6e94cff27b50ce2cfeaee883f201 100644 (file)
@@ -69,33 +69,22 @@ public:
 
        inline void Lock(bool make_native = false)
        {
-               bool contended = false;
-               unsigned int it = 0;
-
 #ifdef _WIN32
 #      ifdef _WIN64
-               while (InterlockedCompareExchange64(&m_Data, THINLOCK_LOCKED, THINLOCK_UNLOCKED) != THINLOCK_UNLOCKED) {
+               if (InterlockedCompareExchange64(&m_Data, THINLOCK_LOCKED, THINLOCK_UNLOCKED) != THINLOCK_UNLOCKED) {
 #      else /* _WIN64 */
-               while (InterlockedCompareExchange(&m_Data, THINLOCK_LOCKED, THINLOCK_UNLOCKED) != THINLOCK_UNLOCKED) {
+               if (InterlockedCompareExchange(&m_Data, THINLOCK_LOCKED, THINLOCK_UNLOCKED) != THINLOCK_UNLOCKED) {
 #      endif /* _WIN64 */
 #else /* _WIN32 */
-               while (!__sync_bool_compare_and_swap(&m_Data, THINLOCK_UNLOCKED, THINLOCK_LOCKED)) {
+               if (!__sync_bool_compare_and_swap(&m_Data, THINLOCK_UNLOCKED, THINLOCK_LOCKED)) {
 #endif /* _WIN32 */
-                       if (m_Data > THINLOCK_LOCKED) {
-                               LockNative();
-                               return;
-                       }
-
-                       contended = true;
-
-                       Spin(it);
-                       it++;
+                       LockSlowPath();
                }
-
-               if (contended || make_native)
-                       MakeNative();
        }
 
+       void LockSlowPath(void);
+       void LockSlowPath(bool make_native);
+
        inline void Unlock(void)
        {
 #ifdef _WIN32
@@ -112,7 +101,7 @@ public:
 
        inline void Inflate(void)
        {
-               Lock(true);
+               LockSlowPath(true);
                Unlock();
        }