def warn_double_lock : Warning<
"locking '%0' that is already acquired">,
InGroup<ThreadSafety>, DefaultIgnore;
-def warn_function_requires_lock : Warning<
- "calling function '%0' requires lock '%0'">,
- InGroup<ThreadSafety>, DefaultIgnore;
def warn_locks_not_released : Warning<
"lock '%0' is not released at the end of function '%1'">,
InGroup<ThreadSafety>, DefaultIgnore;
def warn_var_deref_requires_any_lock : Warning<
"accessing the value pointed to by '%0' requires some lock">,
InGroup<ThreadSafety>, DefaultIgnore;
+def warn_fun_requires_lock : Warning<
+ "calling function '%0' requires %select{shared|exclusive}2 lock '%1'">,
+ InGroup<ThreadSafety>, DefaultIgnore;
+def warn_fun_excludes_lock : Warning<
+ "cannot call function '%0' while holding lock '%1'">,
+ InGroup<ThreadSafety>, DefaultIgnore;
def warn_impcast_vector_scalar : Warning<
/// \brief When visiting CXXMemberCallExprs we need to examine the attributes on
/// the method that is being called and add, remove or check locks in the
/// lockset accordingly.
-///
+///
/// FIXME: For classes annotated with one of the guarded annotations, we need
/// to treat const method calls as reads and non-const method calls as writes,
/// and check that the appropriate locks are held. Non-const method calls with
/// the same signature as const method calls can be also treated as reads.
+///
+/// FIXME: We need to also visit CallExprs to catch/check global functions.
void BuildLockset::VisitCXXMemberCallExpr(CXXMemberCallExpr *Exp) {
NamedDecl *D = dyn_cast_or_null<NamedDecl>(Exp->getCalleeDecl());
}
for (UnlockFunctionAttr::args_iterator I = UFAttr->args_begin(),
- E = UFAttr->args_end(); I != E; ++I)
+ E = UFAttr->args_end(); I != E; ++I)
removeLock(ExpLocation, *I);
break;
}
+ case attr::ExclusiveLocksRequired: {
+ // FIXME: Also use this attribute to add required locks to the initial
+ // lockset when processing a CFG for a function annotated with this
+ // attribute.
+ ExclusiveLocksRequiredAttr *ELRAttr =
+ cast<ExclusiveLocksRequiredAttr>(Attr);
+
+ for (ExclusiveLocksRequiredAttr::args_iterator
+ I = ELRAttr->args_begin(), E = ELRAttr->args_end(); I != E; ++I) {
+ LockID Lock(*I);
+ warnIfLockNotHeld(D, Exp, AK_Written, Lock,
+ diag::warn_fun_requires_lock);
+ }
+ break;
+ }
+
+ case attr::SharedLocksRequired: {
+ // FIXME: Also use this attribute to add required locks to the initial
+ // lockset when processing a CFG for a function annotated with this
+ // attribute.
+ SharedLocksRequiredAttr *SLRAttr = cast<SharedLocksRequiredAttr>(Attr);
+
+ for (SharedLocksRequiredAttr::args_iterator I = SLRAttr->args_begin(),
+ E = SLRAttr->args_end(); I != E; ++I) {
+ LockID Lock(*I);
+ warnIfLockNotHeld(D, Exp, AK_Read, Lock,
+ diag::warn_fun_requires_lock);
+ }
+ break;
+ }
+
+ case attr::LocksExcluded: {
+ LocksExcludedAttr *LEAttr = cast<LocksExcludedAttr>(Attr);
+ for (LocksExcludedAttr::args_iterator I = LEAttr->args_begin(),
+ E = LEAttr->args_end(); I != E; ++I) {
+ LockID Lock(*I);
+ if (locksetContains(Lock))
+ S.Diag(ExpLocation, diag::warn_fun_excludes_lock)
+ << D->getName() << Lock.getName();
+ }
+ break;
+ }
+
+ case attr::LockReturned:
+ // FIXME: Deal with this attribute.
+ break;
+
// Ignore other (non thread-safety) attributes
default:
break;
GlobalLocker glock;
-void aa_elr_fun() __attribute__((exclusive_locks_required(aa_mu)));
-void aa_elr_fun() { }
-
void aa_fun_1() {
glock.globalLock();
glock.globalUnlock();
}
-void aa_fun_2() {
- aa_mu.Lock();
- aa_elr_fun();
- aa_mu.Unlock();
-}
-
void aa_fun_bad_1() {
glock.globalUnlock(); // \
// expected-warning{{unlocking 'aa_mu' that was not acquired}}
*pgb_var = 1;
sls_mu.Unlock();
}
+
+// FIXME: Add support for functions (not only methods)
+class LRBar {
+ public:
+ void aa_elr_fun() __attribute__((exclusive_locks_required(aa_mu)));
+ void aa_elr_fun_s() __attribute__((shared_locks_required(aa_mu)));
+ void le_fun() __attribute__((locks_excluded(sls_mu)));
+};
+
+class LRFoo {
+ public:
+ void test() __attribute__((exclusive_locks_required(sls_mu)));
+ void testShared() __attribute__((shared_locks_required(sls_mu2)));
+};
+
+void elr_fun() __attribute__((exclusive_locks_required(sls_mu)));
+void elr_fun() {}
+
+LRFoo MyLRFoo;
+LRBar Bar;
+
+void es_fun_0() {
+ aa_mu.Lock();
+ Bar.aa_elr_fun();
+ aa_mu.Unlock();
+}
+
+void es_fun_1() {
+ aa_mu.Lock();
+ Bar.aa_elr_fun_s();
+ aa_mu.Unlock();
+}
+
+void es_fun_2() {
+ aa_mu.ReaderLock();
+ Bar.aa_elr_fun_s();
+ aa_mu.Unlock();
+}
+
+void es_fun_3() {
+ sls_mu.Lock();
+ MyLRFoo.test();
+ sls_mu.Unlock();
+}
+
+void es_fun_4() {
+ sls_mu2.Lock();
+ MyLRFoo.testShared();
+ sls_mu2.Unlock();
+}
+
+void es_fun_5() {
+ sls_mu2.ReaderLock();
+ MyLRFoo.testShared();
+ sls_mu2.Unlock();
+}
+
+void es_fun_6() {
+ Bar.le_fun();
+}
+
+void es_fun_7() {
+ sls_mu.Lock();
+ elr_fun();
+ sls_mu.Unlock();
+}
+
+void es_bad_0() {
+ Bar.aa_elr_fun(); // \
+ // expected-warning {{calling function 'aa_elr_fun' requires exclusive lock 'aa_mu'}}
+}
+
+void es_bad_1() {
+ aa_mu.ReaderLock();
+ Bar.aa_elr_fun(); // \
+ // expected-warning {{calling function 'aa_elr_fun' requires exclusive lock 'aa_mu'}}
+ aa_mu.Unlock();
+}
+
+void es_bad_2() {
+ Bar.aa_elr_fun_s(); // \
+ // expected-warning {{calling function 'aa_elr_fun_s' requires shared lock 'aa_mu'}}
+}
+
+void es_bad_3() {
+ MyLRFoo.test(); // \
+ // expected-warning {{calling function 'test' requires exclusive lock 'sls_mu'}}
+}
+
+void es_bad_4() {
+ MyLRFoo.testShared(); // \
+ // expected-warning {{calling function 'testShared' requires shared lock 'sls_mu2'}}
+}
+
+void es_bad_5() {
+ sls_mu.ReaderLock();
+ MyLRFoo.test(); // \
+ // expected-warning {{calling function 'test' requires exclusive lock 'sls_mu'}}
+ sls_mu.Unlock();
+}
+
+void es_bad_6() {
+ sls_mu.Lock();
+ Bar.le_fun(); // \
+ // expected-warning {{cannot call function 'le_fun' while holding lock 'sls_mu'}}
+ sls_mu.Unlock();
+}
+
+void es_bad_7() {
+ sls_mu.ReaderLock();
+ Bar.le_fun(); // \
+ // expected-warning {{cannot call function 'le_fun' while holding lock 'sls_mu'}}
+ sls_mu.Unlock();
+}