bool isEmpty() const { return FactIDs.size() == 0; }
+ // Return true if the set contains only negative facts
+ bool isEmpty(FactManager &FactMan) const {
+ for (FactID FID : *this) {
+ if (!FactMan[FID].negative())
+ return false;
+ }
+ return true;
+ }
+
+ void addLockByID(FactID ID) { FactIDs.push_back(ID); }
+
FactID addLock(FactManager& FM, const FactEntry &Entry) {
FactID F = FM.newFact(Entry);
FactIDs.push_back(F);
ThreadSafetyAnalyzer(ThreadSafetyHandler &H)
: Arena(&Bpa), SxBuilder(Arena), Handler(H) {}
- void addLock(FactSet &FSet, const FactEntry &Entry, StringRef DiagKind);
+ bool inCurrentScope(const CapabilityExpr &CapE);
+
+ void addLock(FactSet &FSet, const FactEntry &Entry, StringRef DiagKind,
+ bool ReqAttr = false);
void removeLock(FactSet &FSet, const CapabilityExpr &CapE,
SourceLocation UnlockLoc, bool FullyRemove, LockKind Kind,
StringRef DiagKind);
return "mutex";
}
+
+inline bool ThreadSafetyAnalyzer::inCurrentScope(const CapabilityExpr &CapE) {
+ if (!CurrentMethod)
+ return false;
+ if (auto *P = dyn_cast_or_null<til::Project>(CapE.sexpr())) {
+ auto *VD = P->clangDecl();
+ if (VD)
+ return VD->getDeclContext() == CurrentMethod->getDeclContext();
+ }
+ return false;
+}
+
+
/// \brief Add a new lock to the lockset, warning if the lock is already there.
-/// \param Mutex -- the Mutex expression for the lock
-/// \param LDat -- the LockData for the lock
+/// \param Mutex -- the Mutex expression for the lock
+/// \param LDat -- the LockData for the lock
+/// \param ReqAttr -- true if this is part of an initial Requires attribute.
void ThreadSafetyAnalyzer::addLock(FactSet &FSet, const FactEntry &Entry,
- StringRef DiagKind) {
+ StringRef DiagKind, bool ReqAttr) {
if (Entry.shouldIgnore())
return;
-
+ if (!ReqAttr && !Entry.negative()) {
+ // look for the negative capability, and remove it from the fact set.
+ CapabilityExpr NegC = !Entry;
+ FactEntry *Nen = FSet.findLock(FactMan, NegC);
+ if (Nen) {
+ FSet.removeLock(FactMan, NegC);
+ }
+ else {
+ if (inCurrentScope(Entry) && !Entry.asserted())
+ Handler.handleNegativeNotHeld(DiagKind, Entry.toString(),
+ NegC.toString(), Entry.loc());
+ }
+ }
// FIXME: deal with acquired before/after annotations.
// FIXME: Don't always warn when we have support for reentrant locks.
- if (!Entry.asserted() && FSet.findLock(FactMan, Entry)) {
- Handler.handleDoubleLock(DiagKind, Entry.toString(), Entry.loc());
+ if (FSet.findLock(FactMan, Entry)) {
+ if (!Entry.asserted())
+ Handler.handleDoubleLock(DiagKind, Entry.toString(), Entry.loc());
} else {
FSet.addLock(FactMan, Entry);
}
if (ReceivedKind != LK_Generic && LDat->kind() != ReceivedKind) {
Handler.handleIncorrectUnlockKind(DiagKind, Cp.toString(),
LDat->kind(), ReceivedKind, UnlockLoc);
- return;
}
if (LDat->underlying()) {
+ assert(!Cp.negative() && "Managing object cannot be negative.");
CapabilityExpr UnderCp(LDat->underlying(), false);
+ FactEntry UnderEntry(!UnderCp, LK_Exclusive, UnlockLoc);
// This is scoped lockable object, which manages the real mutex.
if (FullyRemove) {
// We're destroying the managing object.
// Remove the underlying mutex if it exists; but don't warn.
- if (FSet.findLock(FactMan, UnderCp))
+ if (FSet.findLock(FactMan, UnderCp)) {
FSet.removeLock(FactMan, UnderCp);
+ FSet.addLock(FactMan, UnderEntry);
+ }
+ FSet.removeLock(FactMan, Cp);
} else {
// We're releasing the underlying mutex, but not destroying the
// managing object. Warn on dual release.
Handler.handleUnmatchedUnlock(DiagKind, UnderCp.toString(), UnlockLoc);
}
FSet.removeLock(FactMan, UnderCp);
- return;
+ FSet.addLock(FactMan, UnderEntry);
}
+ return;
}
+ // else !LDat->underlying()
+
FSet.removeLock(FactMan, Cp);
+ if (!Cp.negative()) {
+ FSet.addLock(FactMan, FactEntry(!Cp, LK_Exclusive, UnlockLoc));
+ }
}
LocalVariableMap::Context LVarCtx;
unsigned CtxIndex;
- // Helper functions
- bool inCurrentScope(const CapabilityExpr &CapE);
-
+ // helper functions
void warnIfMutexNotHeld(const NamedDecl *D, const Expr *Exp, AccessKind AK,
Expr *MutexExp, ProtectedOperationKind POK,
StringRef DiagKind);
};
-inline bool BuildLockset::inCurrentScope(const CapabilityExpr &CapE) {
- if (!Analyzer->CurrentMethod)
- return false;
- if (auto *P = dyn_cast_or_null<til::Project>(CapE.sexpr())) {
- auto *VD = P->clangDecl();
- if (VD)
- return VD->getDeclContext() == Analyzer->CurrentMethod->getDeclContext();
- }
- return false;
-}
-
-
/// \brief Warn if the LSet does not contain a lock sufficient to protect access
/// of at least the passed in AccessKind.
void BuildLockset::warnIfMutexNotHeld(const NamedDecl *D, const Expr *Exp,
// If this does not refer to a negative capability in the same class,
// then stop here.
- if (!inCurrentScope(Cp))
+ if (!Analyzer->inCurrentScope(Cp))
return;
// Otherwise the negative requirement must be propagated to the caller.
if (!D || !D->hasAttrs())
return;
- if (D->hasAttr<GuardedVarAttr>() && FSet.isEmpty())
+ if (D->hasAttr<GuardedVarAttr>() && FSet.isEmpty(Analyzer->FactMan)) {
Analyzer->Handler.handleNoMutexHeld("mutex", D, POK_VarAccess, AK,
Exp->getExprLoc());
+ }
for (const auto *I : D->specific_attrs<GuardedByAttr>())
warnIfMutexNotHeld(D, Exp, AK, I->getArg(), POK_VarAccess,
if (!D || !D->hasAttrs())
return;
- if (D->hasAttr<PtGuardedVarAttr>() && FSet.isEmpty())
+ if (D->hasAttr<PtGuardedVarAttr>() && FSet.isEmpty(Analyzer->FactMan))
Analyzer->Handler.handleNoMutexHeld("mutex", D, POK_VarDereference, AK,
Exp->getExprLoc());
}
}
else if (!LDat2->managed() && !LDat2->asserted() &&
- !LDat2->isUniversal()) {
+ !LDat2->negative() && !LDat2->isUniversal()) {
Handler.handleMutexHeldEndOfScope("mutex", LDat2->toString(),
LDat2->loc(), JoinLoc, LEK1);
}
}
}
else if (!LDat1->managed() && !LDat1->asserted() &&
- !LDat1->isUniversal()) {
+ !LDat1->negative() && !LDat1->isUniversal()) {
Handler.handleMutexHeldEndOfScope("mutex", LDat1->toString(),
LDat1->loc(), JoinLoc, LEK2);
}
// FIXME -- Loc can be wrong here.
for (const auto &Mu : ExclusiveLocksToAdd)
addLock(InitialLockset, FactEntry(Mu, LK_Exclusive, Loc),
- CapDiagKind);
+ CapDiagKind, true);
for (const auto &Mu : SharedLocksToAdd)
addLock(InitialLockset, FactEntry(Mu, LK_Shared, Loc),
- CapDiagKind);
+ CapDiagKind, true);
}
for (const auto *CurrBlock : *SortedGraph) {
--- /dev/null
+// RUN: %clang_cc1 -fsyntax-only -verify -std=c++11 -Wthread-safety -Wthread-safety-beta -fcxx-exceptions %s
+
+// FIXME: should also run %clang_cc1 -fsyntax-only -verify -Wthread-safety -std=c++11 -Wc++98-compat %s
+// FIXME: should also run %clang_cc1 -fsyntax-only -verify -Wthread-safety %s
+
+#define LOCKABLE __attribute__ ((lockable))
+#define SCOPED_LOCKABLE __attribute__ ((scoped_lockable))
+#define GUARDED_BY(x) __attribute__ ((guarded_by(x)))
+#define GUARDED_VAR __attribute__ ((guarded_var))
+#define PT_GUARDED_BY(x) __attribute__ ((pt_guarded_by(x)))
+#define PT_GUARDED_VAR __attribute__ ((pt_guarded_var))
+#define ACQUIRED_AFTER(...) __attribute__ ((acquired_after(__VA_ARGS__)))
+#define ACQUIRED_BEFORE(...) __attribute__ ((acquired_before(__VA_ARGS__)))
+#define EXCLUSIVE_LOCK_FUNCTION(...) __attribute__ ((exclusive_lock_function(__VA_ARGS__)))
+#define SHARED_LOCK_FUNCTION(...) __attribute__ ((shared_lock_function(__VA_ARGS__)))
+#define ASSERT_EXCLUSIVE_LOCK(...) __attribute__ ((assert_exclusive_lock(__VA_ARGS__)))
+#define ASSERT_SHARED_LOCK(...) __attribute__ ((assert_shared_lock(__VA_ARGS__)))
+#define EXCLUSIVE_TRYLOCK_FUNCTION(...) __attribute__ ((exclusive_trylock_function(__VA_ARGS__)))
+#define SHARED_TRYLOCK_FUNCTION(...) __attribute__ ((shared_trylock_function(__VA_ARGS__)))
+#define UNLOCK_FUNCTION(...) __attribute__ ((unlock_function(__VA_ARGS__)))
+#define LOCK_RETURNED(x) __attribute__ ((lock_returned(x)))
+#define LOCKS_EXCLUDED(...) __attribute__ ((locks_excluded(__VA_ARGS__)))
+#define EXCLUSIVE_LOCKS_REQUIRED(...) \
+ __attribute__ ((exclusive_locks_required(__VA_ARGS__)))
+#define SHARED_LOCKS_REQUIRED(...) \
+ __attribute__ ((shared_locks_required(__VA_ARGS__)))
+#define NO_THREAD_SAFETY_ANALYSIS __attribute__ ((no_thread_safety_analysis))
+
+
+class __attribute__((lockable)) Mutex {
+ public:
+ void Lock() __attribute__((exclusive_lock_function));
+ void ReaderLock() __attribute__((shared_lock_function));
+ void Unlock() __attribute__((unlock_function));
+ bool TryLock() __attribute__((exclusive_trylock_function(true)));
+ bool ReaderTryLock() __attribute__((shared_trylock_function(true)));
+ void LockWhen(const int &cond) __attribute__((exclusive_lock_function));
+
+ // for negative capabilities
+ const Mutex& operator!() const { return *this; }
+
+ void AssertHeld() ASSERT_EXCLUSIVE_LOCK();
+ void AssertReaderHeld() ASSERT_SHARED_LOCK();
+};
+
+
+namespace SimpleTest {
+
+class Bar {
+ Mutex mu;
+ int a GUARDED_BY(mu);
+
+public:
+ void baz() EXCLUSIVE_LOCKS_REQUIRED(!mu) {
+ mu.Lock();
+ a = 0;
+ mu.Unlock();
+ }
+};
+
+
+class Foo {
+ Mutex mu;
+ int a GUARDED_BY(mu);
+
+public:
+ void foo() {
+ mu.Lock(); // expected-warning {{acquiring mutex 'mu' requires negative capability '!mu'}}
+ baz(); // expected-warning {{cannot call function 'baz' while mutex 'mu' is held}}
+ bar();
+ mu.Unlock();
+ }
+
+ void bar() {
+ baz(); // expected-warning {{calling function 'baz' requires holding '!mu'}}
+ }
+
+ void baz() EXCLUSIVE_LOCKS_REQUIRED(!mu) {
+ mu.Lock();
+ a = 0;
+ mu.Unlock();
+ }
+
+ void test() {
+ Bar b;
+ b.baz(); // no warning -- in different class.
+ }
+
+ void test2() {
+ mu.Lock(); // expected-warning {{acquiring mutex 'mu' requires negative capability '!mu'}}
+ a = 0;
+ mu.Unlock();
+ baz(); // no warning -- !mu in set.
+ }
+
+ void test3() EXCLUSIVE_LOCKS_REQUIRED(!mu) {
+ mu.Lock();
+ a = 0;
+ mu.Unlock();
+ baz(); // no warning -- !mu in set.
+ }
+};
+
+} // end namespace SimpleTest