Skip to content

Thread Safety Analysis: Very basic capability alias-analysis #142955

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 13 additions & 2 deletions clang/include/clang/Analysis/Analyses/ThreadSafetyCommon.h
Original file line number Diff line number Diff line change
Expand Up @@ -386,6 +386,11 @@ class SExprBuilder {
SelfVar->setKind(til::Variable::VK_SFun);
}

// Create placeholder for this: we don't know the VarDecl on construction yet.
til::LiteralPtr *createThisPlaceholder() {
return new (Arena) til::LiteralPtr(nullptr);
}

// Translate a clang expression in an attribute to a til::SExpr.
// Constructs the context from D, DeclExp, and SelfDecl.
CapabilityExpr translateAttrExpr(const Expr *AttrExp, const NamedDecl *D,
Expand All @@ -394,8 +399,8 @@ class SExprBuilder {

CapabilityExpr translateAttrExpr(const Expr *AttrExp, CallingContext *Ctx);

// Translate a variable reference.
til::LiteralPtr *createVariable(const VarDecl *VD);
// Translate a VarDecl to its canonical TIL expression.
til::SExpr *translateVarDecl(const VarDecl *VD, CallingContext *Ctx);

// Translate a clang statement or expression to a TIL expression.
// Also performs substitution of variables; Ctx provides the context.
Expand Down Expand Up @@ -501,6 +506,9 @@ class SExprBuilder {
void mergeEntryMapBackEdge();
void mergePhiNodesBackEdge(const CFGBlock *Blk);

// Returns true if a variable is assumed to be reassigned.
bool isVariableReassigned(const VarDecl *VD);

private:
// Set to true when parsing capability expressions, which get translated
// inaccurately in order to hack around smart pointers etc.
Expand Down Expand Up @@ -531,6 +539,9 @@ class SExprBuilder {
std::vector<til::Phi *> IncompleteArgs;
til::BasicBlock *CurrentBB = nullptr;
BlockInfo *CurrentBlockInfo = nullptr;

// Map caching if a local variable is reassigned.
llvm::DenseMap<const VarDecl *, bool> LocalVariableReassigned;
};

#ifndef NDEBUG
Expand Down
31 changes: 15 additions & 16 deletions clang/lib/Analysis/ThreadSafety.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1141,11 +1141,10 @@ class ThreadSafetyAnalyzer {

void warnIfMutexNotHeld(const FactSet &FSet, const NamedDecl *D,
const Expr *Exp, AccessKind AK, Expr *MutexExp,
ProtectedOperationKind POK, til::LiteralPtr *Self,
ProtectedOperationKind POK, til::SExpr *Self,
SourceLocation Loc);
void warnIfMutexHeld(const FactSet &FSet, const NamedDecl *D, const Expr *Exp,
Expr *MutexExp, til::LiteralPtr *Self,
SourceLocation Loc);
Expr *MutexExp, til::SExpr *Self, SourceLocation Loc);

void checkAccess(const FactSet &FSet, const Expr *Exp, AccessKind AK,
ProtectedOperationKind POK);
Expand Down Expand Up @@ -1599,7 +1598,7 @@ class BuildLockset : public ConstStmtVisitor<BuildLockset> {
}

void handleCall(const Expr *Exp, const NamedDecl *D,
til::LiteralPtr *Self = nullptr,
til::SExpr *Self = nullptr,
SourceLocation Loc = SourceLocation());
void examineArguments(const FunctionDecl *FD,
CallExpr::const_arg_iterator ArgBegin,
Expand Down Expand Up @@ -1629,7 +1628,7 @@ class BuildLockset : public ConstStmtVisitor<BuildLockset> {
/// of at least the passed in AccessKind.
void ThreadSafetyAnalyzer::warnIfMutexNotHeld(
const FactSet &FSet, const NamedDecl *D, const Expr *Exp, AccessKind AK,
Expr *MutexExp, ProtectedOperationKind POK, til::LiteralPtr *Self,
Expr *MutexExp, ProtectedOperationKind POK, til::SExpr *Self,
SourceLocation Loc) {
LockKind LK = getLockKindFromAccessKind(AK);
CapabilityExpr Cp = SxBuilder.translateAttrExpr(MutexExp, D, Exp, Self);
Expand Down Expand Up @@ -1688,8 +1687,7 @@ void ThreadSafetyAnalyzer::warnIfMutexNotHeld(
/// Warn if the LSet contains the given lock.
void ThreadSafetyAnalyzer::warnIfMutexHeld(const FactSet &FSet,
const NamedDecl *D, const Expr *Exp,
Expr *MutexExp,
til::LiteralPtr *Self,
Expr *MutexExp, til::SExpr *Self,
SourceLocation Loc) {
CapabilityExpr Cp = SxBuilder.translateAttrExpr(MutexExp, D, Exp, Self);
if (Cp.isInvalid()) {
Expand Down Expand Up @@ -1857,7 +1855,7 @@ void ThreadSafetyAnalyzer::checkPtAccess(const FactSet &FSet, const Expr *Exp,
/// of an implicitly called cleanup function.
/// \param Loc If \p Exp = nullptr, the location.
void BuildLockset::handleCall(const Expr *Exp, const NamedDecl *D,
til::LiteralPtr *Self, SourceLocation Loc) {
til::SExpr *Self, SourceLocation Loc) {
CapExprSet ExclusiveLocksToAdd, SharedLocksToAdd;
CapExprSet ExclusiveLocksToRemove, SharedLocksToRemove, GenericLocksToRemove;
CapExprSet ScopedReqsAndExcludes;
Expand All @@ -1869,7 +1867,7 @@ void BuildLockset::handleCall(const Expr *Exp, const NamedDecl *D,
const auto *TagT = Exp->getType()->getAs<TagType>();
if (D->hasAttrs() && TagT && Exp->isPRValue()) {
til::LiteralPtr *Placeholder =
Analyzer->SxBuilder.createVariable(nullptr);
Analyzer->SxBuilder.createThisPlaceholder();
[[maybe_unused]] auto inserted =
Analyzer->ConstructedObjects.insert({Exp, Placeholder});
assert(inserted.second && "Are we visiting the same expression again?");
Expand Down Expand Up @@ -2545,7 +2543,7 @@ void ThreadSafetyAnalyzer::runAnalysis(AnalysisDeclContext &AC) {
}
if (UnderlyingLocks.empty())
continue;
CapabilityExpr Cp(SxBuilder.createVariable(Param), StringRef(),
CapabilityExpr Cp(SxBuilder.translateVarDecl(Param, nullptr), StringRef(),
/*Neg=*/false, /*Reentrant=*/false);
auto ScopedEntry = std::make_unique<ScopedLockableFactEntry>(
Cp, Param->getLocation(), FactEntry::Declared);
Expand Down Expand Up @@ -2662,17 +2660,18 @@ void ThreadSafetyAnalyzer::runAnalysis(AnalysisDeclContext &AC) {
if (!DD->hasAttrs())
break;

LocksetBuilder.handleCall(nullptr, DD,
SxBuilder.createVariable(AD.getVarDecl()),
AD.getTriggerStmt()->getEndLoc());
LocksetBuilder.handleCall(
nullptr, DD, SxBuilder.translateVarDecl(AD.getVarDecl(), nullptr),
AD.getTriggerStmt()->getEndLoc());
break;
}

case CFGElement::CleanupFunction: {
const CFGCleanupFunction &CF = BI.castAs<CFGCleanupFunction>();
LocksetBuilder.handleCall(/*Exp=*/nullptr, CF.getFunctionDecl(),
SxBuilder.createVariable(CF.getVarDecl()),
CF.getVarDecl()->getLocation());
LocksetBuilder.handleCall(
/*Exp=*/nullptr, CF.getFunctionDecl(),
SxBuilder.translateVarDecl(CF.getVarDecl(), nullptr),
CF.getVarDecl()->getLocation());
break;
}

Expand Down
121 changes: 120 additions & 1 deletion clang/lib/Analysis/ThreadSafetyCommon.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
#include "clang/AST/Expr.h"
#include "clang/AST/ExprCXX.h"
#include "clang/AST/OperationKinds.h"
#include "clang/AST/RecursiveASTVisitor.h"
#include "clang/AST/Stmt.h"
#include "clang/AST/Type.h"
#include "clang/Analysis/Analyses/ThreadSafetyTIL.h"
Expand Down Expand Up @@ -241,7 +242,21 @@ CapabilityExpr SExprBuilder::translateAttrExpr(const Expr *AttrExp,
return CapabilityExpr(E, AttrExp->getType(), Neg);
}

til::LiteralPtr *SExprBuilder::createVariable(const VarDecl *VD) {
til::SExpr *SExprBuilder::translateVarDecl(const VarDecl *VD,
CallingContext *Ctx) {
assert(VD);
// Substitute local pointer variables with their initializers if they are
// explicitly const or never reassigned.
QualType Ty = VD->getType();
if (Ty->isPointerType() && VD->hasInit() && !isVariableReassigned(VD)) {
const Expr *Init = VD->getInit()->IgnoreParenImpCasts();
// Check for self-initialization to prevent infinite recursion.
if (const auto *InitDRE = dyn_cast<DeclRefExpr>(Init)) {
if (InitDRE->getDecl()->getCanonicalDecl() == VD->getCanonicalDecl())
return new (Arena) til::LiteralPtr(VD);
}
return translate(Init, Ctx);
}
return new (Arena) til::LiteralPtr(VD);
}

Expand Down Expand Up @@ -353,6 +368,9 @@ til::SExpr *SExprBuilder::translateDeclRefExpr(const DeclRefExpr *DRE,
: cast<ObjCMethodDecl>(D)->getCanonicalDecl()->getParamDecl(I);
}

if (const auto *VarD = dyn_cast<VarDecl>(VD))
return translateVarDecl(VarD, Ctx);

// For non-local variables, treat it as a reference to a named object.
return new (Arena) til::LiteralPtr(VD);
}
Expand Down Expand Up @@ -1012,6 +1030,107 @@ void SExprBuilder::exitCFG(const CFGBlock *Last) {
IncompleteArgs.clear();
}

bool SExprBuilder::isVariableReassigned(const VarDecl *VD) {
// Note: The search is performed lazily per-variable and result is cached. An
// alternative would have been to eagerly create a set of all reassigned
// variables, but that would consume significantly more memory. The number of
// variables needing the reassignment check in a single function is expected
// to be small. Also see createVariable().
struct ReassignmentFinder : RecursiveASTVisitor<ReassignmentFinder> {
explicit ReassignmentFinder(const VarDecl *VD) : QueryVD(VD) {
assert(QueryVD->getCanonicalDecl() == QueryVD);
}

bool VisitBinaryOperator(BinaryOperator *BO) {
if (!BO->isAssignmentOp())
return true;
auto *DRE = dyn_cast<DeclRefExpr>(BO->getLHS()->IgnoreParenImpCasts());
if (!DRE)
return true;
auto *AssignedVD = dyn_cast<VarDecl>(DRE->getDecl());
if (!AssignedVD)
return true;
// Don't count the initialization itself as a reassignment.
if (AssignedVD->hasInit() &&
AssignedVD->getInit()->getBeginLoc() == BO->getBeginLoc())
return true;
// If query variable appears as LHS of assignment.
if (QueryVD == AssignedVD->getCanonicalDecl()) {
FoundReassignment = true;
return false; // stop
}
return true;
}

bool VisitCallExpr(CallExpr *CE) {
const FunctionDecl *FD = CE->getDirectCallee();
if (!FD)
return true;

for (unsigned Idx = 0; Idx < CE->getNumArgs(); ++Idx) {
if (FoundReassignment)
return false;
if (Idx >= FD->getNumParams())
break;

const Expr *Arg = CE->getArg(Idx)->IgnoreParenImpCasts();
const ParmVarDecl *PVD = FD->getParamDecl(Idx);
QualType ParamType = PVD->getType();

// Check if the argument is a reference to our QueryVD.
if (const auto *DRE = dyn_cast<DeclRefExpr>(Arg)) {
if (DRE->getDecl()->getCanonicalDecl() == QueryVD) {
// Potential reassignment if passed by non-const reference.
if (ParamType->isReferenceType() &&
!ParamType->getPointeeType().isConstQualified()) {
FoundReassignment = true;
}
}
}
// Check if we are taking the address of the variable.
if (const auto *UO = dyn_cast<UnaryOperator>(Arg);
UO && UO->getOpcode() == UO_AddrOf) {
const Expr *SE = UO->getSubExpr()->IgnoreParenImpCasts();
if (const auto *DRE = dyn_cast<DeclRefExpr>(SE)) {
if (DRE->getDecl()->getCanonicalDecl() == QueryVD) {
// Potential reassignment if passed by non-const pointer.
if (ParamType->isPointerType() &&
!ParamType->getPointeeType().isConstQualified()) {
FoundReassignment = true;
}
}
}
}
}

return !FoundReassignment;
}

const VarDecl *QueryVD;
bool FoundReassignment = false;
};

if (VD->getType().isConstQualified())
return false; // Assume UB-freedom.
if (!VD->isLocalVarDecl())
return true; // Not a local variable (assume reassigned).
auto *FD = dyn_cast<FunctionDecl>(VD->getDeclContext());
if (!FD)
return true; // Assume reassigned.

// Try to look up in cache; use the canonical declaration to ensure consistent
// lookup in the cache.
VD = VD->getCanonicalDecl();
auto It = LocalVariableReassigned.find(VD);
if (It != LocalVariableReassigned.end())
return It->second;

ReassignmentFinder Visitor(VD);
// const_cast ok: FunctionDecl not modified.
Visitor.TraverseDecl(const_cast<FunctionDecl *>(FD));
return LocalVariableReassigned[VD] = Visitor.FoundReassignment;
}

#ifndef NDEBUG
namespace {

Expand Down
6 changes: 5 additions & 1 deletion clang/test/Sema/warn-thread-safety-analysis.c
Original file line number Diff line number Diff line change
Expand Up @@ -184,9 +184,13 @@ int main(void) {
/// Cleanup functions
{
struct Mutex* const __attribute__((cleanup(unlock_scope))) scope = &mu1;
mutex_exclusive_lock(scope); // Note that we have to lock through scope, because no alias analysis!
mutex_exclusive_lock(scope); // Lock through scope works.
// Cleanup happens automatically -> no warning.
}
{
struct Mutex* const __attribute__((unused, cleanup(unlock_scope))) scope = &mu1;
mutex_exclusive_lock(&mu1); // With basic alias analysis lock through mu1 also works.
}

foo_.a_value = 0; // expected-warning {{writing variable 'a_value' requires holding mutex 'mu_' exclusively}}
*foo_.a_ptr = 1; // expected-warning {{writing the value pointed to by 'a_ptr' requires holding mutex 'bar.other_mu' exclusively}}
Expand Down
Loading