39#include "llvm/ADT/DenseMap.h"
40#include "llvm/ADT/ImmutableMap.h"
41#include "llvm/ADT/STLExtras.h"
42#include "llvm/ADT/ScopeExit.h"
43#include "llvm/ADT/SmallVector.h"
44#include "llvm/ADT/StringRef.h"
45#include "llvm/Support/Allocator.h"
46#include "llvm/Support/ErrorHandling.h"
47#include "llvm/Support/TrailingObjects.h"
48#include "llvm/Support/raw_ostream.h"
67 const Expr *DeclExp, StringRef Kind) {
81class CapExprSet :
public SmallVector<CapabilityExpr, 4> {
84 void push_back_nodup(
const CapabilityExpr &CapE) {
85 if (llvm::none_of(*
this, [=](
const CapabilityExpr &CapE2) {
101 enum FactEntryKind { Lockable, ScopedLockable };
112 const FactEntryKind Kind : 8;
118 SourceKind Source : 8;
121 SourceLocation AcquireLoc;
124 ~FactEntry() =
default;
127 FactEntry(FactEntryKind FK,
const CapabilityExpr &CE,
LockKind LK,
128 SourceLocation Loc, SourceKind Src)
129 : CapabilityExpr(CE), Kind(FK), LKind(LK), Source(Src), AcquireLoc(Loc) {}
132 SourceLocation loc()
const {
return AcquireLoc; }
133 FactEntryKind getFactEntryKind()
const {
return Kind; }
135 bool asserted()
const {
return Source == Asserted; }
136 bool declared()
const {
return Source == Declared; }
137 bool managed()
const {
return Source == Managed; }
140 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
142 ThreadSafetyHandler &Handler)
const = 0;
143 virtual void handleLock(FactSet &FSet, FactManager &FactMan,
144 const FactEntry &entry,
145 ThreadSafetyHandler &Handler)
const = 0;
146 virtual void handleUnlock(FactSet &FSet, FactManager &FactMan,
147 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
149 ThreadSafetyHandler &Handler)
const = 0;
157using FactID =
unsigned short;
163 llvm::BumpPtrAllocator &Alloc;
164 std::vector<const FactEntry *> Facts;
167 FactManager(llvm::BumpPtrAllocator &Alloc) : Alloc(Alloc) {}
169 template <
typename T,
typename... ArgTypes>
170 T *createFact(ArgTypes &&...Args) {
171 static_assert(std::is_trivially_destructible_v<T>);
172 return T::create(Alloc, std::forward<ArgTypes>(Args)...);
175 FactID newFact(
const FactEntry *Entry) {
176 Facts.push_back(Entry);
177 assert(Facts.size() - 1 <= std::numeric_limits<FactID>::max() &&
178 "FactID space exhausted");
179 return static_cast<unsigned short>(Facts.size() - 1);
182 const FactEntry &operator[](FactID F)
const {
return *Facts[F]; }
194 using FactVec = SmallVector<FactID, 4>;
199 using iterator = FactVec::iterator;
200 using const_iterator = FactVec::const_iterator;
202 iterator begin() {
return FactIDs.begin(); }
203 const_iterator begin()
const {
return FactIDs.begin(); }
205 iterator end() {
return FactIDs.end(); }
206 const_iterator end()
const {
return FactIDs.end(); }
208 bool isEmpty()
const {
return FactIDs.size() == 0; }
211 bool isEmpty(FactManager &FactMan)
const {
212 for (
const auto FID : *
this) {
213 if (!FactMan[FID].negative())
219 void addLockByID(FactID ID) { FactIDs.push_back(ID); }
221 FactID addLock(FactManager &FM,
const FactEntry *Entry) {
222 FactID F = FM.newFact(Entry);
223 FactIDs.push_back(F);
227 bool removeLock(FactManager& FM,
const CapabilityExpr &CapE) {
228 unsigned n = FactIDs.size();
232 for (
unsigned i = 0; i < n-1; ++i) {
233 if (FM[FactIDs[i]].
matches(CapE)) {
234 FactIDs[i] = FactIDs[n-1];
239 if (FM[FactIDs[n-1]].
matches(CapE)) {
246 std::optional<FactID> replaceLock(FactManager &FM, iterator It,
247 const FactEntry *Entry) {
250 FactID F = FM.newFact(Entry);
255 std::optional<FactID> replaceLock(FactManager &FM,
const CapabilityExpr &CapE,
256 const FactEntry *Entry) {
257 return replaceLock(FM, findLockIter(FM, CapE), Entry);
260 iterator findLockIter(FactManager &FM,
const CapabilityExpr &CapE) {
261 return llvm::find_if(*
this,
262 [&](FactID ID) {
return FM[
ID].matches(CapE); });
265 const FactEntry *findLock(FactManager &FM,
const CapabilityExpr &CapE)
const {
267 llvm::find_if(*
this, [&](FactID ID) {
return FM[
ID].matches(CapE); });
268 return I != end() ? &FM[*I] :
nullptr;
271 const FactEntry *findLockUniv(FactManager &FM,
272 const CapabilityExpr &CapE)
const {
273 auto I = llvm::find_if(
274 *
this, [&](FactID ID) ->
bool {
return FM[
ID].matchesUniv(CapE); });
275 return I != end() ? &FM[*I] :
nullptr;
278 const FactEntry *findPartialMatch(FactManager &FM,
279 const CapabilityExpr &CapE)
const {
280 auto I = llvm::find_if(*
this, [&](FactID ID) ->
bool {
281 return FM[
ID].partiallyMatches(CapE);
283 return I != end() ? &FM[*I] :
nullptr;
286 bool containsMutexDecl(FactManager &FM,
const ValueDecl* Vd)
const {
287 auto I = llvm::find_if(
288 *
this, [&](FactID ID) ->
bool {
return FM[
ID].valueDecl() == Vd; });
293class ThreadSafetyAnalyzer;
308 BeforeInfo() =
default;
309 BeforeInfo(BeforeInfo &&) =
default;
313 llvm::DenseMap<const ValueDecl *, std::unique_ptr<BeforeInfo>>;
314 using CycleMap = llvm::DenseMap<const ValueDecl *, bool>;
320 ThreadSafetyAnalyzer& Analyzer);
323 ThreadSafetyAnalyzer &Analyzer);
327 ThreadSafetyAnalyzer& Analyzer,
340class LocalVariableMap;
342using LocalVarContext = llvm::ImmutableMap<const NamedDecl *, unsigned>;
345enum CFGBlockSide { CBS_Entry, CBS_Exit };
358 LocalVarContext EntryContext;
361 LocalVarContext ExitContext;
364 SourceLocation EntryLoc;
367 SourceLocation ExitLoc;
373 bool Reachable =
false;
375 const FactSet &getSet(CFGBlockSide Side)
const {
376 return Side == CBS_Entry ? EntrySet : ExitSet;
379 SourceLocation getLocation(CFGBlockSide Side)
const {
380 return Side == CBS_Entry ? EntryLoc : ExitLoc;
384 CFGBlockInfo(LocalVarContext EmptyCtx)
385 : EntryContext(EmptyCtx), ExitContext(EmptyCtx) {}
388 static CFGBlockInfo getEmptyBlockInfo(LocalVariableMap &M);
404class LocalVariableMap {
406 using Context = LocalVarContext;
412 struct VarDefinition {
414 friend class LocalVariableMap;
417 const NamedDecl *Dec;
420 const Expr *Exp =
nullptr;
428 bool isReference()
const {
return !Exp; }
432 VarDefinition(
const NamedDecl *D,
const Expr *E, Context
C)
433 : Dec(D), Exp(E), Ctx(
C) {}
436 VarDefinition(
const NamedDecl *D,
unsigned R, Context
C)
437 : Dec(D), Ref(R), Ctx(
C) {}
441 Context::Factory ContextFactory;
442 std::vector<VarDefinition> VarDefinitions;
443 std::vector<std::pair<const Stmt *, Context>> SavedContexts;
448 VarDefinitions.push_back(VarDefinition(
nullptr, 0u, getEmptyContext()));
452 const VarDefinition* lookup(
const NamedDecl *D, Context Ctx) {
453 const unsigned *i = Ctx.lookup(D);
456 assert(*i < VarDefinitions.size());
457 return &VarDefinitions[*i];
463 const Expr* lookupExpr(
const NamedDecl *D, Context &Ctx) {
464 const unsigned *P = Ctx.lookup(D);
470 if (VarDefinitions[i].Exp) {
471 Ctx = VarDefinitions[i].Ctx;
472 return VarDefinitions[i].Exp;
474 i = VarDefinitions[i].Ref;
479 Context getEmptyContext() {
return ContextFactory.getEmptyMap(); }
484 Context getNextContext(
unsigned &CtxIndex,
const Stmt *S, Context
C) {
485 if (SavedContexts[CtxIndex+1].first == S) {
487 Context
Result = SavedContexts[CtxIndex].second;
493 void dumpVarDefinitionName(
unsigned i) {
495 llvm::errs() <<
"Undefined";
498 const NamedDecl *
Dec = VarDefinitions[i].Dec;
500 llvm::errs() <<
"<<NULL>>";
503 Dec->printName(llvm::errs());
504 llvm::errs() <<
"." << i <<
" " << ((
const void*) Dec);
509 for (
unsigned i = 1, e = VarDefinitions.size(); i < e; ++i) {
510 const Expr *Exp = VarDefinitions[i].Exp;
511 unsigned Ref = VarDefinitions[i].Ref;
513 dumpVarDefinitionName(i);
514 llvm::errs() <<
" = ";
515 if (Exp) Exp->
dump();
517 dumpVarDefinitionName(Ref);
518 llvm::errs() <<
"\n";
524 void dumpContext(Context
C) {
525 for (Context::iterator I =
C.begin(), E =
C.end(); I != E; ++I) {
526 const NamedDecl *D = I.getKey();
528 llvm::errs() <<
" -> ";
529 dumpVarDefinitionName(I.getData());
530 llvm::errs() <<
"\n";
535 void traverseCFG(CFG *CFGraph,
const PostOrderCFGView *SortedGraph,
536 std::vector<CFGBlockInfo> &BlockInfo);
539 friend class VarMapBuilder;
542 unsigned getCanonicalDefinitionID(
unsigned ID) {
543 while (ID > 0 && VarDefinitions[ID].isReference())
544 ID = VarDefinitions[
ID].Ref;
549 unsigned getContextIndex() {
return SavedContexts.size()-1; }
552 void saveContext(
const Stmt *S, Context
C) {
553 SavedContexts.push_back(std::make_pair(S,
C));
558 Context addDefinition(
const NamedDecl *D,
const Expr *Exp, Context Ctx) {
559 assert(!Ctx.contains(D));
560 unsigned newID = VarDefinitions.size();
561 Context NewCtx = ContextFactory.add(Ctx, D, newID);
562 VarDefinitions.push_back(VarDefinition(D, Exp, Ctx));
567 Context addReference(
const NamedDecl *D,
unsigned i, Context Ctx) {
568 unsigned newID = VarDefinitions.size();
569 Context NewCtx = ContextFactory.add(Ctx, D, newID);
570 VarDefinitions.push_back(VarDefinition(D, i, Ctx));
576 Context updateDefinition(
const NamedDecl *D, Expr *Exp, Context Ctx) {
577 if (Ctx.contains(D)) {
578 unsigned newID = VarDefinitions.size();
579 Context NewCtx = ContextFactory.remove(Ctx, D);
580 NewCtx = ContextFactory.add(NewCtx, D, newID);
581 VarDefinitions.push_back(VarDefinition(D, Exp, Ctx));
589 Context clearDefinition(
const NamedDecl *D, Context Ctx) {
590 Context NewCtx = Ctx;
591 if (NewCtx.contains(D)) {
592 NewCtx = ContextFactory.remove(NewCtx, D);
593 NewCtx = ContextFactory.add(NewCtx, D, 0);
599 Context removeDefinition(
const NamedDecl *D, Context Ctx) {
600 Context NewCtx = Ctx;
601 if (NewCtx.contains(D)) {
602 NewCtx = ContextFactory.remove(NewCtx, D);
607 Context intersectContexts(Context C1, Context C2);
608 Context createReferenceContext(Context
C);
609 void intersectBackEdge(Context C1, Context C2);
615CFGBlockInfo CFGBlockInfo::getEmptyBlockInfo(LocalVariableMap &M) {
616 return CFGBlockInfo(M.getEmptyContext());
622class VarMapBuilder :
public ConstStmtVisitor<VarMapBuilder> {
624 LocalVariableMap* VMap;
625 LocalVariableMap::Context Ctx;
627 VarMapBuilder(LocalVariableMap *VM, LocalVariableMap::Context
C)
628 : VMap(VM), Ctx(
C) {}
630 void VisitDeclStmt(
const DeclStmt *S);
631 void VisitBinaryOperator(
const BinaryOperator *BO);
632 void VisitCallExpr(
const CallExpr *CE);
638void VarMapBuilder::VisitDeclStmt(
const DeclStmt *S) {
639 bool modifiedCtx =
false;
641 for (
const auto *D : DGrp) {
642 if (
const auto *VD = dyn_cast_or_null<VarDecl>(D)) {
643 const Expr *E = VD->getInit();
646 QualType
T = VD->getType();
647 if (
T.isTrivialType(VD->getASTContext())) {
648 Ctx = VMap->addDefinition(VD, E, Ctx);
654 VMap->saveContext(S, Ctx);
658void VarMapBuilder::VisitBinaryOperator(
const BinaryOperator *BO) {
665 if (
const auto *DRE = dyn_cast<DeclRefExpr>(LHSExp)) {
666 const ValueDecl *VDec = DRE->getDecl();
667 if (Ctx.lookup(VDec)) {
669 Ctx = VMap->updateDefinition(VDec, BO->
getRHS(), Ctx);
672 Ctx = VMap->clearDefinition(VDec, Ctx);
673 VMap->saveContext(BO, Ctx);
679void VarMapBuilder::VisitCallExpr(
const CallExpr *CE) {
689 if (II->isStr(
"bind") || II->isStr(
"bind_front"))
695 for (
unsigned Idx = 0; Idx < CE->
getNumArgs(); ++Idx) {
701 QualType ParamType = PVD->
getType();
704 const ValueDecl *VDec =
nullptr;
707 if (
const auto *DRE = dyn_cast<DeclRefExpr>(Arg))
708 VDec = DRE->getDecl();
712 if (
const auto *UO = dyn_cast<UnaryOperator>(Arg)) {
713 if (UO->getOpcode() == UO_AddrOf) {
714 const Expr *SubE = UO->getSubExpr()->IgnoreParenCasts();
715 if (
const auto *DRE = dyn_cast<DeclRefExpr>(SubE))
716 VDec = DRE->getDecl();
721 if (VDec && Ctx.lookup(VDec)) {
722 Ctx = VMap->clearDefinition(VDec, Ctx);
723 VMap->saveContext(CE, Ctx);
731LocalVariableMap::Context
732LocalVariableMap::intersectContexts(Context C1, Context C2) {
734 for (
const auto &P : C1) {
735 const NamedDecl *
Dec = P.first;
736 const unsigned *I2 = C2.lookup(Dec);
740 }
else if (getCanonicalDefinitionID(P.second) !=
741 getCanonicalDefinitionID(*I2)) {
753LocalVariableMap::Context LocalVariableMap::createReferenceContext(Context
C) {
754 Context
Result = getEmptyContext();
755 for (
const auto &P :
C)
763void LocalVariableMap::intersectBackEdge(Context C1, Context C2) {
764 for (
const auto &P : C1) {
765 const unsigned I1 = P.second;
766 VarDefinition *VDef = &VarDefinitions[I1];
767 assert(VDef->isReference());
769 const unsigned *I2 = C2.lookup(P.first);
778 if (getCanonicalDefinitionID(VDef->Ref) != getCanonicalDefinitionID(*I2)) {
821void LocalVariableMap::traverseCFG(CFG *CFGraph,
822 const PostOrderCFGView *SortedGraph,
823 std::vector<CFGBlockInfo> &BlockInfo) {
824 PostOrderCFGView::CFGBlockSet VisitedBlocks(CFGraph);
826 for (
const auto *CurrBlock : *SortedGraph) {
827 unsigned CurrBlockID = CurrBlock->getBlockID();
828 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlockID];
830 VisitedBlocks.insert(CurrBlock);
833 bool HasBackEdges =
false;
836 PE = CurrBlock->pred_end(); PI != PE; ++PI) {
838 if (*PI ==
nullptr || !VisitedBlocks.alreadySet(*PI)) {
843 unsigned PrevBlockID = (*PI)->getBlockID();
844 CFGBlockInfo *PrevBlockInfo = &BlockInfo[PrevBlockID];
847 CurrBlockInfo->EntryContext = PrevBlockInfo->ExitContext;
851 CurrBlockInfo->EntryContext =
852 intersectContexts(CurrBlockInfo->EntryContext,
853 PrevBlockInfo->ExitContext);
860 CurrBlockInfo->EntryContext =
861 createReferenceContext(CurrBlockInfo->EntryContext);
864 saveContext(
nullptr, CurrBlockInfo->EntryContext);
865 CurrBlockInfo->EntryIndex = getContextIndex();
868 VarMapBuilder VMapBuilder(
this, CurrBlockInfo->EntryContext);
869 for (
const auto &BI : *CurrBlock) {
870 switch (BI.getKind()) {
872 CFGStmt CS = BI.castAs<CFGStmt>();
873 VMapBuilder.Visit(CS.
getStmt());
880 CurrBlockInfo->ExitContext = VMapBuilder.Ctx;
884 SE = CurrBlock->succ_end(); SI != SE; ++SI) {
886 if (*SI ==
nullptr || !VisitedBlocks.alreadySet(*SI))
889 CFGBlock *FirstLoopBlock = *SI;
890 Context LoopBegin = BlockInfo[FirstLoopBlock->
getBlockID()].EntryContext;
891 Context LoopEnd = CurrBlockInfo->ExitContext;
892 intersectBackEdge(LoopBegin, LoopEnd);
898 saveContext(
nullptr, BlockInfo[exitID].ExitContext);
905 std::vector<CFGBlockInfo> &BlockInfo) {
906 for (
const auto *CurrBlock : *SortedGraph) {
907 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlock->getBlockID()];
911 if (
const Stmt *S = CurrBlock->getTerminatorStmt()) {
912 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc = S->
getBeginLoc();
915 BE = CurrBlock->rend(); BI != BE; ++BI) {
917 if (std::optional<CFGStmt> CS = BI->getAs<
CFGStmt>()) {
918 CurrBlockInfo->ExitLoc = CS->getStmt()->getBeginLoc();
924 if (CurrBlockInfo->ExitLoc.
isValid()) {
927 for (
const auto &BI : *CurrBlock) {
929 if (std::optional<CFGStmt> CS = BI.getAs<
CFGStmt>()) {
930 CurrBlockInfo->EntryLoc = CS->getStmt()->getBeginLoc();
934 }
else if (CurrBlock->pred_size() == 1 && *CurrBlock->pred_begin() &&
935 CurrBlock != &CFGraph->
getExit()) {
938 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc =
939 BlockInfo[(*CurrBlock->pred_begin())->getBlockID()].ExitLoc;
940 }
else if (CurrBlock->succ_size() == 1 && *CurrBlock->succ_begin()) {
943 CurrBlockInfo->EntryLoc = CurrBlockInfo->ExitLoc =
944 BlockInfo[(*CurrBlock->succ_begin())->getBlockID()].EntryLoc;
951class LockableFactEntry final :
public FactEntry {
956 unsigned int ReentrancyDepth = 0;
958 LockableFactEntry(
const CapabilityExpr &CE,
LockKind LK, SourceLocation Loc,
960 : FactEntry(Lockable, CE, LK, Loc, Src) {}
963 static LockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
964 const LockableFactEntry &
Other) {
968 static LockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
969 const CapabilityExpr &CE,
LockKind LK,
971 SourceKind Src = Acquired) {
972 return new (
Alloc) LockableFactEntry(CE, LK, Loc, Src);
975 unsigned int getReentrancyDepth()
const {
return ReentrancyDepth; }
978 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
980 ThreadSafetyHandler &Handler)
const override {
981 if (!asserted() && !negative() && !isUniversal()) {
987 void handleLock(FactSet &FSet, FactManager &FactMan,
const FactEntry &entry,
988 ThreadSafetyHandler &Handler)
const override {
989 if (
const FactEntry *RFact = tryReenter(FactMan, entry.kind())) {
991 FSet.replaceLock(FactMan, entry, RFact);
998 void handleUnlock(FactSet &FSet, FactManager &FactMan,
999 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
1001 ThreadSafetyHandler &Handler)
const override {
1002 FSet.removeLock(FactMan, Cp);
1004 if (
const FactEntry *RFact = leaveReentrant(FactMan)) {
1006 FSet.addLock(FactMan, RFact);
1008 FSet.addLock(FactMan, FactMan.createFact<LockableFactEntry>(
1015 const FactEntry *tryReenter(FactManager &FactMan,
1019 if (
kind() != ReenterKind)
1021 auto *NewFact = FactMan.createFact<LockableFactEntry>(*this);
1022 NewFact->ReentrancyDepth++;
1028 const FactEntry *leaveReentrant(FactManager &FactMan)
const {
1029 if (!ReentrancyDepth)
1031 assert(reentrant());
1032 auto *NewFact = FactMan.createFact<LockableFactEntry>(*this);
1033 NewFact->ReentrancyDepth--;
1037 static bool classof(
const FactEntry *A) {
1038 return A->getFactEntryKind() == Lockable;
1042enum UnderlyingCapabilityKind {
1045 UCK_ReleasedExclusive,
1048struct UnderlyingCapability {
1050 UnderlyingCapabilityKind Kind;
1053class ScopedLockableFactEntry final
1055 private llvm::TrailingObjects<ScopedLockableFactEntry,
1056 UnderlyingCapability> {
1057 friend TrailingObjects;
1060 const unsigned ManagedCapacity;
1061 unsigned ManagedSize = 0;
1063 ScopedLockableFactEntry(
const CapabilityExpr &CE, SourceLocation Loc,
1064 SourceKind Src,
unsigned ManagedCapacity)
1065 : FactEntry(ScopedLockable, CE,
LK_Exclusive, Loc, Src),
1066 ManagedCapacity(ManagedCapacity) {}
1068 void addManaged(
const CapabilityExpr &M, UnderlyingCapabilityKind UCK) {
1069 assert(ManagedSize < ManagedCapacity);
1070 new (getTrailingObjects() + ManagedSize) UnderlyingCapability{M, UCK};
1074 ArrayRef<UnderlyingCapability> getManaged()
const {
1075 return getTrailingObjects(ManagedSize);
1079 static ScopedLockableFactEntry *
create(llvm::BumpPtrAllocator &Alloc,
1080 const CapabilityExpr &CE,
1081 SourceLocation Loc, SourceKind Src,
1082 unsigned ManagedCapacity) {
1084 Alloc.Allocate(totalSizeToAlloc<UnderlyingCapability>(ManagedCapacity),
1085 alignof(ScopedLockableFactEntry));
1086 return new (
Storage) ScopedLockableFactEntry(CE, Loc, Src, ManagedCapacity);
1089 CapExprSet getUnderlyingMutexes()
const {
1090 CapExprSet UnderlyingMutexesSet;
1091 for (
const UnderlyingCapability &UnderlyingMutex : getManaged())
1092 UnderlyingMutexesSet.push_back(UnderlyingMutex.Cap);
1093 return UnderlyingMutexesSet;
1100 void addLock(
const CapabilityExpr &M) { addManaged(M, UCK_Acquired); }
1102 void addExclusiveUnlock(
const CapabilityExpr &M) {
1103 addManaged(M, UCK_ReleasedExclusive);
1106 void addSharedUnlock(
const CapabilityExpr &M) {
1107 addManaged(M, UCK_ReleasedShared);
1112 handleRemovalFromIntersection(
const FactSet &FSet, FactManager &FactMan,
1114 ThreadSafetyHandler &Handler)
const override {
1118 for (
const auto &UnderlyingMutex : getManaged()) {
1119 const auto *Entry = FSet.findLock(FactMan, UnderlyingMutex.Cap);
1120 if ((UnderlyingMutex.Kind == UCK_Acquired && Entry) ||
1121 (UnderlyingMutex.Kind != UCK_Acquired && !Entry)) {
1125 UnderlyingMutex.Cap.toString(), loc(),
1131 void handleLock(FactSet &FSet, FactManager &FactMan,
const FactEntry &entry,
1132 ThreadSafetyHandler &Handler)
const override {
1133 for (
const auto &UnderlyingMutex : getManaged()) {
1134 if (UnderlyingMutex.Kind == UCK_Acquired)
1135 lock(FSet, FactMan, UnderlyingMutex.Cap, entry.kind(), entry.loc(),
1138 unlock(FSet, FactMan, UnderlyingMutex.Cap, entry.loc(), &Handler);
1142 void handleUnlock(FactSet &FSet, FactManager &FactMan,
1143 const CapabilityExpr &Cp, SourceLocation UnlockLoc,
1145 ThreadSafetyHandler &Handler)
const override {
1146 assert(!Cp.
negative() &&
"Managing object cannot be negative.");
1147 for (
const auto &UnderlyingMutex : getManaged()) {
1150 ThreadSafetyHandler *TSHandler = FullyRemove ?
nullptr : &Handler;
1151 if (UnderlyingMutex.Kind == UCK_Acquired) {
1152 unlock(FSet, FactMan, UnderlyingMutex.Cap, UnlockLoc, TSHandler);
1154 LockKind kind = UnderlyingMutex.Kind == UCK_ReleasedShared
1157 lock(FSet, FactMan, UnderlyingMutex.Cap, kind, UnlockLoc, TSHandler);
1161 FSet.removeLock(FactMan, Cp);
1164 static bool classof(
const FactEntry *A) {
1165 return A->getFactEntryKind() == ScopedLockable;
1169 void lock(FactSet &FSet, FactManager &FactMan,
const CapabilityExpr &Cp,
1171 ThreadSafetyHandler *Handler)
const {
1172 if (
const auto It = FSet.findLockIter(FactMan, Cp); It != FSet.end()) {
1174 if (
const FactEntry *RFact = Fact.tryReenter(FactMan, kind)) {
1176 FSet.replaceLock(FactMan, It, RFact);
1177 }
else if (Handler) {
1181 FSet.removeLock(FactMan, !Cp);
1182 FSet.addLock(FactMan, FactMan.createFact<LockableFactEntry>(Cp, kind, loc,
1187 void unlock(FactSet &FSet, FactManager &FactMan,
const CapabilityExpr &Cp,
1188 SourceLocation loc, ThreadSafetyHandler *Handler)
const {
1189 if (
const auto It = FSet.findLockIter(FactMan, Cp); It != FSet.end()) {
1191 if (
const FactEntry *RFact = Fact.leaveReentrant(FactMan)) {
1193 FSet.replaceLock(FactMan, It, RFact);
1199 FactMan.createFact<LockableFactEntry>(!Cp,
LK_Exclusive, loc));
1200 }
else if (Handler) {
1201 SourceLocation PrevLoc;
1202 if (
const FactEntry *Neg = FSet.findLock(FactMan, !Cp))
1203 PrevLoc =
Neg->loc();
1210class ThreadSafetyAnalyzer {
1211 friend class BuildLockset;
1212 friend class threadSafety::BeforeSet;
1214 llvm::BumpPtrAllocator Bpa;
1215 threadSafety::til::MemRegionRef Arena;
1216 threadSafety::SExprBuilder SxBuilder;
1218 ThreadSafetyHandler &Handler;
1219 const FunctionDecl *CurrentFunction;
1220 LocalVariableMap LocalVarMap;
1222 llvm::SmallDenseMap<const Expr *, til::LiteralPtr *> ConstructedObjects;
1223 FactManager FactMan;
1224 std::vector<CFGBlockInfo> BlockInfo;
1226 BeforeSet *GlobalBeforeSet;
1229 ThreadSafetyAnalyzer(ThreadSafetyHandler &H, BeforeSet *Bset)
1230 : Arena(&Bpa), SxBuilder(Arena), Handler(H), FactMan(Bpa),
1231 GlobalBeforeSet(Bset) {}
1233 bool inCurrentScope(
const CapabilityExpr &CapE);
1235 void addLock(FactSet &FSet,
const FactEntry *Entry,
bool ReqAttr =
false);
1236 void removeLock(FactSet &FSet,
const CapabilityExpr &CapE,
1237 SourceLocation UnlockLoc,
bool FullyRemove,
LockKind Kind);
1239 template <
typename AttrType>
1240 void getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
const Expr *Exp,
1241 const NamedDecl *D, til::SExpr *
Self =
nullptr);
1243 template <
class AttrType>
1244 void getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
const Expr *Exp,
1246 const CFGBlock *PredBlock,
const CFGBlock *CurrBlock,
1247 Expr *BrE,
bool Neg);
1249 const CallExpr* getTrylockCallExpr(
const Stmt *
Cond, LocalVarContext
C,
1252 void getEdgeLockset(FactSet &
Result,
const FactSet &ExitSet,
1253 const CFGBlock* PredBlock,
1254 const CFGBlock *CurrBlock);
1256 bool join(
const FactEntry &A,
const FactEntry &B, SourceLocation JoinLoc,
1259 void intersectAndWarn(FactSet &EntrySet,
const FactSet &ExitSet,
1263 void intersectAndWarn(FactSet &EntrySet,
const FactSet &ExitSet,
1265 intersectAndWarn(EntrySet, ExitSet, JoinLoc, LEK, LEK);
1268 void runAnalysis(AnalysisDeclContext &AC);
1270 void warnIfMutexNotHeld(
const FactSet &FSet,
const NamedDecl *D,
1271 const Expr *Exp,
AccessKind AK, Expr *MutexExp,
1273 SourceLocation Loc);
1274 void warnIfMutexHeld(
const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
1275 Expr *MutexExp, til::SExpr *
Self, SourceLocation Loc);
1277 void checkAccess(
const FactSet &FSet,
const Expr *Exp,
AccessKind AK,
1279 void checkPtAccess(
const FactSet &FSet,
const Expr *Exp,
AccessKind AK,
1287 ThreadSafetyAnalyzer& Analyzer) {
1289 BeforeInfo *Info =
nullptr;
1293 std::unique_ptr<BeforeInfo> &InfoPtr = BMap[Vd];
1295 InfoPtr.reset(
new BeforeInfo());
1296 Info = InfoPtr.get();
1299 for (
const auto *At : Vd->
attrs()) {
1300 switch (At->getKind()) {
1301 case attr::AcquiredBefore: {
1305 for (
const auto *Arg : A->args()) {
1307 Analyzer.SxBuilder.translateAttrExpr(Arg,
nullptr);
1309 Info->Vect.push_back(Cpvd);
1310 const auto It = BMap.find(Cpvd);
1311 if (It == BMap.end())
1317 case attr::AcquiredAfter: {
1321 for (
const auto *Arg : A->args()) {
1323 Analyzer.SxBuilder.translateAttrExpr(Arg,
nullptr);
1327 ArgInfo->Vect.push_back(Vd);
1340BeforeSet::BeforeInfo *
1342 ThreadSafetyAnalyzer &Analyzer) {
1343 auto It = BMap.find(Vd);
1344 BeforeInfo *Info =
nullptr;
1345 if (It == BMap.end())
1348 Info = It->second.get();
1349 assert(Info &&
"BMap contained nullptr?");
1355 const FactSet& FSet,
1356 ThreadSafetyAnalyzer& Analyzer,
1368 if (Info->Visited == 1)
1371 if (Info->Visited == 2)
1374 if (Info->Vect.empty())
1377 InfoVect.push_back(Info);
1379 for (
const auto *Vdb : Info->Vect) {
1381 if (FSet.containsMutexDecl(Analyzer.FactMan, Vdb)) {
1382 StringRef L1 = StartVd->
getName();
1383 StringRef L2 = Vdb->getName();
1384 Analyzer.Handler.handleLockAcquiredBefore(CapKind, L1, L2, Loc);
1388 if (CycMap.try_emplace(Vd,
true).second) {
1390 Analyzer.Handler.handleBeforeAfterCycle(L1, Vd->
getLocation());
1400 for (
auto *Info : InfoVect)
1406 if (
const auto *CE = dyn_cast<ImplicitCastExpr>(Exp))
1409 if (
const auto *DR = dyn_cast<DeclRefExpr>(Exp))
1410 return DR->getDecl();
1412 if (
const auto *ME = dyn_cast<MemberExpr>(Exp))
1413 return ME->getMemberDecl();
1418bool ThreadSafetyAnalyzer::inCurrentScope(
const CapabilityExpr &CapE) {
1419 const threadSafety::til::SExpr *SExp = CapE.
sexpr();
1420 assert(SExp &&
"Null expressions should be ignored");
1422 if (
const auto *LP = dyn_cast<til::LiteralPtr>(SExp)) {
1423 const ValueDecl *VD = LP->clangDecl();
1435 if (
const auto *P = dyn_cast<til::Project>(SExp)) {
1436 if (!isa_and_nonnull<CXXMethodDecl>(CurrentFunction))
1438 const ValueDecl *VD = P->clangDecl();
1447void ThreadSafetyAnalyzer::addLock(FactSet &FSet,
const FactEntry *Entry,
1449 if (Entry->shouldIgnore())
1452 if (!ReqAttr && !Entry->negative()) {
1454 CapabilityExpr NegC = !*Entry;
1455 const FactEntry *Nen = FSet.findLock(FactMan, NegC);
1457 FSet.removeLock(FactMan, NegC);
1460 if (inCurrentScope(*Entry) && !Entry->asserted() && !Entry->reentrant())
1467 if (!Entry->asserted() && !Entry->declared()) {
1469 Entry->loc(), Entry->getKind());
1472 if (
const FactEntry *Cp = FSet.findLock(FactMan, *Entry)) {
1473 if (!Entry->asserted())
1474 Cp->handleLock(FSet, FactMan, *Entry, Handler);
1476 FSet.addLock(FactMan, Entry);
1482void ThreadSafetyAnalyzer::removeLock(FactSet &FSet,
const CapabilityExpr &Cp,
1483 SourceLocation UnlockLoc,
1484 bool FullyRemove,
LockKind ReceivedKind) {
1488 const FactEntry *LDat = FSet.findLock(FactMan, Cp);
1490 SourceLocation PrevLoc;
1491 if (
const FactEntry *Neg = FSet.findLock(FactMan, !Cp))
1492 PrevLoc =
Neg->loc();
1500 if (ReceivedKind !=
LK_Generic && LDat->kind() != ReceivedKind) {
1502 ReceivedKind, LDat->loc(), UnlockLoc);
1505 LDat->handleUnlock(FSet, FactMan, Cp, UnlockLoc, FullyRemove, Handler);
1510template <
typename AttrType>
1511void ThreadSafetyAnalyzer::getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
1512 const Expr *Exp,
const NamedDecl *D,
1514 if (Attr->args_size() == 0) {
1523 Mtxs.push_back_nodup(Cp);
1527 for (
const auto *Arg : Attr->args()) {
1535 Mtxs.push_back_nodup(Cp);
1542template <
class AttrType>
1543void ThreadSafetyAnalyzer::getMutexIDs(CapExprSet &Mtxs, AttrType *Attr,
1544 const Expr *Exp,
const NamedDecl *D,
1545 const CFGBlock *PredBlock,
1546 const CFGBlock *CurrBlock,
1547 Expr *BrE,
bool Neg) {
1549 bool branch =
false;
1550 if (
const auto *BLE = dyn_cast_or_null<CXXBoolLiteralExpr>(BrE))
1551 branch = BLE->getValue();
1552 else if (
const auto *ILE = dyn_cast_or_null<IntegerLiteral>(BrE))
1553 branch = ILE->getValue().getBoolValue();
1555 int branchnum = branch ? 0 : 1;
1557 branchnum = !branchnum;
1562 SE = PredBlock->
succ_end(); SI != SE && i < 2; ++SI, ++i) {
1563 if (*SI == CurrBlock && i == branchnum)
1564 getMutexIDs(Mtxs, Attr, Exp, D);
1572 }
else if (
const auto *BLE = dyn_cast<CXXBoolLiteralExpr>(E)) {
1573 TCond = BLE->getValue();
1575 }
else if (
const auto *ILE = dyn_cast<IntegerLiteral>(E)) {
1576 TCond = ILE->getValue().getBoolValue();
1578 }
else if (
auto *CE = dyn_cast<ImplicitCastExpr>(E))
1586const CallExpr* ThreadSafetyAnalyzer::getTrylockCallExpr(
const Stmt *
Cond,
1592 if (
const auto *CallExp = dyn_cast<CallExpr>(
Cond)) {
1593 if (CallExp->getBuiltinCallee() == Builtin::BI__builtin_expect)
1594 return getTrylockCallExpr(CallExp->getArg(0),
C, Negate);
1597 else if (
const auto *PE = dyn_cast<ParenExpr>(
Cond))
1598 return getTrylockCallExpr(PE->getSubExpr(),
C, Negate);
1599 else if (
const auto *CE = dyn_cast<ImplicitCastExpr>(
Cond))
1600 return getTrylockCallExpr(CE->getSubExpr(),
C, Negate);
1601 else if (
const auto *FE = dyn_cast<FullExpr>(
Cond))
1602 return getTrylockCallExpr(FE->getSubExpr(),
C, Negate);
1603 else if (
const auto *DRE = dyn_cast<DeclRefExpr>(
Cond)) {
1604 const Expr *E = LocalVarMap.lookupExpr(DRE->getDecl(),
C);
1605 return getTrylockCallExpr(E,
C, Negate);
1607 else if (
const auto *UOP = dyn_cast<UnaryOperator>(
Cond)) {
1608 if (UOP->getOpcode() == UO_LNot) {
1610 return getTrylockCallExpr(UOP->getSubExpr(),
C, Negate);
1614 else if (
const auto *BOP = dyn_cast<BinaryOperator>(
Cond)) {
1615 if (BOP->getOpcode() == BO_EQ || BOP->getOpcode() == BO_NE) {
1616 if (BOP->getOpcode() == BO_NE)
1621 if (!TCond) Negate = !Negate;
1622 return getTrylockCallExpr(BOP->getLHS(),
C, Negate);
1626 if (!TCond) Negate = !Negate;
1627 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1631 if (BOP->getOpcode() == BO_LAnd) {
1633 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1635 if (BOP->getOpcode() == BO_LOr)
1636 return getTrylockCallExpr(BOP->getRHS(),
C, Negate);
1638 }
else if (
const auto *COP = dyn_cast<ConditionalOperator>(
Cond)) {
1642 if (TCond && !FCond)
1643 return getTrylockCallExpr(COP->getCond(),
C, Negate);
1644 if (!TCond && FCond) {
1646 return getTrylockCallExpr(COP->getCond(),
C, Negate);
1656void ThreadSafetyAnalyzer::getEdgeLockset(FactSet&
Result,
1657 const FactSet &ExitSet,
1658 const CFGBlock *PredBlock,
1659 const CFGBlock *CurrBlock) {
1667 bool Negate =
false;
1668 const CFGBlockInfo *PredBlockInfo = &BlockInfo[PredBlock->
getBlockID()];
1669 const LocalVarContext &LVarCtx = PredBlockInfo->ExitContext;
1676 return LocalVarMap.lookupExpr(D, Ctx);
1678 auto Cleanup = llvm::make_scope_exit(
1681 const auto *Exp = getTrylockCallExpr(
Cond, LVarCtx, Negate);
1685 auto *FunDecl = dyn_cast_or_null<NamedDecl>(Exp->getCalleeDecl());
1686 if (!FunDecl || !FunDecl->hasAttr<TryAcquireCapabilityAttr>())
1689 CapExprSet ExclusiveLocksToAdd;
1690 CapExprSet SharedLocksToAdd;
1693 for (
const auto *Attr : FunDecl->specific_attrs<TryAcquireCapabilityAttr>())
1694 getMutexIDs(Attr->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, Attr,
1695 Exp, FunDecl, PredBlock, CurrBlock, Attr->getSuccessValue(),
1699 SourceLocation Loc = Exp->getExprLoc();
1700 for (
const auto &ExclusiveLockToAdd : ExclusiveLocksToAdd)
1701 addLock(
Result, FactMan.createFact<LockableFactEntry>(ExclusiveLockToAdd,
1703 for (
const auto &SharedLockToAdd : SharedLocksToAdd)
1704 addLock(
Result, FactMan.createFact<LockableFactEntry>(SharedLockToAdd,
1715class BuildLockset :
public ConstStmtVisitor<BuildLockset> {
1716 friend class ThreadSafetyAnalyzer;
1718 ThreadSafetyAnalyzer *Analyzer;
1721 const FactSet &FunctionExitFSet;
1722 LocalVariableMap::Context LVarCtx;
1727 void checkAccess(
const Expr *Exp,
AccessKind AK,
1729 Analyzer->checkAccess(FSet, Exp, AK, POK);
1731 void checkPtAccess(
const Expr *Exp,
AccessKind AK,
1733 Analyzer->checkPtAccess(FSet, Exp, AK, POK);
1736 void handleCall(
const Expr *Exp,
const NamedDecl *D,
1737 til::SExpr *
Self =
nullptr,
1738 SourceLocation Loc = SourceLocation());
1739 void examineArguments(
const FunctionDecl *FD,
1742 bool SkipFirstParam =
false);
1745 BuildLockset(ThreadSafetyAnalyzer *Anlzr, CFGBlockInfo &Info,
1746 const FactSet &FunctionExitFSet)
1747 : ConstStmtVisitor<BuildLockset>(), Analyzer(Anlzr), FSet(Info.EntrySet),
1748 FunctionExitFSet(FunctionExitFSet), LVarCtx(Info.EntryContext),
1749 CtxIndex(Info.EntryIndex) {
1751 [
this](
const NamedDecl *D) ->
const Expr * {
1755 return Analyzer->LocalVarMap.lookupExpr(D, Ctx);
1761 void VisitUnaryOperator(
const UnaryOperator *UO);
1762 void VisitBinaryOperator(
const BinaryOperator *BO);
1763 void VisitCastExpr(
const CastExpr *CE);
1764 void VisitCallExpr(
const CallExpr *Exp);
1765 void VisitCXXConstructExpr(
const CXXConstructExpr *Exp);
1766 void VisitDeclStmt(
const DeclStmt *S);
1767 void VisitMaterializeTemporaryExpr(
const MaterializeTemporaryExpr *Exp);
1768 void VisitReturnStmt(
const ReturnStmt *S);
1775void ThreadSafetyAnalyzer::warnIfMutexNotHeld(
1776 const FactSet &FSet,
const NamedDecl *D,
const Expr *Exp,
AccessKind AK,
1778 SourceLocation Loc) {
1790 const FactEntry *LDat = FSet.findLock(FactMan, !Cp);
1793 (!Cp).toString(), Loc);
1799 if (!inCurrentScope(Cp))
1803 LDat = FSet.findLock(FactMan, Cp);
1810 const FactEntry *LDat = FSet.findLockUniv(FactMan, Cp);
1811 bool NoError =
true;
1814 LDat = FSet.findPartialMatch(FactMan, Cp);
1817 std::string PartMatchStr = LDat->toString();
1818 StringRef PartMatchName(PartMatchStr);
1828 if (NoError && LDat && !LDat->isAtLeast(LK)) {
1834void ThreadSafetyAnalyzer::warnIfMutexHeld(
const FactSet &FSet,
1835 const NamedDecl *D,
const Expr *Exp,
1836 Expr *MutexExp, til::SExpr *
Self,
1837 SourceLocation Loc) {
1846 const FactEntry *LDat = FSet.findLock(FactMan, Cp);
1858void ThreadSafetyAnalyzer::checkAccess(
const FactSet &FSet,
const Expr *Exp,
1867 while (
const auto *DRE = dyn_cast<DeclRefExpr>(Exp)) {
1868 const auto *VD = dyn_cast<VarDecl>(DRE->getDecl()->getCanonicalDecl());
1870 if (
const auto *E = VD->getInit()) {
1881 if (
const auto *UO = dyn_cast<UnaryOperator>(Exp)) {
1883 if (UO->getOpcode() == UO_Deref)
1884 checkPtAccess(FSet, UO->getSubExpr(), AK, POK);
1888 if (
const auto *BO = dyn_cast<BinaryOperator>(Exp)) {
1891 return checkAccess(FSet, BO->
getLHS(), AK, POK);
1893 return checkPtAccess(FSet, BO->
getLHS(), AK, POK);
1899 if (
const auto *AE = dyn_cast<ArraySubscriptExpr>(Exp)) {
1900 checkPtAccess(FSet, AE->getLHS(), AK, POK);
1904 if (
const auto *ME = dyn_cast<MemberExpr>(Exp)) {
1906 checkPtAccess(FSet, ME->getBase(), AK, POK);
1908 checkAccess(FSet, ME->getBase(), AK, POK);
1915 if (D->
hasAttr<GuardedVarAttr>() && FSet.isEmpty(FactMan)) {
1920 warnIfMutexNotHeld(FSet, D, Exp, AK, I->getArg(), POK,
nullptr, Loc);
1925void ThreadSafetyAnalyzer::checkPtAccess(
const FactSet &FSet,
const Expr *Exp,
1931 if (
const auto *PE = dyn_cast<ParenExpr>(Exp)) {
1932 Exp = PE->getSubExpr();
1935 if (
const auto *CE = dyn_cast<CastExpr>(Exp)) {
1936 if (CE->getCastKind() == CK_ArrayToPointerDecay) {
1939 checkAccess(FSet, CE->getSubExpr(), AK, POK);
1942 Exp = CE->getSubExpr();
1948 if (
const auto *UO = dyn_cast<UnaryOperator>(Exp)) {
1949 if (UO->getOpcode() == UO_AddrOf) {
1952 checkAccess(FSet, UO->getSubExpr(), AK, POK);
1980 if (D->
hasAttr<PtGuardedVarAttr>() && FSet.isEmpty(FactMan))
1984 warnIfMutexNotHeld(FSet, D, Exp, AK, I->getArg(), PtPOK,
nullptr,
2003void BuildLockset::handleCall(
const Expr *Exp,
const NamedDecl *D,
2004 til::SExpr *
Self, SourceLocation Loc) {
2005 CapExprSet ExclusiveLocksToAdd, SharedLocksToAdd;
2006 CapExprSet ExclusiveLocksToRemove, SharedLocksToRemove, GenericLocksToRemove;
2007 CapExprSet ScopedReqsAndExcludes;
2015 til::LiteralPtr *Placeholder =
2017 [[maybe_unused]]
auto inserted =
2018 Analyzer->ConstructedObjects.insert({Exp, Placeholder});
2019 assert(inserted.second &&
"Are we visiting the same expression again?");
2022 if (TagT->getOriginalDecl()
2023 ->getMostRecentDecl()
2024 ->hasAttr<ScopedLockableAttr>())
2025 Scp = CapabilityExpr(Placeholder, Exp->
getType(),
false);
2032 for(
const Attr *At : D->
attrs()) {
2033 switch (At->getKind()) {
2036 case attr::AcquireCapability: {
2038 Analyzer->getMutexIDs(A->isShared() ? SharedLocksToAdd
2039 : ExclusiveLocksToAdd,
2047 case attr::AssertCapability: {
2049 CapExprSet AssertLocks;
2050 Analyzer->getMutexIDs(AssertLocks, A, Exp, D,
Self);
2051 for (
const auto &AssertLock : AssertLocks)
2053 FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2055 Loc, FactEntry::Asserted));
2061 case attr::ReleaseCapability: {
2064 Analyzer->getMutexIDs(GenericLocksToRemove, A, Exp, D,
Self);
2065 else if (A->isShared())
2066 Analyzer->getMutexIDs(SharedLocksToRemove, A, Exp, D,
Self);
2068 Analyzer->getMutexIDs(ExclusiveLocksToRemove, A, Exp, D,
Self);
2072 case attr::RequiresCapability: {
2074 for (
auto *Arg : A->args()) {
2075 Analyzer->warnIfMutexNotHeld(FSet, D, Exp,
2080 Analyzer->getMutexIDs(ScopedReqsAndExcludes, A, Exp, D,
Self);
2085 case attr::LocksExcluded: {
2087 for (
auto *Arg : A->args()) {
2088 Analyzer->warnIfMutexHeld(FSet, D, Exp, Arg,
Self, Loc);
2091 Analyzer->getMutexIDs(ScopedReqsAndExcludes, A, Exp, D,
Self);
2102 std::optional<CallExpr::const_arg_range> Args;
2104 if (
const auto *CE = dyn_cast<CallExpr>(Exp))
2105 Args = CE->arguments();
2106 else if (
const auto *CE = dyn_cast<CXXConstructExpr>(Exp))
2107 Args = CE->arguments();
2109 llvm_unreachable(
"Unknown call kind");
2111 const auto *CalledFunction = dyn_cast<FunctionDecl>(D);
2112 if (CalledFunction && Args.has_value()) {
2113 for (
auto [Param, Arg] : zip(CalledFunction->parameters(), *Args)) {
2114 CapExprSet DeclaredLocks;
2115 for (
const Attr *At : Param->attrs()) {
2116 switch (At->getKind()) {
2117 case attr::AcquireCapability: {
2119 Analyzer->getMutexIDs(A->isShared() ? SharedLocksToAdd
2120 : ExclusiveLocksToAdd,
2122 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2126 case attr::ReleaseCapability: {
2129 Analyzer->getMutexIDs(GenericLocksToRemove, A, Exp, D,
Self);
2130 else if (A->isShared())
2131 Analyzer->getMutexIDs(SharedLocksToRemove, A, Exp, D,
Self);
2133 Analyzer->getMutexIDs(ExclusiveLocksToRemove, A, Exp, D,
Self);
2134 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2138 case attr::RequiresCapability: {
2140 for (
auto *Arg : A->args())
2141 Analyzer->warnIfMutexNotHeld(FSet, D, Exp,
2144 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2148 case attr::LocksExcluded: {
2150 for (
auto *Arg : A->args())
2151 Analyzer->warnIfMutexHeld(FSet, D, Exp, Arg,
Self, Loc);
2152 Analyzer->getMutexIDs(DeclaredLocks, A, Exp, D,
Self);
2160 if (DeclaredLocks.empty())
2162 CapabilityExpr Cp(Analyzer->SxBuilder.
translate(Arg,
nullptr),
2163 StringRef(
"mutex"),
false,
false);
2164 if (
const auto *CBTE = dyn_cast<CXXBindTemporaryExpr>(Arg->
IgnoreCasts());
2166 if (
auto Object = Analyzer->ConstructedObjects.find(CBTE->getSubExpr());
2167 Object != Analyzer->ConstructedObjects.end())
2168 Cp = CapabilityExpr(
Object->second, StringRef(
"mutex"),
false,
2171 const FactEntry *Fact = FSet.findLock(Analyzer->FactMan, Cp);
2179 for (
const auto &[a,
b] :
2180 zip_longest(DeclaredLocks, Scope->getUnderlyingMutexes())) {
2181 if (!a.has_value()) {
2184 b.value().getKind(),
b.value().toString());
2185 }
else if (!
b.has_value()) {
2188 a.value().getKind(), a.value().toString());
2189 }
else if (!a.value().equals(
b.value())) {
2192 a.value().getKind(), a.value().toString(),
b.value().toString());
2201 for (
const auto &M : ExclusiveLocksToRemove)
2202 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Exclusive);
2203 for (
const auto &M : SharedLocksToRemove)
2204 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Shared);
2205 for (
const auto &M : GenericLocksToRemove)
2206 Analyzer->removeLock(FSet, M, Loc, Dtor,
LK_Generic);
2209 FactEntry::SourceKind Source =
2210 !Scp.
shouldIgnore() ? FactEntry::Managed : FactEntry::Acquired;
2211 for (
const auto &M : ExclusiveLocksToAdd)
2212 Analyzer->addLock(FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2214 for (
const auto &M : SharedLocksToAdd)
2215 Analyzer->addLock(FSet, Analyzer->FactMan.createFact<LockableFactEntry>(
2220 auto *ScopedEntry = Analyzer->FactMan.createFact<ScopedLockableFactEntry>(
2221 Scp, Loc, FactEntry::Acquired,
2222 ExclusiveLocksToAdd.size() + SharedLocksToAdd.size() +
2223 ScopedReqsAndExcludes.size() + ExclusiveLocksToRemove.size() +
2224 SharedLocksToRemove.size());
2225 for (
const auto &M : ExclusiveLocksToAdd)
2226 ScopedEntry->addLock(M);
2227 for (
const auto &M : SharedLocksToAdd)
2228 ScopedEntry->addLock(M);
2229 for (
const auto &M : ScopedReqsAndExcludes)
2230 ScopedEntry->addLock(M);
2231 for (
const auto &M : ExclusiveLocksToRemove)
2232 ScopedEntry->addExclusiveUnlock(M);
2233 for (
const auto &M : SharedLocksToRemove)
2234 ScopedEntry->addSharedUnlock(M);
2235 Analyzer->addLock(FSet, ScopedEntry);
2242void BuildLockset::VisitUnaryOperator(
const UnaryOperator *UO) {
2258void BuildLockset::VisitBinaryOperator(
const BinaryOperator *BO) {
2263 LVarCtx = Analyzer->LocalVarMap.getNextContext(CtxIndex, BO, LVarCtx);
2271void BuildLockset::VisitCastExpr(
const CastExpr *CE) {
2277void BuildLockset::examineArguments(
const FunctionDecl *FD,
2280 bool SkipFirstParam) {
2290 if (FD->
hasAttr<NoThreadSafetyAnalysisAttr>())
2293 const ArrayRef<ParmVarDecl *> Params = FD->
parameters();
2294 auto Param = Params.begin();
2299 for (
auto Arg = ArgBegin; Param != Params.end() && Arg != ArgEnd;
2301 QualType Qt = (*Param)->getType();
2309void BuildLockset::VisitCallExpr(
const CallExpr *Exp) {
2311 LVarCtx = Analyzer->LocalVarMap.getNextContext(CtxIndex, Exp, LVarCtx);
2313 if (
const auto *CE = dyn_cast<CXXMemberCallExpr>(Exp)) {
2314 const auto *ME = dyn_cast<MemberExpr>(CE->getCallee());
2316 const CXXMethodDecl *MD = CE->getMethodDecl();
2319 if (ME->isArrow()) {
2321 checkPtAccess(CE->getImplicitObjectArgument(),
AK_Read);
2324 checkAccess(CE->getImplicitObjectArgument(),
AK_Read);
2328 examineArguments(CE->getDirectCallee(), CE->arg_begin(), CE->arg_end());
2329 }
else if (
const auto *OE = dyn_cast<CXXOperatorCallExpr>(Exp)) {
2337 case OO_PercentEqual:
2341 case OO_LessLessEqual:
2342 case OO_GreaterGreaterEqual:
2343 checkAccess(OE->getArg(1),
AK_Read);
2353 if (!(OEop == OO_Star && OE->getNumArgs() > 1)) {
2355 checkPtAccess(OE->getArg(0),
AK_Read);
2360 const Expr *Obj = OE->getArg(0);
2365 const FunctionDecl *FD = OE->getDirectCallee();
2366 examineArguments(FD, std::next(OE->arg_begin()), OE->arg_end(),
2375 auto *D = dyn_cast_or_null<NamedDecl>(Exp->
getCalleeDecl());
2381void BuildLockset::VisitCXXConstructExpr(
const CXXConstructExpr *Exp) {
2384 const Expr* Source = Exp->
getArg(0);
2394 if (
auto *CE = dyn_cast<CastExpr>(E))
2397 if (
auto *CE = dyn_cast<CastExpr>(E))
2398 if (CE->
getCastKind() == CK_ConstructorConversion ||
2401 if (
auto *BTE = dyn_cast<CXXBindTemporaryExpr>(E))
2402 E = BTE->getSubExpr();
2406void BuildLockset::VisitDeclStmt(
const DeclStmt *S) {
2408 LVarCtx = Analyzer->LocalVarMap.getNextContext(CtxIndex, S, LVarCtx);
2411 if (
auto *VD = dyn_cast_or_null<VarDecl>(D)) {
2412 const Expr *E = VD->getInit();
2418 if (
auto *EWC = dyn_cast<ExprWithCleanups>(E))
2422 if (
auto Object = Analyzer->ConstructedObjects.find(E);
2423 Object != Analyzer->ConstructedObjects.end()) {
2424 Object->second->setClangDecl(VD);
2425 Analyzer->ConstructedObjects.erase(Object);
2431void BuildLockset::VisitMaterializeTemporaryExpr(
2432 const MaterializeTemporaryExpr *Exp) {
2434 if (
auto Object = Analyzer->ConstructedObjects.find(
2436 Object != Analyzer->ConstructedObjects.end()) {
2437 Object->second->setClangDecl(ExtD);
2438 Analyzer->ConstructedObjects.erase(Object);
2443void BuildLockset::VisitReturnStmt(
const ReturnStmt *S) {
2444 if (Analyzer->CurrentFunction ==
nullptr)
2452 const QualType ReturnType =
2455 Analyzer->checkAccess(
2456 FunctionExitFSet, RetVal,
2460 Analyzer->checkPtAccess(
2461 FunctionExitFSet, RetVal,
2471bool ThreadSafetyAnalyzer::join(
const FactEntry &A,
const FactEntry &B,
2472 SourceLocation JoinLoc,
2476 unsigned int ReentrancyDepthA = 0;
2477 unsigned int ReentrancyDepthB = 0;
2479 if (
const auto *LFE = dyn_cast<LockableFactEntry>(&A))
2480 ReentrancyDepthA = LFE->getReentrancyDepth();
2481 if (
const auto *LFE = dyn_cast<LockableFactEntry>(&B))
2482 ReentrancyDepthB = LFE->getReentrancyDepth();
2484 if (ReentrancyDepthA != ReentrancyDepthB) {
2490 return CanModify && ReentrancyDepthA < ReentrancyDepthB;
2491 }
else if (A.kind() != B.kind()) {
2494 if ((A.managed() || A.asserted()) && (B.managed() || B.asserted())) {
2496 bool ShouldTakeB = B.kind() ==
LK_Shared;
2497 if (CanModify || !ShouldTakeB)
2506 return CanModify && A.asserted() && !B.asserted();
2524void ThreadSafetyAnalyzer::intersectAndWarn(FactSet &EntrySet,
2525 const FactSet &ExitSet,
2526 SourceLocation JoinLoc,
2529 FactSet EntrySetOrig = EntrySet;
2532 for (
const auto &Fact : ExitSet) {
2533 const FactEntry &ExitFact = FactMan[Fact];
2535 FactSet::iterator EntryIt = EntrySet.findLockIter(FactMan, ExitFact);
2536 if (EntryIt != EntrySet.end()) {
2537 if (join(FactMan[*EntryIt], ExitFact, JoinLoc, EntryLEK))
2540 ExitFact.handleRemovalFromIntersection(ExitSet, FactMan, JoinLoc,
2546 for (
const auto &Fact : EntrySetOrig) {
2547 const FactEntry *EntryFact = &FactMan[Fact];
2548 const FactEntry *ExitFact = ExitSet.findLock(FactMan, *EntryFact);
2553 EntryFact->handleRemovalFromIntersection(EntrySetOrig, FactMan, JoinLoc,
2556 EntrySet.removeLock(FactMan, *EntryFact);
2569 if (std::optional<CFGStmt> S =
Last.getAs<
CFGStmt>()) {
2581void ThreadSafetyAnalyzer::runAnalysis(AnalysisDeclContext &AC) {
2584 threadSafety::CFGWalker walker;
2585 if (!walker.
init(AC))
2592 const NamedDecl *D = walker.
getDecl();
2593 CurrentFunction = dyn_cast<FunctionDecl>(D);
2595 if (D->
hasAttr<NoThreadSafetyAnalysisAttr>())
2610 CFGBlockInfo::getEmptyBlockInfo(LocalVarMap));
2616 PostOrderCFGView::CFGBlockSet VisitedBlocks(CFGraph);
2622 Initial.Reachable =
true;
2625 LocalVarMap.traverseCFG(CFGraph, SortedGraph, BlockInfo);
2630 CapExprSet ExclusiveLocksAcquired;
2631 CapExprSet SharedLocksAcquired;
2632 CapExprSet LocksReleased;
2637 if (!SortedGraph->
empty()) {
2639 FactSet &InitialLockset = Initial.EntrySet;
2641 CapExprSet ExclusiveLocksToAdd;
2642 CapExprSet SharedLocksToAdd;
2645 for (
const auto *Attr : D->
attrs()) {
2646 Loc = Attr->getLocation();
2647 if (
const auto *A = dyn_cast<RequiresCapabilityAttr>(Attr)) {
2648 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2650 }
else if (
const auto *A = dyn_cast<ReleaseCapabilityAttr>(Attr)) {
2653 if (A->args_size() == 0)
2655 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2657 getMutexIDs(LocksReleased, A,
nullptr, D);
2658 }
else if (
const auto *A = dyn_cast<AcquireCapabilityAttr>(Attr)) {
2659 if (A->args_size() == 0)
2661 getMutexIDs(A->isShared() ? SharedLocksAcquired
2662 : ExclusiveLocksAcquired,
2669 ArrayRef<ParmVarDecl *> Params;
2670 if (CurrentFunction)
2672 else if (
auto CurrentMethod = dyn_cast<ObjCMethodDecl>(D))
2673 Params = CurrentMethod->getCanonicalDecl()->parameters();
2675 llvm_unreachable(
"Unknown function kind");
2676 for (
const ParmVarDecl *Param : Params) {
2677 CapExprSet UnderlyingLocks;
2678 for (
const auto *Attr : Param->attrs()) {
2679 Loc = Attr->getLocation();
2680 if (
const auto *A = dyn_cast<ReleaseCapabilityAttr>(Attr)) {
2681 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2683 getMutexIDs(LocksReleased, A,
nullptr, Param);
2684 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2685 }
else if (
const auto *A = dyn_cast<RequiresCapabilityAttr>(Attr)) {
2686 getMutexIDs(A->isShared() ? SharedLocksToAdd : ExclusiveLocksToAdd, A,
2688 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2689 }
else if (
const auto *A = dyn_cast<AcquireCapabilityAttr>(Attr)) {
2690 getMutexIDs(A->isShared() ? SharedLocksAcquired
2691 : ExclusiveLocksAcquired,
2693 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2694 }
else if (
const auto *A = dyn_cast<LocksExcludedAttr>(Attr)) {
2695 getMutexIDs(UnderlyingLocks, A,
nullptr, Param);
2698 if (UnderlyingLocks.empty())
2703 auto *ScopedEntry = FactMan.createFact<ScopedLockableFactEntry>(
2704 Cp, Param->getLocation(), FactEntry::Declared,
2705 UnderlyingLocks.size());
2706 for (
const CapabilityExpr &M : UnderlyingLocks)
2707 ScopedEntry->addLock(M);
2708 addLock(InitialLockset, ScopedEntry,
true);
2712 for (
const auto &Mu : ExclusiveLocksToAdd) {
2713 const auto *Entry = FactMan.createFact<LockableFactEntry>(
2715 addLock(InitialLockset, Entry,
true);
2717 for (
const auto &Mu : SharedLocksToAdd) {
2718 const auto *Entry = FactMan.createFact<LockableFactEntry>(
2719 Mu,
LK_Shared, Loc, FactEntry::Declared);
2720 addLock(InitialLockset, Entry,
true);
2726 FactSet ExpectedFunctionExitSet = Initial.EntrySet;
2732 for (
const auto &Lock : ExclusiveLocksAcquired)
2733 ExpectedFunctionExitSet.addLock(
2734 FactMan, FactMan.createFact<LockableFactEntry>(Lock,
LK_Exclusive,
2736 for (
const auto &Lock : SharedLocksAcquired)
2737 ExpectedFunctionExitSet.addLock(
2738 FactMan, FactMan.createFact<LockableFactEntry>(Lock,
LK_Shared,
2740 for (
const auto &Lock : LocksReleased)
2741 ExpectedFunctionExitSet.removeLock(FactMan, Lock);
2743 for (
const auto *CurrBlock : *SortedGraph) {
2744 unsigned CurrBlockID = CurrBlock->
getBlockID();
2745 CFGBlockInfo *CurrBlockInfo = &BlockInfo[CurrBlockID];
2748 VisitedBlocks.insert(CurrBlock);
2763 bool LocksetInitialized =
false;
2765 PE = CurrBlock->
pred_end(); PI != PE; ++PI) {
2767 if (*PI ==
nullptr || !VisitedBlocks.alreadySet(*PI))
2770 unsigned PrevBlockID = (*PI)->getBlockID();
2771 CFGBlockInfo *PrevBlockInfo = &BlockInfo[PrevBlockID];
2778 CurrBlockInfo->Reachable =
true;
2780 FactSet PrevLockset;
2781 getEdgeLockset(PrevLockset, PrevBlockInfo->ExitSet, *PI, CurrBlock);
2783 if (!LocksetInitialized) {
2784 CurrBlockInfo->EntrySet = PrevLockset;
2785 LocksetInitialized =
true;
2791 CurrBlockInfo->EntrySet, PrevLockset, CurrBlockInfo->EntryLoc,
2792 isa_and_nonnull<ContinueStmt>((*PI)->getTerminatorStmt())
2799 if (!CurrBlockInfo->Reachable)
2802 BuildLockset LocksetBuilder(
this, *CurrBlockInfo, ExpectedFunctionExitSet);
2805 for (
const auto &BI : *CurrBlock) {
2806 switch (BI.getKind()) {
2808 CFGStmt CS = BI.castAs<CFGStmt>();
2809 LocksetBuilder.Visit(CS.
getStmt());
2814 CFGAutomaticObjDtor AD = BI.castAs<CFGAutomaticObjDtor>();
2816 if (!DD->hasAttrs())
2819 LocksetBuilder.handleCall(
2827 const CFGCleanupFunction &
CF = BI.castAs<CFGCleanupFunction>();
2828 LocksetBuilder.handleCall(
2829 nullptr,
CF.getFunctionDecl(),
2831 CF.getVarDecl()->getLocation());
2836 auto TD = BI.castAs<CFGTemporaryDtor>();
2840 if (
auto Object = ConstructedObjects.find(
2841 TD.getBindTemporaryExpr()->getSubExpr());
2842 Object != ConstructedObjects.end()) {
2846 LocksetBuilder.handleCall(
nullptr, DD,
Object->second,
2847 TD.getBindTemporaryExpr()->getEndLoc());
2848 ConstructedObjects.erase(Object);
2856 CurrBlockInfo->ExitSet = LocksetBuilder.FSet;
2863 SE = CurrBlock->succ_end(); SI != SE; ++SI) {
2865 if (*SI ==
nullptr || !VisitedBlocks.alreadySet(*SI))
2868 CFGBlock *FirstLoopBlock = *SI;
2869 CFGBlockInfo *PreLoop = &BlockInfo[FirstLoopBlock->
getBlockID()];
2870 CFGBlockInfo *LoopEnd = &BlockInfo[CurrBlockID];
2871 intersectAndWarn(PreLoop->EntrySet, LoopEnd->ExitSet, PreLoop->EntryLoc,
2877 if (!Final.Reachable)
2881 intersectAndWarn(ExpectedFunctionExitSet, Final.ExitSet, Final.ExitLoc,
2897 ThreadSafetyAnalyzer Analyzer(Handler, *BSet);
2898 Analyzer.runAnalysis(AC);
2912 llvm_unreachable(
"Unknown AccessKind");
This file defines AnalysisDeclContext, a class that manages the analysis context data for context sen...
Defines enum values for all the target-independent builtin functions.
static void dump(llvm::raw_ostream &OS, StringRef FunctionName, ArrayRef< CounterExpression > Expressions, ArrayRef< CounterMappingRegion > Regions)
static Decl::Kind getKind(const Decl *D)
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
Defines the clang::Expr interface and subclasses for C++ expressions.
Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified.
Defines an enumeration for C++ overloaded operators.
static std::string toString(const clang::SanitizerSet &Sanitizers)
Produce a string containing comma-separated names of sanitizers in Sanitizers set.
Defines the clang::SourceLocation class and associated facilities.
Defines various enumerations that describe declaration and type specifiers.
static void warnInvalidLock(ThreadSafetyHandler &Handler, const Expr *MutexExp, const NamedDecl *D, const Expr *DeclExp, StringRef Kind)
Issue a warning about an invalid lock expression.
static bool getStaticBooleanValue(Expr *E, bool &TCond)
static bool neverReturns(const CFGBlock *B)
static void findBlockLocations(CFG *CFGraph, const PostOrderCFGView *SortedGraph, std::vector< CFGBlockInfo > &BlockInfo)
Find the appropriate source locations to use when producing diagnostics for each block in the CFG.
static const ValueDecl * getValueDecl(const Expr *Exp)
Gets the value decl pointer from DeclRefExprs or MemberExprs.
static const Expr * UnpackConstruction(const Expr *E)
C Language Family Type Representation.
AnalysisDeclContext contains the context data for the function, method or block under analysis.
ASTContext & getASTContext() const
static bool isAssignmentOp(Opcode Opc)
const VarDecl * getVarDecl() const
const Stmt * getTriggerStmt() const
Represents a single basic block in a source-level CFG.
bool hasNoReturnElement() const
ElementList::const_reverse_iterator const_reverse_iterator
succ_iterator succ_begin()
Stmt * getTerminatorStmt()
AdjacentBlocks::const_iterator const_pred_iterator
pred_iterator pred_begin()
unsigned getBlockID() const
Stmt * getTerminatorCondition(bool StripParens=true)
AdjacentBlocks::const_iterator const_succ_iterator
Represents a top-level expression in a basic block.
const CXXDestructorDecl * getDestructorDecl(ASTContext &astContext) const
const Stmt * getStmt() const
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt.
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Expr * getArg(unsigned Arg)
Return the specified argument.
CXXConstructorDecl * getConstructor() const
Get the constructor that this expression will (ultimately) call.
bool isCopyConstructor(unsigned &TypeQuals) const
Whether this constructor is a copy constructor (C++ [class.copy]p2, which can be used to copy the cla...
Expr * getArg(unsigned Arg)
getArg - Return the specified argument.
ConstExprIterator const_arg_iterator
FunctionDecl * getDirectCallee()
If the callee is a FunctionDecl, return it. Otherwise return null.
unsigned getNumArgs() const
getNumArgs - Return the number of actual arguments to this call.
CastKind getCastKind() const
const DeclGroupRef getDeclGroup() const
SourceLocation getBeginLoc() const LLVM_READONLY
llvm::iterator_range< specific_attr_iterator< T > > specific_attrs() const
SourceLocation getLocation() const
bool isDefinedOutsideFunctionOrMethod() const
isDefinedOutsideFunctionOrMethod - This predicate returns true if this scoped decl is defined outside...
DeclContext * getDeclContext()
This represents one expression.
Expr * IgnoreParenCasts() LLVM_READONLY
Skip past any parentheses and casts which might surround this expression until reaching a fixed point...
Expr * IgnoreParenImpCasts() LLVM_READONLY
Skip past any parentheses and implicit casts which might surround this expression until reaching a fi...
Expr * IgnoreImplicit() LLVM_READONLY
Skip past any implicit AST nodes which might surround this expression until reaching a fixed point.
Expr * IgnoreParens() LLVM_READONLY
Skip past any parentheses which might surround this expression until reaching a fixed point.
Expr * IgnoreCasts() LLVM_READONLY
Skip past any casts which might surround this expression until reaching a fixed point.
SourceLocation getExprLoc() const LLVM_READONLY
getExprLoc - Return the preferred location for the arrow when diagnosing a problem with a generic exp...
const ParmVarDecl * getParamDecl(unsigned i) const
QualType getReturnType() const
ArrayRef< ParmVarDecl * > parameters() const
FunctionDecl * getCanonicalDecl() override
Retrieves the "canonical" declaration of the given declaration.
unsigned getNumParams() const
Return the number of parameters this function must have based on its FunctionType.
Expr * getSubExpr() const
Retrieve the temporary-generating subexpression whose value will be materialized into a glvalue.
ValueDecl * getExtendingDecl()
Get the declaration which triggered the lifetime-extension of this temporary, if any.
This represents a decl that may have a name.
IdentifierInfo * getIdentifier() const
Get the identifier that names this declaration, if there is one.
StringRef getName() const
Get the name of identifier for this declaration as a StringRef.
std::string getNameAsString() const
Get a human-readable name for the declaration, even if it is one of the special kinds of names (C++ c...
virtual void printName(raw_ostream &OS, const PrintingPolicy &Policy) const
Pretty-print the unqualified name of this declaration.
QualType getCanonicalType() const
bool isConstQualified() const
Determine whether this type is const-qualified.
Encodes a location in the source.
bool isValid() const
Return true if this is a valid SourceLocation object.
Stmt - This represents one statement.
SourceLocation getEndLoc() const LLVM_READONLY
void dump() const
Dumps the specified AST fragment and all subtrees to llvm::errs().
bool isPointerType() const
bool isReferenceType() const
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
bool isLValueReferenceType() const
const T * getAs() const
Member-template getAs<specific type>'.
Expr * getSubExpr() const
Represent the declaration of a variable (in which case it is an lvalue) a function (in which case it ...
void checkBeforeAfter(const ValueDecl *Vd, const FactSet &FSet, ThreadSafetyAnalyzer &Analyzer, SourceLocation Loc, StringRef CapKind)
Return true if any mutexes in FSet are in the acquired_before set of Vd.
BeforeInfo * insertAttrExprs(const ValueDecl *Vd, ThreadSafetyAnalyzer &Analyzer)
Process acquired_before and acquired_after attributes on Vd.
BeforeInfo * getBeforeInfoForDecl(const ValueDecl *Vd, ThreadSafetyAnalyzer &Analyzer)
const PostOrderCFGView * getSortedGraph() const
const NamedDecl * getDecl() const
bool init(AnalysisDeclContext &AC)
const CFG * getGraph() const
bool shouldIgnore() const
bool equals(const CapabilityExpr &other) const
const til::SExpr * sexpr() const
std::string toString() const
const ValueDecl * valueDecl() const
StringRef getKind() const
CapabilityExpr translateAttrExpr(const Expr *AttrExp, const NamedDecl *D, const Expr *DeclExp, til::SExpr *Self=nullptr)
Translate a clang expression in an attribute to a til::SExpr.
void setLookupLocalVarExpr(std::function< const Expr *(const NamedDecl *)> F)
til::SExpr * translate(const Stmt *S, CallingContext *Ctx)
til::LiteralPtr * createThisPlaceholder()
til::SExpr * translateVariable(const VarDecl *VD, CallingContext *Ctx)
Handler class for thread safety warnings.
virtual ~ThreadSafetyHandler()
virtual void handleExpectMoreUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Expected)
Warn when we get fewer underlying mutexes than expected.
virtual void handleInvalidLockExp(SourceLocation Loc)
Warn about lock expressions which fail to resolve to lockable objects.
virtual void handleUnmatchedUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Expected, Name Actual)
Warn when an actual underlying mutex of a scoped lockable does not match the expected.
virtual void handleExpectFewerUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc, Name ScopeName, StringRef Kind, Name Actual)
Warn when we get more underlying mutexes than expected.
virtual void enterFunction(const FunctionDecl *FD)
Called by the analysis when starting analysis of a function.
virtual void handleIncorrectUnlockKind(StringRef Kind, Name LockName, LockKind Expected, LockKind Received, SourceLocation LocLocked, SourceLocation LocUnlock)
Warn about an unlock function call that attempts to unlock a lock with the incorrect lock kind.
virtual void handleMutexHeldEndOfScope(StringRef Kind, Name LockName, SourceLocation LocLocked, SourceLocation LocEndOfScope, LockErrorKind LEK, bool ReentrancyMismatch=false)
Warn about situations where a mutex is sometimes held and sometimes not.
virtual void leaveFunction(const FunctionDecl *FD)
Called by the analysis when finishing analysis of a function.
virtual void handleExclusiveAndShared(StringRef Kind, Name LockName, SourceLocation Loc1, SourceLocation Loc2)
Warn when a mutex is held exclusively and shared at the same point.
virtual void handleMutexNotHeld(StringRef Kind, const NamedDecl *D, ProtectedOperationKind POK, Name LockName, LockKind LK, SourceLocation Loc, Name *PossibleMatch=nullptr)
Warn when a protected operation occurs while the specific mutex protecting the operation is not locke...
virtual void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName, SourceLocation Loc)
Warn when a function is called while an excluded mutex is locked.
virtual void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK, AccessKind AK, SourceLocation Loc)
Warn when a protected operation occurs while no locks are held.
virtual void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc, SourceLocation LocPreviousUnlock)
Warn about unlock function calls that do not have a prior matching lock expression.
virtual void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg, SourceLocation Loc)
Warn when acquiring a lock that the negative capability is not held.
virtual void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked, SourceLocation LocDoubleLock)
Warn about lock function calls for locks which are already held.
internal::Matcher< T > traverse(TraversalKind TK, const internal::Matcher< T > &InnerMatcher)
Causes all nested matchers to be matched with the specified traversal kind.
unsigned kind
All of the diagnostics that can be emitted by the frontend.
@ CF
Indicates that the tracked object is a CF object.
bool Alloc(InterpState &S, CodePtr OpPC, const Descriptor *Desc)
bool Dec(InterpState &S, CodePtr OpPC, bool CanOverflow)
1) Pops a pointer from the stack 2) Load the value from the pointer 3) Writes the value decreased by ...
bool Neg(InterpState &S, CodePtr OpPC)
std::unique_ptr< DiagnosticConsumer > create(StringRef OutputFile, DiagnosticOptions &DiagOpts, bool MergeChildRecords=false)
Returns a DiagnosticConsumer that serializes diagnostics to a bitcode file.
bool matches(const til::SExpr *E1, const til::SExpr *E2)
LockKind getLockKindFromAccessKind(AccessKind AK)
Helper function that returns a LockKind required for the given level of access.
LockErrorKind
This enum distinguishes between different situations where we warn due to inconsistent locking.
@ LEK_NotLockedAtEndOfFunction
Expecting a capability to be held at the end of function.
@ LEK_LockedSomePredecessors
A capability is locked in some but not all predecessors of a CFGBlock.
@ LEK_LockedAtEndOfFunction
A capability is still locked at the end of a function.
@ LEK_LockedSomeLoopIterations
A capability is locked for some but not all loop iterations.
void threadSafetyCleanup(BeforeSet *Cache)
AccessKind
This enum distinguishes between different ways to access (read or write) a variable.
@ AK_Written
Writing a variable.
@ AK_Read
Reading a variable.
LockKind
This enum distinguishes between different kinds of lock actions.
@ LK_Shared
Shared/reader lock of a mutex.
@ LK_Exclusive
Exclusive/writer lock of a mutex.
@ LK_Generic
Can be either Shared or Exclusive.
void runThreadSafetyAnalysis(AnalysisDeclContext &AC, ThreadSafetyHandler &Handler, BeforeSet **Bset)
Check a function's CFG for thread-safety violations.
ProtectedOperationKind
This enum distinguishes between different kinds of operations that may need to be protected by locks.
@ POK_PtPassByRef
Passing a pt-guarded variable by reference.
@ POK_PassPointer
Passing pointer to a guarded variable.
@ POK_VarDereference
Dereferencing a variable (e.g. p in *p = 5;)
@ POK_PassByRef
Passing a guarded variable by reference.
@ POK_ReturnByRef
Returning a guarded variable by reference.
@ POK_PtPassPointer
Passing a pt-guarded pointer.
@ POK_PtReturnPointer
Returning a pt-guarded pointer.
@ POK_VarAccess
Reading or writing a variable (e.g. x in x = 5;)
@ POK_FunctionCall
Making a function call (e.g. fool())
@ POK_ReturnPointer
Returning pointer to a guarded variable.
@ POK_PtReturnByRef
Returning a pt-guarded variable by reference.
The JSON file list parser is used to communicate input to InstallAPI.
OverloadedOperatorKind
Enumeration specifying the different kinds of C++ overloaded operators.
bool isa(CodeGen::Address addr)
@ Self
'self' clause, allowed on Compute and Combined Constructs, plus 'update'.
nullptr
This class represents a compute construct, representing a 'Kind' of βparallelβ, 'serial',...
static bool classof(const Stmt *T)
@ Result
The result type of a method or function.
const FunctionProtoType * T
U cast(CodeGen::Address addr)
@ Other
Other implicit parameter.
int const char * function