clang 22.0.0git
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines ExprEngine's support for calls and returns.
10//
11//===----------------------------------------------------------------------===//
12
14#include "clang/AST/Decl.h"
15#include "clang/AST/DeclCXX.h"
23#include "llvm/Support/Casting.h"
24#include "llvm/Support/Compiler.h"
25#include "llvm/Support/SaveAndRestore.h"
26#include <optional>
27
28using namespace clang;
29using namespace ento;
30
31#define DEBUG_TYPE "ExprEngine"
32
34 NumOfDynamicDispatchPathSplits,
35 "The # of times we split the path due to imprecise dynamic dispatch info");
36
37STAT_COUNTER(NumInlinedCalls, "The # of times we inlined a call");
38
39STAT_COUNTER(NumReachedInlineCountMax,
40 "The # of times we reached inline count maximum");
41
43 ExplodedNode *Pred) {
44 // Get the entry block in the CFG of the callee.
45 const CFGBlock *Entry = CE.getEntry();
46
47 // Validate the CFG.
48 assert(Entry->empty());
49 assert(Entry->succ_size() == 1);
50
51 // Get the solitary successor.
52 const CFGBlock *Succ = *(Entry->succ_begin());
53
54 // Construct an edge representing the starting location in the callee.
55 BlockEdge Loc(Entry, Succ, CE.getCalleeContext());
56
57 ProgramStateRef state = Pred->getState();
58
59 // Construct a new node, notify checkers that analysis of the function has
60 // begun, and add the resultant nodes to the worklist.
61 bool isNew;
62 ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
63 Node->addPredecessor(Pred, G);
64 if (isNew) {
65 ExplodedNodeSet DstBegin;
66 processBeginOfFunction(BC, Node, DstBegin, Loc);
67 Engine.enqueue(DstBegin);
68 }
69}
70
71// Find the last statement on the path to the exploded node and the
72// corresponding Block.
73static std::pair<const Stmt*,
74 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
75 const Stmt *S = nullptr;
76 const CFGBlock *Blk = nullptr;
77 const StackFrameContext *SF = Node->getStackFrame();
78
79 // Back up through the ExplodedGraph until we reach a statement node in this
80 // stack frame.
81 while (Node) {
82 const ProgramPoint &PP = Node->getLocation();
83
84 if (PP.getStackFrame() == SF) {
85 if (std::optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
86 S = SP->getStmt();
87 break;
88 } else if (std::optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
89 S = CEE->getCalleeContext()->getCallSite();
90 if (S)
91 break;
92
93 // If there is no statement, this is an implicitly-generated call.
94 // We'll walk backwards over it and then continue the loop to find
95 // an actual statement.
96 std::optional<CallEnter> CE;
97 do {
98 Node = Node->getFirstPred();
99 CE = Node->getLocationAs<CallEnter>();
100 } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
101
102 // Continue searching the graph.
103 } else if (std::optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
104 Blk = BE->getSrc();
105 }
106 } else if (std::optional<CallEnter> CE = PP.getAs<CallEnter>()) {
107 // If we reached the CallEnter for this function, it has no statements.
108 if (CE->getCalleeContext() == SF)
109 break;
110 }
111
112 if (Node->pred_empty())
113 return std::make_pair(nullptr, nullptr);
114
115 Node = *Node->pred_begin();
116 }
117
118 return std::make_pair(S, Blk);
119}
120
121/// Adjusts a return value when the called function's return type does not
122/// match the caller's expression type. This can happen when a dynamic call
123/// is devirtualized, and the overriding method has a covariant (more specific)
124/// return type than the parent's method. For C++ objects, this means we need
125/// to add base casts.
126static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
127 StoreManager &StoreMgr) {
128 // For now, the only adjustments we handle apply only to locations.
129 if (!isa<Loc>(V))
130 return V;
131
132 // If the types already match, don't do any unnecessary work.
133 ExpectedTy = ExpectedTy.getCanonicalType();
134 ActualTy = ActualTy.getCanonicalType();
135 if (ExpectedTy == ActualTy)
136 return V;
137
138 // No adjustment is needed between Objective-C pointer types.
139 if (ExpectedTy->isObjCObjectPointerType() &&
140 ActualTy->isObjCObjectPointerType())
141 return V;
142
143 // C++ object pointers may need "derived-to-base" casts.
145 const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
146 if (ExpectedClass && ActualClass) {
147 CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
148 /*DetectVirtual=*/false);
149 if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
150 !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
151 return StoreMgr.evalDerivedToBase(V, Paths.front());
152 }
153 }
154
155 // Unfortunately, Objective-C does not enforce that overridden methods have
156 // covariant return types, so we can't assert that that never happens.
157 // Be safe and return UnknownVal().
158 return UnknownVal();
159}
160
162 ExplodedNode *Pred,
163 ExplodedNodeSet &Dst) {
164 // Find the last statement in the function and the corresponding basic block.
165 const Stmt *LastSt = nullptr;
166 const CFGBlock *Blk = nullptr;
167 std::tie(LastSt, Blk) = getLastStmt(Pred);
168 if (!Blk || !LastSt) {
169 Dst.Add(Pred);
170 return;
171 }
172
173 // Here, we destroy the current location context. We use the current
174 // function's entire body as a diagnostic statement, with which the program
175 // point will be associated. However, we only want to use LastStmt as a
176 // reference for what to clean up if it's a ReturnStmt; otherwise, everything
177 // is dead.
178 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
179 const LocationContext *LCtx = Pred->getLocationContext();
180 removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
183}
184
186 const StackFrameContext *calleeCtx) {
187 const Decl *RuntimeCallee = calleeCtx->getDecl();
188 const Decl *StaticDecl = Call->getDecl();
189 assert(RuntimeCallee);
190 if (!StaticDecl)
191 return true;
192 return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
193}
194
195// Returns the number of elements in the array currently being destructed.
196// If the element count is not found 0 will be returned.
198 const CallEvent &Call, const ProgramStateRef State, SValBuilder &SVB) {
200 "The call event is not a destructor call!");
201
202 const auto &DtorCall = cast<CXXDestructorCall>(Call);
203
204 auto ThisVal = DtorCall.getCXXThisVal();
205
206 if (auto ThisElementRegion = dyn_cast<ElementRegion>(ThisVal.getAsRegion())) {
207 auto ArrayRegion = ThisElementRegion->getAsArrayOffset().getRegion();
208 auto ElementType = ThisElementRegion->getElementType();
209
210 auto ElementCount =
211 getDynamicElementCount(State, ArrayRegion, SVB, ElementType);
212
213 if (!ElementCount.isConstant())
214 return 0;
215
216 return ElementCount.getAsInteger()->getLimitedValue();
217 }
218
219 return 0;
220}
221
222ProgramStateRef ExprEngine::removeStateTraitsUsedForArrayEvaluation(
223 ProgramStateRef State, const CXXConstructExpr *E,
224 const LocationContext *LCtx) {
225
226 assert(LCtx && "Location context must be provided!");
227
228 if (E) {
229 if (getPendingInitLoop(State, E, LCtx))
230 State = removePendingInitLoop(State, E, LCtx);
231
232 if (getIndexOfElementToConstruct(State, E, LCtx))
233 State = removeIndexOfElementToConstruct(State, E, LCtx);
234 }
235
236 if (getPendingArrayDestruction(State, LCtx))
237 State = removePendingArrayDestruction(State, LCtx);
238
239 return State;
240}
241
242/// The call exit is simulated with a sequence of nodes, which occur between
243/// CallExitBegin and CallExitEnd. The following operations occur between the
244/// two program points:
245/// 1. CallExitBegin (triggers the start of call exit sequence)
246/// 2. Bind the return value
247/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
248/// 4. CallExitEnd (switch to the caller context)
249/// 5. PostStmt<CallExpr>
251 // Step 1 CEBNode was generated before the call.
252 const StackFrameContext *calleeCtx = CEBNode->getStackFrame();
253
254 // The parent context might not be a stack frame, so make sure we
255 // look up the first enclosing stack frame.
256 const StackFrameContext *callerCtx =
257 calleeCtx->getParent()->getStackFrame();
258
259 const Stmt *CE = calleeCtx->getCallSite();
260 ProgramStateRef state = CEBNode->getState();
261 // Find the last statement in the function and the corresponding basic block.
262 const Stmt *LastSt = nullptr;
263 const CFGBlock *Blk = nullptr;
264 std::tie(LastSt, Blk) = getLastStmt(CEBNode);
265
266 // Generate a CallEvent /before/ cleaning the state, so that we can get the
267 // correct value for 'this' (if necessary).
269 CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
270
271 // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
272
273 // If this variable is set to 'true' the analyzer will evaluate the call
274 // statement we are about to exit again, instead of continuing the execution
275 // from the statement after the call. This is useful for non-POD type array
276 // construction where the CXXConstructExpr is referenced only once in the CFG,
277 // but we want to evaluate it as many times as many elements the array has.
278 bool ShouldRepeatCall = false;
279
280 if (const auto *DtorDecl =
281 dyn_cast_or_null<CXXDestructorDecl>(Call->getDecl())) {
282 if (auto Idx = getPendingArrayDestruction(state, callerCtx)) {
283 ShouldRepeatCall = *Idx > 0;
284
285 auto ThisVal = svalBuilder.getCXXThis(DtorDecl->getParent(), calleeCtx);
286 state = state->killBinding(ThisVal);
287 }
288 }
289
290 // If the callee returns an expression, bind its value to CallExpr.
291 if (CE) {
292 if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
293 const LocationContext *LCtx = CEBNode->getLocationContext();
294 SVal V = state->getSVal(RS, LCtx);
295
296 // Ensure that the return type matches the type of the returned Expr.
297 if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
298 QualType ReturnedTy =
300 if (!ReturnedTy.isNull()) {
301 if (const Expr *Ex = dyn_cast<Expr>(CE)) {
302 V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
304 }
305 }
306 }
307
308 state = state->BindExpr(CE, callerCtx, V);
309 }
310
311 // Bind the constructed object value to CXXConstructExpr.
312 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
314 svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
315 SVal ThisV = state->getSVal(This);
316 ThisV = state->getSVal(ThisV.castAs<Loc>());
317 state = state->BindExpr(CCE, callerCtx, ThisV);
318
319 ShouldRepeatCall = shouldRepeatCtorCall(state, CCE, callerCtx);
320 }
321
322 if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
323 // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
324 // while to reach the actual CXXNewExpr element from here, so keep the
325 // region for later use.
326 // Additionally cast the return value of the inlined operator new
327 // (which is of type 'void *') to the correct object type.
328 SVal AllocV = state->getSVal(CNE, callerCtx);
329 AllocV = svalBuilder.evalCast(
330 AllocV, CNE->getType(),
331 getContext().getPointerType(getContext().VoidTy));
332
333 state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
334 AllocV);
335 }
336 }
337
338 if (!ShouldRepeatCall) {
339 state = removeStateTraitsUsedForArrayEvaluation(
340 state, dyn_cast_or_null<CXXConstructExpr>(CE), callerCtx);
341 }
342
343 // Step 3: BindedRetNode -> CleanedNodes
344 // If we can find a statement and a block in the inlined function, run remove
345 // dead bindings before returning from the call. This is important to ensure
346 // that we report the issues such as leaks in the stack contexts in which
347 // they occurred.
348 ExplodedNodeSet CleanedNodes;
349 if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
350 static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
351 auto Loc = isa<ReturnStmt>(LastSt)
352 ? ProgramPoint{PostStmt(LastSt, calleeCtx, &retValBind)}
353 : ProgramPoint{EpsilonPoint(calleeCtx, /*Data1=*/nullptr,
354 /*Data2=*/nullptr, &retValBind)};
355 const CFGBlock *PrePurgeBlock =
356 isa<ReturnStmt>(LastSt) ? Blk : &CEBNode->getCFG().getExit();
357 bool isNew;
358 ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
359 BindedRetNode->addPredecessor(CEBNode, G);
360 if (!isNew)
361 return;
362
363 NodeBuilderContext Ctx(getCoreEngine(), PrePurgeBlock, BindedRetNode);
364 currBldrCtx = &Ctx;
365 // Here, we call the Symbol Reaper with 0 statement and callee location
366 // context, telling it to clean up everything in the callee's context
367 // (and its children). We use the callee's function body as a diagnostic
368 // statement, with which the program point will be associated.
369 removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
370 calleeCtx->getAnalysisDeclContext()->getBody(),
372 currBldrCtx = nullptr;
373 } else {
374 CleanedNodes.Add(CEBNode);
375 }
376
377 for (ExplodedNode *N : CleanedNodes) {
378 // Step 4: Generate the CallExit and leave the callee's context.
379 // CleanedNodes -> CEENode
380 CallExitEnd Loc(calleeCtx, callerCtx);
381 bool isNew;
382 ProgramStateRef CEEState = (N == CEBNode) ? state : N->getState();
383
384 ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
385 CEENode->addPredecessor(N, G);
386 if (!isNew)
387 return;
388
389 // Step 5: Perform the post-condition check of the CallExpr and enqueue the
390 // result onto the work list.
391 // CEENode -> Dst -> WorkList
392 NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
393 SaveAndRestore<const NodeBuilderContext *> NBCSave(currBldrCtx, &Ctx);
394 SaveAndRestore CBISave(currStmtIdx, calleeCtx->getIndex());
395
396 CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
397
398 ExplodedNodeSet DstPostCall;
399 if (llvm::isa_and_nonnull<CXXNewExpr>(CE)) {
400 ExplodedNodeSet DstPostPostCallCallback;
401 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
402 CEENode, *UpdatedCall, *this,
403 /*wasInlined=*/true);
404 for (ExplodedNode *I : DstPostPostCallCallback) {
406 cast<CXXAllocatorCall>(*UpdatedCall), DstPostCall, I, *this,
407 /*wasInlined=*/true);
408 }
409 } else {
410 getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
411 *UpdatedCall, *this,
412 /*wasInlined=*/true);
413 }
414 ExplodedNodeSet Dst;
415 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
416 getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
417 *this,
418 /*wasInlined=*/true);
419 } else if (CE &&
420 !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
421 AMgr.getAnalyzerOptions().MayInlineCXXAllocator)) {
422 getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
423 *this, /*wasInlined=*/true);
424 } else {
425 Dst.insert(DstPostCall);
426 }
427
428 // Enqueue the next element in the block.
429 for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
430 PSI != PSE; ++PSI) {
431 unsigned Idx = calleeCtx->getIndex() + (ShouldRepeatCall ? 0 : 1);
432
433 Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(), Idx);
434 }
435 }
436}
437
438bool ExprEngine::isSmall(AnalysisDeclContext *ADC) const {
439 // When there are no branches in the function, it means that there's no
440 // exponential complexity introduced by inlining such function.
441 // Such functions also don't trigger various fundamental problems
442 // with our inlining mechanism, such as the problem of
443 // inlined defensive checks. Hence isLinear().
444 const CFG *Cfg = ADC->getCFG();
445 return Cfg->isLinear() || Cfg->size() <= AMgr.options.AlwaysInlineSize;
446}
447
448bool ExprEngine::isLarge(AnalysisDeclContext *ADC) const {
449 const CFG *Cfg = ADC->getCFG();
450 return Cfg->size() >= AMgr.options.MinCFGSizeTreatFunctionsAsLarge;
451}
452
453bool ExprEngine::isHuge(AnalysisDeclContext *ADC) const {
454 const CFG *Cfg = ADC->getCFG();
455 return Cfg->getNumBlockIDs() > AMgr.options.MaxInlinableSize;
456}
457
458void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
459 bool &IsRecursive, unsigned &StackDepth) {
460 IsRecursive = false;
461 StackDepth = 0;
462
463 while (LCtx) {
464 if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
465 const Decl *DI = SFC->getDecl();
466
467 // Mark recursive (and mutually recursive) functions and always count
468 // them when measuring the stack depth.
469 if (DI == D) {
470 IsRecursive = true;
471 ++StackDepth;
472 LCtx = LCtx->getParent();
473 continue;
474 }
475
476 // Do not count the small functions when determining the stack depth.
477 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
478 if (!isSmall(CalleeADC))
479 ++StackDepth;
480 }
481 LCtx = LCtx->getParent();
482 }
483}
484
485// The GDM component containing the dynamic dispatch bifurcation info. When
486// the exact type of the receiver is not known, we want to explore both paths -
487// one on which we do inline it and the other one on which we don't. This is
488// done to ensure we do not drop coverage.
489// This is the map from the receiver region to a bool, specifying either we
490// consider this region's information precise or not along the given path.
491namespace {
492 enum DynamicDispatchMode {
493 DynamicDispatchModeInlined = 1,
494 DynamicDispatchModeConservative
495 };
496} // end anonymous namespace
497
498REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
499 const MemRegion *, unsigned)
500REGISTER_TRAIT_WITH_PROGRAMSTATE(CTUDispatchBifurcation, bool)
501
502void ExprEngine::ctuBifurcate(const CallEvent &Call, const Decl *D,
503 NodeBuilder &Bldr, ExplodedNode *Pred,
504 ProgramStateRef State) {
505 ProgramStateRef ConservativeEvalState = nullptr;
506 if (Call.isForeign() && !isSecondPhaseCTU()) {
507 const auto IK = AMgr.options.getCTUPhase1Inlining();
508 const bool DoInline = IK == CTUPhase1InliningKind::All ||
510 isSmall(AMgr.getAnalysisDeclContext(D)));
511 if (DoInline) {
512 inlineCall(Engine.getWorkList(), Call, D, Bldr, Pred, State);
513 return;
514 }
515 const bool BState = State->get<CTUDispatchBifurcation>();
516 if (!BState) { // This is the first time we see this foreign function.
517 // Enqueue it to be analyzed in the second (ctu) phase.
518 inlineCall(Engine.getCTUWorkList(), Call, D, Bldr, Pred, State);
519 // Conservatively evaluate in the first phase.
520 ConservativeEvalState = State->set<CTUDispatchBifurcation>(true);
521 conservativeEvalCall(Call, Bldr, Pred, ConservativeEvalState);
522 } else {
523 conservativeEvalCall(Call, Bldr, Pred, State);
524 }
525 return;
526 }
527 inlineCall(Engine.getWorkList(), Call, D, Bldr, Pred, State);
528}
529
530void ExprEngine::inlineCall(WorkList *WList, const CallEvent &Call,
531 const Decl *D, NodeBuilder &Bldr,
532 ExplodedNode *Pred, ProgramStateRef State) {
533 assert(D);
534
535 const LocationContext *CurLC = Pred->getLocationContext();
536 const StackFrameContext *CallerSFC = CurLC->getStackFrame();
537 const LocationContext *ParentOfCallee = CallerSFC;
538 if (Call.getKind() == CE_Block &&
539 !cast<BlockCall>(Call).isConversionFromLambda()) {
540 const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
541 assert(BR && "If we have the block definition we should have its region");
542 AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
543 ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
545 BR);
546 }
547
548 // This may be NULL, but that's fine.
549 const Expr *CallE = Call.getOriginExpr();
550
551 // Construct a new stack frame for the callee.
552 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
553 const StackFrameContext *CalleeSFC =
554 CalleeADC->getStackFrame(ParentOfCallee, CallE, currBldrCtx->getBlock(),
555 currBldrCtx->blockCount(), currStmtIdx);
556
557 CallEnter Loc(CallE, CalleeSFC, CurLC);
558
559 // Construct a new state which contains the mapping from actual to
560 // formal arguments.
561 State = State->enterStackFrame(Call, CalleeSFC);
562
563 bool isNew;
564 if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
565 N->addPredecessor(Pred, G);
566 if (isNew)
567 WList->enqueue(N);
568 }
569
570 // If we decided to inline the call, the successor has been manually
571 // added onto the work list so remove it from the node builder.
572 Bldr.takeNodes(Pred);
573
574 NumInlinedCalls++;
575 Engine.FunctionSummaries->bumpNumTimesInlined(D);
576
577 // Do not mark as visited in the 2nd run (CTUWList), so the function will
578 // be visited as top-level, this way we won't loose reports in non-ctu
579 // mode. Considering the case when a function in a foreign TU calls back
580 // into the main TU.
581 // Note, during the 1st run, it doesn't matter if we mark the foreign
582 // functions as visited (or not) because they can never appear as a top level
583 // function in the main TU.
584 if (!isSecondPhaseCTU())
585 // Mark the decl as visited.
586 if (VisitedCallees)
587 VisitedCallees->insert(D);
588}
589
591 const Stmt *CallE) {
592 const void *ReplayState = State->get<ReplayWithoutInlining>();
593 if (!ReplayState)
594 return nullptr;
595
596 assert(ReplayState == CallE && "Backtracked to the wrong call.");
597 (void)CallE;
598
599 return State->remove<ReplayWithoutInlining>();
600}
601
603 ExplodedNodeSet &dst) {
604 // Perform the previsit of the CallExpr.
605 ExplodedNodeSet dstPreVisit;
606 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
607
608 // Get the call in its initial state. We use this as a template to perform
609 // all the checks.
611 CallEventRef<> CallTemplate = CEMgr.getSimpleCall(
612 CE, Pred->getState(), Pred->getLocationContext(), getCFGElementRef());
613
614 // Evaluate the function call. We try each of the checkers
615 // to see if the can evaluate the function call.
616 ExplodedNodeSet dstCallEvaluated;
617 for (ExplodedNode *N : dstPreVisit) {
618 evalCall(dstCallEvaluated, N, *CallTemplate);
619 }
620
621 // Finally, perform the post-condition check of the CallExpr and store
622 // the created nodes in 'Dst'.
623 // Note that if the call was inlined, dstCallEvaluated will be empty.
624 // The post-CallExpr check will occur in processCallExit.
625 getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
626 *this);
627}
628
629ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
630 const CallEvent &Call) {
631 const Expr *E = Call.getOriginExpr();
632 // FIXME: Constructors to placement arguments of operator new
633 // are not supported yet.
634 if (!E || isa<CXXNewExpr>(E))
635 return State;
636
637 const LocationContext *LC = Call.getLocationContext();
638 for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; ++CallI) {
639 unsigned I = Call.getASTArgumentIndex(CallI);
640 if (std::optional<SVal> V = getObjectUnderConstruction(State, {E, I}, LC)) {
641 SVal VV = *V;
642 (void)VV;
644 ->getStackFrame()->getParent()
645 ->getStackFrame() == LC->getStackFrame());
646 State = finishObjectConstruction(State, {E, I}, LC);
647 }
648 }
649
650 return State;
651}
652
653void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
654 ExplodedNode *Pred,
655 const CallEvent &Call) {
656 ProgramStateRef State = Pred->getState();
657 ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
658 if (CleanedState == State) {
659 Dst.insert(Pred);
660 return;
661 }
662
663 const Expr *E = Call.getOriginExpr();
664 const LocationContext *LC = Call.getLocationContext();
665 NodeBuilder B(Pred, Dst, *currBldrCtx);
666 static SimpleProgramPointTag Tag("ExprEngine",
667 "Finish argument construction");
668 PreStmt PP(E, LC, &Tag);
669 B.generateNode(PP, CleanedState, Pred);
670}
671
673 const CallEvent &Call) {
674 // WARNING: At this time, the state attached to 'Call' may be older than the
675 // state in 'Pred'. This is a minor optimization since CheckerManager will
676 // use an updated CallEvent instance when calling checkers, but if 'Call' is
677 // ever used directly in this function all callers should be updated to pass
678 // the most recent state. (It is probably not worth doing the work here since
679 // for some callers this will not be necessary.)
680
681 // Run any pre-call checks using the generic call interface.
682 ExplodedNodeSet dstPreVisit;
683 getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred,
684 Call, *this);
685
686 // Actually evaluate the function call. We try each of the checkers
687 // to see if the can evaluate the function call, and get a callback at
688 // defaultEvalCall if all of them fail.
689 ExplodedNodeSet dstCallEvaluated;
690 getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
691 Call, *this, EvalCallOptions());
692
693 // If there were other constructors called for object-type arguments
694 // of this call, clean them up.
695 ExplodedNodeSet dstArgumentCleanup;
696 for (ExplodedNode *I : dstCallEvaluated)
697 finishArgumentConstruction(dstArgumentCleanup, I, Call);
698
699 ExplodedNodeSet dstPostCall;
700 getCheckerManager().runCheckersForPostCall(dstPostCall, dstArgumentCleanup,
701 Call, *this);
702
703 // Escaping symbols conjured during invalidating the regions above.
704 // Note that, for inlined calls the nodes were put back into the worklist,
705 // so we can assume that every node belongs to a conservative call at this
706 // point.
707
708 // Run pointerEscape callback with the newly conjured symbols.
710 for (ExplodedNode *I : dstPostCall) {
711 NodeBuilder B(I, Dst, *currBldrCtx);
712 ProgramStateRef State = I->getState();
713 Escaped.clear();
714 {
715 unsigned Arg = -1;
716 for (const ParmVarDecl *PVD : Call.parameters()) {
717 ++Arg;
718 QualType ParamTy = PVD->getType();
719 if (ParamTy.isNull() ||
720 (!ParamTy->isPointerType() && !ParamTy->isReferenceType()))
721 continue;
722 QualType Pointee = ParamTy->getPointeeType();
723 if (Pointee.isConstQualified() || Pointee->isVoidType())
724 continue;
725 if (const MemRegion *MR = Call.getArgSVal(Arg).getAsRegion())
726 Escaped.emplace_back(loc::MemRegionVal(MR), State->getSVal(MR, Pointee));
727 }
728 }
729
730 State = processPointerEscapedOnBind(State, Escaped, I->getLocationContext(),
732
733 if (State == I->getState())
734 Dst.insert(I);
735 else
736 B.generateNode(I->getLocation(), State, I);
737 }
738}
739
741 const LocationContext *LCtx,
742 ProgramStateRef State) {
743 const Expr *E = Call.getOriginExpr();
744 const ConstCFGElementRef &Elem = Call.getCFGElementRef();
745 if (!E)
746 return State;
747
748 // Some method families have known return values.
749 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
750 switch (Msg->getMethodFamily()) {
751 default:
752 break;
753 case OMF_autorelease:
754 case OMF_retain:
755 case OMF_self: {
756 // These methods return their receivers.
757 return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
758 }
759 }
760 } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
761 SVal ThisV = C->getCXXThisVal();
762 ThisV = State->getSVal(ThisV.castAs<Loc>());
763 return State->BindExpr(E, LCtx, ThisV);
764 }
765
766 SVal R;
767 QualType ResultTy = Call.getResultType();
768 unsigned Count = currBldrCtx->blockCount();
769 if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
770 // Conjure a temporary if the function returns an object by value.
771 SVal Target;
772 assert(RTC->getStmt() == Call.getOriginExpr());
773 EvalCallOptions CallOpts; // FIXME: We won't really need those.
774 std::tie(State, Target) = handleConstructionContext(
775 Call.getOriginExpr(), State, currBldrCtx, LCtx,
776 RTC->getConstructionContext(), CallOpts);
777 const MemRegion *TargetR = Target.getAsRegion();
778 assert(TargetR);
779 // Invalidate the region so that it didn't look uninitialized. If this is
780 // a field or element constructor, we do not want to invalidate
781 // the whole structure. Pointer escape is meaningless because
782 // the structure is a product of conservative evaluation
783 // and therefore contains nothing interesting at this point.
785 ITraits.setTrait(TargetR,
787 State = State->invalidateRegions(TargetR, Elem, Count, LCtx,
788 /* CausesPointerEscape=*/false, nullptr,
789 &Call, &ITraits);
790
791 R = State->getSVal(Target.castAs<Loc>(), E->getType());
792 } else {
793 // Conjure a symbol if the return value is unknown.
794
795 // See if we need to conjure a heap pointer instead of
796 // a regular unknown pointer.
797 const auto *CNE = dyn_cast<CXXNewExpr>(E);
798 if (CNE && CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
799 R = svalBuilder.getConjuredHeapSymbolVal(Elem, LCtx, E->getType(), Count);
800 const MemRegion *MR = R.getAsRegion()->StripCasts();
801
802 // Store the extent of the allocated object(s).
803 SVal ElementCount;
804 if (const Expr *SizeExpr = CNE->getArraySize().value_or(nullptr)) {
805 ElementCount = State->getSVal(SizeExpr, LCtx);
806 } else {
807 ElementCount = svalBuilder.makeIntVal(1, /*IsUnsigned=*/true);
808 }
809
810 SVal ElementSize = getElementExtent(CNE->getAllocatedType(), svalBuilder);
811
812 SVal Size =
813 svalBuilder.evalBinOp(State, BO_Mul, ElementCount, ElementSize,
814 svalBuilder.getArrayIndexType());
815
816 // FIXME: This line is to prevent a crash. For more details please check
817 // issue #56264.
818 if (Size.isUndef())
819 Size = UnknownVal();
820
821 State = setDynamicExtent(State, MR, Size.castAs<DefinedOrUnknownSVal>());
822 } else {
823 R = svalBuilder.conjureSymbolVal(Elem, LCtx, ResultTy, Count);
824 }
825 }
826 return State->BindExpr(E, LCtx, R);
827}
828
829// Conservatively evaluate call by invalidating regions and binding
830// a conjured return value.
831void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
832 ExplodedNode *Pred, ProgramStateRef State) {
833 State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
834 State = bindReturnValue(Call, Pred->getLocationContext(), State);
835
836 // And make the result node.
837 static SimpleProgramPointTag PT("ExprEngine", "Conservative eval call");
838 Bldr.generateNode(Call.getProgramPoint(false, &PT), State, Pred);
839}
840
841ExprEngine::CallInlinePolicy
842ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
843 AnalyzerOptions &Opts,
844 const EvalCallOptions &CallOpts) {
845 const LocationContext *CurLC = Pred->getLocationContext();
846 const StackFrameContext *CallerSFC = CurLC->getStackFrame();
847 switch (Call.getKind()) {
848 case CE_Function:
850 case CE_Block:
851 break;
852 case CE_CXXMember:
855 return CIP_DisallowedAlways;
856 break;
857 case CE_CXXConstructor: {
859 return CIP_DisallowedAlways;
860
862
863 const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
864
866 const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
867 : nullptr;
868
869 if (llvm::isa_and_nonnull<NewAllocatedObjectConstructionContext>(CC) &&
870 !Opts.MayInlineCXXAllocator)
871 return CIP_DisallowedOnce;
872
873 if (CallOpts.IsArrayCtorOrDtor) {
874 if (!shouldInlineArrayConstruction(Pred->getState(), CtorExpr, CurLC))
875 return CIP_DisallowedOnce;
876 }
877
878 // Inlining constructors requires including initializers in the CFG.
879 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
880 assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
881 (void)ADC;
882
883 // If the destructor is trivial, it's always safe to inline the constructor.
884 if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
885 break;
886
887 // For other types, only inline constructors if destructor inlining is
888 // also enabled.
890 return CIP_DisallowedAlways;
891
893 // If we don't handle temporary destructors, we shouldn't inline
894 // their constructors.
895 if (CallOpts.IsTemporaryCtorOrDtor &&
896 !Opts.ShouldIncludeTemporaryDtorsInCFG)
897 return CIP_DisallowedOnce;
898
899 // If we did not find the correct this-region, it would be pointless
900 // to inline the constructor. Instead we will simply invalidate
901 // the fake temporary target.
903 return CIP_DisallowedOnce;
904
905 // If the temporary is lifetime-extended by binding it to a reference-type
906 // field within an aggregate, automatic destructors don't work properly.
908 return CIP_DisallowedOnce;
909 }
910
911 break;
912 }
914 // This doesn't really increase the cost of inlining ever, because
915 // the stack frame of the inherited constructor is trivial.
916 return CIP_Allowed;
917 }
918 case CE_CXXDestructor: {
920 return CIP_DisallowedAlways;
921
922 // Inlining destructors requires building the CFG correctly.
923 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
924 assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
925 (void)ADC;
926
927 if (CallOpts.IsArrayCtorOrDtor) {
928 if (!shouldInlineArrayDestruction(getElementCountOfArrayBeingDestructed(
929 Call, Pred->getState(), svalBuilder))) {
930 return CIP_DisallowedOnce;
931 }
932 }
933
934 // Allow disabling temporary destructor inlining with a separate option.
935 if (CallOpts.IsTemporaryCtorOrDtor &&
936 !Opts.MayInlineCXXTemporaryDtors)
937 return CIP_DisallowedOnce;
938
939 // If we did not find the correct this-region, it would be pointless
940 // to inline the destructor. Instead we will simply invalidate
941 // the fake temporary target.
943 return CIP_DisallowedOnce;
944 break;
945 }
947 [[fallthrough]];
948 case CE_CXXAllocator:
949 if (Opts.MayInlineCXXAllocator)
950 break;
951 // Do not inline allocators until we model deallocators.
952 // This is unfortunate, but basically necessary for smart pointers and such.
953 return CIP_DisallowedAlways;
954 case CE_ObjCMessage:
955 if (!Opts.MayInlineObjCMethod)
956 return CIP_DisallowedAlways;
957 if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
959 return CIP_DisallowedAlways;
960 break;
961 }
962
963 return CIP_Allowed;
964}
965
966/// Returns true if the given C++ class contains a member with the given name.
967static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
968 StringRef Name) {
969 const IdentifierInfo &II = Ctx.Idents.get(Name);
970 return RD->hasMemberName(Ctx.DeclarationNames.getIdentifier(&II));
971}
972
973/// Returns true if the given C++ class is a container or iterator.
974///
975/// Our heuristic for this is whether it contains a method named 'begin()' or a
976/// nested type named 'iterator' or 'iterator_category'.
977static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
978 return hasMember(Ctx, RD, "begin") ||
979 hasMember(Ctx, RD, "iterator") ||
980 hasMember(Ctx, RD, "iterator_category");
981}
982
983/// Returns true if the given function refers to a method of a C++ container
984/// or iterator.
985///
986/// We generally do a poor job modeling most containers right now, and might
987/// prefer not to inline their methods.
988static bool isContainerMethod(const ASTContext &Ctx,
989 const FunctionDecl *FD) {
990 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
991 return isContainerClass(Ctx, MD->getParent());
992 return false;
993}
994
995/// Returns true if the given function is the destructor of a class named
996/// "shared_ptr".
997static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
998 const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
999 if (!Dtor)
1000 return false;
1001
1002 const CXXRecordDecl *RD = Dtor->getParent();
1003 if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
1004 if (II->isStr("shared_ptr"))
1005 return true;
1006
1007 return false;
1008}
1009
1010/// Returns true if the function in \p CalleeADC may be inlined in general.
1011///
1012/// This checks static properties of the function, such as its signature and
1013/// CFG, to determine whether the analyzer should ever consider inlining it,
1014/// in any context.
1015bool ExprEngine::mayInlineDecl(AnalysisDeclContext *CalleeADC) const {
1016 AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
1017 // FIXME: Do not inline variadic calls.
1018 if (CallEvent::isVariadic(CalleeADC->getDecl()))
1019 return false;
1020
1021 // Check certain C++-related inlining policies.
1022 ASTContext &Ctx = CalleeADC->getASTContext();
1023 if (Ctx.getLangOpts().CPlusPlus) {
1024 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
1025 // Conditionally control the inlining of template functions.
1026 if (!Opts.MayInlineTemplateFunctions)
1027 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
1028 return false;
1029
1030 // Conditionally control the inlining of C++ standard library functions.
1031 if (!Opts.MayInlineCXXStandardLibrary)
1032 if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
1034 return false;
1035
1036 // Conditionally control the inlining of methods on objects that look
1037 // like C++ containers.
1038 if (!Opts.MayInlineCXXContainerMethods)
1039 if (!AMgr.isInCodeFile(FD->getLocation()))
1040 if (isContainerMethod(Ctx, FD))
1041 return false;
1042
1043 // Conditionally control the inlining of the destructor of C++ shared_ptr.
1044 // We don't currently do a good job modeling shared_ptr because we can't
1045 // see the reference count, so treating as opaque is probably the best
1046 // idea.
1047 if (!Opts.MayInlineCXXSharedPtrDtor)
1048 if (isCXXSharedPtrDtor(FD))
1049 return false;
1050 }
1051 }
1052
1053 // It is possible that the CFG cannot be constructed.
1054 // Be safe, and check if the CalleeCFG is valid.
1055 const CFG *CalleeCFG = CalleeADC->getCFG();
1056 if (!CalleeCFG)
1057 return false;
1058
1059 // Do not inline large functions.
1060 if (isHuge(CalleeADC))
1061 return false;
1062
1063 // It is possible that the live variables analysis cannot be
1064 // run. If so, bail out.
1065 if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
1066 return false;
1067
1068 return true;
1069}
1070
1071bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
1072 const ExplodedNode *Pred,
1073 const EvalCallOptions &CallOpts) {
1074 if (!D)
1075 return false;
1076
1077 AnalysisManager &AMgr = getAnalysisManager();
1078 AnalyzerOptions &Opts = AMgr.options;
1079 AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
1080 AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
1081
1082 // The auto-synthesized bodies are essential to inline as they are
1083 // usually small and commonly used. Note: we should do this check early on to
1084 // ensure we always inline these calls.
1085 if (CalleeADC->isBodyAutosynthesized())
1086 return true;
1087
1088 if (!AMgr.shouldInlineCall())
1089 return false;
1090
1091 // Check if this function has been marked as non-inlinable.
1092 std::optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
1093 if (MayInline) {
1094 if (!*MayInline)
1095 return false;
1096
1097 } else {
1098 // We haven't actually checked the static properties of this function yet.
1099 // Do that now, and record our decision in the function summaries.
1100 if (mayInlineDecl(CalleeADC)) {
1101 Engine.FunctionSummaries->markMayInline(D);
1102 } else {
1103 Engine.FunctionSummaries->markShouldNotInline(D);
1104 return false;
1105 }
1106 }
1107
1108 // Check if we should inline a call based on its kind.
1109 // FIXME: this checks both static and dynamic properties of the call, which
1110 // means we're redoing a bit of work that could be cached in the function
1111 // summary.
1112 CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
1113 if (CIP != CIP_Allowed) {
1114 if (CIP == CIP_DisallowedAlways) {
1115 assert(!MayInline || *MayInline);
1116 Engine.FunctionSummaries->markShouldNotInline(D);
1117 }
1118 return false;
1119 }
1120
1121 // Do not inline if recursive or we've reached max stack frame count.
1122 bool IsRecursive = false;
1123 unsigned StackDepth = 0;
1124 examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
1125 if ((StackDepth >= Opts.InlineMaxStackDepth) &&
1126 (!isSmall(CalleeADC) || IsRecursive))
1127 return false;
1128
1129 // Do not inline large functions too many times.
1130 if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
1131 Opts.MaxTimesInlineLarge) &&
1132 isLarge(CalleeADC)) {
1133 NumReachedInlineCountMax++;
1134 return false;
1135 }
1136
1137 if (HowToInline == Inline_Minimal && (!isSmall(CalleeADC) || IsRecursive))
1138 return false;
1139
1140 return true;
1141}
1142
1143bool ExprEngine::shouldInlineArrayConstruction(const ProgramStateRef State,
1144 const CXXConstructExpr *CE,
1145 const LocationContext *LCtx) {
1146 if (!CE)
1147 return false;
1148
1149 // FIXME: Handle other arrays types.
1150 if (const auto *CAT = dyn_cast<ConstantArrayType>(CE->getType())) {
1151 unsigned ArrSize = getContext().getConstantArrayElementCount(CAT);
1152
1153 // This might seem conter-intuitive at first glance, but the functions are
1154 // closely related. Reasoning about destructors depends only on the type
1155 // of the expression that initialized the memory region, which is the
1156 // CXXConstructExpr. So to avoid code repetition, the work is delegated
1157 // to the function that reasons about destructor inlining. Also note that
1158 // if the constructors of the array elements are inlined, the destructors
1159 // can also be inlined and if the destructors can be inline, it's safe to
1160 // inline the constructors.
1161 return shouldInlineArrayDestruction(ArrSize);
1162 }
1163
1164 // Check if we're inside an ArrayInitLoopExpr, and it's sufficiently small.
1165 if (auto Size = getPendingInitLoop(State, CE, LCtx))
1166 return shouldInlineArrayDestruction(*Size);
1167
1168 return false;
1169}
1170
1171bool ExprEngine::shouldInlineArrayDestruction(uint64_t Size) {
1172
1173 uint64_t maxAllowedSize = AMgr.options.maxBlockVisitOnPath;
1174
1175 // Declaring a 0 element array is also possible.
1176 return Size <= maxAllowedSize && Size > 0;
1177}
1178
1179bool ExprEngine::shouldRepeatCtorCall(ProgramStateRef State,
1180 const CXXConstructExpr *E,
1181 const LocationContext *LCtx) {
1182
1183 if (!E)
1184 return false;
1185
1186 auto Ty = E->getType();
1187
1188 // FIXME: Handle non constant array types
1189 if (const auto *CAT = dyn_cast<ConstantArrayType>(Ty)) {
1191 return Size > getIndexOfElementToConstruct(State, E, LCtx);
1192 }
1193
1194 if (auto Size = getPendingInitLoop(State, E, LCtx))
1195 return Size > getIndexOfElementToConstruct(State, E, LCtx);
1196
1197 return false;
1198}
1199
1201 const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
1202 if (!ICall)
1203 return false;
1204
1205 const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
1206 if (!MD)
1207 return false;
1209 return false;
1210
1211 return MD->isTrivial();
1212}
1213
1215 const CallEvent &CallTemplate,
1216 const EvalCallOptions &CallOpts) {
1217 // Make sure we have the most recent state attached to the call.
1218 ProgramStateRef State = Pred->getState();
1219 CallEventRef<> Call = CallTemplate.cloneWithState(State);
1220
1221 // Special-case trivial assignment operators.
1223 performTrivialCopy(Bldr, Pred, *Call);
1224 return;
1225 }
1226
1227 // Try to inline the call.
1228 // The origin expression here is just used as a kind of checksum;
1229 // this should still be safe even for CallEvents that don't come from exprs.
1230 const Expr *E = Call->getOriginExpr();
1231
1232 ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1233 if (InlinedFailedState) {
1234 // If we already tried once and failed, make sure we don't retry later.
1235 State = InlinedFailedState;
1236 } else {
1237 RuntimeDefinition RD = Call->getRuntimeDefinition();
1238 Call->setForeign(RD.isForeign());
1239 const Decl *D = RD.getDecl();
1240 if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
1241 if (RD.mayHaveOtherDefinitions()) {
1243
1244 // Explore with and without inlining the call.
1245 if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1246 BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
1247 return;
1248 }
1249
1250 // Don't inline if we're not in any dynamic dispatch mode.
1251 if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1252 conservativeEvalCall(*Call, Bldr, Pred, State);
1253 return;
1254 }
1255 }
1256 ctuBifurcate(*Call, D, Bldr, Pred, State);
1257 return;
1258 }
1259 }
1260
1261 // If we can't inline it, clean up the state traits used only if the function
1262 // is inlined.
1263 State = removeStateTraitsUsedForArrayEvaluation(
1264 State, dyn_cast_or_null<CXXConstructExpr>(E), Call->getLocationContext());
1265
1266 // Also handle the return value and invalidate the regions.
1267 conservativeEvalCall(*Call, Bldr, Pred, State);
1268}
1269
1270void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1271 const CallEvent &Call, const Decl *D,
1272 NodeBuilder &Bldr, ExplodedNode *Pred) {
1273 assert(BifurReg);
1274 BifurReg = BifurReg->StripCasts();
1275
1276 // Check if we've performed the split already - note, we only want
1277 // to split the path once per memory region.
1278 ProgramStateRef State = Pred->getState();
1279 const unsigned *BState =
1280 State->get<DynamicDispatchBifurcationMap>(BifurReg);
1281 if (BState) {
1282 // If we are on "inline path", keep inlining if possible.
1283 if (*BState == DynamicDispatchModeInlined)
1284 ctuBifurcate(Call, D, Bldr, Pred, State);
1285 // If inline failed, or we are on the path where we assume we
1286 // don't have enough info about the receiver to inline, conjure the
1287 // return value and invalidate the regions.
1288 conservativeEvalCall(Call, Bldr, Pred, State);
1289 return;
1290 }
1291
1292 // If we got here, this is the first time we process a message to this
1293 // region, so split the path.
1294 ProgramStateRef IState =
1295 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1296 DynamicDispatchModeInlined);
1297 ctuBifurcate(Call, D, Bldr, Pred, IState);
1298
1299 ProgramStateRef NoIState =
1300 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1301 DynamicDispatchModeConservative);
1302 conservativeEvalCall(Call, Bldr, Pred, NoIState);
1303
1304 NumOfDynamicDispatchPathSplits++;
1305}
1306
1308 ExplodedNodeSet &Dst) {
1309 ExplodedNodeSet dstPreVisit;
1310 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1311
1312 StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1313
1314 if (RS->getRetValue()) {
1315 for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1316 ei = dstPreVisit.end(); it != ei; ++it) {
1317 B.generateNode(RS, *it, (*it)->getState());
1318 }
1319 }
1320}
#define V(N, I)
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
#define STAT_COUNTER(VARNAME, DESC)
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
Returns true if the given C++ class is a container or iterator.
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Stmt *CallE)
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
static bool isTrivialObjectAssignment(const CallEvent &Call)
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
Returns true if the given function is the destructor of a class named "shared_ptr".
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrameContext *calleeCtx)
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
Adjusts a return value when the called function's return type does not match the caller's expression ...
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
Returns true if the given function refers to a method of a C++ container or iterator.
static unsigned getElementCountOfArrayBeingDestructed(const CallEvent &Call, const ProgramStateRef State, SValBuilder &SVB)
#define REGISTER_MAP_WITH_PROGRAMSTATE(Name, Key, Value)
Declares an immutable map of type NameTy, suitable for placement into the ProgramState.
#define REGISTER_TRAIT_WITH_PROGRAMSTATE(Name, Type)
Declares a program state trait for type Type called Name, and introduce a type named NameTy.
a trap message and trap category.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition ASTContext.h:188
SourceManager & getSourceManager()
Definition ASTContext.h:798
DeclarationNameTable DeclarationNames
Definition ASTContext.h:741
IdentifierTable & Idents
Definition ASTContext.h:737
const LangOptions & getLangOpts() const
Definition ASTContext.h:891
uint64_t getConstantArrayElementCount(const ConstantArrayType *CA) const
Return number of constant array elements.
AnalysisDeclContext * getContext(const Decl *D)
AnalysisDeclContext contains the context data for the function, method or block under analysis.
const BlockInvocationContext * getBlockInvocationContext(const LocationContext *ParentLC, const BlockDecl *BD, const void *Data)
Obtain a context of the block invocation using its parent context.
static bool isInStdNamespace(const Decl *D)
const StackFrameContext * getStackFrame(LocationContext const *ParentLC, const Stmt *S, const CFGBlock *Blk, unsigned BlockCount, unsigned Index)
Obtain a context of the call stack using its parent context.
ASTContext & getASTContext() const
CFG::BuildOptions & getCFGBuildOptions()
Stores options for the analyzer from the command line.
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K) const
Returns the option controlling which C++ member functions will be considered for inlining.
IPAKind getIPAMode() const
Returns the inter-procedural analysis mode.
CTUPhase1InliningKind getCTUPhase1Inlining() const
unsigned InlineMaxStackDepth
The inlining stack depth limit.
Represents a single basic block in a source-level CFG.
Definition CFG.h:605
bool empty() const
Definition CFG.h:953
succ_iterator succ_begin()
Definition CFG.h:990
unsigned succ_size() const
Definition CFG.h:1008
Represents C++ constructor call.
Definition CFG.h:157
std::optional< T > getAs() const
Convert to the specified CFGElement type, returning std::nullopt if this CFGElement is not of the des...
Definition CFG.h:109
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt.
Definition CFG.h:1222
unsigned size() const
Return the total number of CFGBlocks within the CFG This is simply a renaming of the getNumBlockIDs()...
Definition CFG.h:1414
bool isLinear() const
Returns true if the CFG has no branches.
Definition CFG.cpp:5352
CFGBlock & getExit()
Definition CFG.h:1332
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Definition CFG.h:1409
BasePaths - Represents the set of paths from a derived class to one of its (direct or indirect) bases...
CXXBasePath & front()
bool isAmbiguous(CanQualType BaseType)
Determine whether the path from the most-derived type to the given base type is ambiguous (i....
Represents a call to a C++ constructor.
Definition ExprCXX.h:1549
CXXConstructionKind getConstructionKind() const
Determine whether this constructor is actually constructing a base class (rather than a complete obje...
Definition ExprCXX.h:1660
Represents a C++ destructor within a class.
Definition DeclCXX.h:2869
Represents a static or instance method of a struct/union/class.
Definition DeclCXX.h:2129
const CXXRecordDecl * getParent() const
Return the parent of this method declaration, which is the class in which this method is defined.
Definition DeclCXX.h:2255
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition DeclCXX.cpp:2735
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition DeclCXX.cpp:2714
Represents a C++ struct/union/class.
Definition DeclCXX.h:258
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition DeclCXX.h:1366
bool hasMemberName(DeclarationName N) const
Determine whether this class has a member with the given name, possibly in a non-dependent base class...
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
Represents a point when we begin processing an inlined call.
const CFGBlock * getEntry() const
Returns the entry block in the CFG for the entered function.
const StackFrameContext * getCalleeContext() const
Represents a point when we finish the call exit sequence (for inlined call).
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition Expr.h:2879
ConstructionContext's subclasses describe different ways of constructing an object in C++.
Decl - This represents one declaration (or definition), e.g.
Definition DeclBase.h:86
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition DeclBase.h:978
DeclarationName getIdentifier(const IdentifierInfo *ID)
Create a declaration name that is a simple identifier.
IdentifierInfo * getAsIdentifierInfo() const
Retrieve the IdentifierInfo * stored in this declaration name, or null if this declaration name isn't...
This is a meta program point, which should be skipped by all the diagnostic reasoning etc.
This represents one expression.
Definition Expr.h:112
QualType getType() const
Definition Expr.h:144
Represents a function declaration or definition.
Definition Decl.h:1999
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition Decl.h:2376
One of these records is kept for each identifier that is lexed.
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
It wraps the AnalysisDeclContext to represent both the call stack with the help of StackFrameContext ...
const Decl * getDecl() const
LLVM_ATTRIBUTE_RETURNS_NONNULL AnalysisDeclContext * getAnalysisDeclContext() const
const LocationContext * getParent() const
It might return null.
const StackFrameContext * getStackFrame() const
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Definition Decl.h:339
Represents a parameter to a function.
Definition Decl.h:1789
const StackFrameContext * getStackFrame() const
std::optional< T > getAs() const
Convert to the specified ProgramPoint type, returning std::nullopt if this ProgramPoint is not of the...
A (possibly-)qualified type.
Definition TypeBase.h:937
bool isNull() const
Return true if this QualType doesn't point to a type yet.
Definition TypeBase.h:1004
QualType getCanonicalType() const
Definition TypeBase.h:8337
bool isConstQualified() const
Determine whether this type is const-qualified.
Definition TypeBase.h:8358
ReturnStmt - This represents a return, optionally of an expression: return; return 4;.
Definition Stmt.h:3160
Expr * getRetValue()
Definition Stmt.h:3187
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
It represents a stack frame of the call stack (based on CallEvent).
const Stmt * getCallSite() const
const CFGBlock * getCallSiteBlock() const
Stmt - This represents one statement.
Definition Stmt.h:85
bool isVoidType() const
Definition TypeBase.h:8878
bool isPointerType() const
Definition TypeBase.h:8522
CanQualType getCanonicalTypeUnqualified() const
bool isReferenceType() const
Definition TypeBase.h:8546
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to.
Definition Type.cpp:1909
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
Definition Type.cpp:752
bool isObjCObjectPointerType() const
Definition TypeBase.h:8691
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
Represents a call to a C++ constructor.
Definition CallEvent.h:986
const CXXConstructorDecl * getDecl() const override
Returns the declaration of the function or method that will be called.
Definition CallEvent.h:1017
const CXXConstructExpr * getOriginExpr() const override
Returns the expression whose value will be the result of this call.
Definition CallEvent.h:1013
Represents a non-static C++ member function call, no matter how it is written.
Definition CallEvent.h:679
const FunctionDecl * getDecl() const override
Returns the declaration of the function or method that will be called.
Manages the lifetime of CallEvent objects.
Definition CallEvent.h:1363
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx, CFGBlock::ConstCFGElementRef ElemRef)
CallEventRef getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State)
Gets an outside caller given a callee context.
Represents an abstract call to a function or method along a particular path.
Definition CallEvent.h:153
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
Definition CallEvent.h:1480
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting obj-c messages.
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng, const EvalCallOptions &CallOpts)
Run checkers for evaluating a call.
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
void runCheckersForNewAllocator(const CXXAllocatorCall &Call, ExplodedNodeSet &Dst, ExplodedNode *Pred, ExprEngine &Eng, bool wasInlined=false)
Run checkers between C++ operator new and constructor calls.
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
WorkList * getCTUWorkList() const
Definition CoreEngine.h:173
WorkList * getWorkList() const
Definition CoreEngine.h:172
void insert(const ExplodedNodeSet &S)
void Add(ExplodedNode *N)
const ProgramStateRef & getState() const
ProgramPoint getLocation() const
getLocation - Returns the edge associated with the given node.
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
addPredeccessor - Adds a predecessor to the current node, and in tandem add this node as a successor ...
const StackFrameContext * getStackFrame() const
const LocationContext * getLocationContext() const
std::optional< T > getLocationAs() const &
ExplodedNode * getFirstPred()
ProgramStateManager & getStateManager()
Definition ExprEngine.h:421
void processCallEnter(NodeBuilderContext &BC, CallEnter CE, ExplodedNode *Pred)
Generate the entry node of the callee.
void processBeginOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst, const BlockEdge &L)
Called by CoreEngine.
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer's garbage collection - remove dead symbols and bindings from the state.
std::pair< ProgramStateRef, SVal > handleConstructionContext(const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx, const LocationContext *LCtx, const ConstructionContext *CC, EvalCallOptions &CallOpts, unsigned Idx=0)
A convenient wrapper around computeObjectUnderConstruction and updateObjectsUnderConstruction.
Definition ExprEngine.h:763
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
const CoreEngine & getCoreEngine() const
Definition ExprEngine.h:452
void processCallExit(ExplodedNode *Pred)
Generate the sequence of nodes that simulate the call exit and the post visit for CallExpr.
static std::optional< SVal > getObjectUnderConstruction(ProgramStateRef State, const ConstructionContextItem &Item, const LocationContext *LC)
By looking at a certain item that may be potentially part of an object's ConstructionContext,...
CFGElement getCurrentCFGElement()
Return the CFG element corresponding to the worklist element that is currently being processed by Exp...
Definition ExprEngine.h:712
@ Inline_Minimal
Do minimal inlining of callees.
Definition ExprEngine.h:134
ProgramStateRef processPointerEscapedOnBind(ProgramStateRef State, ArrayRef< std::pair< SVal, SVal > > LocAndVals, const LocationContext *LCtx, PointerEscapeKind Kind, const CallEvent *Call)
Call PointerEscape callback when a value escapes as a result of bind.
static std::optional< unsigned > getIndexOfElementToConstruct(ProgramStateRef State, const CXXConstructExpr *E, const LocationContext *LCtx)
Retreives which element is being constructed in a non-POD type array.
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
ASTContext & getContext() const
getContext - Return the ASTContext associated with this analysis.
Definition ExprEngine.h:196
StoreManager & getStoreManager()
Definition ExprEngine.h:424
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Evaluate a call, running pre- and post-call checkers and allowing checkers to be responsible for hand...
ConstCFGElementRef getCFGElementRef() const
Definition ExprEngine.h:232
static std::optional< unsigned > getPendingArrayDestruction(ProgramStateRef State, const LocationContext *LCtx)
Retreives which element is being destructed in a non-POD type array.
CheckerManager & getCheckerManager() const
Definition ExprEngine.h:205
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
void removeDeadOnEndOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call, const EvalCallOptions &CallOpts={})
Default implementation of call evaluation.
AnalysisManager & getAnalysisManager()
Definition ExprEngine.h:198
static std::optional< unsigned > getPendingInitLoop(ProgramStateRef State, const CXXConstructExpr *E, const LocationContext *LCtx)
Retreives the size of the array in the pending ArrayInitLoopExpr.
MemRegion - The root abstract class for all memory regions.
Definition MemRegion.h:98
LLVM_ATTRIBUTE_RETURNS_NONNULL const MemRegion * StripCasts(bool StripBaseAndDerivedCasts=true) const
unsigned blockCount() const
Returns the number of times the current basic block has been visited on the exploded graph path.
Definition CoreEngine.h:224
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition CoreEngine.h:240
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred)
Generates a node in the ExplodedGraph.
Definition CoreEngine.h:293
void takeNodes(const ExplodedNodeSet &S)
Definition CoreEngine.h:335
Represents any expression that calls an Objective-C method.
Definition CallEvent.h:1250
CallEventManager & getCallEventManager()
Information about invalidation for a particular region/symbol.
Definition MemRegion.h:1657
void setTrait(SymbolRef Sym, InvalidationKinds IK)
Defines the runtime definition of the called function.
Definition CallEvent.h:110
const MemRegion * getDispatchRegion()
When other definitions are possible, returns the region whose runtime type determines the method defi...
Definition CallEvent.h:141
bool mayHaveOtherDefinitions()
Check if the definition we have is precise.
Definition CallEvent.h:137
SVal - This represents a symbolic expression, which can be either an L-value or an R-value.
Definition SVals.h:56
QualType getType(const ASTContext &) const
Try to get a reasonable type for the given value.
Definition SVals.cpp:180
const MemRegion * getAsRegion() const
Definition SVals.cpp:119
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition SVals.h:83
This builder class is useful for generating nodes that resulted from visiting a statement.
Definition CoreEngine.h:384
ExplodedNode * generateNode(const Stmt *S, ExplodedNode *Pred, ProgramStateRef St, const ProgramPointTag *tag=nullptr, ProgramPoint::Kind K=ProgramPoint::PostStmtKind)
Definition CoreEngine.h:413
SVal evalDerivedToBase(SVal Derived, const CastExpr *Cast)
Evaluates a chain of derived-to-base casts through the path specified in Cast.
Definition Store.cpp:254
virtual void enqueue(const WorkListUnit &U)=0
LLVM_ATTRIBUTE_RETURNS_NONNULL const MemRegion * getRegion() const
Get the underlining region.
Definition SVals.h:493
@ PSK_EscapeOutParameters
Escape for a new symbol that was generated into a region that the analyzer cannot follow during a con...
DefinedOrUnknownSVal getDynamicElementCount(ProgramStateRef State, const MemRegion *MR, SValBuilder &SVB, QualType Ty)
IntrusiveRefCntPtr< const ProgramState > ProgramStateRef
ProgramStateRef setDynamicExtent(ProgramStateRef State, const MemRegion *MR, DefinedOrUnknownSVal Extent)
Set the dynamic extent Extent of the region MR.
@ CE_CXXInheritedConstructor
Definition CallEvent.h:69
@ CE_CXXStaticOperator
Definition CallEvent.h:62
@ CE_CXXDestructor
Definition CallEvent.h:65
@ CE_CXXDeallocator
Definition CallEvent.h:73
@ CE_CXXAllocator
Definition CallEvent.h:72
@ CE_CXXConstructor
Definition CallEvent.h:68
@ CE_CXXMemberOperator
Definition CallEvent.h:64
DefinedOrUnknownSVal getElementExtent(QualType Ty, SValBuilder &SVB)
std::variant< struct RequiresDecl, struct HeaderDecl, struct UmbrellaDirDecl, struct ModuleDecl, struct ExcludeDecl, struct ExportDecl, struct ExportAsDecl, struct ExternModuleDecl, struct UseDecl, struct LinkDecl, struct ConfigMacrosDecl, struct ConflictDecl > Decl
All declarations that can appear in a module declaration.
The JSON file list parser is used to communicate input to InstallAPI.
bool isa(CodeGen::Address addr)
Definition Address.h:330
CFGBlock::ConstCFGElementRef ConstCFGElementRef
Definition CFG.h:1199
@ ExpectedClass
@ IPAK_DynamicDispatch
Enable inlining of dynamically dispatched methods.
@ IPAK_DynamicDispatchBifurcate
Enable inlining of dynamically dispatched methods, bifurcate paths when exact type info is unavailabl...
@ CIMK_Destructors
Refers to destructors (implicit or explicit).
@ CIMK_MemberFunctions
Refers to regular member function and operator calls.
@ CIMK_Constructors
Refers to constructors (implicit or explicit).
U cast(CodeGen::Address addr)
Definition Address.h:327
unsigned long uint64_t
Hints for figuring out of a call should be inlined during evalCall().
Definition ExprEngine.h:97
bool IsTemporaryLifetimeExtendedViaAggregate
This call is a constructor for a temporary that is lifetime-extended by binding it to a reference-typ...
Definition ExprEngine.h:112
bool IsTemporaryCtorOrDtor
This call is a constructor or a destructor of a temporary value.
Definition ExprEngine.h:107
bool IsArrayCtorOrDtor
This call is a constructor or a destructor for a single element within an array, a part of array cons...
Definition ExprEngine.h:104
bool IsCtorOrDtorWithImproperlyModeledTargetRegion
This call is a constructor or a destructor for which we do not currently compute the this-region corr...
Definition ExprEngine.h:100
Traits for storing the call processing policy inside GDM.