clang 20.0.0git
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines ExprEngine's support for calls and returns.
10//
11//===----------------------------------------------------------------------===//
12
15#include "clang/AST/Decl.h"
16#include "clang/AST/DeclCXX.h"
23#include "llvm/ADT/SmallSet.h"
24#include "llvm/ADT/Statistic.h"
25#include "llvm/Support/Casting.h"
26#include "llvm/Support/Compiler.h"
27#include "llvm/Support/SaveAndRestore.h"
28#include <optional>
29
30using namespace clang;
31using namespace ento;
32
33#define DEBUG_TYPE "ExprEngine"
34
35STATISTIC(NumOfDynamicDispatchPathSplits,
36 "The # of times we split the path due to imprecise dynamic dispatch info");
37
38STATISTIC(NumInlinedCalls,
39 "The # of times we inlined a call");
40
41STATISTIC(NumReachedInlineCountMax,
42 "The # of times we reached inline count maximum");
43
45 ExplodedNode *Pred) {
46 // Get the entry block in the CFG of the callee.
47 const StackFrameContext *calleeCtx = CE.getCalleeContext();
48 PrettyStackTraceLocationContext CrashInfo(calleeCtx);
49 const CFGBlock *Entry = CE.getEntry();
50
51 // Validate the CFG.
52 assert(Entry->empty());
53 assert(Entry->succ_size() == 1);
54
55 // Get the solitary successor.
56 const CFGBlock *Succ = *(Entry->succ_begin());
57
58 // Construct an edge representing the starting location in the callee.
59 BlockEdge Loc(Entry, Succ, calleeCtx);
60
61 ProgramStateRef state = Pred->getState();
62
63 // Construct a new node, notify checkers that analysis of the function has
64 // begun, and add the resultant nodes to the worklist.
65 bool isNew;
66 ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
67 Node->addPredecessor(Pred, G);
68 if (isNew) {
69 ExplodedNodeSet DstBegin;
70 processBeginOfFunction(BC, Node, DstBegin, Loc);
71 Engine.enqueue(DstBegin);
72 }
73}
74
75// Find the last statement on the path to the exploded node and the
76// corresponding Block.
77static std::pair<const Stmt*,
79 const Stmt *S = nullptr;
80 const CFGBlock *Blk = nullptr;
81 const StackFrameContext *SF = Node->getStackFrame();
82
83 // Back up through the ExplodedGraph until we reach a statement node in this
84 // stack frame.
85 while (Node) {
86 const ProgramPoint &PP = Node->getLocation();
87
88 if (PP.getStackFrame() == SF) {
89 if (std::optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
90 S = SP->getStmt();
91 break;
92 } else if (std::optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
93 S = CEE->getCalleeContext()->getCallSite();
94 if (S)
95 break;
96
97 // If there is no statement, this is an implicitly-generated call.
98 // We'll walk backwards over it and then continue the loop to find
99 // an actual statement.
100 std::optional<CallEnter> CE;
101 do {
102 Node = Node->getFirstPred();
103 CE = Node->getLocationAs<CallEnter>();
104 } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
105
106 // Continue searching the graph.
107 } else if (std::optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
108 Blk = BE->getSrc();
109 }
110 } else if (std::optional<CallEnter> CE = PP.getAs<CallEnter>()) {
111 // If we reached the CallEnter for this function, it has no statements.
112 if (CE->getCalleeContext() == SF)
113 break;
114 }
115
116 if (Node->pred_empty())
117 return std::make_pair(nullptr, nullptr);
118
119 Node = *Node->pred_begin();
120 }
121
122 return std::make_pair(S, Blk);
123}
124
125/// Adjusts a return value when the called function's return type does not
126/// match the caller's expression type. This can happen when a dynamic call
127/// is devirtualized, and the overriding method has a covariant (more specific)
128/// return type than the parent's method. For C++ objects, this means we need
129/// to add base casts.
130static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
131 StoreManager &StoreMgr) {
132 // For now, the only adjustments we handle apply only to locations.
133 if (!isa<Loc>(V))
134 return V;
135
136 // If the types already match, don't do any unnecessary work.
137 ExpectedTy = ExpectedTy.getCanonicalType();
138 ActualTy = ActualTy.getCanonicalType();
139 if (ExpectedTy == ActualTy)
140 return V;
141
142 // No adjustment is needed between Objective-C pointer types.
143 if (ExpectedTy->isObjCObjectPointerType() &&
144 ActualTy->isObjCObjectPointerType())
145 return V;
146
147 // C++ object pointers may need "derived-to-base" casts.
148 const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
149 const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
150 if (ExpectedClass && ActualClass) {
151 CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
152 /*DetectVirtual=*/false);
153 if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
154 !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
155 return StoreMgr.evalDerivedToBase(V, Paths.front());
156 }
157 }
158
159 // Unfortunately, Objective-C does not enforce that overridden methods have
160 // covariant return types, so we can't assert that that never happens.
161 // Be safe and return UnknownVal().
162 return UnknownVal();
163}
164
166 ExplodedNode *Pred,
167 ExplodedNodeSet &Dst) {
168 // Find the last statement in the function and the corresponding basic block.
169 const Stmt *LastSt = nullptr;
170 const CFGBlock *Blk = nullptr;
171 std::tie(LastSt, Blk) = getLastStmt(Pred);
172 if (!Blk || !LastSt) {
173 Dst.Add(Pred);
174 return;
175 }
176
177 // Here, we destroy the current location context. We use the current
178 // function's entire body as a diagnostic statement, with which the program
179 // point will be associated. However, we only want to use LastStmt as a
180 // reference for what to clean up if it's a ReturnStmt; otherwise, everything
181 // is dead.
182 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
183 const LocationContext *LCtx = Pred->getLocationContext();
184 removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
187}
188
190 const StackFrameContext *calleeCtx) {
191 const Decl *RuntimeCallee = calleeCtx->getDecl();
192 const Decl *StaticDecl = Call->getDecl();
193 assert(RuntimeCallee);
194 if (!StaticDecl)
195 return true;
196 return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
197}
198
199// Returns the number of elements in the array currently being destructed.
200// If the element count is not found 0 will be returned.
202 const CallEvent &Call, const ProgramStateRef State, SValBuilder &SVB) {
203 assert(isa<CXXDestructorCall>(Call) &&
204 "The call event is not a destructor call!");
205
206 const auto &DtorCall = cast<CXXDestructorCall>(Call);
207
208 auto ThisVal = DtorCall.getCXXThisVal();
209
210 if (auto ThisElementRegion = dyn_cast<ElementRegion>(ThisVal.getAsRegion())) {
211 auto ArrayRegion = ThisElementRegion->getAsArrayOffset().getRegion();
212 auto ElementType = ThisElementRegion->getElementType();
213
214 auto ElementCount =
215 getDynamicElementCount(State, ArrayRegion, SVB, ElementType);
216
217 if (!ElementCount.isConstant())
218 return 0;
219
220 return ElementCount.getAsInteger()->getLimitedValue();
221 }
222
223 return 0;
224}
225
226ProgramStateRef ExprEngine::removeStateTraitsUsedForArrayEvaluation(
227 ProgramStateRef State, const CXXConstructExpr *E,
228 const LocationContext *LCtx) {
229
230 assert(LCtx && "Location context must be provided!");
231
232 if (E) {
233 if (getPendingInitLoop(State, E, LCtx))
234 State = removePendingInitLoop(State, E, LCtx);
235
236 if (getIndexOfElementToConstruct(State, E, LCtx))
237 State = removeIndexOfElementToConstruct(State, E, LCtx);
238 }
239
240 if (getPendingArrayDestruction(State, LCtx))
241 State = removePendingArrayDestruction(State, LCtx);
242
243 return State;
244}
245
246/// The call exit is simulated with a sequence of nodes, which occur between
247/// CallExitBegin and CallExitEnd. The following operations occur between the
248/// two program points:
249/// 1. CallExitBegin (triggers the start of call exit sequence)
250/// 2. Bind the return value
251/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
252/// 4. CallExitEnd (switch to the caller context)
253/// 5. PostStmt<CallExpr>
255 // Step 1 CEBNode was generated before the call.
257 const StackFrameContext *calleeCtx = CEBNode->getStackFrame();
258
259 // The parent context might not be a stack frame, so make sure we
260 // look up the first enclosing stack frame.
261 const StackFrameContext *callerCtx =
262 calleeCtx->getParent()->getStackFrame();
263
264 const Stmt *CE = calleeCtx->getCallSite();
265 ProgramStateRef state = CEBNode->getState();
266 // Find the last statement in the function and the corresponding basic block.
267 const Stmt *LastSt = nullptr;
268 const CFGBlock *Blk = nullptr;
269 std::tie(LastSt, Blk) = getLastStmt(CEBNode);
270
271 // Generate a CallEvent /before/ cleaning the state, so that we can get the
272 // correct value for 'this' (if necessary).
274 CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
275
276 // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
277
278 // If this variable is set to 'true' the analyzer will evaluate the call
279 // statement we are about to exit again, instead of continuing the execution
280 // from the statement after the call. This is useful for non-POD type array
281 // construction where the CXXConstructExpr is referenced only once in the CFG,
282 // but we want to evaluate it as many times as many elements the array has.
283 bool ShouldRepeatCall = false;
284
285 if (const auto *DtorDecl =
286 dyn_cast_or_null<CXXDestructorDecl>(Call->getDecl())) {
287 if (auto Idx = getPendingArrayDestruction(state, callerCtx)) {
288 ShouldRepeatCall = *Idx > 0;
289
290 auto ThisVal = svalBuilder.getCXXThis(DtorDecl->getParent(), calleeCtx);
291 state = state->killBinding(ThisVal);
292 }
293 }
294
295 // If the callee returns an expression, bind its value to CallExpr.
296 if (CE) {
297 if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
298 const LocationContext *LCtx = CEBNode->getLocationContext();
299 SVal V = state->getSVal(RS, LCtx);
300
301 // Ensure that the return type matches the type of the returned Expr.
302 if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
303 QualType ReturnedTy =
305 if (!ReturnedTy.isNull()) {
306 if (const Expr *Ex = dyn_cast<Expr>(CE)) {
307 V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
309 }
310 }
311 }
312
313 state = state->BindExpr(CE, callerCtx, V);
314 }
315
316 // Bind the constructed object value to CXXConstructExpr.
317 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
318 loc::MemRegionVal This =
319 svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
320 SVal ThisV = state->getSVal(This);
321 ThisV = state->getSVal(ThisV.castAs<Loc>());
322 state = state->BindExpr(CCE, callerCtx, ThisV);
323
324 ShouldRepeatCall = shouldRepeatCtorCall(state, CCE, callerCtx);
325 }
326
327 if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
328 // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
329 // while to reach the actual CXXNewExpr element from here, so keep the
330 // region for later use.
331 // Additionally cast the return value of the inlined operator new
332 // (which is of type 'void *') to the correct object type.
333 SVal AllocV = state->getSVal(CNE, callerCtx);
334 AllocV = svalBuilder.evalCast(
335 AllocV, CNE->getType(),
336 getContext().getPointerType(getContext().VoidTy));
337
338 state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
339 AllocV);
340 }
341 }
342
343 if (!ShouldRepeatCall) {
344 state = removeStateTraitsUsedForArrayEvaluation(
345 state, dyn_cast_or_null<CXXConstructExpr>(CE), callerCtx);
346 }
347
348 // Step 3: BindedRetNode -> CleanedNodes
349 // If we can find a statement and a block in the inlined function, run remove
350 // dead bindings before returning from the call. This is important to ensure
351 // that we report the issues such as leaks in the stack contexts in which
352 // they occurred.
353 ExplodedNodeSet CleanedNodes;
354 if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
355 static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
356 auto Loc = isa<ReturnStmt>(LastSt)
357 ? ProgramPoint{PostStmt(LastSt, calleeCtx, &retValBind)}
358 : ProgramPoint{EpsilonPoint(calleeCtx, /*Data1=*/nullptr,
359 /*Data2=*/nullptr, &retValBind)};
360 const CFGBlock *PrePurgeBlock =
361 isa<ReturnStmt>(LastSt) ? Blk : &CEBNode->getCFG().getExit();
362 bool isNew;
363 ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
364 BindedRetNode->addPredecessor(CEBNode, G);
365 if (!isNew)
366 return;
367
368 NodeBuilderContext Ctx(getCoreEngine(), PrePurgeBlock, BindedRetNode);
369 currBldrCtx = &Ctx;
370 // Here, we call the Symbol Reaper with 0 statement and callee location
371 // context, telling it to clean up everything in the callee's context
372 // (and its children). We use the callee's function body as a diagnostic
373 // statement, with which the program point will be associated.
374 removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
375 calleeCtx->getAnalysisDeclContext()->getBody(),
377 currBldrCtx = nullptr;
378 } else {
379 CleanedNodes.Add(CEBNode);
380 }
381
382 for (ExplodedNode *N : CleanedNodes) {
383 // Step 4: Generate the CallExit and leave the callee's context.
384 // CleanedNodes -> CEENode
385 CallExitEnd Loc(calleeCtx, callerCtx);
386 bool isNew;
387 ProgramStateRef CEEState = (N == CEBNode) ? state : N->getState();
388
389 ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
390 CEENode->addPredecessor(N, G);
391 if (!isNew)
392 return;
393
394 // Step 5: Perform the post-condition check of the CallExpr and enqueue the
395 // result onto the work list.
396 // CEENode -> Dst -> WorkList
397 NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
398 SaveAndRestore<const NodeBuilderContext *> NBCSave(currBldrCtx, &Ctx);
399 SaveAndRestore CBISave(currStmtIdx, calleeCtx->getIndex());
400
401 CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
402
403 ExplodedNodeSet DstPostCall;
404 if (llvm::isa_and_nonnull<CXXNewExpr>(CE)) {
405 ExplodedNodeSet DstPostPostCallCallback;
406 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
407 CEENode, *UpdatedCall, *this,
408 /*wasInlined=*/true);
409 for (ExplodedNode *I : DstPostPostCallCallback) {
411 cast<CXXAllocatorCall>(*UpdatedCall), DstPostCall, I, *this,
412 /*wasInlined=*/true);
413 }
414 } else {
415 getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
416 *UpdatedCall, *this,
417 /*wasInlined=*/true);
418 }
419 ExplodedNodeSet Dst;
420 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
421 getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
422 *this,
423 /*wasInlined=*/true);
424 } else if (CE &&
425 !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
426 AMgr.getAnalyzerOptions().MayInlineCXXAllocator)) {
427 getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
428 *this, /*wasInlined=*/true);
429 } else {
430 Dst.insert(DstPostCall);
431 }
432
433 // Enqueue the next element in the block.
434 for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
435 PSI != PSE; ++PSI) {
436 unsigned Idx = calleeCtx->getIndex() + (ShouldRepeatCall ? 0 : 1);
437
438 Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(), Idx);
439 }
440 }
441}
442
443bool ExprEngine::isSmall(AnalysisDeclContext *ADC) const {
444 // When there are no branches in the function, it means that there's no
445 // exponential complexity introduced by inlining such function.
446 // Such functions also don't trigger various fundamental problems
447 // with our inlining mechanism, such as the problem of
448 // inlined defensive checks. Hence isLinear().
449 const CFG *Cfg = ADC->getCFG();
450 return Cfg->isLinear() || Cfg->size() <= AMgr.options.AlwaysInlineSize;
451}
452
453bool ExprEngine::isLarge(AnalysisDeclContext *ADC) const {
454 const CFG *Cfg = ADC->getCFG();
455 return Cfg->size() >= AMgr.options.MinCFGSizeTreatFunctionsAsLarge;
456}
457
458bool ExprEngine::isHuge(AnalysisDeclContext *ADC) const {
459 const CFG *Cfg = ADC->getCFG();
460 return Cfg->getNumBlockIDs() > AMgr.options.MaxInlinableSize;
461}
462
463void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
464 bool &IsRecursive, unsigned &StackDepth) {
465 IsRecursive = false;
466 StackDepth = 0;
467
468 while (LCtx) {
469 if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
470 const Decl *DI = SFC->getDecl();
471
472 // Mark recursive (and mutually recursive) functions and always count
473 // them when measuring the stack depth.
474 if (DI == D) {
475 IsRecursive = true;
476 ++StackDepth;
477 LCtx = LCtx->getParent();
478 continue;
479 }
480
481 // Do not count the small functions when determining the stack depth.
482 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
483 if (!isSmall(CalleeADC))
484 ++StackDepth;
485 }
486 LCtx = LCtx->getParent();
487 }
488}
489
490// The GDM component containing the dynamic dispatch bifurcation info. When
491// the exact type of the receiver is not known, we want to explore both paths -
492// one on which we do inline it and the other one on which we don't. This is
493// done to ensure we do not drop coverage.
494// This is the map from the receiver region to a bool, specifying either we
495// consider this region's information precise or not along the given path.
496namespace {
497 enum DynamicDispatchMode {
498 DynamicDispatchModeInlined = 1,
499 DynamicDispatchModeConservative
500 };
501} // end anonymous namespace
502
503REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
504 const MemRegion *, unsigned)
505REGISTER_TRAIT_WITH_PROGRAMSTATE(CTUDispatchBifurcation, bool)
506
507void ExprEngine::ctuBifurcate(const CallEvent &Call, const Decl *D,
508 NodeBuilder &Bldr, ExplodedNode *Pred,
509 ProgramStateRef State) {
510 ProgramStateRef ConservativeEvalState = nullptr;
511 if (Call.isForeign() && !isSecondPhaseCTU()) {
512 const auto IK = AMgr.options.getCTUPhase1Inlining();
513 const bool DoInline = IK == CTUPhase1InliningKind::All ||
515 isSmall(AMgr.getAnalysisDeclContext(D)));
516 if (DoInline) {
517 inlineCall(Engine.getWorkList(), Call, D, Bldr, Pred, State);
518 return;
519 }
520 const bool BState = State->get<CTUDispatchBifurcation>();
521 if (!BState) { // This is the first time we see this foreign function.
522 // Enqueue it to be analyzed in the second (ctu) phase.
523 inlineCall(Engine.getCTUWorkList(), Call, D, Bldr, Pred, State);
524 // Conservatively evaluate in the first phase.
525 ConservativeEvalState = State->set<CTUDispatchBifurcation>(true);
526 conservativeEvalCall(Call, Bldr, Pred, ConservativeEvalState);
527 } else {
528 conservativeEvalCall(Call, Bldr, Pred, State);
529 }
530 return;
531 }
532 inlineCall(Engine.getWorkList(), Call, D, Bldr, Pred, State);
533}
534
535void ExprEngine::inlineCall(WorkList *WList, const CallEvent &Call,
536 const Decl *D, NodeBuilder &Bldr,
537 ExplodedNode *Pred, ProgramStateRef State) {
538 assert(D);
539
540 const LocationContext *CurLC = Pred->getLocationContext();
541 const StackFrameContext *CallerSFC = CurLC->getStackFrame();
542 const LocationContext *ParentOfCallee = CallerSFC;
543 if (Call.getKind() == CE_Block &&
544 !cast<BlockCall>(Call).isConversionFromLambda()) {
545 const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
546 assert(BR && "If we have the block definition we should have its region");
548 ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
549 cast<BlockDecl>(D),
550 BR);
551 }
552
553 // This may be NULL, but that's fine.
554 const Expr *CallE = Call.getOriginExpr();
555
556 // Construct a new stack frame for the callee.
558 const StackFrameContext *CalleeSFC =
559 CalleeADC->getStackFrame(ParentOfCallee, CallE, currBldrCtx->getBlock(),
560 currBldrCtx->blockCount(), currStmtIdx);
561
562 CallEnter Loc(CallE, CalleeSFC, CurLC);
563
564 // Construct a new state which contains the mapping from actual to
565 // formal arguments.
566 State = State->enterStackFrame(Call, CalleeSFC);
567
568 bool isNew;
569 if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
570 N->addPredecessor(Pred, G);
571 if (isNew)
572 WList->enqueue(N);
573 }
574
575 // If we decided to inline the call, the successor has been manually
576 // added onto the work list so remove it from the node builder.
577 Bldr.takeNodes(Pred);
578
579 NumInlinedCalls++;
580 Engine.FunctionSummaries->bumpNumTimesInlined(D);
581
582 // Do not mark as visited in the 2nd run (CTUWList), so the function will
583 // be visited as top-level, this way we won't loose reports in non-ctu
584 // mode. Considering the case when a function in a foreign TU calls back
585 // into the main TU.
586 // Note, during the 1st run, it doesn't matter if we mark the foreign
587 // functions as visited (or not) because they can never appear as a top level
588 // function in the main TU.
589 if (!isSecondPhaseCTU())
590 // Mark the decl as visited.
591 if (VisitedCallees)
592 VisitedCallees->insert(D);
593}
594
596 const Stmt *CallE) {
597 const void *ReplayState = State->get<ReplayWithoutInlining>();
598 if (!ReplayState)
599 return nullptr;
600
601 assert(ReplayState == CallE && "Backtracked to the wrong call.");
602 (void)CallE;
603
604 return State->remove<ReplayWithoutInlining>();
605}
606
608 ExplodedNodeSet &dst) {
609 // Perform the previsit of the CallExpr.
610 ExplodedNodeSet dstPreVisit;
611 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
612
613 // Get the call in its initial state. We use this as a template to perform
614 // all the checks.
616 CallEventRef<> CallTemplate = CEMgr.getSimpleCall(
617 CE, Pred->getState(), Pred->getLocationContext(), getCFGElementRef());
618
619 // Evaluate the function call. We try each of the checkers
620 // to see if the can evaluate the function call.
621 ExplodedNodeSet dstCallEvaluated;
622 for (ExplodedNode *N : dstPreVisit) {
623 evalCall(dstCallEvaluated, N, *CallTemplate);
624 }
625
626 // Finally, perform the post-condition check of the CallExpr and store
627 // the created nodes in 'Dst'.
628 // Note that if the call was inlined, dstCallEvaluated will be empty.
629 // The post-CallExpr check will occur in processCallExit.
630 getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
631 *this);
632}
633
634ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
635 const CallEvent &Call) {
636 const Expr *E = Call.getOriginExpr();
637 // FIXME: Constructors to placement arguments of operator new
638 // are not supported yet.
639 if (!E || isa<CXXNewExpr>(E))
640 return State;
641
642 const LocationContext *LC = Call.getLocationContext();
643 for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; ++CallI) {
644 unsigned I = Call.getASTArgumentIndex(CallI);
645 if (std::optional<SVal> V = getObjectUnderConstruction(State, {E, I}, LC)) {
646 SVal VV = *V;
647 (void)VV;
648 assert(cast<VarRegion>(VV.castAs<loc::MemRegionVal>().getRegion())
649 ->getStackFrame()->getParent()
650 ->getStackFrame() == LC->getStackFrame());
651 State = finishObjectConstruction(State, {E, I}, LC);
652 }
653 }
654
655 return State;
656}
657
658void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
659 ExplodedNode *Pred,
660 const CallEvent &Call) {
661 ProgramStateRef State = Pred->getState();
662 ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
663 if (CleanedState == State) {
664 Dst.insert(Pred);
665 return;
666 }
667
668 const Expr *E = Call.getOriginExpr();
669 const LocationContext *LC = Call.getLocationContext();
670 NodeBuilder B(Pred, Dst, *currBldrCtx);
671 static SimpleProgramPointTag Tag("ExprEngine",
672 "Finish argument construction");
673 PreStmt PP(E, LC, &Tag);
674 B.generateNode(PP, CleanedState, Pred);
675}
676
678 const CallEvent &Call) {
679 // WARNING: At this time, the state attached to 'Call' may be older than the
680 // state in 'Pred'. This is a minor optimization since CheckerManager will
681 // use an updated CallEvent instance when calling checkers, but if 'Call' is
682 // ever used directly in this function all callers should be updated to pass
683 // the most recent state. (It is probably not worth doing the work here since
684 // for some callers this will not be necessary.)
685
686 // Run any pre-call checks using the generic call interface.
687 ExplodedNodeSet dstPreVisit;
688 getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred,
689 Call, *this);
690
691 // Actually evaluate the function call. We try each of the checkers
692 // to see if the can evaluate the function call, and get a callback at
693 // defaultEvalCall if all of them fail.
694 ExplodedNodeSet dstCallEvaluated;
695 getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
696 Call, *this, EvalCallOptions());
697
698 // If there were other constructors called for object-type arguments
699 // of this call, clean them up.
700 ExplodedNodeSet dstArgumentCleanup;
701 for (ExplodedNode *I : dstCallEvaluated)
702 finishArgumentConstruction(dstArgumentCleanup, I, Call);
703
704 ExplodedNodeSet dstPostCall;
705 getCheckerManager().runCheckersForPostCall(dstPostCall, dstArgumentCleanup,
706 Call, *this);
707
708 // Escaping symbols conjured during invalidating the regions above.
709 // Note that, for inlined calls the nodes were put back into the worklist,
710 // so we can assume that every node belongs to a conservative call at this
711 // point.
712
713 // Run pointerEscape callback with the newly conjured symbols.
715 for (ExplodedNode *I : dstPostCall) {
716 NodeBuilder B(I, Dst, *currBldrCtx);
717 ProgramStateRef State = I->getState();
718 Escaped.clear();
719 {
720 unsigned Arg = -1;
721 for (const ParmVarDecl *PVD : Call.parameters()) {
722 ++Arg;
723 QualType ParamTy = PVD->getType();
724 if (ParamTy.isNull() ||
725 (!ParamTy->isPointerType() && !ParamTy->isReferenceType()))
726 continue;
727 QualType Pointee = ParamTy->getPointeeType();
728 if (Pointee.isConstQualified() || Pointee->isVoidType())
729 continue;
730 if (const MemRegion *MR = Call.getArgSVal(Arg).getAsRegion())
731 Escaped.emplace_back(loc::MemRegionVal(MR), State->getSVal(MR, Pointee));
732 }
733 }
734
735 State = processPointerEscapedOnBind(State, Escaped, I->getLocationContext(),
737
738 if (State == I->getState())
739 Dst.insert(I);
740 else
741 B.generateNode(I->getLocation(), State, I);
742 }
743}
744
746 const LocationContext *LCtx,
747 ProgramStateRef State) {
748 const Expr *E = Call.getOriginExpr();
749 if (!E)
750 return State;
751
752 // Some method families have known return values.
753 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
754 switch (Msg->getMethodFamily()) {
755 default:
756 break;
757 case OMF_autorelease:
758 case OMF_retain:
759 case OMF_self: {
760 // These methods return their receivers.
761 return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
762 }
763 }
764 } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
765 SVal ThisV = C->getCXXThisVal();
766 ThisV = State->getSVal(ThisV.castAs<Loc>());
767 return State->BindExpr(E, LCtx, ThisV);
768 }
769
770 SVal R;
771 QualType ResultTy = Call.getResultType();
772 unsigned Count = currBldrCtx->blockCount();
773 if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
774 // Conjure a temporary if the function returns an object by value.
775 SVal Target;
776 assert(RTC->getStmt() == Call.getOriginExpr());
777 EvalCallOptions CallOpts; // FIXME: We won't really need those.
778 std::tie(State, Target) = handleConstructionContext(
779 Call.getOriginExpr(), State, currBldrCtx, LCtx,
780 RTC->getConstructionContext(), CallOpts);
781 const MemRegion *TargetR = Target.getAsRegion();
782 assert(TargetR);
783 // Invalidate the region so that it didn't look uninitialized. If this is
784 // a field or element constructor, we do not want to invalidate
785 // the whole structure. Pointer escape is meaningless because
786 // the structure is a product of conservative evaluation
787 // and therefore contains nothing interesting at this point.
789 ITraits.setTrait(TargetR,
791 State = State->invalidateRegions(TargetR, E, Count, LCtx,
792 /* CausesPointerEscape=*/false, nullptr,
793 &Call, &ITraits);
794
795 R = State->getSVal(Target.castAs<Loc>(), E->getType());
796 } else {
797 // Conjure a symbol if the return value is unknown.
798
799 // See if we need to conjure a heap pointer instead of
800 // a regular unknown pointer.
801 const auto *CNE = dyn_cast<CXXNewExpr>(E);
802 if (CNE && CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
803 R = svalBuilder.getConjuredHeapSymbolVal(E, LCtx, Count);
804 const MemRegion *MR = R.getAsRegion()->StripCasts();
805
806 // Store the extent of the allocated object(s).
807 SVal ElementCount;
808 if (const Expr *SizeExpr = CNE->getArraySize().value_or(nullptr)) {
809 ElementCount = State->getSVal(SizeExpr, LCtx);
810 } else {
811 ElementCount = svalBuilder.makeIntVal(1, /*IsUnsigned=*/true);
812 }
813
814 SVal ElementSize = getElementExtent(CNE->getAllocatedType(), svalBuilder);
815
816 SVal Size =
817 svalBuilder.evalBinOp(State, BO_Mul, ElementCount, ElementSize,
818 svalBuilder.getArrayIndexType());
819
820 // FIXME: This line is to prevent a crash. For more details please check
821 // issue #56264.
822 if (Size.isUndef())
823 Size = UnknownVal();
824
825 State = setDynamicExtent(State, MR, Size.castAs<DefinedOrUnknownSVal>());
826 } else {
827 R = svalBuilder.conjureSymbolVal(nullptr, E, LCtx, ResultTy, Count);
828 }
829 }
830 return State->BindExpr(E, LCtx, R);
831}
832
833// Conservatively evaluate call by invalidating regions and binding
834// a conjured return value.
835void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
836 ExplodedNode *Pred, ProgramStateRef State) {
837 State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
838 State = bindReturnValue(Call, Pred->getLocationContext(), State);
839
840 // And make the result node.
841 static SimpleProgramPointTag PT("ExprEngine", "Conservative eval call");
842 Bldr.generateNode(Call.getProgramPoint(false, &PT), State, Pred);
843}
844
845ExprEngine::CallInlinePolicy
846ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
847 AnalyzerOptions &Opts,
848 const EvalCallOptions &CallOpts) {
849 const LocationContext *CurLC = Pred->getLocationContext();
850 const StackFrameContext *CallerSFC = CurLC->getStackFrame();
851 switch (Call.getKind()) {
852 case CE_Function:
854 case CE_Block:
855 break;
856 case CE_CXXMember:
859 return CIP_DisallowedAlways;
860 break;
861 case CE_CXXConstructor: {
863 return CIP_DisallowedAlways;
864
865 const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
866
867 const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
868
870 const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
871 : nullptr;
872
873 if (llvm::isa_and_nonnull<NewAllocatedObjectConstructionContext>(CC) &&
874 !Opts.MayInlineCXXAllocator)
875 return CIP_DisallowedOnce;
876
877 if (CallOpts.IsArrayCtorOrDtor) {
878 if (!shouldInlineArrayConstruction(Pred->getState(), CtorExpr, CurLC))
879 return CIP_DisallowedOnce;
880 }
881
882 // Inlining constructors requires including initializers in the CFG.
883 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
884 assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
885 (void)ADC;
886
887 // If the destructor is trivial, it's always safe to inline the constructor.
888 if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
889 break;
890
891 // For other types, only inline constructors if destructor inlining is
892 // also enabled.
894 return CIP_DisallowedAlways;
895
897 // If we don't handle temporary destructors, we shouldn't inline
898 // their constructors.
899 if (CallOpts.IsTemporaryCtorOrDtor &&
900 !Opts.ShouldIncludeTemporaryDtorsInCFG)
901 return CIP_DisallowedOnce;
902
903 // If we did not find the correct this-region, it would be pointless
904 // to inline the constructor. Instead we will simply invalidate
905 // the fake temporary target.
907 return CIP_DisallowedOnce;
908
909 // If the temporary is lifetime-extended by binding it to a reference-type
910 // field within an aggregate, automatic destructors don't work properly.
912 return CIP_DisallowedOnce;
913 }
914
915 break;
916 }
918 // This doesn't really increase the cost of inlining ever, because
919 // the stack frame of the inherited constructor is trivial.
920 return CIP_Allowed;
921 }
922 case CE_CXXDestructor: {
924 return CIP_DisallowedAlways;
925
926 // Inlining destructors requires building the CFG correctly.
927 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
928 assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
929 (void)ADC;
930
931 if (CallOpts.IsArrayCtorOrDtor) {
932 if (!shouldInlineArrayDestruction(getElementCountOfArrayBeingDestructed(
933 Call, Pred->getState(), svalBuilder))) {
934 return CIP_DisallowedOnce;
935 }
936 }
937
938 // Allow disabling temporary destructor inlining with a separate option.
939 if (CallOpts.IsTemporaryCtorOrDtor &&
940 !Opts.MayInlineCXXTemporaryDtors)
941 return CIP_DisallowedOnce;
942
943 // If we did not find the correct this-region, it would be pointless
944 // to inline the destructor. Instead we will simply invalidate
945 // the fake temporary target.
947 return CIP_DisallowedOnce;
948 break;
949 }
951 [[fallthrough]];
952 case CE_CXXAllocator:
953 if (Opts.MayInlineCXXAllocator)
954 break;
955 // Do not inline allocators until we model deallocators.
956 // This is unfortunate, but basically necessary for smart pointers and such.
957 return CIP_DisallowedAlways;
958 case CE_ObjCMessage:
959 if (!Opts.MayInlineObjCMethod)
960 return CIP_DisallowedAlways;
961 if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
963 return CIP_DisallowedAlways;
964 break;
965 }
966
967 return CIP_Allowed;
968}
969
970/// Returns true if the given C++ class contains a member with the given name.
971static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
972 StringRef Name) {
973 const IdentifierInfo &II = Ctx.Idents.get(Name);
974 return RD->hasMemberName(Ctx.DeclarationNames.getIdentifier(&II));
975}
976
977/// Returns true if the given C++ class is a container or iterator.
978///
979/// Our heuristic for this is whether it contains a method named 'begin()' or a
980/// nested type named 'iterator' or 'iterator_category'.
981static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
982 return hasMember(Ctx, RD, "begin") ||
983 hasMember(Ctx, RD, "iterator") ||
984 hasMember(Ctx, RD, "iterator_category");
985}
986
987/// Returns true if the given function refers to a method of a C++ container
988/// or iterator.
989///
990/// We generally do a poor job modeling most containers right now, and might
991/// prefer not to inline their methods.
992static bool isContainerMethod(const ASTContext &Ctx,
993 const FunctionDecl *FD) {
994 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
995 return isContainerClass(Ctx, MD->getParent());
996 return false;
997}
998
999/// Returns true if the given function is the destructor of a class named
1000/// "shared_ptr".
1001static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
1002 const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
1003 if (!Dtor)
1004 return false;
1005
1006 const CXXRecordDecl *RD = Dtor->getParent();
1007 if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
1008 if (II->isStr("shared_ptr"))
1009 return true;
1010
1011 return false;
1012}
1013
1014/// Returns true if the function in \p CalleeADC may be inlined in general.
1015///
1016/// This checks static properties of the function, such as its signature and
1017/// CFG, to determine whether the analyzer should ever consider inlining it,
1018/// in any context.
1019bool ExprEngine::mayInlineDecl(AnalysisDeclContext *CalleeADC) const {
1020 AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
1021 // FIXME: Do not inline variadic calls.
1022 if (CallEvent::isVariadic(CalleeADC->getDecl()))
1023 return false;
1024
1025 // Check certain C++-related inlining policies.
1026 ASTContext &Ctx = CalleeADC->getASTContext();
1027 if (Ctx.getLangOpts().CPlusPlus) {
1028 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
1029 // Conditionally control the inlining of template functions.
1030 if (!Opts.MayInlineTemplateFunctions)
1031 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
1032 return false;
1033
1034 // Conditionally control the inlining of C++ standard library functions.
1035 if (!Opts.MayInlineCXXStandardLibrary)
1036 if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
1038 return false;
1039
1040 // Conditionally control the inlining of methods on objects that look
1041 // like C++ containers.
1042 if (!Opts.MayInlineCXXContainerMethods)
1043 if (!AMgr.isInCodeFile(FD->getLocation()))
1044 if (isContainerMethod(Ctx, FD))
1045 return false;
1046
1047 // Conditionally control the inlining of the destructor of C++ shared_ptr.
1048 // We don't currently do a good job modeling shared_ptr because we can't
1049 // see the reference count, so treating as opaque is probably the best
1050 // idea.
1051 if (!Opts.MayInlineCXXSharedPtrDtor)
1052 if (isCXXSharedPtrDtor(FD))
1053 return false;
1054 }
1055 }
1056
1057 // It is possible that the CFG cannot be constructed.
1058 // Be safe, and check if the CalleeCFG is valid.
1059 const CFG *CalleeCFG = CalleeADC->getCFG();
1060 if (!CalleeCFG)
1061 return false;
1062
1063 // Do not inline large functions.
1064 if (isHuge(CalleeADC))
1065 return false;
1066
1067 // It is possible that the live variables analysis cannot be
1068 // run. If so, bail out.
1069 if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
1070 return false;
1071
1072 return true;
1073}
1074
1075bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
1076 const ExplodedNode *Pred,
1077 const EvalCallOptions &CallOpts) {
1078 if (!D)
1079 return false;
1080
1082 AnalyzerOptions &Opts = AMgr.options;
1084 AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
1085
1086 // The auto-synthesized bodies are essential to inline as they are
1087 // usually small and commonly used. Note: we should do this check early on to
1088 // ensure we always inline these calls.
1089 if (CalleeADC->isBodyAutosynthesized())
1090 return true;
1091
1092 if (!AMgr.shouldInlineCall())
1093 return false;
1094
1095 // Check if this function has been marked as non-inlinable.
1096 std::optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
1097 if (MayInline) {
1098 if (!*MayInline)
1099 return false;
1100
1101 } else {
1102 // We haven't actually checked the static properties of this function yet.
1103 // Do that now, and record our decision in the function summaries.
1104 if (mayInlineDecl(CalleeADC)) {
1105 Engine.FunctionSummaries->markMayInline(D);
1106 } else {
1107 Engine.FunctionSummaries->markShouldNotInline(D);
1108 return false;
1109 }
1110 }
1111
1112 // Check if we should inline a call based on its kind.
1113 // FIXME: this checks both static and dynamic properties of the call, which
1114 // means we're redoing a bit of work that could be cached in the function
1115 // summary.
1116 CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
1117 if (CIP != CIP_Allowed) {
1118 if (CIP == CIP_DisallowedAlways) {
1119 assert(!MayInline || *MayInline);
1120 Engine.FunctionSummaries->markShouldNotInline(D);
1121 }
1122 return false;
1123 }
1124
1125 // Do not inline if recursive or we've reached max stack frame count.
1126 bool IsRecursive = false;
1127 unsigned StackDepth = 0;
1128 examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
1129 if ((StackDepth >= Opts.InlineMaxStackDepth) &&
1130 (!isSmall(CalleeADC) || IsRecursive))
1131 return false;
1132
1133 // Do not inline large functions too many times.
1134 if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
1135 Opts.MaxTimesInlineLarge) &&
1136 isLarge(CalleeADC)) {
1137 NumReachedInlineCountMax++;
1138 return false;
1139 }
1140
1141 if (HowToInline == Inline_Minimal && (!isSmall(CalleeADC) || IsRecursive))
1142 return false;
1143
1144 return true;
1145}
1146
1147bool ExprEngine::shouldInlineArrayConstruction(const ProgramStateRef State,
1148 const CXXConstructExpr *CE,
1149 const LocationContext *LCtx) {
1150 if (!CE)
1151 return false;
1152
1153 // FIXME: Handle other arrays types.
1154 if (const auto *CAT = dyn_cast<ConstantArrayType>(CE->getType())) {
1155 unsigned ArrSize = getContext().getConstantArrayElementCount(CAT);
1156
1157 // This might seem conter-intuitive at first glance, but the functions are
1158 // closely related. Reasoning about destructors depends only on the type
1159 // of the expression that initialized the memory region, which is the
1160 // CXXConstructExpr. So to avoid code repetition, the work is delegated
1161 // to the function that reasons about destructor inlining. Also note that
1162 // if the constructors of the array elements are inlined, the destructors
1163 // can also be inlined and if the destructors can be inline, it's safe to
1164 // inline the constructors.
1165 return shouldInlineArrayDestruction(ArrSize);
1166 }
1167
1168 // Check if we're inside an ArrayInitLoopExpr, and it's sufficiently small.
1169 if (auto Size = getPendingInitLoop(State, CE, LCtx))
1170 return shouldInlineArrayDestruction(*Size);
1171
1172 return false;
1173}
1174
1175bool ExprEngine::shouldInlineArrayDestruction(uint64_t Size) {
1176
1177 uint64_t maxAllowedSize = AMgr.options.maxBlockVisitOnPath;
1178
1179 // Declaring a 0 element array is also possible.
1180 return Size <= maxAllowedSize && Size > 0;
1181}
1182
1183bool ExprEngine::shouldRepeatCtorCall(ProgramStateRef State,
1184 const CXXConstructExpr *E,
1185 const LocationContext *LCtx) {
1186
1187 if (!E)
1188 return false;
1189
1190 auto Ty = E->getType();
1191
1192 // FIXME: Handle non constant array types
1193 if (const auto *CAT = dyn_cast<ConstantArrayType>(Ty)) {
1195 return Size > getIndexOfElementToConstruct(State, E, LCtx);
1196 }
1197
1198 if (auto Size = getPendingInitLoop(State, E, LCtx))
1199 return Size > getIndexOfElementToConstruct(State, E, LCtx);
1200
1201 return false;
1202}
1203
1205 const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
1206 if (!ICall)
1207 return false;
1208
1209 const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
1210 if (!MD)
1211 return false;
1213 return false;
1214
1215 return MD->isTrivial();
1216}
1217
1219 const CallEvent &CallTemplate,
1220 const EvalCallOptions &CallOpts) {
1221 // Make sure we have the most recent state attached to the call.
1222 ProgramStateRef State = Pred->getState();
1223 CallEventRef<> Call = CallTemplate.cloneWithState(State);
1224
1225 // Special-case trivial assignment operators.
1227 performTrivialCopy(Bldr, Pred, *Call);
1228 return;
1229 }
1230
1231 // Try to inline the call.
1232 // The origin expression here is just used as a kind of checksum;
1233 // this should still be safe even for CallEvents that don't come from exprs.
1234 const Expr *E = Call->getOriginExpr();
1235
1236 ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1237 if (InlinedFailedState) {
1238 // If we already tried once and failed, make sure we don't retry later.
1239 State = InlinedFailedState;
1240 } else {
1241 RuntimeDefinition RD = Call->getRuntimeDefinition();
1242 Call->setForeign(RD.isForeign());
1243 const Decl *D = RD.getDecl();
1244 if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
1245 if (RD.mayHaveOtherDefinitions()) {
1247
1248 // Explore with and without inlining the call.
1249 if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1250 BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
1251 return;
1252 }
1253
1254 // Don't inline if we're not in any dynamic dispatch mode.
1255 if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1256 conservativeEvalCall(*Call, Bldr, Pred, State);
1257 return;
1258 }
1259 }
1260 ctuBifurcate(*Call, D, Bldr, Pred, State);
1261 return;
1262 }
1263 }
1264
1265 // If we can't inline it, clean up the state traits used only if the function
1266 // is inlined.
1267 State = removeStateTraitsUsedForArrayEvaluation(
1268 State, dyn_cast_or_null<CXXConstructExpr>(E), Call->getLocationContext());
1269
1270 // Also handle the return value and invalidate the regions.
1271 conservativeEvalCall(*Call, Bldr, Pred, State);
1272}
1273
1274void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1275 const CallEvent &Call, const Decl *D,
1276 NodeBuilder &Bldr, ExplodedNode *Pred) {
1277 assert(BifurReg);
1278 BifurReg = BifurReg->StripCasts();
1279
1280 // Check if we've performed the split already - note, we only want
1281 // to split the path once per memory region.
1282 ProgramStateRef State = Pred->getState();
1283 const unsigned *BState =
1284 State->get<DynamicDispatchBifurcationMap>(BifurReg);
1285 if (BState) {
1286 // If we are on "inline path", keep inlining if possible.
1287 if (*BState == DynamicDispatchModeInlined)
1288 ctuBifurcate(Call, D, Bldr, Pred, State);
1289 // If inline failed, or we are on the path where we assume we
1290 // don't have enough info about the receiver to inline, conjure the
1291 // return value and invalidate the regions.
1292 conservativeEvalCall(Call, Bldr, Pred, State);
1293 return;
1294 }
1295
1296 // If we got here, this is the first time we process a message to this
1297 // region, so split the path.
1298 ProgramStateRef IState =
1299 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1300 DynamicDispatchModeInlined);
1301 ctuBifurcate(Call, D, Bldr, Pred, IState);
1302
1303 ProgramStateRef NoIState =
1304 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1305 DynamicDispatchModeConservative);
1306 conservativeEvalCall(Call, Bldr, Pred, NoIState);
1307
1308 NumOfDynamicDispatchPathSplits++;
1309}
1310
1312 ExplodedNodeSet &Dst) {
1313 ExplodedNodeSet dstPreVisit;
1314 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1315
1316 StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1317
1318 if (RS->getRetValue()) {
1319 for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1320 ei = dstPreVisit.end(); it != ei; ++it) {
1321 B.generateNode(RS, *it, (*it)->getState());
1322 }
1323 }
1324}
#define V(N, I)
Definition: ASTContext.h:3443
DynTypedNode Node
const Decl * D
Expr * E
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
Returns true if the given C++ class is a container or iterator.
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Stmt *CallE)
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
static bool isTrivialObjectAssignment(const CallEvent &Call)
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
Returns true if the given function is the destructor of a class named "shared_ptr".
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrameContext *calleeCtx)
STATISTIC(NumOfDynamicDispatchPathSplits, "The # of times we split the path due to imprecise dynamic dispatch info")
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
Adjusts a return value when the called function's return type does not match the caller's expression ...
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
Returns true if the given function refers to a method of a C++ container or iterator.
static unsigned getElementCountOfArrayBeingDestructed(const CallEvent &Call, const ProgramStateRef State, SValBuilder &SVB)
llvm::MachO::Target Target
Definition: MachO.h:51
#define REGISTER_MAP_WITH_PROGRAMSTATE(Name, Key, Value)
Declares an immutable map of type NameTy, suitable for placement into the ProgramState.
#define REGISTER_TRAIT_WITH_PROGRAMSTATE(Name, Type)
Declares a program state trait for type Type called Name, and introduce a type named NameTy.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition: ASTContext.h:188
SourceManager & getSourceManager()
Definition: ASTContext.h:741
DeclarationNameTable DeclarationNames
Definition: ASTContext.h:684
IdentifierTable & Idents
Definition: ASTContext.h:680
const LangOptions & getLangOpts() const
Definition: ASTContext.h:834
uint64_t getConstantArrayElementCount(const ConstantArrayType *CA) const
Return number of constant array elements.
AnalysisDeclContext * getContext(const Decl *D)
AnalysisDeclContext contains the context data for the function, method or block under analysis.
const BlockInvocationContext * getBlockInvocationContext(const LocationContext *ParentLC, const BlockDecl *BD, const void *Data)
Obtain a context of the block invocation using its parent context.
const Decl * getDecl() const
static bool isInStdNamespace(const Decl *D)
const StackFrameContext * getStackFrame(LocationContext const *ParentLC, const Stmt *S, const CFGBlock *Blk, unsigned BlockCount, unsigned Index)
Obtain a context of the call stack using its parent context.
ASTContext & getASTContext() const
CFG::BuildOptions & getCFGBuildOptions()
Stores options for the analyzer from the command line.
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K) const
Returns the option controlling which C++ member functions will be considered for inlining.
unsigned maxBlockVisitOnPath
The maximum number of times the analyzer visits a block.
IPAKind getIPAMode() const
Returns the inter-procedural analysis mode.
CTUPhase1InliningKind getCTUPhase1Inlining() const
AnalysisPurgeMode AnalysisPurgeOpt
unsigned InlineMaxStackDepth
The inlining stack depth limit.
Represents a single basic block in a source-level CFG.
Definition: CFG.h:604
bool empty() const
Definition: CFG.h:947
succ_iterator succ_begin()
Definition: CFG.h:984
unsigned succ_size() const
Definition: CFG.h:1002
Represents C++ constructor call.
Definition: CFG.h:156
std::optional< T > getAs() const
Convert to the specified CFGElement type, returning std::nullopt if this CFGElement is not of the des...
Definition: CFG.h:109
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt.
Definition: CFG.h:1214
unsigned size() const
Return the total number of CFGBlocks within the CFG This is simply a renaming of the getNumBlockIDs()...
Definition: CFG.h:1407
bool isLinear() const
Returns true if the CFG has no branches.
Definition: CFG.cpp:5249
CFGBlock & getExit()
Definition: CFG.h:1324
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Definition: CFG.h:1402
BasePaths - Represents the set of paths from a derived class to one of its (direct or indirect) bases...
Represents a call to a C++ constructor.
Definition: ExprCXX.h:1546
CXXConstructionKind getConstructionKind() const
Determine whether this constructor is actually constructing a base class (rather than a complete obje...
Definition: ExprCXX.h:1657
Represents a C++ destructor within a class.
Definition: DeclCXX.h:2817
Represents a static or instance method of a struct/union/class.
Definition: DeclCXX.h:2078
const CXXRecordDecl * getParent() const
Return the parent of this method declaration, which is the class in which this method is defined.
Definition: DeclCXX.h:2204
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition: DeclCXX.cpp:2582
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition: DeclCXX.cpp:2560
Represents a C++ struct/union/class.
Definition: DeclCXX.h:258
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition: DeclCXX.h:1378
bool hasMemberName(DeclarationName N) const
Determine whether this class has a member with the given name, possibly in a non-dependent base class...
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
Represents a point when we begin processing an inlined call.
Definition: ProgramPoint.h:628
const CFGBlock * getEntry() const
Returns the entry block in the CFG for the entered function.
Definition: ProgramPoint.h:643
const StackFrameContext * getCalleeContext() const
Definition: ProgramPoint.h:638
Represents a point when we finish the call exit sequence (for inlined call).
Definition: ProgramPoint.h:686
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition: Expr.h:2874
ConstructionContext's subclasses describe different ways of constructing an object in C++.
Decl - This represents one declaration (or definition), e.g.
Definition: DeclBase.h:86
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition: DeclBase.h:967
DeclarationName getIdentifier(const IdentifierInfo *ID)
Create a declaration name that is a simple identifier.
IdentifierInfo * getAsIdentifierInfo() const
Retrieve the IdentifierInfo * stored in this declaration name, or null if this declaration name isn't...
This is a meta program point, which should be skipped by all the diagnostic reasoning etc.
Definition: ProgramPoint.h:730
This represents one expression.
Definition: Expr.h:110
QualType getType() const
Definition: Expr.h:142
Represents a function declaration or definition.
Definition: Decl.h:1935
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition: Decl.h:2305
One of these records is kept for each identifier that is lexed.
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
It wraps the AnalysisDeclContext to represent both the call stack with the help of StackFrameContext ...
const Decl * getDecl() const
LLVM_ATTRIBUTE_RETURNS_NONNULL AnalysisDeclContext * getAnalysisDeclContext() const
const LocationContext * getParent() const
It might return null.
const StackFrameContext * getStackFrame() const
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Definition: Decl.h:319
Represents a parameter to a function.
Definition: Decl.h:1725
const StackFrameContext * getStackFrame() const
Definition: ProgramPoint.h:179
std::optional< T > getAs() const
Convert to the specified ProgramPoint type, returning std::nullopt if this ProgramPoint is not of the...
Definition: ProgramPoint.h:147
A (possibly-)qualified type.
Definition: Type.h:929
bool isNull() const
Return true if this QualType doesn't point to a type yet.
Definition: Type.h:996
QualType getCanonicalType() const
Definition: Type.h:7983
bool isConstQualified() const
Determine whether this type is const-qualified.
Definition: Type.h:8004
ReturnStmt - This represents a return, optionally of an expression: return; return 4;.
Definition: Stmt.h:3046
Expr * getRetValue()
Definition: Stmt.h:3077
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
It represents a stack frame of the call stack (based on CallEvent).
const Stmt * getCallSite() const
const CFGBlock * getCallSiteBlock() const
Stmt - This represents one statement.
Definition: Stmt.h:84
bool isVoidType() const
Definition: Type.h:8510
bool isPointerType() const
Definition: Type.h:8186
CanQualType getCanonicalTypeUnqualified() const
bool isReferenceType() const
Definition: Type.h:8204
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to.
Definition: Type.cpp:1901
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
Definition: Type.cpp:738
bool isObjCObjectPointerType() const
Definition: Type.h:8328
static bool isInCodeFile(SourceLocation SL, const SourceManager &SM)
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
AnalysisDeclContextManager & getAnalysisDeclContextManager()
AnalyzerOptions & getAnalyzerOptions() override
BlockDataRegion - A region that represents a block instance.
Definition: MemRegion.h:678
Represents a call to a C++ constructor.
Definition: CallEvent.h:984
const CXXConstructorDecl * getDecl() const override
Returns the declaration of the function or method that will be called.
Definition: CallEvent.h:1015
const CXXConstructExpr * getOriginExpr() const override
Returns the expression whose value will be the result of this call.
Definition: CallEvent.h:1011
Represents a non-static C++ member function call, no matter how it is written.
Definition: CallEvent.h:677
const FunctionDecl * getDecl() const override
Returns the declaration of the function or method that will be called.
Definition: CallEvent.cpp:693
Manages the lifetime of CallEvent objects.
Definition: CallEvent.h:1361
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx, CFGBlock::ConstCFGElementRef ElemRef)
Definition: CallEvent.cpp:1413
CallEventRef getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State)
Gets an outside caller given a callee context.
Definition: CallEvent.cpp:1438
Represents an abstract call to a function or method along a particular path.
Definition: CallEvent.h:153
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
Definition: CallEvent.h:1478
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
Definition: CallEvent.cpp:352
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
Definition: CallEvent.cpp:381
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting obj-c messages.
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng, const EvalCallOptions &CallOpts)
Run checkers for evaluating a call.
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
void runCheckersForNewAllocator(const CXXAllocatorCall &Call, ExplodedNodeSet &Dst, ExplodedNode *Pred, ExprEngine &Eng, bool wasInlined=false)
Run checkers between C++ operator new and constructor calls.
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
WorkList * getCTUWorkList() const
Definition: CoreEngine.h:165
WorkList * getWorkList() const
Definition: CoreEngine.h:164
void enqueue(ExplodedNodeSet &Set)
Enqueue the given set of nodes onto the work list.
Definition: CoreEngine.cpp:594
ExplodedNode * getNode(const ProgramPoint &L, ProgramStateRef State, bool IsSink=false, bool *IsNew=nullptr)
Retrieve the node associated with a (Location,State) pair, where the 'Location' is a ProgramPoint in ...
void insert(const ExplodedNodeSet &S)
void Add(ExplodedNode *N)
const ProgramStateRef & getState() const
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
addPredeccessor - Adds a predecessor to the current node, and in tandem add this node as a successor ...
const StackFrameContext * getStackFrame() const
const LocationContext * getLocationContext() const
ProgramStateManager & getStateManager()
Definition: ExprEngine.h:414
void processCallEnter(NodeBuilderContext &BC, CallEnter CE, ExplodedNode *Pred)
Generate the entry node of the callee.
void processBeginOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst, const BlockEdge &L)
Called by CoreEngine.
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer's garbage collection - remove dead symbols and bindings from the state.
std::pair< ProgramStateRef, SVal > handleConstructionContext(const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx, const LocationContext *LCtx, const ConstructionContext *CC, EvalCallOptions &CallOpts, unsigned Idx=0)
A convenient wrapper around computeObjectUnderConstruction and updateObjectsUnderConstruction.
Definition: ExprEngine.h:741
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
const CoreEngine & getCoreEngine() const
Definition: ExprEngine.h:437
void processCallExit(ExplodedNode *Pred)
Generate the sequence of nodes that simulate the call exit and the post visit for CallExpr.
static std::optional< SVal > getObjectUnderConstruction(ProgramStateRef State, const ConstructionContextItem &Item, const LocationContext *LC)
By looking at a certain item that may be potentially part of an object's ConstructionContext,...
Definition: ExprEngine.cpp:603
CFGElement getCurrentCFGElement()
Return the CFG element corresponding to the worklist element that is currently being processed by Exp...
Definition: ExprEngine.h:690
@ Inline_Minimal
Do minimal inlining of callees.
Definition: ExprEngine.h:134
ProgramStateRef processPointerEscapedOnBind(ProgramStateRef State, ArrayRef< std::pair< SVal, SVal > > LocAndVals, const LocationContext *LCtx, PointerEscapeKind Kind, const CallEvent *Call)
Call PointerEscape callback when a value escapes as a result of bind.
static std::optional< unsigned > getIndexOfElementToConstruct(ProgramStateRef State, const CXXConstructExpr *E, const LocationContext *LCtx)
Retreives which element is being constructed in a non-POD type array.
Definition: ExprEngine.cpp:513
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
ASTContext & getContext() const
getContext - Return the ASTContext associated with this analysis.
Definition: ExprEngine.h:196
StoreManager & getStoreManager()
Definition: ExprEngine.h:416
CFGBlock::ConstCFGElementRef getCFGElementRef() const
Definition: ExprEngine.h:229
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Evaluate a call, running pre- and post-call checkers and allowing checkers to be responsible for hand...
static std::optional< unsigned > getPendingArrayDestruction(ProgramStateRef State, const LocationContext *LCtx)
Retreives which element is being destructed in a non-POD type array.
Definition: ExprEngine.cpp:532
CheckerManager & getCheckerManager() const
Definition: ExprEngine.h:204
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
void removeDeadOnEndOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call, const EvalCallOptions &CallOpts={})
Default implementation of call evaluation.
AnalysisManager & getAnalysisManager()
Definition: ExprEngine.h:198
static std::optional< unsigned > getPendingInitLoop(ProgramStateRef State, const CXXConstructExpr *E, const LocationContext *LCtx)
Retreives the size of the array in the pending ArrayInitLoopExpr.
Definition: ExprEngine.cpp:486
std::optional< bool > mayInline(const Decl *D)
void markMayInline(const Decl *D)
unsigned getNumTimesInlined(const Decl *D)
void markShouldNotInline(const Decl *D)
void bumpNumTimesInlined(const Decl *D)
MemRegion - The root abstract class for all memory regions.
Definition: MemRegion.h:97
LLVM_ATTRIBUTE_RETURNS_NONNULL const MemRegion * StripCasts(bool StripBaseAndDerivedCasts=true) const
Definition: MemRegion.cpp:1412
const CFGBlock * getBlock() const
Return the CFGBlock associated with this builder.
Definition: CoreEngine.h:209
unsigned blockCount() const
Returns the number of times the current basic block has been visited on the exploded graph path.
Definition: CoreEngine.h:216
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition: CoreEngine.h:232
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred)
Generates a node in the ExplodedGraph.
Definition: CoreEngine.h:285
void takeNodes(const ExplodedNodeSet &S)
Definition: CoreEngine.h:327
Represents any expression that calls an Objective-C method.
Definition: CallEvent.h:1248
While alive, includes the current analysis stack in a crash trace.
CallEventManager & getCallEventManager()
Definition: ProgramState.h:571
Information about invalidation for a particular region/symbol.
Definition: MemRegion.h:1629
void setTrait(SymbolRef Sym, InvalidationKinds IK)
Definition: MemRegion.cpp:1802
Defines the runtime definition of the called function.
Definition: CallEvent.h:110
const MemRegion * getDispatchRegion()
When other definitions are possible, returns the region whose runtime type determines the method defi...
Definition: CallEvent.h:141
bool mayHaveOtherDefinitions()
Check if the definition we have is precise.
Definition: CallEvent.h:137
nonloc::ConcreteInt makeIntVal(const IntegerLiteral *integer)
Definition: SValBuilder.h:288
QualType getArrayIndexType() const
Definition: SValBuilder.h:157
SVal evalCast(SVal V, QualType CastTy, QualType OriginalTy)
Cast a given SVal to another SVal using given QualType's.
DefinedOrUnknownSVal conjureSymbolVal(const void *symbolTag, const Expr *expr, const LocationContext *LCtx, unsigned count)
Create a new symbol with a unique 'name'.
loc::MemRegionVal getCXXThis(const CXXMethodDecl *D, const StackFrameContext *SFC)
Return a memory region for the 'this' object reference.
SVal evalBinOp(ProgramStateRef state, BinaryOperator::Opcode op, SVal lhs, SVal rhs, QualType type)
DefinedSVal getConjuredHeapSymbolVal(const Expr *E, const LocationContext *LCtx, unsigned Count)
Conjure a symbol representing heap allocated memory region.
SVal - This represents a symbolic expression, which can be either an L-value or an R-value.
Definition: SVals.h:56
QualType getType(const ASTContext &) const
Try to get a reasonable type for the given value.
Definition: SVals.cpp:181
const MemRegion * getAsRegion() const
Definition: SVals.cpp:120
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition: SVals.h:83
This builder class is useful for generating nodes that resulted from visiting a statement.
Definition: CoreEngine.h:376
ExplodedNode * generateNode(const Stmt *S, ExplodedNode *Pred, ProgramStateRef St, const ProgramPointTag *tag=nullptr, ProgramPoint::Kind K=ProgramPoint::PostStmtKind)
Definition: CoreEngine.h:405
SVal evalDerivedToBase(SVal Derived, const CastExpr *Cast)
Evaluates a chain of derived-to-base casts through the path specified in Cast.
Definition: Store.cpp:252
virtual void enqueue(const WorkListUnit &U)=0
LLVM_ATTRIBUTE_RETURNS_NONNULL const MemRegion * getRegion() const
Get the underlining region.
Definition: SVals.h:493
@ PSK_EscapeOutParameters
Escape for a new symbol that was generated into a region that the analyzer cannot follow during a con...
DefinedOrUnknownSVal getDynamicElementCount(ProgramStateRef State, const MemRegion *MR, SValBuilder &SVB, QualType Ty)
ProgramStateRef setDynamicExtent(ProgramStateRef State, const MemRegion *MR, DefinedOrUnknownSVal Extent)
Set the dynamic extent Extent of the region MR.
@ CE_CXXMember
Definition: CallEvent.h:63
@ CE_ObjCMessage
Definition: CallEvent.h:77
@ CE_CXXInheritedConstructor
Definition: CallEvent.h:69
@ CE_CXXStaticOperator
Definition: CallEvent.h:62
@ CE_CXXDestructor
Definition: CallEvent.h:65
@ CE_CXXDeallocator
Definition: CallEvent.h:73
@ CE_CXXAllocator
Definition: CallEvent.h:72
@ CE_CXXConstructor
Definition: CallEvent.h:68
@ CE_CXXMemberOperator
Definition: CallEvent.h:64
DefinedOrUnknownSVal getElementExtent(QualType Ty, SValBuilder &SVB)
The JSON file list parser is used to communicate input to InstallAPI.
@ IPAK_DynamicDispatch
Enable inlining of dynamically dispatched methods.
@ IPAK_DynamicDispatchBifurcate
Enable inlining of dynamically dispatched methods, bifurcate paths when exact type info is unavailabl...
@ OMF_autorelease
@ CIMK_Destructors
Refers to destructors (implicit or explicit).
@ CIMK_MemberFunctions
Refers to regular member function and operator calls.
@ CIMK_Constructors
Refers to constructors (implicit or explicit).
unsigned long uint64_t
Hints for figuring out of a call should be inlined during evalCall().
Definition: ExprEngine.h:97
bool IsTemporaryLifetimeExtendedViaAggregate
This call is a constructor for a temporary that is lifetime-extended by binding it to a reference-typ...
Definition: ExprEngine.h:112
bool IsTemporaryCtorOrDtor
This call is a constructor or a destructor of a temporary value.
Definition: ExprEngine.h:107
bool IsArrayCtorOrDtor
This call is a constructor or a destructor for a single element within an array, a part of array cons...
Definition: ExprEngine.h:104
bool IsCtorOrDtorWithImproperlyModeledTargetRegion
This call is a constructor or a destructor for which we do not currently compute the this-region corr...
Definition: ExprEngine.h:100
Traits for storing the call processing policy inside GDM.
Definition: ExprEngine.h:1004