clang  3.7.0
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1 //=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines ExprEngine's support for calls and returns.
11 //
12 //===----------------------------------------------------------------------===//
13 
17 #include "clang/AST/DeclCXX.h"
18 #include "clang/AST/ParentMap.h"
22 #include "llvm/ADT/SmallSet.h"
23 #include "llvm/ADT/Statistic.h"
24 #include "llvm/Support/SaveAndRestore.h"
25 
26 using namespace clang;
27 using namespace ento;
28 
29 #define DEBUG_TYPE "ExprEngine"
30 
31 STATISTIC(NumOfDynamicDispatchPathSplits,
32  "The # of times we split the path due to imprecise dynamic dispatch info");
33 
34 STATISTIC(NumInlinedCalls,
35  "The # of times we inlined a call");
36 
37 STATISTIC(NumReachedInlineCountMax,
38  "The # of times we reached inline count maximum");
39 
41  // Get the entry block in the CFG of the callee.
42  const StackFrameContext *calleeCtx = CE.getCalleeContext();
43  PrettyStackTraceLocationContext CrashInfo(calleeCtx);
44 
45  const CFG *CalleeCFG = calleeCtx->getCFG();
46  const CFGBlock *Entry = &(CalleeCFG->getEntry());
47 
48  // Validate the CFG.
49  assert(Entry->empty());
50  assert(Entry->succ_size() == 1);
51 
52  // Get the solitary successor.
53  const CFGBlock *Succ = *(Entry->succ_begin());
54 
55  // Construct an edge representing the starting location in the callee.
56  BlockEdge Loc(Entry, Succ, calleeCtx);
57 
58  ProgramStateRef state = Pred->getState();
59 
60  // Construct a new node and add it to the worklist.
61  bool isNew;
62  ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
63  Node->addPredecessor(Pred, G);
64  if (isNew)
65  Engine.getWorkList()->enqueue(Node);
66 }
67 
68 // Find the last statement on the path to the exploded node and the
69 // corresponding Block.
70 static std::pair<const Stmt*,
72  const Stmt *S = nullptr;
73  const CFGBlock *Blk = nullptr;
74  const StackFrameContext *SF =
76 
77  // Back up through the ExplodedGraph until we reach a statement node in this
78  // stack frame.
79  while (Node) {
80  const ProgramPoint &PP = Node->getLocation();
81 
82  if (PP.getLocationContext()->getCurrentStackFrame() == SF) {
83  if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
84  S = SP->getStmt();
85  break;
86  } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
87  S = CEE->getCalleeContext()->getCallSite();
88  if (S)
89  break;
90 
91  // If there is no statement, this is an implicitly-generated call.
92  // We'll walk backwards over it and then continue the loop to find
93  // an actual statement.
95  do {
96  Node = Node->getFirstPred();
97  CE = Node->getLocationAs<CallEnter>();
98  } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
99 
100  // Continue searching the graph.
101  } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
102  Blk = BE->getSrc();
103  }
104  } else if (Optional<CallEnter> CE = PP.getAs<CallEnter>()) {
105  // If we reached the CallEnter for this function, it has no statements.
106  if (CE->getCalleeContext() == SF)
107  break;
108  }
109 
110  if (Node->pred_empty())
111  return std::make_pair(nullptr, nullptr);
112 
113  Node = *Node->pred_begin();
114  }
115 
116  return std::make_pair(S, Blk);
117 }
118 
119 /// Adjusts a return value when the called function's return type does not
120 /// match the caller's expression type. This can happen when a dynamic call
121 /// is devirtualized, and the overridding method has a covariant (more specific)
122 /// return type than the parent's method. For C++ objects, this means we need
123 /// to add base casts.
124 static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
125  StoreManager &StoreMgr) {
126  // For now, the only adjustments we handle apply only to locations.
127  if (!V.getAs<Loc>())
128  return V;
129 
130  // If the types already match, don't do any unnecessary work.
131  ExpectedTy = ExpectedTy.getCanonicalType();
132  ActualTy = ActualTy.getCanonicalType();
133  if (ExpectedTy == ActualTy)
134  return V;
135 
136  // No adjustment is needed between Objective-C pointer types.
137  if (ExpectedTy->isObjCObjectPointerType() &&
138  ActualTy->isObjCObjectPointerType())
139  return V;
140 
141  // C++ object pointers may need "derived-to-base" casts.
142  const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
143  const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
144  if (ExpectedClass && ActualClass) {
145  CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
146  /*DetectVirtual=*/false);
147  if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
148  !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
149  return StoreMgr.evalDerivedToBase(V, Paths.front());
150  }
151  }
152 
153  // Unfortunately, Objective-C does not enforce that overridden methods have
154  // covariant return types, so we can't assert that that never happens.
155  // Be safe and return UnknownVal().
156  return UnknownVal();
157 }
158 
160  ExplodedNode *Pred,
161  ExplodedNodeSet &Dst) {
162  // Find the last statement in the function and the corresponding basic block.
163  const Stmt *LastSt = nullptr;
164  const CFGBlock *Blk = nullptr;
165  std::tie(LastSt, Blk) = getLastStmt(Pred);
166  if (!Blk || !LastSt) {
167  Dst.Add(Pred);
168  return;
169  }
170 
171  // Here, we destroy the current location context. We use the current
172  // function's entire body as a diagnostic statement, with which the program
173  // point will be associated. However, we only want to use LastStmt as a
174  // reference for what to clean up if it's a ReturnStmt; otherwise, everything
175  // is dead.
176  SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
177  const LocationContext *LCtx = Pred->getLocationContext();
178  removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
179  LCtx->getAnalysisDeclContext()->getBody(),
181 }
182 
184  const StackFrameContext *calleeCtx) {
185  const Decl *RuntimeCallee = calleeCtx->getDecl();
186  const Decl *StaticDecl = Call->getDecl();
187  assert(RuntimeCallee);
188  if (!StaticDecl)
189  return true;
190  return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
191 }
192 
193 /// Returns true if the CXXConstructExpr \p E was intended to construct a
194 /// prvalue for the region in \p V.
195 ///
196 /// Note that we can't just test for rvalue vs. glvalue because
197 /// CXXConstructExprs embedded in DeclStmts and initializers are considered
198 /// rvalues by the AST, and the analyzer would like to treat them as lvalues.
199 static bool isTemporaryPRValue(const CXXConstructExpr *E, SVal V) {
200  if (E->isGLValue())
201  return false;
202 
203  const MemRegion *MR = V.getAsRegion();
204  if (!MR)
205  return false;
206 
207  return isa<CXXTempObjectRegion>(MR);
208 }
209 
210 /// The call exit is simulated with a sequence of nodes, which occur between
211 /// CallExitBegin and CallExitEnd. The following operations occur between the
212 /// two program points:
213 /// 1. CallExitBegin (triggers the start of call exit sequence)
214 /// 2. Bind the return value
215 /// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
216 /// 4. CallExitEnd (switch to the caller context)
217 /// 5. PostStmt<CallExpr>
219  // Step 1 CEBNode was generated before the call.
221  const StackFrameContext *calleeCtx =
223 
224  // The parent context might not be a stack frame, so make sure we
225  // look up the first enclosing stack frame.
226  const StackFrameContext *callerCtx =
227  calleeCtx->getParent()->getCurrentStackFrame();
228 
229  const Stmt *CE = calleeCtx->getCallSite();
230  ProgramStateRef state = CEBNode->getState();
231  // Find the last statement in the function and the corresponding basic block.
232  const Stmt *LastSt = nullptr;
233  const CFGBlock *Blk = nullptr;
234  std::tie(LastSt, Blk) = getLastStmt(CEBNode);
235 
236  // Generate a CallEvent /before/ cleaning the state, so that we can get the
237  // correct value for 'this' (if necessary).
239  CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
240 
241  // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
242 
243  // If the callee returns an expression, bind its value to CallExpr.
244  if (CE) {
245  if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
246  const LocationContext *LCtx = CEBNode->getLocationContext();
247  SVal V = state->getSVal(RS, LCtx);
248 
249  // Ensure that the return type matches the type of the returned Expr.
250  if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
251  QualType ReturnedTy =
252  CallEvent::getDeclaredResultType(calleeCtx->getDecl());
253  if (!ReturnedTy.isNull()) {
254  if (const Expr *Ex = dyn_cast<Expr>(CE)) {
255  V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
256  getStoreManager());
257  }
258  }
259  }
260 
261  state = state->BindExpr(CE, callerCtx, V);
262  }
263 
264  // Bind the constructed object value to CXXConstructExpr.
265  if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
266  loc::MemRegionVal This =
267  svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
268  SVal ThisV = state->getSVal(This);
269 
270  // If the constructed object is a temporary prvalue, get its bindings.
271  if (isTemporaryPRValue(CCE, ThisV))
272  ThisV = state->getSVal(ThisV.castAs<Loc>());
273 
274  state = state->BindExpr(CCE, callerCtx, ThisV);
275  }
276  }
277 
278  // Step 3: BindedRetNode -> CleanedNodes
279  // If we can find a statement and a block in the inlined function, run remove
280  // dead bindings before returning from the call. This is important to ensure
281  // that we report the issues such as leaks in the stack contexts in which
282  // they occurred.
283  ExplodedNodeSet CleanedNodes;
284  if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
285  static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
286  PostStmt Loc(LastSt, calleeCtx, &retValBind);
287  bool isNew;
288  ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
289  BindedRetNode->addPredecessor(CEBNode, G);
290  if (!isNew)
291  return;
292 
293  NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
294  currBldrCtx = &Ctx;
295  // Here, we call the Symbol Reaper with 0 statement and callee location
296  // context, telling it to clean up everything in the callee's context
297  // (and its children). We use the callee's function body as a diagnostic
298  // statement, with which the program point will be associated.
299  removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
300  calleeCtx->getAnalysisDeclContext()->getBody(),
302  currBldrCtx = nullptr;
303  } else {
304  CleanedNodes.Add(CEBNode);
305  }
306 
307  for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
308  E = CleanedNodes.end(); I != E; ++I) {
309 
310  // Step 4: Generate the CallExit and leave the callee's context.
311  // CleanedNodes -> CEENode
312  CallExitEnd Loc(calleeCtx, callerCtx);
313  bool isNew;
314  ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState();
315  ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
316  CEENode->addPredecessor(*I, G);
317  if (!isNew)
318  return;
319 
320  // Step 5: Perform the post-condition check of the CallExpr and enqueue the
321  // result onto the work list.
322  // CEENode -> Dst -> WorkList
323  NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
324  SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
325  &Ctx);
326  SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
327 
328  CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
329 
330  ExplodedNodeSet DstPostCall;
331  getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
332  *UpdatedCall, *this,
333  /*WasInlined=*/true);
334 
335  ExplodedNodeSet Dst;
336  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
337  getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
338  *this,
339  /*WasInlined=*/true);
340  } else if (CE) {
341  getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
342  *this, /*WasInlined=*/true);
343  } else {
344  Dst.insert(DstPostCall);
345  }
346 
347  // Enqueue the next element in the block.
348  for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
349  PSI != PSE; ++PSI) {
350  Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
351  calleeCtx->getIndex()+1);
352  }
353  }
354 }
355 
356 void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
357  bool &IsRecursive, unsigned &StackDepth) {
358  IsRecursive = false;
359  StackDepth = 0;
360 
361  while (LCtx) {
362  if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
363  const Decl *DI = SFC->getDecl();
364 
365  // Mark recursive (and mutually recursive) functions and always count
366  // them when measuring the stack depth.
367  if (DI == D) {
368  IsRecursive = true;
369  ++StackDepth;
370  LCtx = LCtx->getParent();
371  continue;
372  }
373 
374  // Do not count the small functions when determining the stack depth.
375  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
376  const CFG *CalleeCFG = CalleeADC->getCFG();
377  if (CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize())
378  ++StackDepth;
379  }
380  LCtx = LCtx->getParent();
381  }
382 
383 }
384 
385 static bool IsInStdNamespace(const FunctionDecl *FD) {
386  const DeclContext *DC = FD->getEnclosingNamespaceContext();
387  const NamespaceDecl *ND = dyn_cast<NamespaceDecl>(DC);
388  if (!ND)
389  return false;
390 
391  while (const DeclContext *Parent = ND->getParent()) {
392  if (!isa<NamespaceDecl>(Parent))
393  break;
394  ND = cast<NamespaceDecl>(Parent);
395  }
396 
397  return ND->isStdNamespace();
398 }
399 
400 // The GDM component containing the dynamic dispatch bifurcation info. When
401 // the exact type of the receiver is not known, we want to explore both paths -
402 // one on which we do inline it and the other one on which we don't. This is
403 // done to ensure we do not drop coverage.
404 // This is the map from the receiver region to a bool, specifying either we
405 // consider this region's information precise or not along the given path.
406 namespace {
408  DynamicDispatchModeInlined = 1,
409  DynamicDispatchModeConservative
410  };
411 }
412 REGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
414  unsigned))
415 
416 bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
417  NodeBuilder &Bldr, ExplodedNode *Pred,
419  assert(D);
420 
421  const LocationContext *CurLC = Pred->getLocationContext();
422  const StackFrameContext *CallerSFC = CurLC->getCurrentStackFrame();
423  const LocationContext *ParentOfCallee = CallerSFC;
424  if (Call.getKind() == CE_Block) {
425  const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
426  assert(BR && "If we have the block definition we should have its region");
427  AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
428  ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
429  cast<BlockDecl>(D),
430  BR);
431  }
432 
433  // This may be NULL, but that's fine.
434  const Expr *CallE = Call.getOriginExpr();
435 
436  // Construct a new stack frame for the callee.
437  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
438  const StackFrameContext *CalleeSFC =
439  CalleeADC->getStackFrame(ParentOfCallee, CallE,
440  currBldrCtx->getBlock(),
441  currStmtIdx);
442 
443 
444  CallEnter Loc(CallE, CalleeSFC, CurLC);
445 
446  // Construct a new state which contains the mapping from actual to
447  // formal arguments.
448  State = State->enterStackFrame(Call, CalleeSFC);
449 
450  bool isNew;
451  if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
452  N->addPredecessor(Pred, G);
453  if (isNew)
454  Engine.getWorkList()->enqueue(N);
455  }
456 
457  // If we decided to inline the call, the successor has been manually
458  // added onto the work list so remove it from the node builder.
459  Bldr.takeNodes(Pred);
460 
461  NumInlinedCalls++;
462 
463  // Mark the decl as visited.
464  if (VisitedCallees)
465  VisitedCallees->insert(D);
466 
467  return true;
468 }
469 
471  const Stmt *CallE) {
472  const void *ReplayState = State->get<ReplayWithoutInlining>();
473  if (!ReplayState)
474  return nullptr;
475 
476  assert(ReplayState == CallE && "Backtracked to the wrong call.");
477  (void)CallE;
478 
479  return State->remove<ReplayWithoutInlining>();
480 }
481 
483  ExplodedNodeSet &dst) {
484  // Perform the previsit of the CallExpr.
485  ExplodedNodeSet dstPreVisit;
486  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
487 
488  // Get the call in its initial state. We use this as a template to perform
489  // all the checks.
491  CallEventRef<> CallTemplate
492  = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
493 
494  // Evaluate the function call. We try each of the checkers
495  // to see if the can evaluate the function call.
496  ExplodedNodeSet dstCallEvaluated;
497  for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
498  I != E; ++I) {
499  evalCall(dstCallEvaluated, *I, *CallTemplate);
500  }
501 
502  // Finally, perform the post-condition check of the CallExpr and store
503  // the created nodes in 'Dst'.
504  // Note that if the call was inlined, dstCallEvaluated will be empty.
505  // The post-CallExpr check will occur in processCallExit.
506  getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
507  *this);
508 }
509 
511  const CallEvent &Call) {
512  // WARNING: At this time, the state attached to 'Call' may be older than the
513  // state in 'Pred'. This is a minor optimization since CheckerManager will
514  // use an updated CallEvent instance when calling checkers, but if 'Call' is
515  // ever used directly in this function all callers should be updated to pass
516  // the most recent state. (It is probably not worth doing the work here since
517  // for some callers this will not be necessary.)
518 
519  // Run any pre-call checks using the generic call interface.
520  ExplodedNodeSet dstPreVisit;
521  getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred, Call, *this);
522 
523  // Actually evaluate the function call. We try each of the checkers
524  // to see if the can evaluate the function call, and get a callback at
525  // defaultEvalCall if all of them fail.
526  ExplodedNodeSet dstCallEvaluated;
527  getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
528  Call, *this);
529 
530  // Finally, run any post-call checks.
531  getCheckerManager().runCheckersForPostCall(Dst, dstCallEvaluated,
532  Call, *this);
533 }
534 
536  const LocationContext *LCtx,
538  const Expr *E = Call.getOriginExpr();
539  if (!E)
540  return State;
541 
542  // Some method families have known return values.
543  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
544  switch (Msg->getMethodFamily()) {
545  default:
546  break;
547  case OMF_autorelease:
548  case OMF_retain:
549  case OMF_self: {
550  // These methods return their receivers.
551  return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
552  }
553  }
554  } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
555  SVal ThisV = C->getCXXThisVal();
556 
557  // If the constructed object is a temporary prvalue, get its bindings.
558  if (isTemporaryPRValue(cast<CXXConstructExpr>(E), ThisV))
559  ThisV = State->getSVal(ThisV.castAs<Loc>());
560 
561  return State->BindExpr(E, LCtx, ThisV);
562  }
563 
564  // Conjure a symbol if the return value is unknown.
565  QualType ResultTy = Call.getResultType();
566  SValBuilder &SVB = getSValBuilder();
567  unsigned Count = currBldrCtx->blockCount();
568  SVal R = SVB.conjureSymbolVal(nullptr, E, LCtx, ResultTy, Count);
569  return State->BindExpr(E, LCtx, R);
570 }
571 
572 // Conservatively evaluate call by invalidating regions and binding
573 // a conjured return value.
574 void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
575  ExplodedNode *Pred,
577  State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
578  State = bindReturnValue(Call, Pred->getLocationContext(), State);
579 
580  // And make the result node.
581  Bldr.generateNode(Call.getProgramPoint(), State, Pred);
582 }
583 
588 };
589 
591  const ExplodedNode *Pred,
592  AnalyzerOptions &Opts) {
593  const LocationContext *CurLC = Pred->getLocationContext();
594  const StackFrameContext *CallerSFC = CurLC->getCurrentStackFrame();
595  switch (Call.getKind()) {
596  case CE_Function:
597  case CE_Block:
598  break;
599  case CE_CXXMember:
602  return CIP_DisallowedAlways;
603  break;
604  case CE_CXXConstructor: {
606  return CIP_DisallowedAlways;
607 
608  const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
609 
610  // FIXME: We don't handle constructors or destructors for arrays properly.
611  // Even once we do, we still need to be careful about implicitly-generated
612  // initializers for array fields in default move/copy constructors.
613  const MemRegion *Target = Ctor.getCXXThisVal().getAsRegion();
614  if (Target && isa<ElementRegion>(Target))
615  return CIP_DisallowedOnce;
616 
617  // FIXME: This is a hack. We don't use the correct region for a new
618  // expression, so if we inline the constructor its result will just be
619  // thrown away. This short-term hack is tracked in <rdar://problem/12180598>
620  // and the longer-term possible fix is discussed in PR12014.
621  const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
622  if (const Stmt *Parent = CurLC->getParentMap().getParent(CtorExpr))
623  if (isa<CXXNewExpr>(Parent))
624  return CIP_DisallowedOnce;
625 
626  // Inlining constructors requires including initializers in the CFG.
627  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
628  assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
629  (void)ADC;
630 
631  // If the destructor is trivial, it's always safe to inline the constructor.
632  if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
633  break;
634 
635  // For other types, only inline constructors if destructor inlining is
636  // also enabled.
638  return CIP_DisallowedAlways;
639 
640  // FIXME: This is a hack. We don't handle temporary destructors
641  // right now, so we shouldn't inline their constructors.
643  if (!Target || !isa<DeclRegion>(Target))
644  return CIP_DisallowedOnce;
645 
646  break;
647  }
648  case CE_CXXDestructor: {
650  return CIP_DisallowedAlways;
651 
652  // Inlining destructors requires building the CFG correctly.
653  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
654  assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
655  (void)ADC;
656 
657  const CXXDestructorCall &Dtor = cast<CXXDestructorCall>(Call);
658 
659  // FIXME: We don't handle constructors or destructors for arrays properly.
660  const MemRegion *Target = Dtor.getCXXThisVal().getAsRegion();
661  if (Target && isa<ElementRegion>(Target))
662  return CIP_DisallowedOnce;
663 
664  break;
665  }
666  case CE_CXXAllocator:
667  if (Opts.mayInlineCXXAllocator())
668  break;
669  // Do not inline allocators until we model deallocators.
670  // This is unfortunate, but basically necessary for smart pointers and such.
671  return CIP_DisallowedAlways;
672  case CE_ObjCMessage:
673  if (!Opts.mayInlineObjCMethod())
674  return CIP_DisallowedAlways;
675  if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
677  return CIP_DisallowedAlways;
678  break;
679  }
680 
681  return CIP_Allowed;
682 }
683 
684 /// Returns true if the given C++ class contains a member with the given name.
685 static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
686  StringRef Name) {
687  const IdentifierInfo &II = Ctx.Idents.get(Name);
688  DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
689  if (!RD->lookup(DeclName).empty())
690  return true;
691 
692  CXXBasePaths Paths(false, false, false);
694  DeclName.getAsOpaquePtr(),
695  Paths))
696  return true;
697 
698  return false;
699 }
700 
701 /// Returns true if the given C++ class is a container or iterator.
702 ///
703 /// Our heuristic for this is whether it contains a method named 'begin()' or a
704 /// nested type named 'iterator' or 'iterator_category'.
705 static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
706  return hasMember(Ctx, RD, "begin") ||
707  hasMember(Ctx, RD, "iterator") ||
708  hasMember(Ctx, RD, "iterator_category");
709 }
710 
711 /// Returns true if the given function refers to a method of a C++ container
712 /// or iterator.
713 ///
714 /// We generally do a poor job modeling most containers right now, and might
715 /// prefer not to inline their methods.
716 static bool isContainerMethod(const ASTContext &Ctx,
717  const FunctionDecl *FD) {
718  if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
719  return isContainerClass(Ctx, MD->getParent());
720  return false;
721 }
722 
723 /// Returns true if the given function is the destructor of a class named
724 /// "shared_ptr".
725 static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
726  const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
727  if (!Dtor)
728  return false;
729 
730  const CXXRecordDecl *RD = Dtor->getParent();
731  if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
732  if (II->isStr("shared_ptr"))
733  return true;
734 
735  return false;
736 }
737 
738 /// Returns true if the function in \p CalleeADC may be inlined in general.
739 ///
740 /// This checks static properties of the function, such as its signature and
741 /// CFG, to determine whether the analyzer should ever consider inlining it,
742 /// in any context.
743 static bool mayInlineDecl(AnalysisDeclContext *CalleeADC,
744  AnalyzerOptions &Opts) {
745  // FIXME: Do not inline variadic calls.
746  if (CallEvent::isVariadic(CalleeADC->getDecl()))
747  return false;
748 
749  // Check certain C++-related inlining policies.
750  ASTContext &Ctx = CalleeADC->getASTContext();
751  if (Ctx.getLangOpts().CPlusPlus) {
752  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
753  // Conditionally control the inlining of template functions.
754  if (!Opts.mayInlineTemplateFunctions())
755  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
756  return false;
757 
758  // Conditionally control the inlining of C++ standard library functions.
759  if (!Opts.mayInlineCXXStandardLibrary())
760  if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
761  if (IsInStdNamespace(FD))
762  return false;
763 
764  // Conditionally control the inlining of methods on objects that look
765  // like C++ containers.
766  if (!Opts.mayInlineCXXContainerMethods())
767  if (!Ctx.getSourceManager().isInMainFile(FD->getLocation()))
768  if (isContainerMethod(Ctx, FD))
769  return false;
770 
771  // Conditionally control the inlining of the destructor of C++ shared_ptr.
772  // We don't currently do a good job modeling shared_ptr because we can't
773  // see the reference count, so treating as opaque is probably the best
774  // idea.
775  if (!Opts.mayInlineCXXSharedPtrDtor())
776  if (isCXXSharedPtrDtor(FD))
777  return false;
778 
779  }
780  }
781 
782  // It is possible that the CFG cannot be constructed.
783  // Be safe, and check if the CalleeCFG is valid.
784  const CFG *CalleeCFG = CalleeADC->getCFG();
785  if (!CalleeCFG)
786  return false;
787 
788  // Do not inline large functions.
789  if (CalleeCFG->getNumBlockIDs() > Opts.getMaxInlinableSize())
790  return false;
791 
792  // It is possible that the live variables analysis cannot be
793  // run. If so, bail out.
794  if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
795  return false;
796 
797  return true;
798 }
799 
800 bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
801  const ExplodedNode *Pred) {
802  if (!D)
803  return false;
804 
806  AnalyzerOptions &Opts = AMgr.options;
808  AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
809 
810  // Temporary object destructor processing is currently broken, so we never
811  // inline them.
812  // FIXME: Remove this once temp destructors are working.
813  if (isa<CXXDestructorCall>(Call)) {
814  if ((*currBldrCtx->getBlock())[currStmtIdx].getAs<CFGTemporaryDtor>())
815  return false;
816  }
817 
818  // The auto-synthesized bodies are essential to inline as they are
819  // usually small and commonly used. Note: we should do this check early on to
820  // ensure we always inline these calls.
821  if (CalleeADC->isBodyAutosynthesized())
822  return true;
823 
824  if (!AMgr.shouldInlineCall())
825  return false;
826 
827  // Check if this function has been marked as non-inlinable.
828  Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
829  if (MayInline.hasValue()) {
830  if (!MayInline.getValue())
831  return false;
832 
833  } else {
834  // We haven't actually checked the static properties of this function yet.
835  // Do that now, and record our decision in the function summaries.
836  if (mayInlineDecl(CalleeADC, Opts)) {
837  Engine.FunctionSummaries->markMayInline(D);
838  } else {
839  Engine.FunctionSummaries->markShouldNotInline(D);
840  return false;
841  }
842  }
843 
844  // Check if we should inline a call based on its kind.
845  // FIXME: this checks both static and dynamic properties of the call, which
846  // means we're redoing a bit of work that could be cached in the function
847  // summary.
848  CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts);
849  if (CIP != CIP_Allowed) {
850  if (CIP == CIP_DisallowedAlways) {
851  assert(!MayInline.hasValue() || MayInline.getValue());
852  Engine.FunctionSummaries->markShouldNotInline(D);
853  }
854  return false;
855  }
856 
857  const CFG *CalleeCFG = CalleeADC->getCFG();
858 
859  // Do not inline if recursive or we've reached max stack frame count.
860  bool IsRecursive = false;
861  unsigned StackDepth = 0;
862  examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
863  if ((StackDepth >= Opts.InlineMaxStackDepth) &&
864  ((CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize())
865  || IsRecursive))
866  return false;
867 
868  // Do not inline large functions too many times.
869  if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
870  Opts.getMaxTimesInlineLarge()) &&
871  CalleeCFG->getNumBlockIDs() > 13) {
872  NumReachedInlineCountMax++;
873  return false;
874  }
875 
876  if (HowToInline == Inline_Minimal &&
877  (CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize()
878  || IsRecursive))
879  return false;
880 
881  Engine.FunctionSummaries->bumpNumTimesInlined(D);
882 
883  return true;
884 }
885 
886 static bool isTrivialObjectAssignment(const CallEvent &Call) {
887  const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
888  if (!ICall)
889  return false;
890 
891  const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
892  if (!MD)
893  return false;
895  return false;
896 
897  return MD->isTrivial();
898 }
899 
901  const CallEvent &CallTemplate) {
902  // Make sure we have the most recent state attached to the call.
903  ProgramStateRef State = Pred->getState();
904  CallEventRef<> Call = CallTemplate.cloneWithState(State);
905 
906  // Special-case trivial assignment operators.
907  if (isTrivialObjectAssignment(*Call)) {
908  performTrivialCopy(Bldr, Pred, *Call);
909  return;
910  }
911 
912  // Try to inline the call.
913  // The origin expression here is just used as a kind of checksum;
914  // this should still be safe even for CallEvents that don't come from exprs.
915  const Expr *E = Call->getOriginExpr();
916 
917  ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
918  if (InlinedFailedState) {
919  // If we already tried once and failed, make sure we don't retry later.
920  State = InlinedFailedState;
921  } else {
922  RuntimeDefinition RD = Call->getRuntimeDefinition();
923  const Decl *D = RD.getDecl();
924  if (shouldInlineCall(*Call, D, Pred)) {
925  if (RD.mayHaveOtherDefinitions()) {
927 
928  // Explore with and without inlining the call.
929  if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
930  BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
931  return;
932  }
933 
934  // Don't inline if we're not in any dynamic dispatch mode.
935  if (Options.getIPAMode() != IPAK_DynamicDispatch) {
936  conservativeEvalCall(*Call, Bldr, Pred, State);
937  return;
938  }
939  }
940 
941  // We are not bifurcating and we do have a Decl, so just inline.
942  if (inlineCall(*Call, D, Bldr, Pred, State))
943  return;
944  }
945  }
946 
947  // If we can't inline it, handle the return value and invalidate the regions.
948  conservativeEvalCall(*Call, Bldr, Pred, State);
949 }
950 
951 void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
952  const CallEvent &Call, const Decl *D,
953  NodeBuilder &Bldr, ExplodedNode *Pred) {
954  assert(BifurReg);
955  BifurReg = BifurReg->StripCasts();
956 
957  // Check if we've performed the split already - note, we only want
958  // to split the path once per memory region.
959  ProgramStateRef State = Pred->getState();
960  const unsigned *BState =
961  State->get<DynamicDispatchBifurcationMap>(BifurReg);
962  if (BState) {
963  // If we are on "inline path", keep inlining if possible.
964  if (*BState == DynamicDispatchModeInlined)
965  if (inlineCall(Call, D, Bldr, Pred, State))
966  return;
967  // If inline failed, or we are on the path where we assume we
968  // don't have enough info about the receiver to inline, conjure the
969  // return value and invalidate the regions.
970  conservativeEvalCall(Call, Bldr, Pred, State);
971  return;
972  }
973 
974  // If we got here, this is the first time we process a message to this
975  // region, so split the path.
976  ProgramStateRef IState =
977  State->set<DynamicDispatchBifurcationMap>(BifurReg,
978  DynamicDispatchModeInlined);
979  inlineCall(Call, D, Bldr, Pred, IState);
980 
981  ProgramStateRef NoIState =
982  State->set<DynamicDispatchBifurcationMap>(BifurReg,
983  DynamicDispatchModeConservative);
984  conservativeEvalCall(Call, Bldr, Pred, NoIState);
985 
986  NumOfDynamicDispatchPathSplits++;
987  return;
988 }
989 
990 
992  ExplodedNodeSet &Dst) {
993 
994  ExplodedNodeSet dstPreVisit;
995  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
996 
997  StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
998 
999  if (RS->getRetValue()) {
1000  for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1001  ei = dstPreVisit.end(); it != ei; ++it) {
1002  B.generateNode(RS, *it, (*it)->getState());
1003  }
1004  }
1005 }
AnalysisDeclContextManager & getAnalysisDeclContextManager()
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
unsigned InlineMaxStackDepth
The inlining stack depth limit.
ASTContext & getASTContext() const
SVal evalDerivedToBase(SVal Derived, const CastExpr *Cast)
Definition: Store.cpp:235
MemRegion - The root abstract class for all memory regions.
Definition: MemRegion.h:77
const CXXConstructorDecl * getDecl() const override
Definition: CallEvent.h:714
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition: DeclCXX.h:1263
succ_iterator succ_begin()
Definition: CFG.h:542
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
void processCallExit(ExplodedNode *Pred) override
const StackFrameContext * getCalleeContext() const
Definition: ProgramPoint.h:571
This builder class is useful for generating nodes that resulted from visiting a statement. The main difference from its parent NodeBuilder is that it creates a statement specific ProgramPoint.
Definition: CoreEngine.h:345
CFGBlock & getEntry()
Definition: CFG.h:863
ProgramPoint getLocation() const
getLocation - Returns the edge associated with the given node.
static bool mayInlineDecl(AnalysisDeclContext *CalleeADC, AnalyzerOptions &Opts)
Manages the lifetime of CallEvent objects.
Definition: CallEvent.h:897
Optional< bool > mayInline(const Decl *D)
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
IdentifierInfo * getAsIdentifierInfo() const
bool isStdNamespace() const
Definition: DeclBase.cpp:835
void markShouldNotInline(const Decl *D)
NamespaceDecl - Represent a C++ namespace.
Definition: Decl.h:400
Represents a call to a C++ constructor.
Definition: ExprCXX.h:1075
virtual const CXXConstructExpr * getOriginExpr() const
Definition: CallEvent.h:710
static bool IsInStdNamespace(const FunctionDecl *FD)
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx)
Definition: CallEvent.cpp:901
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrameContext *calleeCtx)
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition: DeclCXX.cpp:1526
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
loc::MemRegionVal getCXXThis(const CXXMethodDecl *D, const StackFrameContext *SFC)
Return a memory region for the 'this' object reference.
unsigned succ_size() const
Definition: CFG.h:552
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer's garbage collection - remove dead symbols and bindings from the state...
Definition: ExprEngine.cpp:341
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition: DeclCXX.cpp:1547
bool isBodyAutosynthesized() const
Checks if the body of the Decl is generated by the BodyFarm.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition: ASTContext.h:89
LineState State
AnalysisDeclContext * getAnalysisDeclContext() const
const Expr * getOriginExpr() const
Returns the expression whose value will be the result of this call. May be null.
Definition: CallEvent.h:197
const CXXRecordDecl * getPointeeCXXRecordDecl() const
Definition: Type.cpp:1490
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
Optional< T > getLocationAs() const LLVM_LVALUE_FUNCTION
IdentifierTable & Idents
Definition: ASTContext.h:439
Represents any expression that calls an Objective-C method.
Definition: CallEvent.h:791
DeclContext * getEnclosingNamespaceContext()
Retrieve the nearest enclosing namespace context.
Definition: DeclBase.cpp:1474
virtual Kind getKind() const =0
Returns the kind of call this is.
const LangOptions & getLangOpts() const
Definition: ASTContext.h:533
unsigned getNumTimesInlined(const Decl *D)
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting obj-c messages.
const CXXRecordDecl * getParent() const
Definition: DeclCXX.h:1817
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
ProgramPoint getProgramPoint(bool IsPreVisit=false, const ProgramPointTag *Tag=nullptr) const
Returns an appropriate ProgramPoint for this call.
Definition: CallEvent.cpp:178
const Decl * getDecl() const
ExplodedNode * getFirstPred()
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
Definition: CallEvent.cpp:272
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition: DeclBase.h:733
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
const LocationContext * getLocationContext() const
void removeDeadOnEndOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
static bool isTrivialObjectAssignment(const CallEvent &Call)
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
CanQualType getCanonicalTypeUnqualified() const
Represents a non-static C++ member function call, no matter how it is written.
Definition: CallEvent.h:522
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
DeclarationNameTable DeclarationNames
Definition: ASTContext.h:442
Stmt * getBody() const
Get the body of the Declaration.
unsigned blockCount() const
Returns the number of times the current basic block has been visited on the exploded graph path...
Definition: CoreEngine.h:191
const MemRegion * StripCasts(bool StripBaseCasts=true) const
Definition: MemRegion.cpp:1089
CheckerManager & getCheckerManager() const
Definition: ExprEngine.h:127
AnalysisDeclContext * getContext(const Decl *D)
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
const ProgramStateRef & getState() const
IPAKind getIPAMode()
Returns the inter-procedural analysis mode.
CallEventRef getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State)
Definition: CallEvent.cpp:923
Represents an implicit call to a C++ destructor.
Definition: CallEvent.h:639
Represents a C++ destructor within a class.
Definition: DeclCXX.h:2358
ParentMap & getParentMap() const
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition: CoreEngine.h:207
Optional< T > getAs() const
Convert to the specified SVal type, returning None if this SVal is not of the desired type...
Definition: SVals.h:86
void Add(ExplodedNode *N)
#define bool
Definition: stdbool.h:31
Refers to regular member function and operator calls.
ConstructionKind getConstructionKind() const
Determine whether this constructor is actually constructing a base class (rather than a complete obje...
Definition: ExprCXX.h:1172
bool mayInlineObjCMethod()
Returns true if ObjectiveC inlining is enabled, false otherwise.
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng)
Run checkers for evaluating a call.
REGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap, CLANG_ENTO_PROGRAMSTATE_MAP(const MemRegion *, unsigned)) bool ExprEngine
DeclContext * getParent()
getParent - Returns the containing DeclContext.
Definition: DeclBase.h:1174
void bumpNumTimesInlined(const Decl *D)
static bool isTemporaryPRValue(const CXXConstructExpr *E, SVal V)
DeclarationName getDeclName() const
Definition: Decl.h:189
ExplodedNode * getNode(const ProgramPoint &L, ProgramStateRef State, bool IsSink=false, bool *IsNew=nullptr)
Retrieve the node associated with a (Location,State) pair, where the 'Location' is a ProgramPoint in ...
const StackFrameContext * getStackFrame(LocationContext const *Parent, const Stmt *S, const CFGBlock *Blk, unsigned Idx)
bool isGLValue() const
Definition: Expr.h:253
Enable inlining of dynamically dispatched methods.
bool isInMainFile(SourceLocation Loc) const
Returns whether the PresumedLoc for a given SourceLocation is in the main file.
const MatchFinder::MatchFinderOptions & Options
Defines the runtime definition of the called function.
Definition: CallEvent.h:77
Stmt * getParent(Stmt *) const
Definition: ParentMap.cpp:120
DefinedOrUnknownSVal conjureSymbolVal(const void *symbolTag, const Expr *expr, const LocationContext *LCtx, unsigned count)
Create a new symbol with a unique 'name'.
const FunctionDecl * getDecl() const override
Definition: CallEvent.cpp:393
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
const StackFrameContext * getCurrentStackFrame() const
AnalysisManager & getAnalysisManager() override
Definition: ExprEngine.h:125
CallEventManager & getCallEventManager()
Definition: ProgramState.h:507
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Represents a static or instance method of a struct/union/class.
Definition: DeclCXX.h:1717
DeclarationName getIdentifier(const IdentifierInfo *ID)
#define CLANG_ENTO_PROGRAMSTATE_MAP(Key, Value)
const Decl * getDecl() const
Do minimal inlining of callees.
Definition: ExprEngine.h:55
Refers to destructors (implicit or explicit).
lookup_result lookup(DeclarationName Name) const
Definition: DeclBase.cpp:1339
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
const MemRegion * getDispatchRegion()
Definition: CallEvent.h:101
void insert(const ExplodedNodeSet &S)
ast_type_traits::DynTypedNode Node
ProgramStateRef invalidateRegions(unsigned BlockCount, ProgramStateRef Orig=nullptr) const
Returns a new state with all argument regions invalidated.
Definition: CallEvent.cpp:138
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Stmt *CallE)
const LocationContext * getParent() const
CFG::BuildOptions & getCFGBuildOptions()
Return the build options used to construct the CFG.
SValBuilder & getSValBuilder()
Definition: ExprEngine.h:131
StoreManager & getStoreManager()
Definition: ExprEngine.h:300
const LocationContext * getLocationContext() const
Definition: ProgramPoint.h:155
bool lookupInBases(BaseMatchesCallback *BaseMatches, void *UserData, CXXBasePaths &Paths) const
Look for entities within the base classes of this C++ class, transitively searching all base class su...
void markMayInline(const Decl *D)
bool isAmbiguous(CanQualType BaseType)
Determine whether the path from the most-derived type to the given base type is ambiguous (i...
const MemRegion * getAsRegion() const
Definition: SVals.cpp:135
const Expr * getRetValue() const
Definition: Stmt.cpp:1013
bool mayHaveOtherDefinitions()
Check if the definition we have is precise. If not, it is possible that the call dispatches to anothe...
Definition: CallEvent.h:97
Represents an abstract call to a function or method along a particular path.
Definition: CallEvent.h:113
Optional< T > getAs() const
Convert to the specified ProgramPoint type, returning None if this ProgramPoint is not of the desired...
Definition: ProgramPoint.h:127
ProgramStateManager & getStateManager() override
Definition: ExprEngine.h:298
const CFGBlock * getBlock() const
Return the CFGBlock associated with this builder.
Definition: CoreEngine.h:187
QualType getResultType() const
Returns the result type, adjusted for references.
Definition: CallEvent.cpp:27
void * getAsOpaquePtr() const
QualType getCanonicalType() const
Definition: Type.h:5055
WorkList * getWorkList() const
Definition: CoreEngine.h:148
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
Definition: CallEvent.cpp:243
CXXBasePath & front()
bool isTrivial() const
Definition: Decl.h:1800
void processCallEnter(CallEnter CE, ExplodedNode *Pred) override
Generate the entry node of the callee.
STATISTIC(NumOfDynamicDispatchPathSplits,"The # of times we split the path due to imprecise dynamic dispatch info")
SourceManager & getSourceManager()
Definition: ASTContext.h:494
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call)
Default implementation of call evaluation.
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred)
Generates a node in the ExplodedGraph.
Definition: CoreEngine.h:260
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate.h) and friends (in DeclFriend.h).
Represents a C++ struct/union/class.
Definition: DeclCXX.h:285
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K)
bool isObjCObjectPointerType() const
Definition: Type.h:5304
CallEventRef< T > cloneWithState(ProgramStateRef State) const
Definition: CallEvent.h:58
pred_iterator pred_begin()
SVal getCXXThisVal() const
Returns the value of the implicit 'this' object.
Definition: CallEvent.cpp:567
ExplodedNode * generateNode(const Stmt *S, ExplodedNode *Pred, ProgramStateRef St, const ProgramPointTag *tag=nullptr, ProgramPoint::Kind K=ProgramPoint::PostStmtKind)
Definition: CoreEngine.h:375
static CallInlinePolicy mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred, AnalyzerOptions &Opts)
virtual void enqueue(const WorkListUnit &U)=0
const BlockInvocationContext * getBlockInvocationContext(const LocationContext *parent, const BlockDecl *BD, const void *ContextData)
AnalysisPurgeMode AnalysisPurgeOpt
bool empty() const
Definition: CFG.h:517
bool isNull() const
isNull - Return true if this QualType doesn't point to a type yet.
Definition: Type.h:633
unsigned getNumBlockIDs() const
Definition: CFG.h:932
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition: SVals.h:75
Represents a call to a C++ constructor.
Definition: CallEvent.h:687
const CoreEngine & getCoreEngine() const
Definition: ExprEngine.h:320
static bool FindOrdinaryMember(const CXXBaseSpecifier *Specifier, CXXBasePath &Path, void *Name)
Base-class lookup callback that determines whether there exists a member with the given name...