]> granicus.if.org Git - clang/commitdiff
[analyzer] Inline C++ operator new when c++-inline-allocators is turned on.
authorJordan Rose <jordan_rose@apple.com>
Tue, 11 Feb 2014 02:21:06 +0000 (02:21 +0000)
committerJordan Rose <jordan_rose@apple.com>
Tue, 11 Feb 2014 02:21:06 +0000 (02:21 +0000)
This will let us stage in the modeling of operator new. The -analyzer-config
opton 'c++-inline-allocators' is currently off by default.

Patch by Karthik Bhat!

git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@201122 91177308-0d34-0410-b5e6-96231b3b80d8

include/clang/StaticAnalyzer/Core/AnalyzerOptions.h
include/clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h
lib/StaticAnalyzer/Core/AnalyzerOptions.cpp
lib/StaticAnalyzer/Core/CoreEngine.cpp
lib/StaticAnalyzer/Core/ExprEngine.cpp
lib/StaticAnalyzer/Core/ExprEngineCXX.cpp
lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp
test/Analysis/inline.cpp

index 01e89dee3a1a3568a1943a04d46def6269d9d447..42b6ca5586aae975c3f27940e392cf4e0da02ae9 100644 (file)
@@ -199,6 +199,9 @@ private:
   /// \sa mayInlineTemplateFunctions
   Optional<bool> InlineTemplateFunctions;
 
+  /// \sa mayInlineCXXAllocator
+  Optional<bool> InlineCXXAllocator;
+
   /// \sa mayInlineCXXContainerCtorsAndDtors
   Optional<bool> InlineCXXContainerCtorsAndDtors;
 
@@ -291,6 +294,12 @@ public:
   /// accepts the values "true" and "false".
   bool mayInlineTemplateFunctions();
 
+  /// Returns whether or not allocator call may be considered for inlining.
+  ///
+  /// This is controlled by the 'c++-allocator-inlining' config option, which
+  /// accepts the values "true" and "false".
+  bool mayInlineCXXAllocator();
+
   /// Returns whether or not constructors and destructors of C++ container
   /// objects may be considered for inlining.
   ///
index 2699f7d0d767c8457d32fa707684a6f84a8a06a8..4ef0a8ba350c0c937418c03c2ccd994025fe35de 100644 (file)
@@ -421,6 +421,10 @@ public:
                           const Stmt *S, bool IsBaseDtor,
                           ExplodedNode *Pred, ExplodedNodeSet &Dst);
 
+  void VisitCXXNewAllocatorCall(const CXXNewExpr *CNE,
+                                ExplodedNode *Pred,
+                                ExplodedNodeSet &Dst);
+
   void VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
                        ExplodedNodeSet &Dst);
 
index 9dcf58babd278c778e51c7933b2887babf224434..1bea96e04589c87c4bd42aabf731224c50079c81 100644 (file)
@@ -134,6 +134,12 @@ bool AnalyzerOptions::mayInlineTemplateFunctions() {
                           /*Default=*/true);
 }
 
+bool AnalyzerOptions::mayInlineCXXAllocator() {
+  return getBooleanOption(InlineCXXAllocator,
+                          "c++-allocator-inlining",
+                          /*Default=*/false);
+}
+
 bool AnalyzerOptions::mayInlineCXXContainerCtorsAndDtors() {
   return getBooleanOption(InlineCXXContainerCtorsAndDtors,
                           "c++-container-inlining",
index b09b2c2ddfabda8c976856850d5e9919d409846c..fa2e78053008fa43eaf0205e9d7314ad51aa7f35 100644 (file)
@@ -532,6 +532,11 @@ void CoreEngine::enqueueStmtNode(ExplodedNode *N,
     return;
   }
 
+  if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) {
+    WList->enqueue(N, Block, Idx+1);
+    return;
+  }
+
   // At this point, we know we're processing a normal statement.
   CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>();
   PostStmt Loc(CS.getStmt(), N->getLocationContext());
index 095e09795da31a3dabdc4e96ee79c09ebe7d2bbb..ba5025b481c2352550a972a1fb82ae265b53ba46 100644 (file)
@@ -553,12 +553,20 @@ void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D,
 
 void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE,
                                      ExplodedNode *Pred) {
-  //TODO: Implement VisitCXXNewAllocatorCall
   ExplodedNodeSet Dst;
-  NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
-  const LocationContext *LCtx = Pred->getLocationContext();
-  PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx);
-  Bldr.generateNode(PP, Pred->getState(), Pred);
+  AnalysisManager &AMgr = getAnalysisManager();
+  AnalyzerOptions &Opts = AMgr.options;
+  // TODO: We're not evaluating allocators for all cases just yet as
+  // we're not handling the return value correctly, which causes false
+  // positives when the alpha.cplusplus.NewDeleteLeaks check is on.
+  if (Opts.mayInlineCXXAllocator())
+    VisitCXXNewAllocatorCall(NE, Pred, Dst);
+  else {
+    NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
+    const LocationContext *LCtx = Pred->getLocationContext();
+    PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx);
+    Bldr.generateNode(PP, Pred->getState(), Pred);
+  }
   Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
 }
 
index eba4f94d80e66803508bcb9575f723d6756edcb0..fbbc73cc73bfc89a164c76eb4e16827342f84b32 100644 (file)
@@ -329,6 +329,32 @@ void ExprEngine::VisitCXXDestructor(QualType ObjectType,
                                              *Call, *this);
 }
 
+void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE,
+                                          ExplodedNode *Pred,
+                                          ExplodedNodeSet &Dst) {
+  ProgramStateRef State = Pred->getState();
+  const LocationContext *LCtx = Pred->getLocationContext();
+  PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
+                                CNE->getStartLoc(),
+                                "Error evaluating New Allocator Call");
+  CallEventManager &CEMgr = getStateManager().getCallEventManager();
+  CallEventRef<CXXAllocatorCall> Call =
+    CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
+
+  ExplodedNodeSet DstPreCall;
+  getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
+                                            *Call, *this);
+
+  ExplodedNodeSet DstInvalidated;
+  StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx);
+  for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
+       I != E; ++I)
+    defaultEvalCall(Bldr, *I, *Call);
+  getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated,
+                                             *Call, *this);
+}
+
+
 void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
                                    ExplodedNodeSet &Dst) {
   // FIXME: Much of this should eventually migrate to CXXAllocatorCall.
index 863dddaa6f599948d6489ec7b8ee3c7af3a97ce9..e5b46cafab89fc079b1fb2021f32bc541c75c814 100644 (file)
@@ -664,6 +664,8 @@ static CallInlinePolicy mayInlineCallKind(const CallEvent &Call,
     break;
   }
   case CE_CXXAllocator:
+    if (Opts.mayInlineCXXAllocator())
+      break;
     // Do not inline allocators until we model deallocators.
     // This is unfortunate, but basically necessary for smart pointers and such.
     return CIP_DisallowedAlways;
index ca126ddf7f389d76b306fc241f746542c0132ef7..183df16e124d18ffaafea3fe248bb4a0a71f4082 100644 (file)
@@ -1,4 +1,4 @@
-// RUN: %clang_cc1 -analyze -analyzer-checker=core,unix.Malloc,debug.ExprInspection -analyzer-config ipa=inlining -verify %s
+// RUN: %clang_cc1 -analyze -analyzer-checker=core,unix.Malloc,debug.ExprInspection -analyzer-config ipa=inlining -analyzer-config c++-allocator-inlining=true -verify %s
 
 void clang_analyzer_eval(bool);
 void clang_analyzer_checkInlined(bool);
@@ -9,6 +9,7 @@ extern "C" void *malloc(size_t);
 // This is the standard placement new.
 inline void* operator new(size_t, void* __p) throw()
 {
+  clang_analyzer_checkInlined(true);// expected-warning{{TRUE}}
   return __p;
 }