1 | //=== MallocChecker.cpp - A malloc/free checker -------------------*- C++ -*--// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | // |
9 | // This file defines a variety of memory management related checkers, such as |
10 | // leak, double free, and use-after-free. |
11 | // |
12 | // The following checkers are defined here: |
13 | // |
14 | // * MallocChecker |
15 | // Despite its name, it models all sorts of memory allocations and |
16 | // de- or reallocation, including but not limited to malloc, free, |
17 | // relloc, new, delete. It also reports on a variety of memory misuse |
18 | // errors. |
19 | // Many other checkers interact very closely with this checker, in fact, |
20 | // most are merely options to this one. Other checkers may register |
21 | // MallocChecker, but do not enable MallocChecker's reports (more details |
22 | // to follow around its field, ChecksEnabled). |
23 | // It also has a boolean "Optimistic" checker option, which if set to true |
24 | // will cause the checker to model user defined memory management related |
25 | // functions annotated via the attribute ownership_takes, ownership_holds |
26 | // and ownership_returns. |
27 | // |
28 | // * NewDeleteChecker |
29 | // Enables the modeling of new, new[], delete, delete[] in MallocChecker, |
30 | // and checks for related double-free and use-after-free errors. |
31 | // |
32 | // * NewDeleteLeaksChecker |
33 | // Checks for leaks related to new, new[], delete, delete[]. |
34 | // Depends on NewDeleteChecker. |
35 | // |
36 | // * MismatchedDeallocatorChecker |
37 | // Enables checking whether memory is deallocated with the correspending |
38 | // allocation function in MallocChecker, such as malloc() allocated |
39 | // regions are only freed by free(), new by delete, new[] by delete[]. |
40 | // |
41 | // InnerPointerChecker interacts very closely with MallocChecker, but unlike |
42 | // the above checkers, it has it's own file, hence the many InnerPointerChecker |
43 | // related headers and non-static functions. |
44 | // |
45 | //===----------------------------------------------------------------------===// |
46 | |
47 | #include "AllocationState.h" |
48 | #include "InterCheckerAPI.h" |
49 | #include "NoOwnershipChangeVisitor.h" |
50 | #include "clang/AST/Attr.h" |
51 | #include "clang/AST/DeclCXX.h" |
52 | #include "clang/AST/DeclTemplate.h" |
53 | #include "clang/AST/Expr.h" |
54 | #include "clang/AST/ExprCXX.h" |
55 | #include "clang/AST/ParentMap.h" |
56 | #include "clang/ASTMatchers/ASTMatchFinder.h" |
57 | #include "clang/ASTMatchers/ASTMatchers.h" |
58 | #include "clang/Analysis/ProgramPoint.h" |
59 | #include "clang/Basic/LLVM.h" |
60 | #include "clang/Basic/SourceManager.h" |
61 | #include "clang/Basic/TargetInfo.h" |
62 | #include "clang/Lex/Lexer.h" |
63 | #include "clang/StaticAnalyzer/Checkers/BuiltinCheckerRegistration.h" |
64 | #include "clang/StaticAnalyzer/Checkers/Taint.h" |
65 | #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" |
66 | #include "clang/StaticAnalyzer/Core/BugReporter/CommonBugCategories.h" |
67 | #include "clang/StaticAnalyzer/Core/Checker.h" |
68 | #include "clang/StaticAnalyzer/Core/CheckerManager.h" |
69 | #include "clang/StaticAnalyzer/Core/PathSensitive/CallDescription.h" |
70 | #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" |
71 | #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h" |
72 | #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerHelpers.h" |
73 | #include "clang/StaticAnalyzer/Core/PathSensitive/DynamicExtent.h" |
74 | #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h" |
75 | #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" |
76 | #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramStateTrait.h" |
77 | #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState_Fwd.h" |
78 | #include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h" |
79 | #include "clang/StaticAnalyzer/Core/PathSensitive/SymbolManager.h" |
80 | #include "llvm/ADT/STLExtras.h" |
81 | #include "llvm/ADT/StringExtras.h" |
82 | #include "llvm/Support/Casting.h" |
83 | #include "llvm/Support/Compiler.h" |
84 | #include "llvm/Support/ErrorHandling.h" |
85 | #include "llvm/Support/raw_ostream.h" |
86 | #include <functional> |
87 | #include <optional> |
88 | #include <utility> |
89 | |
90 | using namespace clang; |
91 | using namespace ento; |
92 | using namespace std::placeholders; |
93 | |
94 | //===----------------------------------------------------------------------===// |
95 | // The types of allocation we're modeling. This is used to check whether a |
96 | // dynamically allocated object is deallocated with the correct function, like |
97 | // not using operator delete on an object created by malloc(), or alloca regions |
98 | // aren't ever deallocated manually. |
99 | //===----------------------------------------------------------------------===// |
100 | |
101 | namespace { |
102 | |
103 | // Used to check correspondence between allocators and deallocators. |
104 | enum AllocationFamilyKind { |
105 | AF_None, |
106 | AF_Malloc, |
107 | AF_CXXNew, |
108 | AF_CXXNewArray, |
109 | AF_IfNameIndex, |
110 | AF_Alloca, |
111 | AF_InnerBuffer, |
112 | AF_Custom, |
113 | }; |
114 | |
115 | struct AllocationFamily { |
116 | AllocationFamilyKind Kind; |
117 | std::optional<StringRef> CustomName; |
118 | |
119 | explicit AllocationFamily(AllocationFamilyKind AKind, |
120 | std::optional<StringRef> Name = std::nullopt) |
121 | : Kind(AKind), CustomName(Name) { |
122 | assert((Kind != AF_Custom || CustomName.has_value()) && |
123 | "Custom family must specify also the name" ); |
124 | |
125 | // Preseve previous behavior when "malloc" class means AF_Malloc |
126 | if (Kind == AF_Custom && CustomName.value() == "malloc" ) { |
127 | Kind = AF_Malloc; |
128 | CustomName = std::nullopt; |
129 | } |
130 | } |
131 | |
132 | bool operator==(const AllocationFamily &Other) const { |
133 | return std::tie(args: Kind, args: CustomName) == std::tie(args: Other.Kind, args: Other.CustomName); |
134 | } |
135 | |
136 | bool operator!=(const AllocationFamily &Other) const { |
137 | return !(*this == Other); |
138 | } |
139 | |
140 | void Profile(llvm::FoldingSetNodeID &ID) const { |
141 | ID.AddInteger(I: Kind); |
142 | |
143 | if (Kind == AF_Custom) |
144 | ID.AddString(String: CustomName.value()); |
145 | } |
146 | }; |
147 | |
148 | } // end of anonymous namespace |
149 | |
150 | /// Print names of allocators and deallocators. |
151 | /// |
152 | /// \returns true on success. |
153 | static bool printMemFnName(raw_ostream &os, CheckerContext &C, const Expr *E); |
154 | |
155 | /// Print expected name of an allocator based on the deallocator's family |
156 | /// derived from the DeallocExpr. |
157 | static void printExpectedAllocName(raw_ostream &os, AllocationFamily Family); |
158 | |
159 | /// Print expected name of a deallocator based on the allocator's |
160 | /// family. |
161 | static void printExpectedDeallocName(raw_ostream &os, AllocationFamily Family); |
162 | |
163 | //===----------------------------------------------------------------------===// |
164 | // The state of a symbol, in terms of memory management. |
165 | //===----------------------------------------------------------------------===// |
166 | |
167 | namespace { |
168 | |
169 | class RefState { |
170 | enum Kind { |
171 | // Reference to allocated memory. |
172 | Allocated, |
173 | // Reference to zero-allocated memory. |
174 | AllocatedOfSizeZero, |
175 | // Reference to released/freed memory. |
176 | Released, |
177 | // The responsibility for freeing resources has transferred from |
178 | // this reference. A relinquished symbol should not be freed. |
179 | Relinquished, |
180 | // We are no longer guaranteed to have observed all manipulations |
181 | // of this pointer/memory. For example, it could have been |
182 | // passed as a parameter to an opaque function. |
183 | Escaped |
184 | }; |
185 | |
186 | const Stmt *S; |
187 | |
188 | Kind K; |
189 | AllocationFamily Family; |
190 | |
191 | RefState(Kind k, const Stmt *s, AllocationFamily family) |
192 | : S(s), K(k), Family(family) { |
193 | assert(family.Kind != AF_None); |
194 | } |
195 | |
196 | public: |
197 | bool isAllocated() const { return K == Allocated; } |
198 | bool isAllocatedOfSizeZero() const { return K == AllocatedOfSizeZero; } |
199 | bool isReleased() const { return K == Released; } |
200 | bool isRelinquished() const { return K == Relinquished; } |
201 | bool isEscaped() const { return K == Escaped; } |
202 | AllocationFamily getAllocationFamily() const { return Family; } |
203 | const Stmt *getStmt() const { return S; } |
204 | |
205 | bool operator==(const RefState &X) const { |
206 | return K == X.K && S == X.S && Family == X.Family; |
207 | } |
208 | |
209 | static RefState getAllocated(AllocationFamily family, const Stmt *s) { |
210 | return RefState(Allocated, s, family); |
211 | } |
212 | static RefState getAllocatedOfSizeZero(const RefState *RS) { |
213 | return RefState(AllocatedOfSizeZero, RS->getStmt(), |
214 | RS->getAllocationFamily()); |
215 | } |
216 | static RefState getReleased(AllocationFamily family, const Stmt *s) { |
217 | return RefState(Released, s, family); |
218 | } |
219 | static RefState getRelinquished(AllocationFamily family, const Stmt *s) { |
220 | return RefState(Relinquished, s, family); |
221 | } |
222 | static RefState getEscaped(const RefState *RS) { |
223 | return RefState(Escaped, RS->getStmt(), RS->getAllocationFamily()); |
224 | } |
225 | |
226 | void Profile(llvm::FoldingSetNodeID &ID) const { |
227 | ID.AddInteger(I: K); |
228 | ID.AddPointer(Ptr: S); |
229 | Family.Profile(ID); |
230 | } |
231 | |
232 | LLVM_DUMP_METHOD void dump(raw_ostream &OS) const { |
233 | switch (K) { |
234 | #define CASE(ID) case ID: OS << #ID; break; |
235 | CASE(Allocated) |
236 | CASE(AllocatedOfSizeZero) |
237 | CASE(Released) |
238 | CASE(Relinquished) |
239 | CASE(Escaped) |
240 | } |
241 | } |
242 | |
243 | LLVM_DUMP_METHOD void dump() const { dump(OS&: llvm::errs()); } |
244 | }; |
245 | |
246 | } // end of anonymous namespace |
247 | |
248 | REGISTER_MAP_WITH_PROGRAMSTATE(RegionState, SymbolRef, RefState) |
249 | |
250 | /// Check if the memory associated with this symbol was released. |
251 | static bool isReleased(SymbolRef Sym, CheckerContext &C); |
252 | |
253 | /// Update the RefState to reflect the new memory allocation. |
254 | /// The optional \p RetVal parameter specifies the newly allocated pointer |
255 | /// value; if unspecified, the value of expression \p E is used. |
256 | static ProgramStateRef |
257 | MallocUpdateRefState(CheckerContext &C, const Expr *E, ProgramStateRef State, |
258 | AllocationFamily Family, |
259 | std::optional<SVal> RetVal = std::nullopt); |
260 | |
261 | //===----------------------------------------------------------------------===// |
262 | // The modeling of memory reallocation. |
263 | // |
264 | // The terminology 'toPtr' and 'fromPtr' will be used: |
265 | // toPtr = realloc(fromPtr, 20); |
266 | //===----------------------------------------------------------------------===// |
267 | |
268 | REGISTER_SET_WITH_PROGRAMSTATE(ReallocSizeZeroSymbols, SymbolRef) |
269 | |
270 | namespace { |
271 | |
272 | /// The state of 'fromPtr' after reallocation is known to have failed. |
273 | enum OwnershipAfterReallocKind { |
274 | // The symbol needs to be freed (e.g.: realloc) |
275 | OAR_ToBeFreedAfterFailure, |
276 | // The symbol has been freed (e.g.: reallocf) |
277 | OAR_FreeOnFailure, |
278 | // The symbol doesn't have to freed (e.g.: we aren't sure if, how and where |
279 | // 'fromPtr' was allocated: |
280 | // void Haha(int *ptr) { |
281 | // ptr = realloc(ptr, 67); |
282 | // // ... |
283 | // } |
284 | // ). |
285 | OAR_DoNotTrackAfterFailure |
286 | }; |
287 | |
288 | /// Stores information about the 'fromPtr' symbol after reallocation. |
289 | /// |
290 | /// This is important because realloc may fail, and that needs special modeling. |
291 | /// Whether reallocation failed or not will not be known until later, so we'll |
292 | /// store whether upon failure 'fromPtr' will be freed, or needs to be freed |
293 | /// later, etc. |
294 | struct ReallocPair { |
295 | |
296 | // The 'fromPtr'. |
297 | SymbolRef ReallocatedSym; |
298 | OwnershipAfterReallocKind Kind; |
299 | |
300 | ReallocPair(SymbolRef S, OwnershipAfterReallocKind K) |
301 | : ReallocatedSym(S), Kind(K) {} |
302 | void Profile(llvm::FoldingSetNodeID &ID) const { |
303 | ID.AddInteger(I: Kind); |
304 | ID.AddPointer(Ptr: ReallocatedSym); |
305 | } |
306 | bool operator==(const ReallocPair &X) const { |
307 | return ReallocatedSym == X.ReallocatedSym && |
308 | Kind == X.Kind; |
309 | } |
310 | }; |
311 | |
312 | } // end of anonymous namespace |
313 | |
314 | REGISTER_MAP_WITH_PROGRAMSTATE(ReallocPairs, SymbolRef, ReallocPair) |
315 | |
316 | static bool isStandardNew(const FunctionDecl *FD); |
317 | static bool isStandardNew(const CallEvent &Call) { |
318 | if (!Call.getDecl() || !isa<FunctionDecl>(Val: Call.getDecl())) |
319 | return false; |
320 | return isStandardNew(FD: cast<FunctionDecl>(Val: Call.getDecl())); |
321 | } |
322 | |
323 | static bool isStandardDelete(const FunctionDecl *FD); |
324 | static bool isStandardDelete(const CallEvent &Call) { |
325 | if (!Call.getDecl() || !isa<FunctionDecl>(Val: Call.getDecl())) |
326 | return false; |
327 | return isStandardDelete(FD: cast<FunctionDecl>(Val: Call.getDecl())); |
328 | } |
329 | |
330 | /// Tells if the callee is one of the builtin new/delete operators, including |
331 | /// placement operators and other standard overloads. |
332 | template <typename T> static bool isStandardNewDelete(const T &FD) { |
333 | return isStandardDelete(FD) || isStandardNew(FD); |
334 | } |
335 | |
336 | //===----------------------------------------------------------------------===// |
337 | // Definition of the MallocChecker class. |
338 | //===----------------------------------------------------------------------===// |
339 | |
340 | namespace { |
341 | |
342 | class MallocChecker |
343 | : public Checker<check::DeadSymbols, check::PointerEscape, |
344 | check::ConstPointerEscape, check::PreStmt<ReturnStmt>, |
345 | check::EndFunction, check::PreCall, check::PostCall, |
346 | eval::Call, check::NewAllocator, |
347 | check::PostStmt<BlockExpr>, check::PostObjCMessage, |
348 | check::Location, eval::Assume> { |
349 | public: |
350 | /// In pessimistic mode, the checker assumes that it does not know which |
351 | /// functions might free the memory. |
352 | /// In optimistic mode, the checker assumes that all user-defined functions |
353 | /// which might free a pointer are annotated. |
354 | bool ShouldIncludeOwnershipAnnotatedFunctions = false; |
355 | |
356 | bool ShouldRegisterNoOwnershipChangeVisitor = false; |
357 | |
358 | /// Many checkers are essentially built into this one, so enabling them will |
359 | /// make MallocChecker perform additional modeling and reporting. |
360 | enum CheckKind { |
361 | /// When a subchecker is enabled but MallocChecker isn't, model memory |
362 | /// management but do not emit warnings emitted with MallocChecker only |
363 | /// enabled. |
364 | CK_MallocChecker, |
365 | CK_NewDeleteChecker, |
366 | CK_NewDeleteLeaksChecker, |
367 | CK_MismatchedDeallocatorChecker, |
368 | CK_InnerPointerChecker, |
369 | CK_TaintedAllocChecker, |
370 | CK_NumCheckKinds |
371 | }; |
372 | |
373 | using LeakInfo = std::pair<const ExplodedNode *, const MemRegion *>; |
374 | |
375 | bool ChecksEnabled[CK_NumCheckKinds] = {false}; |
376 | CheckerNameRef CheckNames[CK_NumCheckKinds]; |
377 | |
378 | void checkPreCall(const CallEvent &Call, CheckerContext &C) const; |
379 | void checkPostCall(const CallEvent &Call, CheckerContext &C) const; |
380 | bool evalCall(const CallEvent &Call, CheckerContext &C) const; |
381 | void checkNewAllocator(const CXXAllocatorCall &Call, CheckerContext &C) const; |
382 | void checkPostObjCMessage(const ObjCMethodCall &Call, CheckerContext &C) const; |
383 | void checkPostStmt(const BlockExpr *BE, CheckerContext &C) const; |
384 | void checkDeadSymbols(SymbolReaper &SymReaper, CheckerContext &C) const; |
385 | void checkPreStmt(const ReturnStmt *S, CheckerContext &C) const; |
386 | void checkEndFunction(const ReturnStmt *S, CheckerContext &C) const; |
387 | ProgramStateRef evalAssume(ProgramStateRef state, SVal Cond, |
388 | bool Assumption) const; |
389 | void checkLocation(SVal l, bool isLoad, const Stmt *S, |
390 | CheckerContext &C) const; |
391 | |
392 | ProgramStateRef checkPointerEscape(ProgramStateRef State, |
393 | const InvalidatedSymbols &Escaped, |
394 | const CallEvent *Call, |
395 | PointerEscapeKind Kind) const; |
396 | ProgramStateRef checkConstPointerEscape(ProgramStateRef State, |
397 | const InvalidatedSymbols &Escaped, |
398 | const CallEvent *Call, |
399 | PointerEscapeKind Kind) const; |
400 | |
401 | void printState(raw_ostream &Out, ProgramStateRef State, |
402 | const char *NL, const char *Sep) const override; |
403 | |
404 | private: |
405 | mutable std::unique_ptr<BugType> BT_DoubleFree[CK_NumCheckKinds]; |
406 | mutable std::unique_ptr<BugType> BT_DoubleDelete; |
407 | mutable std::unique_ptr<BugType> BT_Leak[CK_NumCheckKinds]; |
408 | mutable std::unique_ptr<BugType> BT_UseFree[CK_NumCheckKinds]; |
409 | mutable std::unique_ptr<BugType> BT_BadFree[CK_NumCheckKinds]; |
410 | mutable std::unique_ptr<BugType> BT_FreeAlloca[CK_NumCheckKinds]; |
411 | mutable std::unique_ptr<BugType> BT_MismatchedDealloc; |
412 | mutable std::unique_ptr<BugType> BT_OffsetFree[CK_NumCheckKinds]; |
413 | mutable std::unique_ptr<BugType> BT_UseZerroAllocated[CK_NumCheckKinds]; |
414 | mutable std::unique_ptr<BugType> BT_TaintedAlloc; |
415 | |
416 | #define CHECK_FN(NAME) \ |
417 | void NAME(ProgramStateRef State, const CallEvent &Call, CheckerContext &C) \ |
418 | const; |
419 | |
420 | CHECK_FN(checkFree) |
421 | CHECK_FN(checkIfNameIndex) |
422 | CHECK_FN(checkBasicAlloc) |
423 | CHECK_FN(checkKernelMalloc) |
424 | CHECK_FN(checkCalloc) |
425 | CHECK_FN(checkAlloca) |
426 | CHECK_FN(checkStrdup) |
427 | CHECK_FN(checkIfFreeNameIndex) |
428 | CHECK_FN(checkCXXNewOrCXXDelete) |
429 | CHECK_FN(checkGMalloc0) |
430 | CHECK_FN(checkGMemdup) |
431 | CHECK_FN(checkGMallocN) |
432 | CHECK_FN(checkGMallocN0) |
433 | CHECK_FN(preGetdelim) |
434 | CHECK_FN(checkGetdelim) |
435 | CHECK_FN(checkReallocN) |
436 | CHECK_FN(checkOwnershipAttr) |
437 | |
438 | void checkRealloc(ProgramStateRef State, const CallEvent &Call, |
439 | CheckerContext &C, bool ShouldFreeOnFail) const; |
440 | |
441 | using CheckFn = |
442 | std::function<void(const MallocChecker *, ProgramStateRef State, |
443 | const CallEvent &Call, CheckerContext &C)>; |
444 | |
445 | const CallDescriptionMap<CheckFn> PreFnMap{ |
446 | // NOTE: the following CallDescription also matches the C++ standard |
447 | // library function std::getline(); the callback will filter it out. |
448 | {{CDM::CLibrary, {"getline" }, 3}, &MallocChecker::preGetdelim}, |
449 | {{CDM::CLibrary, {"getdelim" }, 4}, &MallocChecker::preGetdelim}, |
450 | }; |
451 | |
452 | const CallDescriptionMap<CheckFn> PostFnMap{ |
453 | // NOTE: the following CallDescription also matches the C++ standard |
454 | // library function std::getline(); the callback will filter it out. |
455 | {{CDM::CLibrary, {"getline" }, 3}, &MallocChecker::checkGetdelim}, |
456 | {{CDM::CLibrary, {"getdelim" }, 4}, &MallocChecker::checkGetdelim}, |
457 | }; |
458 | |
459 | const CallDescriptionMap<CheckFn> FreeingMemFnMap{ |
460 | {{CDM::CLibrary, {"free" }, 1}, &MallocChecker::checkFree}, |
461 | {{CDM::CLibrary, {"if_freenameindex" }, 1}, |
462 | &MallocChecker::checkIfFreeNameIndex}, |
463 | {{CDM::CLibrary, {"kfree" }, 1}, &MallocChecker::checkFree}, |
464 | {{CDM::CLibrary, {"g_free" }, 1}, &MallocChecker::checkFree}, |
465 | }; |
466 | |
467 | bool isFreeingCall(const CallEvent &Call) const; |
468 | static bool isFreeingOwnershipAttrCall(const FunctionDecl *Func); |
469 | static bool isFreeingOwnershipAttrCall(const CallEvent &Call); |
470 | static bool isAllocatingOwnershipAttrCall(const FunctionDecl *Func); |
471 | static bool isAllocatingOwnershipAttrCall(const CallEvent &Call); |
472 | |
473 | friend class NoMemOwnershipChangeVisitor; |
474 | |
475 | CallDescriptionMap<CheckFn> AllocaMemFnMap{ |
476 | {{CDM::CLibrary, {"alloca" }, 1}, &MallocChecker::checkAlloca}, |
477 | {{CDM::CLibrary, {"_alloca" }, 1}, &MallocChecker::checkAlloca}, |
478 | // The line for "alloca" also covers "__builtin_alloca", but the |
479 | // _with_align variant must be listed separately because it takes an |
480 | // extra argument: |
481 | {{CDM::CLibrary, {"__builtin_alloca_with_align" }, 2}, |
482 | &MallocChecker::checkAlloca}, |
483 | }; |
484 | |
485 | CallDescriptionMap<CheckFn> AllocatingMemFnMap{ |
486 | {{CDM::CLibrary, {"malloc" }, 1}, &MallocChecker::checkBasicAlloc}, |
487 | {{CDM::CLibrary, {"malloc" }, 3}, &MallocChecker::checkKernelMalloc}, |
488 | {{CDM::CLibrary, {"calloc" }, 2}, &MallocChecker::checkCalloc}, |
489 | {{CDM::CLibrary, {"valloc" }, 1}, &MallocChecker::checkBasicAlloc}, |
490 | {{CDM::CLibrary, {"strndup" }, 2}, &MallocChecker::checkStrdup}, |
491 | {{CDM::CLibrary, {"strdup" }, 1}, &MallocChecker::checkStrdup}, |
492 | {{CDM::CLibrary, {"_strdup" }, 1}, &MallocChecker::checkStrdup}, |
493 | {{CDM::CLibrary, {"kmalloc" }, 2}, &MallocChecker::checkKernelMalloc}, |
494 | {{CDM::CLibrary, {"if_nameindex" }, 1}, &MallocChecker::checkIfNameIndex}, |
495 | {{CDM::CLibrary, {"wcsdup" }, 1}, &MallocChecker::checkStrdup}, |
496 | {{CDM::CLibrary, {"_wcsdup" }, 1}, &MallocChecker::checkStrdup}, |
497 | {{CDM::CLibrary, {"g_malloc" }, 1}, &MallocChecker::checkBasicAlloc}, |
498 | {{CDM::CLibrary, {"g_malloc0" }, 1}, &MallocChecker::checkGMalloc0}, |
499 | {{CDM::CLibrary, {"g_try_malloc" }, 1}, &MallocChecker::checkBasicAlloc}, |
500 | {{CDM::CLibrary, {"g_try_malloc0" }, 1}, &MallocChecker::checkGMalloc0}, |
501 | {{CDM::CLibrary, {"g_memdup" }, 2}, &MallocChecker::checkGMemdup}, |
502 | {{CDM::CLibrary, {"g_malloc_n" }, 2}, &MallocChecker::checkGMallocN}, |
503 | {{CDM::CLibrary, {"g_malloc0_n" }, 2}, &MallocChecker::checkGMallocN0}, |
504 | {{CDM::CLibrary, {"g_try_malloc_n" }, 2}, &MallocChecker::checkGMallocN}, |
505 | {{CDM::CLibrary, {"g_try_malloc0_n" }, 2}, &MallocChecker::checkGMallocN0}, |
506 | }; |
507 | |
508 | CallDescriptionMap<CheckFn> ReallocatingMemFnMap{ |
509 | {{CDM::CLibrary, {"realloc" }, 2}, |
510 | std::bind(f: &MallocChecker::checkRealloc, args: _1, args: _2, args: _3, args: _4, args: false)}, |
511 | {{CDM::CLibrary, {"reallocf" }, 2}, |
512 | std::bind(f: &MallocChecker::checkRealloc, args: _1, args: _2, args: _3, args: _4, args: true)}, |
513 | {{CDM::CLibrary, {"g_realloc" }, 2}, |
514 | std::bind(f: &MallocChecker::checkRealloc, args: _1, args: _2, args: _3, args: _4, args: false)}, |
515 | {{CDM::CLibrary, {"g_try_realloc" }, 2}, |
516 | std::bind(f: &MallocChecker::checkRealloc, args: _1, args: _2, args: _3, args: _4, args: false)}, |
517 | {{CDM::CLibrary, {"g_realloc_n" }, 3}, &MallocChecker::checkReallocN}, |
518 | {{CDM::CLibrary, {"g_try_realloc_n" }, 3}, &MallocChecker::checkReallocN}, |
519 | }; |
520 | |
521 | bool isMemCall(const CallEvent &Call) const; |
522 | bool hasOwnershipReturns(const CallEvent &Call) const; |
523 | bool hasOwnershipTakesHolds(const CallEvent &Call) const; |
524 | void reportTaintBug(StringRef Msg, ProgramStateRef State, CheckerContext &C, |
525 | llvm::ArrayRef<SymbolRef> TaintedSyms, |
526 | AllocationFamily Family) const; |
527 | |
528 | void checkTaintedness(CheckerContext &C, const CallEvent &Call, |
529 | const SVal SizeSVal, ProgramStateRef State, |
530 | AllocationFamily Family) const; |
531 | |
532 | // TODO: Remove mutable by moving the initializtaion to the registry function. |
533 | mutable std::optional<uint64_t> KernelZeroFlagVal; |
534 | |
535 | using KernelZeroSizePtrValueTy = std::optional<int>; |
536 | /// Store the value of macro called `ZERO_SIZE_PTR`. |
537 | /// The value is initialized at first use, before first use the outer |
538 | /// Optional is empty, afterwards it contains another Optional that indicates |
539 | /// if the macro value could be determined, and if yes the value itself. |
540 | mutable std::optional<KernelZeroSizePtrValueTy> KernelZeroSizePtrValue; |
541 | |
542 | /// Process C++ operator new()'s allocation, which is the part of C++ |
543 | /// new-expression that goes before the constructor. |
544 | [[nodiscard]] ProgramStateRef |
545 | processNewAllocation(const CXXAllocatorCall &Call, CheckerContext &C, |
546 | AllocationFamily Family) const; |
547 | |
548 | /// Perform a zero-allocation check. |
549 | /// |
550 | /// \param [in] Call The expression that allocates memory. |
551 | /// \param [in] IndexOfSizeArg Index of the argument that specifies the size |
552 | /// of the memory that needs to be allocated. E.g. for malloc, this would be |
553 | /// 0. |
554 | /// \param [in] RetVal Specifies the newly allocated pointer value; |
555 | /// if unspecified, the value of expression \p E is used. |
556 | [[nodiscard]] static ProgramStateRef |
557 | ProcessZeroAllocCheck(CheckerContext &C, const CallEvent &Call, |
558 | const unsigned IndexOfSizeArg, ProgramStateRef State, |
559 | std::optional<SVal> RetVal = std::nullopt); |
560 | |
561 | /// Model functions with the ownership_returns attribute. |
562 | /// |
563 | /// User-defined function may have the ownership_returns attribute, which |
564 | /// annotates that the function returns with an object that was allocated on |
565 | /// the heap, and passes the ownertship to the callee. |
566 | /// |
567 | /// void __attribute((ownership_returns(malloc, 1))) *my_malloc(size_t); |
568 | /// |
569 | /// It has two parameters: |
570 | /// - first: name of the resource (e.g. 'malloc') |
571 | /// - (OPTIONAL) second: size of the allocated region |
572 | /// |
573 | /// \param [in] Call The expression that allocates memory. |
574 | /// \param [in] Att The ownership_returns attribute. |
575 | /// \param [in] State The \c ProgramState right before allocation. |
576 | /// \returns The ProgramState right after allocation. |
577 | [[nodiscard]] ProgramStateRef |
578 | MallocMemReturnsAttr(CheckerContext &C, const CallEvent &Call, |
579 | const OwnershipAttr *Att, ProgramStateRef State) const; |
580 | /// Models memory allocation. |
581 | /// |
582 | /// \param [in] C Checker context. |
583 | /// \param [in] Call The expression that allocates memory. |
584 | /// \param [in] State The \c ProgramState right before allocation. |
585 | /// \param [in] isAlloca Is the allocation function alloca-like |
586 | /// \returns The ProgramState with returnValue bound |
587 | [[nodiscard]] ProgramStateRef MallocBindRetVal(CheckerContext &C, |
588 | const CallEvent &Call, |
589 | ProgramStateRef State, |
590 | bool isAlloca) const; |
591 | |
592 | /// Models memory allocation. |
593 | /// |
594 | /// \param [in] Call The expression that allocates memory. |
595 | /// \param [in] SizeEx Size of the memory that needs to be allocated. |
596 | /// \param [in] Init The value the allocated memory needs to be initialized. |
597 | /// with. For example, \c calloc initializes the allocated memory to 0, |
598 | /// malloc leaves it undefined. |
599 | /// \param [in] State The \c ProgramState right before allocation. |
600 | /// \returns The ProgramState right after allocation. |
601 | [[nodiscard]] ProgramStateRef |
602 | MallocMemAux(CheckerContext &C, const CallEvent &Call, const Expr *SizeEx, |
603 | SVal Init, ProgramStateRef State, AllocationFamily Family) const; |
604 | |
605 | /// Models memory allocation. |
606 | /// |
607 | /// \param [in] Call The expression that allocates memory. |
608 | /// \param [in] Size Size of the memory that needs to be allocated. |
609 | /// \param [in] Init The value the allocated memory needs to be initialized. |
610 | /// with. For example, \c calloc initializes the allocated memory to 0, |
611 | /// malloc leaves it undefined. |
612 | /// \param [in] State The \c ProgramState right before allocation. |
613 | /// \returns The ProgramState right after allocation. |
614 | [[nodiscard]] ProgramStateRef MallocMemAux(CheckerContext &C, |
615 | const CallEvent &Call, SVal Size, |
616 | SVal Init, ProgramStateRef State, |
617 | AllocationFamily Family) const; |
618 | |
619 | // Check if this malloc() for special flags. At present that means M_ZERO or |
620 | // __GFP_ZERO (in which case, treat it like calloc). |
621 | [[nodiscard]] std::optional<ProgramStateRef> |
622 | performKernelMalloc(const CallEvent &Call, CheckerContext &C, |
623 | const ProgramStateRef &State) const; |
624 | |
625 | /// Model functions with the ownership_takes and ownership_holds attributes. |
626 | /// |
627 | /// User-defined function may have the ownership_takes and/or ownership_holds |
628 | /// attributes, which annotates that the function frees the memory passed as a |
629 | /// parameter. |
630 | /// |
631 | /// void __attribute((ownership_takes(malloc, 1))) my_free(void *); |
632 | /// void __attribute((ownership_holds(malloc, 1))) my_hold(void *); |
633 | /// |
634 | /// They have two parameters: |
635 | /// - first: name of the resource (e.g. 'malloc') |
636 | /// - second: index of the parameter the attribute applies to |
637 | /// |
638 | /// \param [in] Call The expression that frees memory. |
639 | /// \param [in] Att The ownership_takes or ownership_holds attribute. |
640 | /// \param [in] State The \c ProgramState right before allocation. |
641 | /// \returns The ProgramState right after deallocation. |
642 | [[nodiscard]] ProgramStateRef FreeMemAttr(CheckerContext &C, |
643 | const CallEvent &Call, |
644 | const OwnershipAttr *Att, |
645 | ProgramStateRef State) const; |
646 | |
647 | /// Models memory deallocation. |
648 | /// |
649 | /// \param [in] Call The expression that frees memory. |
650 | /// \param [in] State The \c ProgramState right before allocation. |
651 | /// \param [in] Num Index of the argument that needs to be freed. This is |
652 | /// normally 0, but for custom free functions it may be different. |
653 | /// \param [in] Hold Whether the parameter at \p Index has the ownership_holds |
654 | /// attribute. |
655 | /// \param [out] IsKnownToBeAllocated Whether the memory to be freed is known |
656 | /// to have been allocated, or in other words, the symbol to be freed was |
657 | /// registered as allocated by this checker. In the following case, \c ptr |
658 | /// isn't known to be allocated. |
659 | /// void Haha(int *ptr) { |
660 | /// ptr = realloc(ptr, 67); |
661 | /// // ... |
662 | /// } |
663 | /// \param [in] ReturnsNullOnFailure Whether the memory deallocation function |
664 | /// we're modeling returns with Null on failure. |
665 | /// \returns The ProgramState right after deallocation. |
666 | [[nodiscard]] ProgramStateRef |
667 | FreeMemAux(CheckerContext &C, const CallEvent &Call, ProgramStateRef State, |
668 | unsigned Num, bool Hold, bool &IsKnownToBeAllocated, |
669 | AllocationFamily Family, bool ReturnsNullOnFailure = false) const; |
670 | |
671 | /// Models memory deallocation. |
672 | /// |
673 | /// \param [in] ArgExpr The variable who's pointee needs to be freed. |
674 | /// \param [in] Call The expression that frees the memory. |
675 | /// \param [in] State The \c ProgramState right before allocation. |
676 | /// normally 0, but for custom free functions it may be different. |
677 | /// \param [in] Hold Whether the parameter at \p Index has the ownership_holds |
678 | /// attribute. |
679 | /// \param [out] IsKnownToBeAllocated Whether the memory to be freed is known |
680 | /// to have been allocated, or in other words, the symbol to be freed was |
681 | /// registered as allocated by this checker. In the following case, \c ptr |
682 | /// isn't known to be allocated. |
683 | /// void Haha(int *ptr) { |
684 | /// ptr = realloc(ptr, 67); |
685 | /// // ... |
686 | /// } |
687 | /// \param [in] ReturnsNullOnFailure Whether the memory deallocation function |
688 | /// we're modeling returns with Null on failure. |
689 | /// \param [in] ArgValOpt Optional value to use for the argument instead of |
690 | /// the one obtained from ArgExpr. |
691 | /// \returns The ProgramState right after deallocation. |
692 | [[nodiscard]] ProgramStateRef |
693 | FreeMemAux(CheckerContext &C, const Expr *ArgExpr, const CallEvent &Call, |
694 | ProgramStateRef State, bool Hold, bool &IsKnownToBeAllocated, |
695 | AllocationFamily Family, bool ReturnsNullOnFailure = false, |
696 | std::optional<SVal> ArgValOpt = {}) const; |
697 | |
698 | // TODO: Needs some refactoring, as all other deallocation modeling |
699 | // functions are suffering from out parameters and messy code due to how |
700 | // realloc is handled. |
701 | // |
702 | /// Models memory reallocation. |
703 | /// |
704 | /// \param [in] Call The expression that reallocated memory |
705 | /// \param [in] ShouldFreeOnFail Whether if reallocation fails, the supplied |
706 | /// memory should be freed. |
707 | /// \param [in] State The \c ProgramState right before reallocation. |
708 | /// \param [in] SuffixWithN Whether the reallocation function we're modeling |
709 | /// has an '_n' suffix, such as g_realloc_n. |
710 | /// \returns The ProgramState right after reallocation. |
711 | [[nodiscard]] ProgramStateRef |
712 | ReallocMemAux(CheckerContext &C, const CallEvent &Call, bool ShouldFreeOnFail, |
713 | ProgramStateRef State, AllocationFamily Family, |
714 | bool SuffixWithN = false) const; |
715 | |
716 | /// Evaluates the buffer size that needs to be allocated. |
717 | /// |
718 | /// \param [in] Blocks The amount of blocks that needs to be allocated. |
719 | /// \param [in] BlockBytes The size of a block. |
720 | /// \returns The symbolic value of \p Blocks * \p BlockBytes. |
721 | [[nodiscard]] static SVal evalMulForBufferSize(CheckerContext &C, |
722 | const Expr *Blocks, |
723 | const Expr *BlockBytes); |
724 | |
725 | /// Models zero initialized array allocation. |
726 | /// |
727 | /// \param [in] Call The expression that reallocated memory |
728 | /// \param [in] State The \c ProgramState right before reallocation. |
729 | /// \returns The ProgramState right after allocation. |
730 | [[nodiscard]] ProgramStateRef CallocMem(CheckerContext &C, |
731 | const CallEvent &Call, |
732 | ProgramStateRef State) const; |
733 | |
734 | /// See if deallocation happens in a suspicious context. If so, escape the |
735 | /// pointers that otherwise would have been deallocated and return true. |
736 | bool suppressDeallocationsInSuspiciousContexts(const CallEvent &Call, |
737 | CheckerContext &C) const; |
738 | |
739 | /// If in \p S \p Sym is used, check whether \p Sym was already freed. |
740 | bool checkUseAfterFree(SymbolRef Sym, CheckerContext &C, const Stmt *S) const; |
741 | |
742 | /// If in \p S \p Sym is used, check whether \p Sym was allocated as a zero |
743 | /// sized memory region. |
744 | void checkUseZeroAllocated(SymbolRef Sym, CheckerContext &C, |
745 | const Stmt *S) const; |
746 | |
747 | /// If in \p S \p Sym is being freed, check whether \p Sym was already freed. |
748 | bool checkDoubleDelete(SymbolRef Sym, CheckerContext &C) const; |
749 | |
750 | /// Check if the function is known to free memory, or if it is |
751 | /// "interesting" and should be modeled explicitly. |
752 | /// |
753 | /// \param [out] EscapingSymbol A function might not free memory in general, |
754 | /// but could be known to free a particular symbol. In this case, false is |
755 | /// returned and the single escaping symbol is returned through the out |
756 | /// parameter. |
757 | /// |
758 | /// We assume that pointers do not escape through calls to system functions |
759 | /// not handled by this checker. |
760 | bool mayFreeAnyEscapedMemoryOrIsModeledExplicitly(const CallEvent *Call, |
761 | ProgramStateRef State, |
762 | SymbolRef &EscapingSymbol) const; |
763 | |
764 | /// Implementation of the checkPointerEscape callbacks. |
765 | [[nodiscard]] ProgramStateRef |
766 | checkPointerEscapeAux(ProgramStateRef State, |
767 | const InvalidatedSymbols &Escaped, |
768 | const CallEvent *Call, PointerEscapeKind Kind, |
769 | bool IsConstPointerEscape) const; |
770 | |
771 | // Implementation of the checkPreStmt and checkEndFunction callbacks. |
772 | void checkEscapeOnReturn(const ReturnStmt *S, CheckerContext &C) const; |
773 | |
774 | ///@{ |
775 | /// Tells if a given family/call/symbol is tracked by the current checker. |
776 | /// Sets CheckKind to the kind of the checker responsible for this |
777 | /// family/call/symbol. |
778 | std::optional<CheckKind> getCheckIfTracked(AllocationFamily Family, |
779 | bool IsALeakCheck = false) const; |
780 | |
781 | std::optional<CheckKind> getCheckIfTracked(CheckerContext &C, SymbolRef Sym, |
782 | bool IsALeakCheck = false) const; |
783 | ///@} |
784 | static bool SummarizeValue(raw_ostream &os, SVal V); |
785 | static bool SummarizeRegion(ProgramStateRef State, raw_ostream &os, |
786 | const MemRegion *MR); |
787 | |
788 | void HandleNonHeapDealloc(CheckerContext &C, SVal ArgVal, SourceRange Range, |
789 | const Expr *DeallocExpr, |
790 | AllocationFamily Family) const; |
791 | |
792 | void HandleFreeAlloca(CheckerContext &C, SVal ArgVal, |
793 | SourceRange Range) const; |
794 | |
795 | void HandleMismatchedDealloc(CheckerContext &C, SourceRange Range, |
796 | const Expr *DeallocExpr, const RefState *RS, |
797 | SymbolRef Sym, bool OwnershipTransferred) const; |
798 | |
799 | void HandleOffsetFree(CheckerContext &C, SVal ArgVal, SourceRange Range, |
800 | const Expr *DeallocExpr, AllocationFamily Family, |
801 | const Expr *AllocExpr = nullptr) const; |
802 | |
803 | void HandleUseAfterFree(CheckerContext &C, SourceRange Range, |
804 | SymbolRef Sym) const; |
805 | |
806 | void HandleDoubleFree(CheckerContext &C, SourceRange Range, bool Released, |
807 | SymbolRef Sym, SymbolRef PrevSym) const; |
808 | |
809 | void HandleDoubleDelete(CheckerContext &C, SymbolRef Sym) const; |
810 | |
811 | void HandleUseZeroAlloc(CheckerContext &C, SourceRange Range, |
812 | SymbolRef Sym) const; |
813 | |
814 | void HandleFunctionPtrFree(CheckerContext &C, SVal ArgVal, SourceRange Range, |
815 | const Expr *FreeExpr, |
816 | AllocationFamily Family) const; |
817 | |
818 | /// Find the location of the allocation for Sym on the path leading to the |
819 | /// exploded node N. |
820 | static LeakInfo getAllocationSite(const ExplodedNode *N, SymbolRef Sym, |
821 | CheckerContext &C); |
822 | |
823 | void HandleLeak(SymbolRef Sym, ExplodedNode *N, CheckerContext &C) const; |
824 | |
825 | /// Test if value in ArgVal equals to value in macro `ZERO_SIZE_PTR`. |
826 | bool isArgZERO_SIZE_PTR(ProgramStateRef State, CheckerContext &C, |
827 | SVal ArgVal) const; |
828 | }; |
829 | } // end anonymous namespace |
830 | |
831 | //===----------------------------------------------------------------------===// |
832 | // Definition of NoOwnershipChangeVisitor. |
833 | //===----------------------------------------------------------------------===// |
834 | |
835 | namespace { |
836 | class NoMemOwnershipChangeVisitor final : public NoOwnershipChangeVisitor { |
837 | protected: |
838 | /// Syntactically checks whether the callee is a deallocating function. Since |
839 | /// we have no path-sensitive information on this call (we would need a |
840 | /// CallEvent instead of a CallExpr for that), its possible that a |
841 | /// deallocation function was called indirectly through a function pointer, |
842 | /// but we are not able to tell, so this is a best effort analysis. |
843 | /// See namespace `memory_passed_to_fn_call_free_through_fn_ptr` in |
844 | /// clang/test/Analysis/NewDeleteLeaks.cpp. |
845 | bool isFreeingCallAsWritten(const CallExpr &Call) const { |
846 | const auto *MallocChk = static_cast<const MallocChecker *>(&Checker); |
847 | if (MallocChk->FreeingMemFnMap.lookupAsWritten(Call) || |
848 | MallocChk->ReallocatingMemFnMap.lookupAsWritten(Call)) |
849 | return true; |
850 | |
851 | if (const auto *Func = |
852 | llvm::dyn_cast_or_null<FunctionDecl>(Val: Call.getCalleeDecl())) |
853 | return MallocChecker::isFreeingOwnershipAttrCall(Func); |
854 | |
855 | return false; |
856 | } |
857 | |
858 | bool hasResourceStateChanged(ProgramStateRef CallEnterState, |
859 | ProgramStateRef CallExitEndState) final { |
860 | return CallEnterState->get<RegionState>(key: Sym) != |
861 | CallExitEndState->get<RegionState>(key: Sym); |
862 | } |
863 | |
864 | /// Heuristically guess whether the callee intended to free memory. This is |
865 | /// done syntactically, because we are trying to argue about alternative |
866 | /// paths of execution, and as a consequence we don't have path-sensitive |
867 | /// information. |
868 | bool doesFnIntendToHandleOwnership(const Decl *Callee, |
869 | ASTContext &ACtx) final { |
870 | const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: Callee); |
871 | |
872 | // Given that the stack frame was entered, the body should always be |
873 | // theoretically obtainable. In case of body farms, the synthesized body |
874 | // is not attached to declaration, thus triggering the '!FD->hasBody()' |
875 | // branch. That said, would a synthesized body ever intend to handle |
876 | // ownership? As of today they don't. And if they did, how would we |
877 | // put notes inside it, given that it doesn't match any source locations? |
878 | if (!FD || !FD->hasBody()) |
879 | return false; |
880 | using namespace clang::ast_matchers; |
881 | |
882 | auto Matches = match(Matcher: findAll(Matcher: stmt(anyOf(cxxDeleteExpr().bind(ID: "delete" ), |
883 | callExpr().bind(ID: "call" )))), |
884 | Node: *FD->getBody(), Context&: ACtx); |
885 | for (BoundNodes Match : Matches) { |
886 | if (Match.getNodeAs<CXXDeleteExpr>(ID: "delete" )) |
887 | return true; |
888 | |
889 | if (const auto *Call = Match.getNodeAs<CallExpr>(ID: "call" )) |
890 | if (isFreeingCallAsWritten(Call: *Call)) |
891 | return true; |
892 | } |
893 | // TODO: Ownership might change with an attempt to store the allocated |
894 | // memory, not only through deallocation. Check for attempted stores as |
895 | // well. |
896 | return false; |
897 | } |
898 | |
899 | PathDiagnosticPieceRef emitNote(const ExplodedNode *N) final { |
900 | PathDiagnosticLocation L = PathDiagnosticLocation::create( |
901 | P: N->getLocation(), |
902 | SMng: N->getState()->getStateManager().getContext().getSourceManager()); |
903 | return std::make_shared<PathDiagnosticEventPiece>( |
904 | args&: L, args: "Returning without deallocating memory or storing the pointer for " |
905 | "later deallocation" ); |
906 | } |
907 | |
908 | public: |
909 | NoMemOwnershipChangeVisitor(SymbolRef Sym, const MallocChecker *Checker) |
910 | : NoOwnershipChangeVisitor(Sym, Checker) {} |
911 | |
912 | void Profile(llvm::FoldingSetNodeID &ID) const override { |
913 | static int Tag = 0; |
914 | ID.AddPointer(Ptr: &Tag); |
915 | ID.AddPointer(Ptr: Sym); |
916 | } |
917 | }; |
918 | |
919 | } // end anonymous namespace |
920 | |
921 | //===----------------------------------------------------------------------===// |
922 | // Definition of MallocBugVisitor. |
923 | //===----------------------------------------------------------------------===// |
924 | |
925 | namespace { |
926 | /// The bug visitor which allows us to print extra diagnostics along the |
927 | /// BugReport path. For example, showing the allocation site of the leaked |
928 | /// region. |
929 | class MallocBugVisitor final : public BugReporterVisitor { |
930 | protected: |
931 | enum NotificationMode { Normal, ReallocationFailed }; |
932 | |
933 | // The allocated region symbol tracked by the main analysis. |
934 | SymbolRef Sym; |
935 | |
936 | // The mode we are in, i.e. what kind of diagnostics will be emitted. |
937 | NotificationMode Mode; |
938 | |
939 | // A symbol from when the primary region should have been reallocated. |
940 | SymbolRef FailedReallocSymbol; |
941 | |
942 | // A release function stack frame in which memory was released. Used for |
943 | // miscellaneous false positive suppression. |
944 | const StackFrameContext *ReleaseFunctionLC; |
945 | |
946 | bool IsLeak; |
947 | |
948 | public: |
949 | MallocBugVisitor(SymbolRef S, bool isLeak = false) |
950 | : Sym(S), Mode(Normal), FailedReallocSymbol(nullptr), |
951 | ReleaseFunctionLC(nullptr), IsLeak(isLeak) {} |
952 | |
953 | static void *getTag() { |
954 | static int Tag = 0; |
955 | return &Tag; |
956 | } |
957 | |
958 | void Profile(llvm::FoldingSetNodeID &ID) const override { |
959 | ID.AddPointer(Ptr: getTag()); |
960 | ID.AddPointer(Ptr: Sym); |
961 | } |
962 | |
963 | /// Did not track -> allocated. Other state (released) -> allocated. |
964 | static inline bool isAllocated(const RefState *RSCurr, const RefState *RSPrev, |
965 | const Stmt *Stmt) { |
966 | return (isa_and_nonnull<CallExpr, CXXNewExpr>(Val: Stmt) && |
967 | (RSCurr && |
968 | (RSCurr->isAllocated() || RSCurr->isAllocatedOfSizeZero())) && |
969 | (!RSPrev || |
970 | !(RSPrev->isAllocated() || RSPrev->isAllocatedOfSizeZero()))); |
971 | } |
972 | |
973 | /// Did not track -> released. Other state (allocated) -> released. |
974 | /// The statement associated with the release might be missing. |
975 | static inline bool isReleased(const RefState *RSCurr, const RefState *RSPrev, |
976 | const Stmt *Stmt) { |
977 | bool IsReleased = |
978 | (RSCurr && RSCurr->isReleased()) && (!RSPrev || !RSPrev->isReleased()); |
979 | assert(!IsReleased || (isa_and_nonnull<CallExpr, CXXDeleteExpr>(Stmt)) || |
980 | (!Stmt && RSCurr->getAllocationFamily().Kind == AF_InnerBuffer)); |
981 | return IsReleased; |
982 | } |
983 | |
984 | /// Did not track -> relinquished. Other state (allocated) -> relinquished. |
985 | static inline bool isRelinquished(const RefState *RSCurr, |
986 | const RefState *RSPrev, const Stmt *Stmt) { |
987 | return ( |
988 | isa_and_nonnull<CallExpr, ObjCMessageExpr, ObjCPropertyRefExpr>(Val: Stmt) && |
989 | (RSCurr && RSCurr->isRelinquished()) && |
990 | (!RSPrev || !RSPrev->isRelinquished())); |
991 | } |
992 | |
993 | /// If the expression is not a call, and the state change is |
994 | /// released -> allocated, it must be the realloc return value |
995 | /// check. If we have to handle more cases here, it might be cleaner just |
996 | /// to track this extra bit in the state itself. |
997 | static inline bool hasReallocFailed(const RefState *RSCurr, |
998 | const RefState *RSPrev, |
999 | const Stmt *Stmt) { |
1000 | return ((!isa_and_nonnull<CallExpr>(Val: Stmt)) && |
1001 | (RSCurr && |
1002 | (RSCurr->isAllocated() || RSCurr->isAllocatedOfSizeZero())) && |
1003 | (RSPrev && |
1004 | !(RSPrev->isAllocated() || RSPrev->isAllocatedOfSizeZero()))); |
1005 | } |
1006 | |
1007 | PathDiagnosticPieceRef VisitNode(const ExplodedNode *N, |
1008 | BugReporterContext &BRC, |
1009 | PathSensitiveBugReport &BR) override; |
1010 | |
1011 | PathDiagnosticPieceRef getEndPath(BugReporterContext &BRC, |
1012 | const ExplodedNode *EndPathNode, |
1013 | PathSensitiveBugReport &BR) override { |
1014 | if (!IsLeak) |
1015 | return nullptr; |
1016 | |
1017 | PathDiagnosticLocation L = BR.getLocation(); |
1018 | // Do not add the statement itself as a range in case of leak. |
1019 | return std::make_shared<PathDiagnosticEventPiece>(args&: L, args: BR.getDescription(), |
1020 | args: false); |
1021 | } |
1022 | |
1023 | private: |
1024 | class StackHintGeneratorForReallocationFailed |
1025 | : public StackHintGeneratorForSymbol { |
1026 | public: |
1027 | StackHintGeneratorForReallocationFailed(SymbolRef S, StringRef M) |
1028 | : StackHintGeneratorForSymbol(S, M) {} |
1029 | |
1030 | std::string getMessageForArg(const Expr *ArgE, unsigned ArgIndex) override { |
1031 | // Printed parameters start at 1, not 0. |
1032 | ++ArgIndex; |
1033 | |
1034 | SmallString<200> buf; |
1035 | llvm::raw_svector_ostream os(buf); |
1036 | |
1037 | os << "Reallocation of " << ArgIndex << llvm::getOrdinalSuffix(Val: ArgIndex) |
1038 | << " parameter failed" ; |
1039 | |
1040 | return std::string(os.str()); |
1041 | } |
1042 | |
1043 | std::string getMessageForReturn(const CallExpr *CallExpr) override { |
1044 | return "Reallocation of returned value failed" ; |
1045 | } |
1046 | }; |
1047 | }; |
1048 | } // end anonymous namespace |
1049 | |
1050 | // A map from the freed symbol to the symbol representing the return value of |
1051 | // the free function. |
1052 | REGISTER_MAP_WITH_PROGRAMSTATE(FreeReturnValue, SymbolRef, SymbolRef) |
1053 | |
1054 | namespace { |
1055 | class StopTrackingCallback final : public SymbolVisitor { |
1056 | ProgramStateRef state; |
1057 | |
1058 | public: |
1059 | StopTrackingCallback(ProgramStateRef st) : state(std::move(st)) {} |
1060 | ProgramStateRef getState() const { return state; } |
1061 | |
1062 | bool VisitSymbol(SymbolRef sym) override { |
1063 | state = state->remove<RegionState>(K: sym); |
1064 | return true; |
1065 | } |
1066 | }; |
1067 | } // end anonymous namespace |
1068 | |
1069 | static bool isStandardNew(const FunctionDecl *FD) { |
1070 | if (!FD) |
1071 | return false; |
1072 | |
1073 | OverloadedOperatorKind Kind = FD->getOverloadedOperator(); |
1074 | if (Kind != OO_New && Kind != OO_Array_New) |
1075 | return false; |
1076 | |
1077 | // This is standard if and only if it's not defined in a user file. |
1078 | SourceLocation L = FD->getLocation(); |
1079 | // If the header for operator delete is not included, it's still defined |
1080 | // in an invalid source location. Check to make sure we don't crash. |
1081 | return !L.isValid() || |
1082 | FD->getASTContext().getSourceManager().isInSystemHeader(L); |
1083 | } |
1084 | |
1085 | static bool isStandardDelete(const FunctionDecl *FD) { |
1086 | if (!FD) |
1087 | return false; |
1088 | |
1089 | OverloadedOperatorKind Kind = FD->getOverloadedOperator(); |
1090 | if (Kind != OO_Delete && Kind != OO_Array_Delete) |
1091 | return false; |
1092 | |
1093 | bool HasBody = FD->hasBody(); // Prefer using the definition. |
1094 | |
1095 | // This is standard if and only if it's not defined in a user file. |
1096 | SourceLocation L = FD->getLocation(); |
1097 | |
1098 | // If the header for operator delete is not included, it's still defined |
1099 | // in an invalid source location. Check to make sure we don't crash. |
1100 | const auto &SM = FD->getASTContext().getSourceManager(); |
1101 | return L.isInvalid() || (!HasBody && SM.isInSystemHeader(L)); |
1102 | } |
1103 | |
1104 | //===----------------------------------------------------------------------===// |
1105 | // Methods of MallocChecker and MallocBugVisitor. |
1106 | //===----------------------------------------------------------------------===// |
1107 | |
1108 | bool MallocChecker::isFreeingOwnershipAttrCall(const CallEvent &Call) { |
1109 | const auto *Func = dyn_cast_or_null<FunctionDecl>(Val: Call.getDecl()); |
1110 | |
1111 | return Func && isFreeingOwnershipAttrCall(Func); |
1112 | } |
1113 | |
1114 | bool MallocChecker::isFreeingOwnershipAttrCall(const FunctionDecl *Func) { |
1115 | if (Func->hasAttrs()) { |
1116 | for (const auto *I : Func->specific_attrs<OwnershipAttr>()) { |
1117 | OwnershipAttr::OwnershipKind OwnKind = I->getOwnKind(); |
1118 | if (OwnKind == OwnershipAttr::Takes || OwnKind == OwnershipAttr::Holds) |
1119 | return true; |
1120 | } |
1121 | } |
1122 | return false; |
1123 | } |
1124 | |
1125 | bool MallocChecker::isFreeingCall(const CallEvent &Call) const { |
1126 | if (FreeingMemFnMap.lookup(Call) || ReallocatingMemFnMap.lookup(Call)) |
1127 | return true; |
1128 | |
1129 | return isFreeingOwnershipAttrCall(Call); |
1130 | } |
1131 | |
1132 | bool MallocChecker::isAllocatingOwnershipAttrCall(const CallEvent &Call) { |
1133 | const auto *Func = dyn_cast_or_null<FunctionDecl>(Val: Call.getDecl()); |
1134 | |
1135 | return Func && isAllocatingOwnershipAttrCall(Func); |
1136 | } |
1137 | |
1138 | bool MallocChecker::isAllocatingOwnershipAttrCall(const FunctionDecl *Func) { |
1139 | for (const auto *I : Func->specific_attrs<OwnershipAttr>()) { |
1140 | if (I->getOwnKind() == OwnershipAttr::Returns) |
1141 | return true; |
1142 | } |
1143 | |
1144 | return false; |
1145 | } |
1146 | |
1147 | bool MallocChecker::isMemCall(const CallEvent &Call) const { |
1148 | if (FreeingMemFnMap.lookup(Call) || AllocatingMemFnMap.lookup(Call) || |
1149 | AllocaMemFnMap.lookup(Call) || ReallocatingMemFnMap.lookup(Call)) |
1150 | return true; |
1151 | |
1152 | if (!ShouldIncludeOwnershipAnnotatedFunctions) |
1153 | return false; |
1154 | |
1155 | const auto *Func = dyn_cast<FunctionDecl>(Val: Call.getDecl()); |
1156 | return Func && Func->hasAttr<OwnershipAttr>(); |
1157 | } |
1158 | |
1159 | std::optional<ProgramStateRef> |
1160 | MallocChecker::performKernelMalloc(const CallEvent &Call, CheckerContext &C, |
1161 | const ProgramStateRef &State) const { |
1162 | // 3-argument malloc(), as commonly used in {Free,Net,Open}BSD Kernels: |
1163 | // |
1164 | // void *malloc(unsigned long size, struct malloc_type *mtp, int flags); |
1165 | // |
1166 | // One of the possible flags is M_ZERO, which means 'give me back an |
1167 | // allocation which is already zeroed', like calloc. |
1168 | |
1169 | // 2-argument kmalloc(), as used in the Linux kernel: |
1170 | // |
1171 | // void *kmalloc(size_t size, gfp_t flags); |
1172 | // |
1173 | // Has the similar flag value __GFP_ZERO. |
1174 | |
1175 | // This logic is largely cloned from O_CREAT in UnixAPIChecker, maybe some |
1176 | // code could be shared. |
1177 | |
1178 | ASTContext &Ctx = C.getASTContext(); |
1179 | llvm::Triple::OSType OS = Ctx.getTargetInfo().getTriple().getOS(); |
1180 | |
1181 | if (!KernelZeroFlagVal) { |
1182 | switch (OS) { |
1183 | case llvm::Triple::FreeBSD: |
1184 | KernelZeroFlagVal = 0x0100; |
1185 | break; |
1186 | case llvm::Triple::NetBSD: |
1187 | KernelZeroFlagVal = 0x0002; |
1188 | break; |
1189 | case llvm::Triple::OpenBSD: |
1190 | KernelZeroFlagVal = 0x0008; |
1191 | break; |
1192 | case llvm::Triple::Linux: |
1193 | // __GFP_ZERO |
1194 | KernelZeroFlagVal = 0x8000; |
1195 | break; |
1196 | default: |
1197 | // FIXME: We need a more general way of getting the M_ZERO value. |
1198 | // See also: O_CREAT in UnixAPIChecker.cpp. |
1199 | |
1200 | // Fall back to normal malloc behavior on platforms where we don't |
1201 | // know M_ZERO. |
1202 | return std::nullopt; |
1203 | } |
1204 | } |
1205 | |
1206 | // We treat the last argument as the flags argument, and callers fall-back to |
1207 | // normal malloc on a None return. This works for the FreeBSD kernel malloc |
1208 | // as well as Linux kmalloc. |
1209 | if (Call.getNumArgs() < 2) |
1210 | return std::nullopt; |
1211 | |
1212 | const Expr *FlagsEx = Call.getArgExpr(Index: Call.getNumArgs() - 1); |
1213 | const SVal V = C.getSVal(FlagsEx); |
1214 | if (!isa<NonLoc>(Val: V)) { |
1215 | // The case where 'V' can be a location can only be due to a bad header, |
1216 | // so in this case bail out. |
1217 | return std::nullopt; |
1218 | } |
1219 | |
1220 | NonLoc Flags = V.castAs<NonLoc>(); |
1221 | NonLoc ZeroFlag = C.getSValBuilder() |
1222 | .makeIntVal(integer: *KernelZeroFlagVal, type: FlagsEx->getType()) |
1223 | .castAs<NonLoc>(); |
1224 | SVal MaskedFlagsUC = C.getSValBuilder().evalBinOpNN(state: State, op: BO_And, |
1225 | lhs: Flags, rhs: ZeroFlag, |
1226 | resultTy: FlagsEx->getType()); |
1227 | if (MaskedFlagsUC.isUnknownOrUndef()) |
1228 | return std::nullopt; |
1229 | DefinedSVal MaskedFlags = MaskedFlagsUC.castAs<DefinedSVal>(); |
1230 | |
1231 | // Check if maskedFlags is non-zero. |
1232 | ProgramStateRef TrueState, FalseState; |
1233 | std::tie(args&: TrueState, args&: FalseState) = State->assume(Cond: MaskedFlags); |
1234 | |
1235 | // If M_ZERO is set, treat this like calloc (initialized). |
1236 | if (TrueState && !FalseState) { |
1237 | SVal ZeroVal = C.getSValBuilder().makeZeroVal(type: Ctx.CharTy); |
1238 | return MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: ZeroVal, State: TrueState, |
1239 | Family: AllocationFamily(AF_Malloc)); |
1240 | } |
1241 | |
1242 | return std::nullopt; |
1243 | } |
1244 | |
1245 | SVal MallocChecker::evalMulForBufferSize(CheckerContext &C, const Expr *Blocks, |
1246 | const Expr *BlockBytes) { |
1247 | SValBuilder &SB = C.getSValBuilder(); |
1248 | SVal BlocksVal = C.getSVal(Blocks); |
1249 | SVal BlockBytesVal = C.getSVal(BlockBytes); |
1250 | ProgramStateRef State = C.getState(); |
1251 | SVal TotalSize = SB.evalBinOp(state: State, op: BO_Mul, lhs: BlocksVal, rhs: BlockBytesVal, |
1252 | type: SB.getContext().getSizeType()); |
1253 | return TotalSize; |
1254 | } |
1255 | |
1256 | void MallocChecker::checkBasicAlloc(ProgramStateRef State, |
1257 | const CallEvent &Call, |
1258 | CheckerContext &C) const { |
1259 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: UndefinedVal(), State, |
1260 | Family: AllocationFamily(AF_Malloc)); |
1261 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1262 | C.addTransition(State); |
1263 | } |
1264 | |
1265 | void MallocChecker::checkKernelMalloc(ProgramStateRef State, |
1266 | const CallEvent &Call, |
1267 | CheckerContext &C) const { |
1268 | std::optional<ProgramStateRef> MaybeState = |
1269 | performKernelMalloc(Call, C, State); |
1270 | if (MaybeState) |
1271 | State = *MaybeState; |
1272 | else |
1273 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: UndefinedVal(), State, |
1274 | Family: AllocationFamily(AF_Malloc)); |
1275 | C.addTransition(State); |
1276 | } |
1277 | |
1278 | static bool isStandardRealloc(const CallEvent &Call) { |
1279 | const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: Call.getDecl()); |
1280 | assert(FD); |
1281 | ASTContext &AC = FD->getASTContext(); |
1282 | |
1283 | return FD->getDeclaredReturnType().getDesugaredType(Context: AC) == AC.VoidPtrTy && |
1284 | FD->getParamDecl(i: 0)->getType().getDesugaredType(AC) == AC.VoidPtrTy && |
1285 | FD->getParamDecl(i: 1)->getType().getDesugaredType(AC) == |
1286 | AC.getSizeType(); |
1287 | } |
1288 | |
1289 | static bool isGRealloc(const CallEvent &Call) { |
1290 | const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: Call.getDecl()); |
1291 | assert(FD); |
1292 | ASTContext &AC = FD->getASTContext(); |
1293 | |
1294 | return FD->getDeclaredReturnType().getDesugaredType(Context: AC) == AC.VoidPtrTy && |
1295 | FD->getParamDecl(i: 0)->getType().getDesugaredType(AC) == AC.VoidPtrTy && |
1296 | FD->getParamDecl(i: 1)->getType().getDesugaredType(AC) == |
1297 | AC.UnsignedLongTy; |
1298 | } |
1299 | |
1300 | void MallocChecker::checkRealloc(ProgramStateRef State, const CallEvent &Call, |
1301 | CheckerContext &C, |
1302 | bool ShouldFreeOnFail) const { |
1303 | // Ignore calls to functions whose type does not match the expected type of |
1304 | // either the standard realloc or g_realloc from GLib. |
1305 | // FIXME: Should we perform this kind of checking consistently for each |
1306 | // function? If yes, then perhaps extend the `CallDescription` interface to |
1307 | // handle this. |
1308 | if (!isStandardRealloc(Call) && !isGRealloc(Call)) |
1309 | return; |
1310 | |
1311 | State = ReallocMemAux(C, Call, ShouldFreeOnFail, State, |
1312 | Family: AllocationFamily(AF_Malloc)); |
1313 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 1, State); |
1314 | C.addTransition(State); |
1315 | } |
1316 | |
1317 | void MallocChecker::checkCalloc(ProgramStateRef State, const CallEvent &Call, |
1318 | CheckerContext &C) const { |
1319 | State = CallocMem(C, Call, State); |
1320 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1321 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 1, State); |
1322 | C.addTransition(State); |
1323 | } |
1324 | |
1325 | void MallocChecker::checkFree(ProgramStateRef State, const CallEvent &Call, |
1326 | CheckerContext &C) const { |
1327 | bool IsKnownToBeAllocatedMemory = false; |
1328 | if (suppressDeallocationsInSuspiciousContexts(Call, C)) |
1329 | return; |
1330 | State = FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1331 | Family: AllocationFamily(AF_Malloc)); |
1332 | C.addTransition(State); |
1333 | } |
1334 | |
1335 | void MallocChecker::checkAlloca(ProgramStateRef State, const CallEvent &Call, |
1336 | CheckerContext &C) const { |
1337 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: UndefinedVal(), State, |
1338 | Family: AllocationFamily(AF_Alloca)); |
1339 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1340 | C.addTransition(State); |
1341 | } |
1342 | |
1343 | void MallocChecker::checkStrdup(ProgramStateRef State, const CallEvent &Call, |
1344 | CheckerContext &C) const { |
1345 | const auto *CE = dyn_cast_or_null<CallExpr>(Val: Call.getOriginExpr()); |
1346 | if (!CE) |
1347 | return; |
1348 | State = MallocMemAux(C, Call, Size: UnknownVal(), Init: UnknownVal(), State, |
1349 | Family: AllocationFamily(AF_Malloc)); |
1350 | |
1351 | C.addTransition(State); |
1352 | } |
1353 | |
1354 | void MallocChecker::checkIfNameIndex(ProgramStateRef State, |
1355 | const CallEvent &Call, |
1356 | CheckerContext &C) const { |
1357 | // Should we model this differently? We can allocate a fixed number of |
1358 | // elements with zeros in the last one. |
1359 | State = MallocMemAux(C, Call, Size: UnknownVal(), Init: UnknownVal(), State, |
1360 | Family: AllocationFamily(AF_IfNameIndex)); |
1361 | |
1362 | C.addTransition(State); |
1363 | } |
1364 | |
1365 | void MallocChecker::checkIfFreeNameIndex(ProgramStateRef State, |
1366 | const CallEvent &Call, |
1367 | CheckerContext &C) const { |
1368 | bool IsKnownToBeAllocatedMemory = false; |
1369 | State = FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1370 | Family: AllocationFamily(AF_IfNameIndex)); |
1371 | C.addTransition(State); |
1372 | } |
1373 | |
1374 | void MallocChecker::checkCXXNewOrCXXDelete(ProgramStateRef State, |
1375 | const CallEvent &Call, |
1376 | CheckerContext &C) const { |
1377 | bool IsKnownToBeAllocatedMemory = false; |
1378 | const auto *CE = dyn_cast_or_null<CallExpr>(Val: Call.getOriginExpr()); |
1379 | if (!CE) |
1380 | return; |
1381 | |
1382 | assert(isStandardNewDelete(Call)); |
1383 | |
1384 | // Process direct calls to operator new/new[]/delete/delete[] functions |
1385 | // as distinct from new/new[]/delete/delete[] expressions that are |
1386 | // processed by the checkPostStmt callbacks for CXXNewExpr and |
1387 | // CXXDeleteExpr. |
1388 | const FunctionDecl *FD = C.getCalleeDecl(CE); |
1389 | switch (FD->getOverloadedOperator()) { |
1390 | case OO_New: |
1391 | State = MallocMemAux(C, Call, SizeEx: CE->getArg(Arg: 0), Init: UndefinedVal(), State, |
1392 | Family: AllocationFamily(AF_CXXNew)); |
1393 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1394 | break; |
1395 | case OO_Array_New: |
1396 | State = MallocMemAux(C, Call, SizeEx: CE->getArg(Arg: 0), Init: UndefinedVal(), State, |
1397 | Family: AllocationFamily(AF_CXXNewArray)); |
1398 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1399 | break; |
1400 | case OO_Delete: |
1401 | State = FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1402 | Family: AllocationFamily(AF_CXXNew)); |
1403 | break; |
1404 | case OO_Array_Delete: |
1405 | State = FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1406 | Family: AllocationFamily(AF_CXXNewArray)); |
1407 | break; |
1408 | default: |
1409 | assert(false && "not a new/delete operator" ); |
1410 | return; |
1411 | } |
1412 | |
1413 | C.addTransition(State); |
1414 | } |
1415 | |
1416 | void MallocChecker::checkGMalloc0(ProgramStateRef State, const CallEvent &Call, |
1417 | CheckerContext &C) const { |
1418 | SValBuilder &svalBuilder = C.getSValBuilder(); |
1419 | SVal zeroVal = svalBuilder.makeZeroVal(type: svalBuilder.getContext().CharTy); |
1420 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: zeroVal, State, |
1421 | Family: AllocationFamily(AF_Malloc)); |
1422 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1423 | C.addTransition(State); |
1424 | } |
1425 | |
1426 | void MallocChecker::checkGMemdup(ProgramStateRef State, const CallEvent &Call, |
1427 | CheckerContext &C) const { |
1428 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 1), Init: UnknownVal(), State, |
1429 | Family: AllocationFamily(AF_Malloc)); |
1430 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 1, State); |
1431 | C.addTransition(State); |
1432 | } |
1433 | |
1434 | void MallocChecker::checkGMallocN(ProgramStateRef State, const CallEvent &Call, |
1435 | CheckerContext &C) const { |
1436 | SVal Init = UndefinedVal(); |
1437 | SVal TotalSize = evalMulForBufferSize(C, Blocks: Call.getArgExpr(Index: 0), BlockBytes: Call.getArgExpr(Index: 1)); |
1438 | State = MallocMemAux(C, Call, Size: TotalSize, Init, State, |
1439 | Family: AllocationFamily(AF_Malloc)); |
1440 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1441 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 1, State); |
1442 | C.addTransition(State); |
1443 | } |
1444 | |
1445 | void MallocChecker::checkGMallocN0(ProgramStateRef State, const CallEvent &Call, |
1446 | CheckerContext &C) const { |
1447 | SValBuilder &SB = C.getSValBuilder(); |
1448 | SVal Init = SB.makeZeroVal(type: SB.getContext().CharTy); |
1449 | SVal TotalSize = evalMulForBufferSize(C, Blocks: Call.getArgExpr(Index: 0), BlockBytes: Call.getArgExpr(Index: 1)); |
1450 | State = MallocMemAux(C, Call, Size: TotalSize, Init, State, |
1451 | Family: AllocationFamily(AF_Malloc)); |
1452 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1453 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 1, State); |
1454 | C.addTransition(State); |
1455 | } |
1456 | |
1457 | static bool isFromStdNamespace(const CallEvent &Call) { |
1458 | const Decl *FD = Call.getDecl(); |
1459 | assert(FD && "a CallDescription cannot match a call without a Decl" ); |
1460 | return FD->isInStdNamespace(); |
1461 | } |
1462 | |
1463 | void MallocChecker::preGetdelim(ProgramStateRef State, const CallEvent &Call, |
1464 | CheckerContext &C) const { |
1465 | // Discard calls to the C++ standard library function std::getline(), which |
1466 | // is completely unrelated to the POSIX getline() that we're checking. |
1467 | if (isFromStdNamespace(Call)) |
1468 | return; |
1469 | |
1470 | const auto LinePtr = getPointeeVal(PtrSVal: Call.getArgSVal(Index: 0), State); |
1471 | if (!LinePtr) |
1472 | return; |
1473 | |
1474 | // FreeMemAux takes IsKnownToBeAllocated as an output parameter, and it will |
1475 | // be true after the call if the symbol was registered by this checker. |
1476 | // We do not need this value here, as FreeMemAux will take care |
1477 | // of reporting any violation of the preconditions. |
1478 | bool IsKnownToBeAllocated = false; |
1479 | State = FreeMemAux(C, ArgExpr: Call.getArgExpr(Index: 0), Call, State, Hold: false, |
1480 | IsKnownToBeAllocated, Family: AllocationFamily(AF_Malloc), ReturnsNullOnFailure: false, |
1481 | ArgValOpt: LinePtr); |
1482 | if (State) |
1483 | C.addTransition(State); |
1484 | } |
1485 | |
1486 | void MallocChecker::checkGetdelim(ProgramStateRef State, const CallEvent &Call, |
1487 | CheckerContext &C) const { |
1488 | // Discard calls to the C++ standard library function std::getline(), which |
1489 | // is completely unrelated to the POSIX getline() that we're checking. |
1490 | if (isFromStdNamespace(Call)) |
1491 | return; |
1492 | |
1493 | // Handle the post-conditions of getline and getdelim: |
1494 | // Register the new conjured value as an allocated buffer. |
1495 | const CallExpr *CE = dyn_cast_or_null<CallExpr>(Val: Call.getOriginExpr()); |
1496 | if (!CE) |
1497 | return; |
1498 | |
1499 | const auto LinePtr = |
1500 | getPointeeVal(PtrSVal: Call.getArgSVal(Index: 0), State)->getAs<DefinedSVal>(); |
1501 | const auto Size = |
1502 | getPointeeVal(PtrSVal: Call.getArgSVal(Index: 1), State)->getAs<DefinedSVal>(); |
1503 | if (!LinePtr || !Size || !LinePtr->getAsRegion()) |
1504 | return; |
1505 | |
1506 | State = setDynamicExtent(State, MR: LinePtr->getAsRegion(), Extent: *Size); |
1507 | C.addTransition(State: MallocUpdateRefState(C, CE, State, |
1508 | AllocationFamily(AF_Malloc), *LinePtr)); |
1509 | } |
1510 | |
1511 | void MallocChecker::checkReallocN(ProgramStateRef State, const CallEvent &Call, |
1512 | CheckerContext &C) const { |
1513 | State = ReallocMemAux(C, Call, /*ShouldFreeOnFail=*/false, State, |
1514 | Family: AllocationFamily(AF_Malloc), |
1515 | /*SuffixWithN=*/true); |
1516 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 1, State); |
1517 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 2, State); |
1518 | C.addTransition(State); |
1519 | } |
1520 | |
1521 | void MallocChecker::checkOwnershipAttr(ProgramStateRef State, |
1522 | const CallEvent &Call, |
1523 | CheckerContext &C) const { |
1524 | const auto *CE = dyn_cast_or_null<CallExpr>(Val: Call.getOriginExpr()); |
1525 | if (!CE) |
1526 | return; |
1527 | const FunctionDecl *FD = C.getCalleeDecl(CE); |
1528 | if (!FD) |
1529 | return; |
1530 | if (ShouldIncludeOwnershipAnnotatedFunctions || |
1531 | ChecksEnabled[CK_MismatchedDeallocatorChecker]) { |
1532 | // Check all the attributes, if there are any. |
1533 | // There can be multiple of these attributes. |
1534 | if (FD->hasAttrs()) |
1535 | for (const auto *I : FD->specific_attrs<OwnershipAttr>()) { |
1536 | switch (I->getOwnKind()) { |
1537 | case OwnershipAttr::Returns: |
1538 | State = MallocMemReturnsAttr(C, Call, I, State); |
1539 | break; |
1540 | case OwnershipAttr::Takes: |
1541 | case OwnershipAttr::Holds: |
1542 | State = FreeMemAttr(C, Call, I, State); |
1543 | break; |
1544 | } |
1545 | } |
1546 | } |
1547 | C.addTransition(State); |
1548 | } |
1549 | |
1550 | bool MallocChecker::evalCall(const CallEvent &Call, CheckerContext &C) const { |
1551 | if (!Call.getOriginExpr()) |
1552 | return false; |
1553 | |
1554 | ProgramStateRef State = C.getState(); |
1555 | |
1556 | if (const CheckFn *Callback = FreeingMemFnMap.lookup(Call)) { |
1557 | (*Callback)(this, State, Call, C); |
1558 | return true; |
1559 | } |
1560 | |
1561 | if (const CheckFn *Callback = AllocatingMemFnMap.lookup(Call)) { |
1562 | State = MallocBindRetVal(C, Call, State, isAlloca: false); |
1563 | (*Callback)(this, State, Call, C); |
1564 | return true; |
1565 | } |
1566 | |
1567 | if (const CheckFn *Callback = ReallocatingMemFnMap.lookup(Call)) { |
1568 | State = MallocBindRetVal(C, Call, State, isAlloca: false); |
1569 | (*Callback)(this, State, Call, C); |
1570 | return true; |
1571 | } |
1572 | |
1573 | if (isStandardNew(Call)) { |
1574 | State = MallocBindRetVal(C, Call, State, isAlloca: false); |
1575 | checkCXXNewOrCXXDelete(State, Call, C); |
1576 | return true; |
1577 | } |
1578 | |
1579 | if (isStandardDelete(Call)) { |
1580 | checkCXXNewOrCXXDelete(State, Call, C); |
1581 | return true; |
1582 | } |
1583 | |
1584 | if (const CheckFn *Callback = AllocaMemFnMap.lookup(Call)) { |
1585 | State = MallocBindRetVal(C, Call, State, isAlloca: true); |
1586 | (*Callback)(this, State, Call, C); |
1587 | return true; |
1588 | } |
1589 | |
1590 | if (isFreeingOwnershipAttrCall(Call)) { |
1591 | checkOwnershipAttr(State, Call, C); |
1592 | return true; |
1593 | } |
1594 | |
1595 | if (isAllocatingOwnershipAttrCall(Call)) { |
1596 | State = MallocBindRetVal(C, Call, State, isAlloca: false); |
1597 | checkOwnershipAttr(State, Call, C); |
1598 | return true; |
1599 | } |
1600 | |
1601 | return false; |
1602 | } |
1603 | |
1604 | // Performs a 0-sized allocations check. |
1605 | ProgramStateRef MallocChecker::ProcessZeroAllocCheck( |
1606 | CheckerContext &C, const CallEvent &Call, const unsigned IndexOfSizeArg, |
1607 | ProgramStateRef State, std::optional<SVal> RetVal) { |
1608 | if (!State) |
1609 | return nullptr; |
1610 | |
1611 | const Expr *Arg = nullptr; |
1612 | |
1613 | if (const CallExpr *CE = dyn_cast<CallExpr>(Val: Call.getOriginExpr())) { |
1614 | Arg = CE->getArg(Arg: IndexOfSizeArg); |
1615 | } else if (const CXXNewExpr *NE = |
1616 | dyn_cast<CXXNewExpr>(Val: Call.getOriginExpr())) { |
1617 | if (NE->isArray()) { |
1618 | Arg = *NE->getArraySize(); |
1619 | } else { |
1620 | return State; |
1621 | } |
1622 | } else { |
1623 | assert(false && "not a CallExpr or CXXNewExpr" ); |
1624 | return nullptr; |
1625 | } |
1626 | |
1627 | if (!RetVal) |
1628 | RetVal = State->getSVal(Call.getOriginExpr(), C.getLocationContext()); |
1629 | |
1630 | assert(Arg); |
1631 | |
1632 | auto DefArgVal = |
1633 | State->getSVal(Arg, Call.getLocationContext()).getAs<DefinedSVal>(); |
1634 | |
1635 | if (!DefArgVal) |
1636 | return State; |
1637 | |
1638 | // Check if the allocation size is 0. |
1639 | ProgramStateRef TrueState, FalseState; |
1640 | SValBuilder &SvalBuilder = State->getStateManager().getSValBuilder(); |
1641 | DefinedSVal Zero = |
1642 | SvalBuilder.makeZeroVal(type: Arg->getType()).castAs<DefinedSVal>(); |
1643 | |
1644 | std::tie(args&: TrueState, args&: FalseState) = |
1645 | State->assume(SvalBuilder.evalEQ(State, *DefArgVal, Zero)); |
1646 | |
1647 | if (TrueState && !FalseState) { |
1648 | SymbolRef Sym = RetVal->getAsLocSymbol(); |
1649 | if (!Sym) |
1650 | return State; |
1651 | |
1652 | const RefState *RS = State->get<RegionState>(key: Sym); |
1653 | if (RS) { |
1654 | if (RS->isAllocated()) |
1655 | return TrueState->set<RegionState>(K: Sym, |
1656 | E: RefState::getAllocatedOfSizeZero(RS)); |
1657 | else |
1658 | return State; |
1659 | } else { |
1660 | // Case of zero-size realloc. Historically 'realloc(ptr, 0)' is treated as |
1661 | // 'free(ptr)' and the returned value from 'realloc(ptr, 0)' is not |
1662 | // tracked. Add zero-reallocated Sym to the state to catch references |
1663 | // to zero-allocated memory. |
1664 | return TrueState->add<ReallocSizeZeroSymbols>(K: Sym); |
1665 | } |
1666 | } |
1667 | |
1668 | // Assume the value is non-zero going forward. |
1669 | assert(FalseState); |
1670 | return FalseState; |
1671 | } |
1672 | |
1673 | static QualType getDeepPointeeType(QualType T) { |
1674 | QualType Result = T, PointeeType = T->getPointeeType(); |
1675 | while (!PointeeType.isNull()) { |
1676 | Result = PointeeType; |
1677 | PointeeType = PointeeType->getPointeeType(); |
1678 | } |
1679 | return Result; |
1680 | } |
1681 | |
1682 | /// \returns true if the constructor invoked by \p NE has an argument of a |
1683 | /// pointer/reference to a record type. |
1684 | static bool hasNonTrivialConstructorCall(const CXXNewExpr *NE) { |
1685 | |
1686 | const CXXConstructExpr *ConstructE = NE->getConstructExpr(); |
1687 | if (!ConstructE) |
1688 | return false; |
1689 | |
1690 | if (!NE->getAllocatedType()->getAsCXXRecordDecl()) |
1691 | return false; |
1692 | |
1693 | const CXXConstructorDecl *CtorD = ConstructE->getConstructor(); |
1694 | |
1695 | // Iterate over the constructor parameters. |
1696 | for (const auto *CtorParam : CtorD->parameters()) { |
1697 | |
1698 | QualType CtorParamPointeeT = CtorParam->getType()->getPointeeType(); |
1699 | if (CtorParamPointeeT.isNull()) |
1700 | continue; |
1701 | |
1702 | CtorParamPointeeT = getDeepPointeeType(CtorParamPointeeT); |
1703 | |
1704 | if (CtorParamPointeeT->getAsCXXRecordDecl()) |
1705 | return true; |
1706 | } |
1707 | |
1708 | return false; |
1709 | } |
1710 | |
1711 | ProgramStateRef |
1712 | MallocChecker::processNewAllocation(const CXXAllocatorCall &Call, |
1713 | CheckerContext &C, |
1714 | AllocationFamily Family) const { |
1715 | if (!isStandardNewDelete(FD: Call)) |
1716 | return nullptr; |
1717 | |
1718 | const CXXNewExpr *NE = Call.getOriginExpr(); |
1719 | const ParentMap &PM = C.getLocationContext()->getParentMap(); |
1720 | ProgramStateRef State = C.getState(); |
1721 | |
1722 | // Non-trivial constructors have a chance to escape 'this', but marking all |
1723 | // invocations of trivial constructors as escaped would cause too great of |
1724 | // reduction of true positives, so let's just do that for constructors that |
1725 | // have an argument of a pointer-to-record type. |
1726 | if (!PM.isConsumedExpr(NE) && hasNonTrivialConstructorCall(NE)) |
1727 | return State; |
1728 | |
1729 | // The return value from operator new is bound to a specified initialization |
1730 | // value (if any) and we don't want to loose this value. So we call |
1731 | // MallocUpdateRefState() instead of MallocMemAux() which breaks the |
1732 | // existing binding. |
1733 | SVal Target = Call.getObjectUnderConstruction(); |
1734 | if (Call.getOriginExpr()->isArray()) { |
1735 | if (auto SizeEx = NE->getArraySize()) |
1736 | checkTaintedness(C, Call, SizeSVal: C.getSVal(*SizeEx), State, |
1737 | Family: AllocationFamily(AF_CXXNewArray)); |
1738 | } |
1739 | |
1740 | State = MallocUpdateRefState(C, NE, State, Family, Target); |
1741 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State, RetVal: Target); |
1742 | return State; |
1743 | } |
1744 | |
1745 | void MallocChecker::checkNewAllocator(const CXXAllocatorCall &Call, |
1746 | CheckerContext &C) const { |
1747 | if (!C.wasInlined) { |
1748 | ProgramStateRef State = processNewAllocation( |
1749 | Call, C, |
1750 | Family: AllocationFamily(Call.getOriginExpr()->isArray() ? AF_CXXNewArray |
1751 | : AF_CXXNew)); |
1752 | C.addTransition(State); |
1753 | } |
1754 | } |
1755 | |
1756 | static bool isKnownDeallocObjCMethodName(const ObjCMethodCall &Call) { |
1757 | // If the first selector piece is one of the names below, assume that the |
1758 | // object takes ownership of the memory, promising to eventually deallocate it |
1759 | // with free(). |
1760 | // Ex: [NSData dataWithBytesNoCopy:bytes length:10]; |
1761 | // (...unless a 'freeWhenDone' parameter is false, but that's checked later.) |
1762 | StringRef FirstSlot = Call.getSelector().getNameForSlot(argIndex: 0); |
1763 | return FirstSlot == "dataWithBytesNoCopy" || |
1764 | FirstSlot == "initWithBytesNoCopy" || |
1765 | FirstSlot == "initWithCharactersNoCopy" ; |
1766 | } |
1767 | |
1768 | static std::optional<bool> getFreeWhenDoneArg(const ObjCMethodCall &Call) { |
1769 | Selector S = Call.getSelector(); |
1770 | |
1771 | // FIXME: We should not rely on fully-constrained symbols being folded. |
1772 | for (unsigned i = 1; i < S.getNumArgs(); ++i) |
1773 | if (S.getNameForSlot(argIndex: i) == "freeWhenDone" ) |
1774 | return !Call.getArgSVal(Index: i).isZeroConstant(); |
1775 | |
1776 | return std::nullopt; |
1777 | } |
1778 | |
1779 | void MallocChecker::checkPostObjCMessage(const ObjCMethodCall &Call, |
1780 | CheckerContext &C) const { |
1781 | if (C.wasInlined) |
1782 | return; |
1783 | |
1784 | if (!isKnownDeallocObjCMethodName(Call)) |
1785 | return; |
1786 | |
1787 | if (std::optional<bool> FreeWhenDone = getFreeWhenDoneArg(Call)) |
1788 | if (!*FreeWhenDone) |
1789 | return; |
1790 | |
1791 | if (Call.hasNonZeroCallbackArg()) |
1792 | return; |
1793 | |
1794 | bool IsKnownToBeAllocatedMemory; |
1795 | ProgramStateRef State = FreeMemAux(C, ArgExpr: Call.getArgExpr(Index: 0), Call, State: C.getState(), |
1796 | /*Hold=*/true, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1797 | Family: AllocationFamily(AF_Malloc), |
1798 | /*ReturnsNullOnFailure=*/true); |
1799 | |
1800 | C.addTransition(State); |
1801 | } |
1802 | |
1803 | ProgramStateRef |
1804 | MallocChecker::MallocMemReturnsAttr(CheckerContext &C, const CallEvent &Call, |
1805 | const OwnershipAttr *Att, |
1806 | ProgramStateRef State) const { |
1807 | if (!State) |
1808 | return nullptr; |
1809 | |
1810 | auto attrClassName = Att->getModule()->getName(); |
1811 | auto Family = AllocationFamily(AF_Custom, attrClassName); |
1812 | |
1813 | if (!Att->args().empty()) { |
1814 | return MallocMemAux(C, Call, |
1815 | Call.getArgExpr(Index: Att->args_begin()->getASTIndex()), |
1816 | UnknownVal(), State, Family); |
1817 | } |
1818 | return MallocMemAux(C, Call, UnknownVal(), UnknownVal(), State, Family); |
1819 | } |
1820 | |
1821 | ProgramStateRef MallocChecker::MallocBindRetVal(CheckerContext &C, |
1822 | const CallEvent &Call, |
1823 | ProgramStateRef State, |
1824 | bool isAlloca) const { |
1825 | const Expr *CE = Call.getOriginExpr(); |
1826 | |
1827 | // We expect the allocation functions to return a pointer. |
1828 | if (!Loc::isLocType(T: CE->getType())) |
1829 | return nullptr; |
1830 | |
1831 | unsigned Count = C.blockCount(); |
1832 | SValBuilder &SVB = C.getSValBuilder(); |
1833 | const LocationContext *LCtx = C.getPredecessor()->getLocationContext(); |
1834 | DefinedSVal RetVal = |
1835 | isAlloca ? SVB.getAllocaRegionVal(E: CE, LCtx, Count) |
1836 | : SVB.getConjuredHeapSymbolVal(elem: Call.getCFGElementRef(), LCtx, |
1837 | type: CE->getType(), Count); |
1838 | return State->BindExpr(CE, C.getLocationContext(), RetVal); |
1839 | } |
1840 | |
1841 | ProgramStateRef MallocChecker::MallocMemAux(CheckerContext &C, |
1842 | const CallEvent &Call, |
1843 | const Expr *SizeEx, SVal Init, |
1844 | ProgramStateRef State, |
1845 | AllocationFamily Family) const { |
1846 | if (!State) |
1847 | return nullptr; |
1848 | |
1849 | assert(SizeEx); |
1850 | return MallocMemAux(C, Call, Size: C.getSVal(SizeEx), Init, State, Family); |
1851 | } |
1852 | |
1853 | void MallocChecker::reportTaintBug(StringRef Msg, ProgramStateRef State, |
1854 | CheckerContext &C, |
1855 | llvm::ArrayRef<SymbolRef> TaintedSyms, |
1856 | AllocationFamily Family) const { |
1857 | if (ExplodedNode *N = C.generateNonFatalErrorNode(State, Tag: this)) { |
1858 | if (!BT_TaintedAlloc) |
1859 | BT_TaintedAlloc.reset(p: new BugType(CheckNames[CK_TaintedAllocChecker], |
1860 | "Tainted Memory Allocation" , |
1861 | categories::TaintedData)); |
1862 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_TaintedAlloc, args&: Msg, args&: N); |
1863 | for (auto TaintedSym : TaintedSyms) { |
1864 | R->markInteresting(sym: TaintedSym); |
1865 | } |
1866 | C.emitReport(R: std::move(R)); |
1867 | } |
1868 | } |
1869 | |
1870 | void MallocChecker::checkTaintedness(CheckerContext &C, const CallEvent &Call, |
1871 | const SVal SizeSVal, ProgramStateRef State, |
1872 | AllocationFamily Family) const { |
1873 | if (!ChecksEnabled[CK_TaintedAllocChecker]) |
1874 | return; |
1875 | std::vector<SymbolRef> TaintedSyms = |
1876 | taint::getTaintedSymbols(State, V: SizeSVal); |
1877 | if (TaintedSyms.empty()) |
1878 | return; |
1879 | |
1880 | SValBuilder &SVB = C.getSValBuilder(); |
1881 | QualType SizeTy = SVB.getContext().getSizeType(); |
1882 | QualType CmpTy = SVB.getConditionType(); |
1883 | // In case the symbol is tainted, we give a warning if the |
1884 | // size is larger than SIZE_MAX/4 |
1885 | BasicValueFactory &BVF = SVB.getBasicValueFactory(); |
1886 | const llvm::APSInt MaxValInt = BVF.getMaxValue(T: SizeTy); |
1887 | NonLoc MaxLength = |
1888 | SVB.makeIntVal(integer: MaxValInt / APSIntType(MaxValInt).getValue(RawValue: 4)); |
1889 | std::optional<NonLoc> SizeNL = SizeSVal.getAs<NonLoc>(); |
1890 | auto Cmp = SVB.evalBinOpNN(state: State, op: BO_GE, lhs: *SizeNL, rhs: MaxLength, resultTy: CmpTy) |
1891 | .getAs<DefinedOrUnknownSVal>(); |
1892 | if (!Cmp) |
1893 | return; |
1894 | auto [StateTooLarge, StateNotTooLarge] = State->assume(Cond: *Cmp); |
1895 | if (!StateTooLarge && StateNotTooLarge) { |
1896 | // We can prove that size is not too large so there is no issue. |
1897 | return; |
1898 | } |
1899 | |
1900 | std::string Callee = "Memory allocation function" ; |
1901 | if (Call.getCalleeIdentifier()) |
1902 | Callee = Call.getCalleeIdentifier()->getName().str(); |
1903 | reportTaintBug( |
1904 | Msg: Callee + " is called with a tainted (potentially attacker controlled) " |
1905 | "value. Make sure the value is bound checked." , |
1906 | State, C, TaintedSyms, Family); |
1907 | } |
1908 | |
1909 | ProgramStateRef MallocChecker::MallocMemAux(CheckerContext &C, |
1910 | const CallEvent &Call, SVal Size, |
1911 | SVal Init, ProgramStateRef State, |
1912 | AllocationFamily Family) const { |
1913 | if (!State) |
1914 | return nullptr; |
1915 | |
1916 | const Expr *CE = Call.getOriginExpr(); |
1917 | |
1918 | // We expect the malloc functions to return a pointer. |
1919 | // Should have been already checked. |
1920 | assert(Loc::isLocType(CE->getType()) && |
1921 | "Allocation functions must return a pointer" ); |
1922 | |
1923 | const LocationContext *LCtx = C.getPredecessor()->getLocationContext(); |
1924 | SVal RetVal = State->getSVal(CE, C.getLocationContext()); |
1925 | |
1926 | // Fill the region with the initialization value. |
1927 | State = State->bindDefaultInitial(loc: RetVal, V: Init, LCtx); |
1928 | |
1929 | // If Size is somehow undefined at this point, this line prevents a crash. |
1930 | if (Size.isUndef()) |
1931 | Size = UnknownVal(); |
1932 | |
1933 | checkTaintedness(C, Call, SizeSVal: Size, State, Family: AllocationFamily(AF_Malloc)); |
1934 | |
1935 | // Set the region's extent. |
1936 | State = setDynamicExtent(State, MR: RetVal.getAsRegion(), |
1937 | Extent: Size.castAs<DefinedOrUnknownSVal>()); |
1938 | |
1939 | return MallocUpdateRefState(C, E: CE, State, Family); |
1940 | } |
1941 | |
1942 | static ProgramStateRef MallocUpdateRefState(CheckerContext &C, const Expr *E, |
1943 | ProgramStateRef State, |
1944 | AllocationFamily Family, |
1945 | std::optional<SVal> RetVal) { |
1946 | if (!State) |
1947 | return nullptr; |
1948 | |
1949 | // Get the return value. |
1950 | if (!RetVal) |
1951 | RetVal = State->getSVal(E, C.getLocationContext()); |
1952 | |
1953 | // We expect the malloc functions to return a pointer. |
1954 | if (!RetVal->getAs<Loc>()) |
1955 | return nullptr; |
1956 | |
1957 | SymbolRef Sym = RetVal->getAsLocSymbol(); |
1958 | |
1959 | // NOTE: If this was an `alloca()` call, then `RetVal` holds an |
1960 | // `AllocaRegion`, so `Sym` will be a nullpointer because `AllocaRegion`s do |
1961 | // not have an associated symbol. However, this distinct region type means |
1962 | // that we don't need to store anything about them in `RegionState`. |
1963 | |
1964 | if (Sym) |
1965 | return State->set<RegionState>(Sym, RefState::getAllocated(Family, E)); |
1966 | |
1967 | return State; |
1968 | } |
1969 | |
1970 | ProgramStateRef MallocChecker::FreeMemAttr(CheckerContext &C, |
1971 | const CallEvent &Call, |
1972 | const OwnershipAttr *Att, |
1973 | ProgramStateRef State) const { |
1974 | if (!State) |
1975 | return nullptr; |
1976 | |
1977 | auto attrClassName = Att->getModule()->getName(); |
1978 | auto Family = AllocationFamily(AF_Custom, attrClassName); |
1979 | |
1980 | bool IsKnownToBeAllocated = false; |
1981 | |
1982 | for (const auto &Arg : Att->args()) { |
1983 | ProgramStateRef StateI = |
1984 | FreeMemAux(C, Call, State, Arg.getASTIndex(), |
1985 | Att->getOwnKind() == OwnershipAttr::Holds, |
1986 | IsKnownToBeAllocated, Family); |
1987 | if (StateI) |
1988 | State = StateI; |
1989 | } |
1990 | return State; |
1991 | } |
1992 | |
1993 | ProgramStateRef MallocChecker::FreeMemAux(CheckerContext &C, |
1994 | const CallEvent &Call, |
1995 | ProgramStateRef State, unsigned Num, |
1996 | bool Hold, bool &IsKnownToBeAllocated, |
1997 | AllocationFamily Family, |
1998 | bool ReturnsNullOnFailure) const { |
1999 | if (!State) |
2000 | return nullptr; |
2001 | |
2002 | if (Call.getNumArgs() < (Num + 1)) |
2003 | return nullptr; |
2004 | |
2005 | return FreeMemAux(C, ArgExpr: Call.getArgExpr(Index: Num), Call, State, Hold, |
2006 | IsKnownToBeAllocated, Family, ReturnsNullOnFailure); |
2007 | } |
2008 | |
2009 | /// Checks if the previous call to free on the given symbol failed - if free |
2010 | /// failed, returns true. Also, returns the corresponding return value symbol. |
2011 | static bool didPreviousFreeFail(ProgramStateRef State, |
2012 | SymbolRef Sym, SymbolRef &RetStatusSymbol) { |
2013 | const SymbolRef *Ret = State->get<FreeReturnValue>(key: Sym); |
2014 | if (Ret) { |
2015 | assert(*Ret && "We should not store the null return symbol" ); |
2016 | ConstraintManager &CMgr = State->getConstraintManager(); |
2017 | ConditionTruthVal FreeFailed = CMgr.isNull(State, Sym: *Ret); |
2018 | RetStatusSymbol = *Ret; |
2019 | return FreeFailed.isConstrainedTrue(); |
2020 | } |
2021 | return false; |
2022 | } |
2023 | |
2024 | static void printOwnershipTakesList(raw_ostream &os, CheckerContext &C, |
2025 | const Expr *E) { |
2026 | const CallExpr *CE = dyn_cast<CallExpr>(Val: E); |
2027 | |
2028 | if (!CE) |
2029 | return; |
2030 | |
2031 | const FunctionDecl *FD = CE->getDirectCallee(); |
2032 | if (!FD) |
2033 | return; |
2034 | |
2035 | // Only one ownership_takes attribute is allowed. |
2036 | for (const auto *I : FD->specific_attrs<OwnershipAttr>()) { |
2037 | if (I->getOwnKind() != OwnershipAttr::Takes) |
2038 | continue; |
2039 | |
2040 | os << ", which takes ownership of '" << I->getModule()->getName() << '\''; |
2041 | break; |
2042 | } |
2043 | } |
2044 | |
2045 | static bool printMemFnName(raw_ostream &os, CheckerContext &C, const Expr *E) { |
2046 | if (const CallExpr *CE = dyn_cast<CallExpr>(Val: E)) { |
2047 | // FIXME: This doesn't handle indirect calls. |
2048 | const FunctionDecl *FD = CE->getDirectCallee(); |
2049 | if (!FD) |
2050 | return false; |
2051 | |
2052 | os << '\'' << *FD; |
2053 | |
2054 | if (!FD->isOverloadedOperator()) |
2055 | os << "()" ; |
2056 | |
2057 | os << '\''; |
2058 | return true; |
2059 | } |
2060 | |
2061 | if (const ObjCMessageExpr *Msg = dyn_cast<ObjCMessageExpr>(Val: E)) { |
2062 | if (Msg->isInstanceMessage()) |
2063 | os << "-" ; |
2064 | else |
2065 | os << "+" ; |
2066 | Msg->getSelector().print(OS&: os); |
2067 | return true; |
2068 | } |
2069 | |
2070 | if (const CXXNewExpr *NE = dyn_cast<CXXNewExpr>(Val: E)) { |
2071 | os << "'" |
2072 | << getOperatorSpelling(Operator: NE->getOperatorNew()->getOverloadedOperator()) |
2073 | << "'" ; |
2074 | return true; |
2075 | } |
2076 | |
2077 | if (const CXXDeleteExpr *DE = dyn_cast<CXXDeleteExpr>(Val: E)) { |
2078 | os << "'" |
2079 | << getOperatorSpelling(Operator: DE->getOperatorDelete()->getOverloadedOperator()) |
2080 | << "'" ; |
2081 | return true; |
2082 | } |
2083 | |
2084 | return false; |
2085 | } |
2086 | |
2087 | static void printExpectedAllocName(raw_ostream &os, AllocationFamily Family) { |
2088 | |
2089 | switch (Family.Kind) { |
2090 | case AF_Malloc: |
2091 | os << "'malloc()'" ; |
2092 | return; |
2093 | case AF_CXXNew: |
2094 | os << "'new'" ; |
2095 | return; |
2096 | case AF_CXXNewArray: |
2097 | os << "'new[]'" ; |
2098 | return; |
2099 | case AF_IfNameIndex: |
2100 | os << "'if_nameindex()'" ; |
2101 | return; |
2102 | case AF_InnerBuffer: |
2103 | os << "container-specific allocator" ; |
2104 | return; |
2105 | case AF_Custom: |
2106 | os << Family.CustomName.value(); |
2107 | return; |
2108 | case AF_Alloca: |
2109 | case AF_None: |
2110 | assert(false && "not a deallocation expression" ); |
2111 | } |
2112 | } |
2113 | |
2114 | static void printExpectedDeallocName(raw_ostream &os, AllocationFamily Family) { |
2115 | switch (Family.Kind) { |
2116 | case AF_Malloc: |
2117 | os << "'free()'" ; |
2118 | return; |
2119 | case AF_CXXNew: |
2120 | os << "'delete'" ; |
2121 | return; |
2122 | case AF_CXXNewArray: |
2123 | os << "'delete[]'" ; |
2124 | return; |
2125 | case AF_IfNameIndex: |
2126 | os << "'if_freenameindex()'" ; |
2127 | return; |
2128 | case AF_InnerBuffer: |
2129 | os << "container-specific deallocator" ; |
2130 | return; |
2131 | case AF_Custom: |
2132 | os << "function that takes ownership of '" << Family.CustomName.value() |
2133 | << "\'" ; |
2134 | return; |
2135 | case AF_Alloca: |
2136 | case AF_None: |
2137 | assert(false && "not a deallocation expression" ); |
2138 | } |
2139 | } |
2140 | |
2141 | ProgramStateRef |
2142 | MallocChecker::FreeMemAux(CheckerContext &C, const Expr *ArgExpr, |
2143 | const CallEvent &Call, ProgramStateRef State, |
2144 | bool Hold, bool &IsKnownToBeAllocated, |
2145 | AllocationFamily Family, bool ReturnsNullOnFailure, |
2146 | std::optional<SVal> ArgValOpt) const { |
2147 | |
2148 | if (!State) |
2149 | return nullptr; |
2150 | |
2151 | SVal ArgVal = ArgValOpt.value_or(u: C.getSVal(ArgExpr)); |
2152 | if (!isa<DefinedOrUnknownSVal>(Val: ArgVal)) |
2153 | return nullptr; |
2154 | DefinedOrUnknownSVal location = ArgVal.castAs<DefinedOrUnknownSVal>(); |
2155 | |
2156 | // Check for null dereferences. |
2157 | if (!isa<Loc>(Val: location)) |
2158 | return nullptr; |
2159 | |
2160 | // The explicit NULL case, no operation is performed. |
2161 | ProgramStateRef notNullState, nullState; |
2162 | std::tie(args&: notNullState, args&: nullState) = State->assume(Cond: location); |
2163 | if (nullState && !notNullState) |
2164 | return nullptr; |
2165 | |
2166 | // Unknown values could easily be okay |
2167 | // Undefined values are handled elsewhere |
2168 | if (ArgVal.isUnknownOrUndef()) |
2169 | return nullptr; |
2170 | |
2171 | const MemRegion *R = ArgVal.getAsRegion(); |
2172 | const Expr *ParentExpr = Call.getOriginExpr(); |
2173 | |
2174 | // NOTE: We detected a bug, but the checker under whose name we would emit the |
2175 | // error could be disabled. Generally speaking, the MallocChecker family is an |
2176 | // integral part of the Static Analyzer, and disabling any part of it should |
2177 | // only be done under exceptional circumstances, such as frequent false |
2178 | // positives. If this is the case, we can reasonably believe that there are |
2179 | // serious faults in our understanding of the source code, and even if we |
2180 | // don't emit an warning, we should terminate further analysis with a sink |
2181 | // node. |
2182 | |
2183 | // Nonlocs can't be freed, of course. |
2184 | // Non-region locations (labels and fixed addresses) also shouldn't be freed. |
2185 | if (!R) { |
2186 | // Exception: |
2187 | // If the macro ZERO_SIZE_PTR is defined, this could be a kernel source |
2188 | // code. In that case, the ZERO_SIZE_PTR defines a special value used for a |
2189 | // zero-sized memory block which is allowed to be freed, despite not being a |
2190 | // null pointer. |
2191 | if (Family.Kind != AF_Malloc || !isArgZERO_SIZE_PTR(State, C, ArgVal)) |
2192 | HandleNonHeapDealloc(C, ArgVal, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2193 | Family); |
2194 | return nullptr; |
2195 | } |
2196 | |
2197 | R = R->StripCasts(); |
2198 | |
2199 | // Blocks might show up as heap data, but should not be free()d |
2200 | if (isa<BlockDataRegion>(Val: R)) { |
2201 | HandleNonHeapDealloc(C, ArgVal, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2202 | Family); |
2203 | return nullptr; |
2204 | } |
2205 | |
2206 | // Parameters, locals, statics, globals, and memory returned by |
2207 | // __builtin_alloca() shouldn't be freed. |
2208 | if (!R->hasMemorySpace<UnknownSpaceRegion, HeapSpaceRegion>(State)) { |
2209 | // Regions returned by malloc() are represented by SymbolicRegion objects |
2210 | // within HeapSpaceRegion. Of course, free() can work on memory allocated |
2211 | // outside the current function, so UnknownSpaceRegion is also a |
2212 | // possibility here. |
2213 | |
2214 | if (isa<AllocaRegion>(Val: R)) |
2215 | HandleFreeAlloca(C, ArgVal, Range: ArgExpr->getSourceRange()); |
2216 | else |
2217 | HandleNonHeapDealloc(C, ArgVal, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2218 | Family); |
2219 | |
2220 | return nullptr; |
2221 | } |
2222 | |
2223 | const SymbolicRegion *SrBase = dyn_cast<SymbolicRegion>(Val: R->getBaseRegion()); |
2224 | // Various cases could lead to non-symbol values here. |
2225 | // For now, ignore them. |
2226 | if (!SrBase) |
2227 | return nullptr; |
2228 | |
2229 | SymbolRef SymBase = SrBase->getSymbol(); |
2230 | const RefState *RsBase = State->get<RegionState>(key: SymBase); |
2231 | SymbolRef PreviousRetStatusSymbol = nullptr; |
2232 | |
2233 | IsKnownToBeAllocated = |
2234 | RsBase && (RsBase->isAllocated() || RsBase->isAllocatedOfSizeZero()); |
2235 | |
2236 | if (RsBase) { |
2237 | |
2238 | // Memory returned by alloca() shouldn't be freed. |
2239 | if (RsBase->getAllocationFamily().Kind == AF_Alloca) { |
2240 | HandleFreeAlloca(C, ArgVal, Range: ArgExpr->getSourceRange()); |
2241 | return nullptr; |
2242 | } |
2243 | |
2244 | // Check for double free first. |
2245 | if ((RsBase->isReleased() || RsBase->isRelinquished()) && |
2246 | !didPreviousFreeFail(State, Sym: SymBase, RetStatusSymbol&: PreviousRetStatusSymbol)) { |
2247 | HandleDoubleFree(C, Range: ParentExpr->getSourceRange(), Released: RsBase->isReleased(), |
2248 | Sym: SymBase, PrevSym: PreviousRetStatusSymbol); |
2249 | return nullptr; |
2250 | |
2251 | // If the pointer is allocated or escaped, but we are now trying to free it, |
2252 | // check that the call to free is proper. |
2253 | } else if (RsBase->isAllocated() || RsBase->isAllocatedOfSizeZero() || |
2254 | RsBase->isEscaped()) { |
2255 | |
2256 | // Check if an expected deallocation function matches the real one. |
2257 | bool DeallocMatchesAlloc = RsBase->getAllocationFamily() == Family; |
2258 | if (!DeallocMatchesAlloc) { |
2259 | HandleMismatchedDealloc(C, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2260 | RS: RsBase, Sym: SymBase, OwnershipTransferred: Hold); |
2261 | return nullptr; |
2262 | } |
2263 | |
2264 | // Check if the memory location being freed is the actual location |
2265 | // allocated, or an offset. |
2266 | RegionOffset Offset = R->getAsOffset(); |
2267 | if (Offset.isValid() && |
2268 | !Offset.hasSymbolicOffset() && |
2269 | Offset.getOffset() != 0) { |
2270 | const Expr *AllocExpr = cast<Expr>(Val: RsBase->getStmt()); |
2271 | HandleOffsetFree(C, ArgVal, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2272 | Family, AllocExpr); |
2273 | return nullptr; |
2274 | } |
2275 | } |
2276 | } |
2277 | |
2278 | if (SymBase->getType()->isFunctionPointerType()) { |
2279 | HandleFunctionPtrFree(C, ArgVal, Range: ArgExpr->getSourceRange(), FreeExpr: ParentExpr, |
2280 | Family); |
2281 | return nullptr; |
2282 | } |
2283 | |
2284 | // Clean out the info on previous call to free return info. |
2285 | State = State->remove<FreeReturnValue>(K: SymBase); |
2286 | |
2287 | // Keep track of the return value. If it is NULL, we will know that free |
2288 | // failed. |
2289 | if (ReturnsNullOnFailure) { |
2290 | SVal RetVal = C.getSVal(ParentExpr); |
2291 | SymbolRef RetStatusSymbol = RetVal.getAsSymbol(); |
2292 | if (RetStatusSymbol) { |
2293 | C.getSymbolManager().addSymbolDependency(Primary: SymBase, Dependent: RetStatusSymbol); |
2294 | State = State->set<FreeReturnValue>(K: SymBase, E: RetStatusSymbol); |
2295 | } |
2296 | } |
2297 | |
2298 | // If we don't know anything about this symbol, a free on it may be totally |
2299 | // valid. If this is the case, lets assume that the allocation family of the |
2300 | // freeing function is the same as the symbols allocation family, and go with |
2301 | // that. |
2302 | assert(!RsBase || (RsBase && RsBase->getAllocationFamily() == Family)); |
2303 | |
2304 | // Assume that after memory is freed, it contains unknown values. This |
2305 | // conforts languages standards, since reading from freed memory is considered |
2306 | // UB and may result in arbitrary value. |
2307 | State = State->invalidateRegions(Values: {location}, Elem: Call.getCFGElementRef(), |
2308 | BlockCount: C.blockCount(), LCtx: C.getLocationContext(), |
2309 | /*CausesPointerEscape=*/false, |
2310 | /*InvalidatedSymbols=*/IS: nullptr); |
2311 | |
2312 | // Normal free. |
2313 | if (Hold) |
2314 | return State->set<RegionState>(SymBase, |
2315 | RefState::getRelinquished(Family, |
2316 | ParentExpr)); |
2317 | |
2318 | return State->set<RegionState>(SymBase, |
2319 | RefState::getReleased(Family, ParentExpr)); |
2320 | } |
2321 | |
2322 | std::optional<MallocChecker::CheckKind> |
2323 | MallocChecker::getCheckIfTracked(AllocationFamily Family, |
2324 | bool IsALeakCheck) const { |
2325 | switch (Family.Kind) { |
2326 | case AF_Malloc: |
2327 | case AF_Alloca: |
2328 | case AF_Custom: |
2329 | case AF_IfNameIndex: { |
2330 | if (ChecksEnabled[CK_MallocChecker]) |
2331 | return CK_MallocChecker; |
2332 | return std::nullopt; |
2333 | } |
2334 | case AF_CXXNew: |
2335 | case AF_CXXNewArray: { |
2336 | if (IsALeakCheck) { |
2337 | if (ChecksEnabled[CK_NewDeleteLeaksChecker]) |
2338 | return CK_NewDeleteLeaksChecker; |
2339 | } |
2340 | else { |
2341 | if (ChecksEnabled[CK_NewDeleteChecker]) |
2342 | return CK_NewDeleteChecker; |
2343 | } |
2344 | return std::nullopt; |
2345 | } |
2346 | case AF_InnerBuffer: { |
2347 | if (ChecksEnabled[CK_InnerPointerChecker]) |
2348 | return CK_InnerPointerChecker; |
2349 | return std::nullopt; |
2350 | } |
2351 | case AF_None: { |
2352 | assert(false && "no family" ); |
2353 | return std::nullopt; |
2354 | } |
2355 | } |
2356 | assert(false && "unhandled family" ); |
2357 | return std::nullopt; |
2358 | } |
2359 | |
2360 | std::optional<MallocChecker::CheckKind> |
2361 | MallocChecker::getCheckIfTracked(CheckerContext &C, SymbolRef Sym, |
2362 | bool IsALeakCheck) const { |
2363 | if (C.getState()->contains<ReallocSizeZeroSymbols>(key: Sym)) |
2364 | return CK_MallocChecker; |
2365 | |
2366 | const RefState *RS = C.getState()->get<RegionState>(key: Sym); |
2367 | assert(RS); |
2368 | return getCheckIfTracked(Family: RS->getAllocationFamily(), IsALeakCheck); |
2369 | } |
2370 | |
2371 | bool MallocChecker::SummarizeValue(raw_ostream &os, SVal V) { |
2372 | if (std::optional<nonloc::ConcreteInt> IntVal = |
2373 | V.getAs<nonloc::ConcreteInt>()) |
2374 | os << "an integer (" << IntVal->getValue() << ")" ; |
2375 | else if (std::optional<loc::ConcreteInt> ConstAddr = |
2376 | V.getAs<loc::ConcreteInt>()) |
2377 | os << "a constant address (" << ConstAddr->getValue() << ")" ; |
2378 | else if (std::optional<loc::GotoLabel> Label = V.getAs<loc::GotoLabel>()) |
2379 | os << "the address of the label '" << Label->getLabel()->getName() << "'" ; |
2380 | else |
2381 | return false; |
2382 | |
2383 | return true; |
2384 | } |
2385 | |
2386 | bool MallocChecker::SummarizeRegion(ProgramStateRef State, raw_ostream &os, |
2387 | const MemRegion *MR) { |
2388 | switch (MR->getKind()) { |
2389 | case MemRegion::FunctionCodeRegionKind: { |
2390 | const NamedDecl *FD = cast<FunctionCodeRegion>(Val: MR)->getDecl(); |
2391 | if (FD) |
2392 | os << "the address of the function '" << *FD << '\''; |
2393 | else |
2394 | os << "the address of a function" ; |
2395 | return true; |
2396 | } |
2397 | case MemRegion::BlockCodeRegionKind: |
2398 | os << "block text" ; |
2399 | return true; |
2400 | case MemRegion::BlockDataRegionKind: |
2401 | // FIXME: where the block came from? |
2402 | os << "a block" ; |
2403 | return true; |
2404 | default: { |
2405 | const MemSpaceRegion *MS = MR->getMemorySpace(State); |
2406 | |
2407 | if (isa<StackLocalsSpaceRegion>(Val: MS)) { |
2408 | const VarRegion *VR = dyn_cast<VarRegion>(Val: MR); |
2409 | const VarDecl *VD; |
2410 | if (VR) |
2411 | VD = VR->getDecl(); |
2412 | else |
2413 | VD = nullptr; |
2414 | |
2415 | if (VD) |
2416 | os << "the address of the local variable '" << VD->getName() << "'" ; |
2417 | else |
2418 | os << "the address of a local stack variable" ; |
2419 | return true; |
2420 | } |
2421 | |
2422 | if (isa<StackArgumentsSpaceRegion>(Val: MS)) { |
2423 | const VarRegion *VR = dyn_cast<VarRegion>(Val: MR); |
2424 | const VarDecl *VD; |
2425 | if (VR) |
2426 | VD = VR->getDecl(); |
2427 | else |
2428 | VD = nullptr; |
2429 | |
2430 | if (VD) |
2431 | os << "the address of the parameter '" << VD->getName() << "'" ; |
2432 | else |
2433 | os << "the address of a parameter" ; |
2434 | return true; |
2435 | } |
2436 | |
2437 | if (isa<GlobalsSpaceRegion>(Val: MS)) { |
2438 | const VarRegion *VR = dyn_cast<VarRegion>(Val: MR); |
2439 | const VarDecl *VD; |
2440 | if (VR) |
2441 | VD = VR->getDecl(); |
2442 | else |
2443 | VD = nullptr; |
2444 | |
2445 | if (VD) { |
2446 | if (VD->isStaticLocal()) |
2447 | os << "the address of the static variable '" << VD->getName() << "'" ; |
2448 | else |
2449 | os << "the address of the global variable '" << VD->getName() << "'" ; |
2450 | } else |
2451 | os << "the address of a global variable" ; |
2452 | return true; |
2453 | } |
2454 | |
2455 | return false; |
2456 | } |
2457 | } |
2458 | } |
2459 | |
2460 | void MallocChecker::HandleNonHeapDealloc(CheckerContext &C, SVal ArgVal, |
2461 | SourceRange Range, |
2462 | const Expr *DeallocExpr, |
2463 | AllocationFamily Family) const { |
2464 | |
2465 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker]) { |
2466 | C.addSink(); |
2467 | return; |
2468 | } |
2469 | |
2470 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(Family); |
2471 | if (!CheckKind) |
2472 | return; |
2473 | |
2474 | if (ExplodedNode *N = C.generateErrorNode()) { |
2475 | if (!BT_BadFree[*CheckKind]) |
2476 | BT_BadFree[*CheckKind].reset(p: new BugType( |
2477 | CheckNames[*CheckKind], "Bad free" , categories::MemoryError)); |
2478 | |
2479 | SmallString<100> buf; |
2480 | llvm::raw_svector_ostream os(buf); |
2481 | |
2482 | const MemRegion *MR = ArgVal.getAsRegion(); |
2483 | while (const ElementRegion *ER = dyn_cast_or_null<ElementRegion>(Val: MR)) |
2484 | MR = ER->getSuperRegion(); |
2485 | |
2486 | os << "Argument to " ; |
2487 | if (!printMemFnName(os, C, E: DeallocExpr)) |
2488 | os << "deallocator" ; |
2489 | |
2490 | os << " is " ; |
2491 | bool Summarized = |
2492 | MR ? SummarizeRegion(State: C.getState(), os, MR) : SummarizeValue(os, V: ArgVal); |
2493 | if (Summarized) |
2494 | os << ", which is not memory allocated by " ; |
2495 | else |
2496 | os << "not memory allocated by " ; |
2497 | |
2498 | printExpectedAllocName(os, Family); |
2499 | |
2500 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_BadFree[*CheckKind], |
2501 | args: os.str(), args&: N); |
2502 | R->markInteresting(R: MR); |
2503 | R->addRange(R: Range); |
2504 | C.emitReport(R: std::move(R)); |
2505 | } |
2506 | } |
2507 | |
2508 | void MallocChecker::HandleFreeAlloca(CheckerContext &C, SVal ArgVal, |
2509 | SourceRange Range) const { |
2510 | |
2511 | std::optional<MallocChecker::CheckKind> CheckKind; |
2512 | |
2513 | if (ChecksEnabled[CK_MallocChecker]) |
2514 | CheckKind = CK_MallocChecker; |
2515 | else if (ChecksEnabled[CK_MismatchedDeallocatorChecker]) |
2516 | CheckKind = CK_MismatchedDeallocatorChecker; |
2517 | else { |
2518 | C.addSink(); |
2519 | return; |
2520 | } |
2521 | |
2522 | if (ExplodedNode *N = C.generateErrorNode()) { |
2523 | if (!BT_FreeAlloca[*CheckKind]) |
2524 | BT_FreeAlloca[*CheckKind].reset(p: new BugType( |
2525 | CheckNames[*CheckKind], "Free 'alloca()'" , categories::MemoryError)); |
2526 | |
2527 | auto R = std::make_unique<PathSensitiveBugReport>( |
2528 | args&: *BT_FreeAlloca[*CheckKind], |
2529 | args: "Memory allocated by 'alloca()' should not be deallocated" , args&: N); |
2530 | R->markInteresting(R: ArgVal.getAsRegion()); |
2531 | R->addRange(R: Range); |
2532 | C.emitReport(R: std::move(R)); |
2533 | } |
2534 | } |
2535 | |
2536 | void MallocChecker::HandleMismatchedDealloc(CheckerContext &C, |
2537 | SourceRange Range, |
2538 | const Expr *DeallocExpr, |
2539 | const RefState *RS, SymbolRef Sym, |
2540 | bool OwnershipTransferred) const { |
2541 | |
2542 | if (!ChecksEnabled[CK_MismatchedDeallocatorChecker]) { |
2543 | C.addSink(); |
2544 | return; |
2545 | } |
2546 | |
2547 | if (ExplodedNode *N = C.generateErrorNode()) { |
2548 | if (!BT_MismatchedDealloc) |
2549 | BT_MismatchedDealloc.reset( |
2550 | p: new BugType(CheckNames[CK_MismatchedDeallocatorChecker], |
2551 | "Bad deallocator" , categories::MemoryError)); |
2552 | |
2553 | SmallString<100> buf; |
2554 | llvm::raw_svector_ostream os(buf); |
2555 | |
2556 | const Expr *AllocExpr = cast<Expr>(Val: RS->getStmt()); |
2557 | SmallString<20> AllocBuf; |
2558 | llvm::raw_svector_ostream AllocOs(AllocBuf); |
2559 | SmallString<20> DeallocBuf; |
2560 | llvm::raw_svector_ostream DeallocOs(DeallocBuf); |
2561 | |
2562 | if (OwnershipTransferred) { |
2563 | if (printMemFnName(os&: DeallocOs, C, E: DeallocExpr)) |
2564 | os << DeallocOs.str() << " cannot" ; |
2565 | else |
2566 | os << "Cannot" ; |
2567 | |
2568 | os << " take ownership of memory" ; |
2569 | |
2570 | if (printMemFnName(os&: AllocOs, C, E: AllocExpr)) |
2571 | os << " allocated by " << AllocOs.str(); |
2572 | } else { |
2573 | os << "Memory" ; |
2574 | if (printMemFnName(os&: AllocOs, C, E: AllocExpr)) |
2575 | os << " allocated by " << AllocOs.str(); |
2576 | |
2577 | os << " should be deallocated by " ; |
2578 | printExpectedDeallocName(os, Family: RS->getAllocationFamily()); |
2579 | |
2580 | if (printMemFnName(os&: DeallocOs, C, E: DeallocExpr)) |
2581 | os << ", not " << DeallocOs.str(); |
2582 | |
2583 | printOwnershipTakesList(os, C, E: DeallocExpr); |
2584 | } |
2585 | |
2586 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_MismatchedDealloc, |
2587 | args: os.str(), args&: N); |
2588 | R->markInteresting(sym: Sym); |
2589 | R->addRange(R: Range); |
2590 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2591 | C.emitReport(R: std::move(R)); |
2592 | } |
2593 | } |
2594 | |
2595 | void MallocChecker::HandleOffsetFree(CheckerContext &C, SVal ArgVal, |
2596 | SourceRange Range, const Expr *DeallocExpr, |
2597 | AllocationFamily Family, |
2598 | const Expr *AllocExpr) const { |
2599 | |
2600 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker]) { |
2601 | C.addSink(); |
2602 | return; |
2603 | } |
2604 | |
2605 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(Family); |
2606 | if (!CheckKind) |
2607 | return; |
2608 | |
2609 | ExplodedNode *N = C.generateErrorNode(); |
2610 | if (!N) |
2611 | return; |
2612 | |
2613 | if (!BT_OffsetFree[*CheckKind]) |
2614 | BT_OffsetFree[*CheckKind].reset(p: new BugType( |
2615 | CheckNames[*CheckKind], "Offset free" , categories::MemoryError)); |
2616 | |
2617 | SmallString<100> buf; |
2618 | llvm::raw_svector_ostream os(buf); |
2619 | SmallString<20> AllocNameBuf; |
2620 | llvm::raw_svector_ostream AllocNameOs(AllocNameBuf); |
2621 | |
2622 | const MemRegion *MR = ArgVal.getAsRegion(); |
2623 | assert(MR && "Only MemRegion based symbols can have offset free errors" ); |
2624 | |
2625 | RegionOffset Offset = MR->getAsOffset(); |
2626 | assert((Offset.isValid() && |
2627 | !Offset.hasSymbolicOffset() && |
2628 | Offset.getOffset() != 0) && |
2629 | "Only symbols with a valid offset can have offset free errors" ); |
2630 | |
2631 | int offsetBytes = Offset.getOffset() / C.getASTContext().getCharWidth(); |
2632 | |
2633 | os << "Argument to " ; |
2634 | if (!printMemFnName(os, C, E: DeallocExpr)) |
2635 | os << "deallocator" ; |
2636 | os << " is offset by " |
2637 | << offsetBytes |
2638 | << " " |
2639 | << ((abs(x: offsetBytes) > 1) ? "bytes" : "byte" ) |
2640 | << " from the start of " ; |
2641 | if (AllocExpr && printMemFnName(os&: AllocNameOs, C, E: AllocExpr)) |
2642 | os << "memory allocated by " << AllocNameOs.str(); |
2643 | else |
2644 | os << "allocated memory" ; |
2645 | |
2646 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_OffsetFree[*CheckKind], |
2647 | args: os.str(), args&: N); |
2648 | R->markInteresting(R: MR->getBaseRegion()); |
2649 | R->addRange(R: Range); |
2650 | C.emitReport(R: std::move(R)); |
2651 | } |
2652 | |
2653 | void MallocChecker::HandleUseAfterFree(CheckerContext &C, SourceRange Range, |
2654 | SymbolRef Sym) const { |
2655 | |
2656 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker] && |
2657 | !ChecksEnabled[CK_InnerPointerChecker]) { |
2658 | C.addSink(); |
2659 | return; |
2660 | } |
2661 | |
2662 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(C, Sym); |
2663 | if (!CheckKind) |
2664 | return; |
2665 | |
2666 | if (ExplodedNode *N = C.generateErrorNode()) { |
2667 | if (!BT_UseFree[*CheckKind]) |
2668 | BT_UseFree[*CheckKind].reset(p: new BugType( |
2669 | CheckNames[*CheckKind], "Use-after-free" , categories::MemoryError)); |
2670 | |
2671 | AllocationFamily AF = |
2672 | C.getState()->get<RegionState>(key: Sym)->getAllocationFamily(); |
2673 | |
2674 | auto R = std::make_unique<PathSensitiveBugReport>( |
2675 | args&: *BT_UseFree[*CheckKind], |
2676 | args: AF.Kind == AF_InnerBuffer |
2677 | ? "Inner pointer of container used after re/deallocation" |
2678 | : "Use of memory after it is freed" , |
2679 | args&: N); |
2680 | |
2681 | R->markInteresting(sym: Sym); |
2682 | R->addRange(R: Range); |
2683 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2684 | |
2685 | if (AF.Kind == AF_InnerBuffer) |
2686 | R->addVisitor(visitor: allocation_state::getInnerPointerBRVisitor(Sym)); |
2687 | |
2688 | C.emitReport(R: std::move(R)); |
2689 | } |
2690 | } |
2691 | |
2692 | void MallocChecker::HandleDoubleFree(CheckerContext &C, SourceRange Range, |
2693 | bool Released, SymbolRef Sym, |
2694 | SymbolRef PrevSym) const { |
2695 | |
2696 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker]) { |
2697 | C.addSink(); |
2698 | return; |
2699 | } |
2700 | |
2701 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(C, Sym); |
2702 | if (!CheckKind) |
2703 | return; |
2704 | |
2705 | if (ExplodedNode *N = C.generateErrorNode()) { |
2706 | if (!BT_DoubleFree[*CheckKind]) |
2707 | BT_DoubleFree[*CheckKind].reset(p: new BugType( |
2708 | CheckNames[*CheckKind], "Double free" , categories::MemoryError)); |
2709 | |
2710 | auto R = std::make_unique<PathSensitiveBugReport>( |
2711 | args&: *BT_DoubleFree[*CheckKind], |
2712 | args: (Released ? "Attempt to free released memory" |
2713 | : "Attempt to free non-owned memory" ), |
2714 | args&: N); |
2715 | R->addRange(R: Range); |
2716 | R->markInteresting(sym: Sym); |
2717 | if (PrevSym) |
2718 | R->markInteresting(sym: PrevSym); |
2719 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2720 | C.emitReport(R: std::move(R)); |
2721 | } |
2722 | } |
2723 | |
2724 | void MallocChecker::HandleDoubleDelete(CheckerContext &C, SymbolRef Sym) const { |
2725 | |
2726 | if (!ChecksEnabled[CK_NewDeleteChecker]) { |
2727 | C.addSink(); |
2728 | return; |
2729 | } |
2730 | |
2731 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(C, Sym); |
2732 | if (!CheckKind) |
2733 | return; |
2734 | |
2735 | if (ExplodedNode *N = C.generateErrorNode()) { |
2736 | if (!BT_DoubleDelete) |
2737 | BT_DoubleDelete.reset(p: new BugType(CheckNames[CK_NewDeleteChecker], |
2738 | "Double delete" , |
2739 | categories::MemoryError)); |
2740 | |
2741 | auto R = std::make_unique<PathSensitiveBugReport>( |
2742 | args&: *BT_DoubleDelete, args: "Attempt to delete released memory" , args&: N); |
2743 | |
2744 | R->markInteresting(sym: Sym); |
2745 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2746 | C.emitReport(R: std::move(R)); |
2747 | } |
2748 | } |
2749 | |
2750 | void MallocChecker::HandleUseZeroAlloc(CheckerContext &C, SourceRange Range, |
2751 | SymbolRef Sym) const { |
2752 | |
2753 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker]) { |
2754 | C.addSink(); |
2755 | return; |
2756 | } |
2757 | |
2758 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(C, Sym); |
2759 | |
2760 | if (!CheckKind) |
2761 | return; |
2762 | |
2763 | if (ExplodedNode *N = C.generateErrorNode()) { |
2764 | if (!BT_UseZerroAllocated[*CheckKind]) |
2765 | BT_UseZerroAllocated[*CheckKind].reset( |
2766 | p: new BugType(CheckNames[*CheckKind], "Use of zero allocated" , |
2767 | categories::MemoryError)); |
2768 | |
2769 | auto R = std::make_unique<PathSensitiveBugReport>( |
2770 | args&: *BT_UseZerroAllocated[*CheckKind], |
2771 | args: "Use of memory allocated with size zero" , args&: N); |
2772 | |
2773 | R->addRange(R: Range); |
2774 | if (Sym) { |
2775 | R->markInteresting(sym: Sym); |
2776 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2777 | } |
2778 | C.emitReport(R: std::move(R)); |
2779 | } |
2780 | } |
2781 | |
2782 | void MallocChecker::HandleFunctionPtrFree(CheckerContext &C, SVal ArgVal, |
2783 | SourceRange Range, |
2784 | const Expr *FreeExpr, |
2785 | AllocationFamily Family) const { |
2786 | if (!ChecksEnabled[CK_MallocChecker]) { |
2787 | C.addSink(); |
2788 | return; |
2789 | } |
2790 | |
2791 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(Family); |
2792 | if (!CheckKind) |
2793 | return; |
2794 | |
2795 | if (ExplodedNode *N = C.generateErrorNode()) { |
2796 | if (!BT_BadFree[*CheckKind]) |
2797 | BT_BadFree[*CheckKind].reset(p: new BugType( |
2798 | CheckNames[*CheckKind], "Bad free" , categories::MemoryError)); |
2799 | |
2800 | SmallString<100> Buf; |
2801 | llvm::raw_svector_ostream Os(Buf); |
2802 | |
2803 | const MemRegion *MR = ArgVal.getAsRegion(); |
2804 | while (const ElementRegion *ER = dyn_cast_or_null<ElementRegion>(Val: MR)) |
2805 | MR = ER->getSuperRegion(); |
2806 | |
2807 | Os << "Argument to " ; |
2808 | if (!printMemFnName(os&: Os, C, E: FreeExpr)) |
2809 | Os << "deallocator" ; |
2810 | |
2811 | Os << " is a function pointer" ; |
2812 | |
2813 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_BadFree[*CheckKind], |
2814 | args: Os.str(), args&: N); |
2815 | R->markInteresting(R: MR); |
2816 | R->addRange(R: Range); |
2817 | C.emitReport(R: std::move(R)); |
2818 | } |
2819 | } |
2820 | |
2821 | ProgramStateRef |
2822 | MallocChecker::ReallocMemAux(CheckerContext &C, const CallEvent &Call, |
2823 | bool ShouldFreeOnFail, ProgramStateRef State, |
2824 | AllocationFamily Family, bool SuffixWithN) const { |
2825 | if (!State) |
2826 | return nullptr; |
2827 | |
2828 | const CallExpr *CE = cast<CallExpr>(Val: Call.getOriginExpr()); |
2829 | |
2830 | if (SuffixWithN && CE->getNumArgs() < 3) |
2831 | return nullptr; |
2832 | else if (CE->getNumArgs() < 2) |
2833 | return nullptr; |
2834 | |
2835 | const Expr *arg0Expr = CE->getArg(Arg: 0); |
2836 | SVal Arg0Val = C.getSVal(arg0Expr); |
2837 | if (!isa<DefinedOrUnknownSVal>(Val: Arg0Val)) |
2838 | return nullptr; |
2839 | DefinedOrUnknownSVal arg0Val = Arg0Val.castAs<DefinedOrUnknownSVal>(); |
2840 | |
2841 | SValBuilder &svalBuilder = C.getSValBuilder(); |
2842 | |
2843 | DefinedOrUnknownSVal PtrEQ = svalBuilder.evalEQ( |
2844 | state: State, lhs: arg0Val, rhs: svalBuilder.makeNullWithType(type: arg0Expr->getType())); |
2845 | |
2846 | // Get the size argument. |
2847 | const Expr *Arg1 = CE->getArg(Arg: 1); |
2848 | |
2849 | // Get the value of the size argument. |
2850 | SVal TotalSize = C.getSVal(Arg1); |
2851 | if (SuffixWithN) |
2852 | TotalSize = evalMulForBufferSize(C, Blocks: Arg1, BlockBytes: CE->getArg(Arg: 2)); |
2853 | if (!isa<DefinedOrUnknownSVal>(Val: TotalSize)) |
2854 | return nullptr; |
2855 | |
2856 | // Compare the size argument to 0. |
2857 | DefinedOrUnknownSVal SizeZero = |
2858 | svalBuilder.evalEQ(state: State, lhs: TotalSize.castAs<DefinedOrUnknownSVal>(), |
2859 | rhs: svalBuilder.makeIntValWithWidth( |
2860 | ptrType: svalBuilder.getContext().getSizeType(), integer: 0)); |
2861 | |
2862 | ProgramStateRef StatePtrIsNull, StatePtrNotNull; |
2863 | std::tie(args&: StatePtrIsNull, args&: StatePtrNotNull) = State->assume(Cond: PtrEQ); |
2864 | ProgramStateRef StateSizeIsZero, StateSizeNotZero; |
2865 | std::tie(args&: StateSizeIsZero, args&: StateSizeNotZero) = State->assume(Cond: SizeZero); |
2866 | // We only assume exceptional states if they are definitely true; if the |
2867 | // state is under-constrained, assume regular realloc behavior. |
2868 | bool PrtIsNull = StatePtrIsNull && !StatePtrNotNull; |
2869 | bool SizeIsZero = StateSizeIsZero && !StateSizeNotZero; |
2870 | |
2871 | // If the ptr is NULL and the size is not 0, the call is equivalent to |
2872 | // malloc(size). |
2873 | if (PrtIsNull && !SizeIsZero) { |
2874 | ProgramStateRef stateMalloc = MallocMemAux( |
2875 | C, Call, Size: TotalSize, Init: UndefinedVal(), State: StatePtrIsNull, Family); |
2876 | return stateMalloc; |
2877 | } |
2878 | |
2879 | // Proccess as allocation of 0 bytes. |
2880 | if (PrtIsNull && SizeIsZero) |
2881 | return State; |
2882 | |
2883 | assert(!PrtIsNull); |
2884 | |
2885 | bool IsKnownToBeAllocated = false; |
2886 | |
2887 | // If the size is 0, free the memory. |
2888 | if (SizeIsZero) |
2889 | // The semantics of the return value are: |
2890 | // If size was equal to 0, either NULL or a pointer suitable to be passed |
2891 | // to free() is returned. We just free the input pointer and do not add |
2892 | // any constrains on the output pointer. |
2893 | if (ProgramStateRef stateFree = FreeMemAux( |
2894 | C, Call, State: StateSizeIsZero, Num: 0, Hold: false, IsKnownToBeAllocated, Family)) |
2895 | return stateFree; |
2896 | |
2897 | // Default behavior. |
2898 | if (ProgramStateRef stateFree = |
2899 | FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated, Family)) { |
2900 | |
2901 | ProgramStateRef stateRealloc = |
2902 | MallocMemAux(C, Call, Size: TotalSize, Init: UnknownVal(), State: stateFree, Family); |
2903 | if (!stateRealloc) |
2904 | return nullptr; |
2905 | |
2906 | OwnershipAfterReallocKind Kind = OAR_ToBeFreedAfterFailure; |
2907 | if (ShouldFreeOnFail) |
2908 | Kind = OAR_FreeOnFailure; |
2909 | else if (!IsKnownToBeAllocated) |
2910 | Kind = OAR_DoNotTrackAfterFailure; |
2911 | |
2912 | // Get the from and to pointer symbols as in toPtr = realloc(fromPtr, size). |
2913 | SymbolRef FromPtr = arg0Val.getLocSymbolInBase(); |
2914 | SVal RetVal = stateRealloc->getSVal(CE, C.getLocationContext()); |
2915 | SymbolRef ToPtr = RetVal.getAsSymbol(); |
2916 | assert(FromPtr && ToPtr && |
2917 | "By this point, FreeMemAux and MallocMemAux should have checked " |
2918 | "whether the argument or the return value is symbolic!" ); |
2919 | |
2920 | // Record the info about the reallocated symbol so that we could properly |
2921 | // process failed reallocation. |
2922 | stateRealloc = stateRealloc->set<ReallocPairs>(K: ToPtr, |
2923 | E: ReallocPair(FromPtr, Kind)); |
2924 | // The reallocated symbol should stay alive for as long as the new symbol. |
2925 | C.getSymbolManager().addSymbolDependency(Primary: ToPtr, Dependent: FromPtr); |
2926 | return stateRealloc; |
2927 | } |
2928 | return nullptr; |
2929 | } |
2930 | |
2931 | ProgramStateRef MallocChecker::CallocMem(CheckerContext &C, |
2932 | const CallEvent &Call, |
2933 | ProgramStateRef State) const { |
2934 | if (!State) |
2935 | return nullptr; |
2936 | |
2937 | if (Call.getNumArgs() < 2) |
2938 | return nullptr; |
2939 | |
2940 | SValBuilder &svalBuilder = C.getSValBuilder(); |
2941 | SVal zeroVal = svalBuilder.makeZeroVal(type: svalBuilder.getContext().CharTy); |
2942 | SVal TotalSize = |
2943 | evalMulForBufferSize(C, Blocks: Call.getArgExpr(Index: 0), BlockBytes: Call.getArgExpr(Index: 1)); |
2944 | |
2945 | return MallocMemAux(C, Call, Size: TotalSize, Init: zeroVal, State, |
2946 | Family: AllocationFamily(AF_Malloc)); |
2947 | } |
2948 | |
2949 | MallocChecker::LeakInfo MallocChecker::getAllocationSite(const ExplodedNode *N, |
2950 | SymbolRef Sym, |
2951 | CheckerContext &C) { |
2952 | const LocationContext *LeakContext = N->getLocationContext(); |
2953 | // Walk the ExplodedGraph backwards and find the first node that referred to |
2954 | // the tracked symbol. |
2955 | const ExplodedNode *AllocNode = N; |
2956 | const MemRegion *ReferenceRegion = nullptr; |
2957 | |
2958 | while (N) { |
2959 | ProgramStateRef State = N->getState(); |
2960 | if (!State->get<RegionState>(key: Sym)) |
2961 | break; |
2962 | |
2963 | // Find the most recent expression bound to the symbol in the current |
2964 | // context. |
2965 | if (!ReferenceRegion) { |
2966 | if (const MemRegion *MR = C.getLocationRegionIfPostStore(N)) { |
2967 | SVal Val = State->getSVal(R: MR); |
2968 | if (Val.getAsLocSymbol() == Sym) { |
2969 | const VarRegion *VR = MR->getBaseRegion()->getAs<VarRegion>(); |
2970 | // Do not show local variables belonging to a function other than |
2971 | // where the error is reported. |
2972 | if (!VR || (VR->getStackFrame() == LeakContext->getStackFrame())) |
2973 | ReferenceRegion = MR; |
2974 | } |
2975 | } |
2976 | } |
2977 | |
2978 | // Allocation node, is the last node in the current or parent context in |
2979 | // which the symbol was tracked. |
2980 | const LocationContext *NContext = N->getLocationContext(); |
2981 | if (NContext == LeakContext || |
2982 | NContext->isParentOf(LC: LeakContext)) |
2983 | AllocNode = N; |
2984 | N = N->pred_empty() ? nullptr : *(N->pred_begin()); |
2985 | } |
2986 | |
2987 | return LeakInfo(AllocNode, ReferenceRegion); |
2988 | } |
2989 | |
2990 | void MallocChecker::HandleLeak(SymbolRef Sym, ExplodedNode *N, |
2991 | CheckerContext &C) const { |
2992 | |
2993 | if (!ChecksEnabled[CK_MallocChecker] && |
2994 | !ChecksEnabled[CK_NewDeleteLeaksChecker]) |
2995 | return; |
2996 | |
2997 | const RefState *RS = C.getState()->get<RegionState>(key: Sym); |
2998 | assert(RS && "cannot leak an untracked symbol" ); |
2999 | AllocationFamily Family = RS->getAllocationFamily(); |
3000 | |
3001 | if (Family.Kind == AF_Alloca) |
3002 | return; |
3003 | |
3004 | std::optional<MallocChecker::CheckKind> CheckKind = |
3005 | getCheckIfTracked(Family, IsALeakCheck: true); |
3006 | |
3007 | if (!CheckKind) |
3008 | return; |
3009 | |
3010 | assert(N); |
3011 | if (!BT_Leak[*CheckKind]) { |
3012 | // Leaks should not be reported if they are post-dominated by a sink: |
3013 | // (1) Sinks are higher importance bugs. |
3014 | // (2) NoReturnFunctionChecker uses sink nodes to represent paths ending |
3015 | // with __noreturn functions such as assert() or exit(). We choose not |
3016 | // to report leaks on such paths. |
3017 | BT_Leak[*CheckKind].reset(p: new BugType(CheckNames[*CheckKind], "Memory leak" , |
3018 | categories::MemoryError, |
3019 | /*SuppressOnSink=*/true)); |
3020 | } |
3021 | |
3022 | // Most bug reports are cached at the location where they occurred. |
3023 | // With leaks, we want to unique them by the location where they were |
3024 | // allocated, and only report a single path. |
3025 | PathDiagnosticLocation LocUsedForUniqueing; |
3026 | const ExplodedNode *AllocNode = nullptr; |
3027 | const MemRegion *Region = nullptr; |
3028 | std::tie(args&: AllocNode, args&: Region) = getAllocationSite(N, Sym, C); |
3029 | |
3030 | const Stmt *AllocationStmt = AllocNode->getStmtForDiagnostics(); |
3031 | if (AllocationStmt) |
3032 | LocUsedForUniqueing = PathDiagnosticLocation::createBegin(S: AllocationStmt, |
3033 | SM: C.getSourceManager(), |
3034 | LAC: AllocNode->getLocationContext()); |
3035 | |
3036 | SmallString<200> buf; |
3037 | llvm::raw_svector_ostream os(buf); |
3038 | if (Region && Region->canPrintPretty()) { |
3039 | os << "Potential leak of memory pointed to by " ; |
3040 | Region->printPretty(os); |
3041 | } else { |
3042 | os << "Potential memory leak" ; |
3043 | } |
3044 | |
3045 | auto R = std::make_unique<PathSensitiveBugReport>( |
3046 | args&: *BT_Leak[*CheckKind], args: os.str(), args&: N, args&: LocUsedForUniqueing, |
3047 | args: AllocNode->getLocationContext()->getDecl()); |
3048 | R->markInteresting(sym: Sym); |
3049 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym, ConstructorArgs: true); |
3050 | if (ShouldRegisterNoOwnershipChangeVisitor) |
3051 | R->addVisitor<NoMemOwnershipChangeVisitor>(ConstructorArgs&: Sym, ConstructorArgs: this); |
3052 | C.emitReport(R: std::move(R)); |
3053 | } |
3054 | |
3055 | void MallocChecker::checkDeadSymbols(SymbolReaper &SymReaper, |
3056 | CheckerContext &C) const |
3057 | { |
3058 | ProgramStateRef state = C.getState(); |
3059 | RegionStateTy OldRS = state->get<RegionState>(); |
3060 | RegionStateTy::Factory &F = state->get_context<RegionState>(); |
3061 | |
3062 | RegionStateTy RS = OldRS; |
3063 | SmallVector<SymbolRef, 2> Errors; |
3064 | for (auto [Sym, State] : RS) { |
3065 | if (SymReaper.isDead(sym: Sym)) { |
3066 | if (State.isAllocated() || State.isAllocatedOfSizeZero()) |
3067 | Errors.push_back(Elt: Sym); |
3068 | // Remove the dead symbol from the map. |
3069 | RS = F.remove(Old: RS, K: Sym); |
3070 | } |
3071 | } |
3072 | |
3073 | if (RS == OldRS) { |
3074 | // We shouldn't have touched other maps yet. |
3075 | assert(state->get<ReallocPairs>() == |
3076 | C.getState()->get<ReallocPairs>()); |
3077 | assert(state->get<FreeReturnValue>() == |
3078 | C.getState()->get<FreeReturnValue>()); |
3079 | return; |
3080 | } |
3081 | |
3082 | // Cleanup the Realloc Pairs Map. |
3083 | ReallocPairsTy RP = state->get<ReallocPairs>(); |
3084 | for (auto [Sym, ReallocPair] : RP) { |
3085 | if (SymReaper.isDead(sym: Sym) || SymReaper.isDead(sym: ReallocPair.ReallocatedSym)) { |
3086 | state = state->remove<ReallocPairs>(K: Sym); |
3087 | } |
3088 | } |
3089 | |
3090 | // Cleanup the FreeReturnValue Map. |
3091 | FreeReturnValueTy FR = state->get<FreeReturnValue>(); |
3092 | for (auto [Sym, RetSym] : FR) { |
3093 | if (SymReaper.isDead(sym: Sym) || SymReaper.isDead(sym: RetSym)) { |
3094 | state = state->remove<FreeReturnValue>(K: Sym); |
3095 | } |
3096 | } |
3097 | |
3098 | // Generate leak node. |
3099 | ExplodedNode *N = C.getPredecessor(); |
3100 | if (!Errors.empty()) { |
3101 | N = C.generateNonFatalErrorNode(State: C.getState()); |
3102 | if (N) { |
3103 | for (SymbolRef Sym : Errors) { |
3104 | HandleLeak(Sym, N, C); |
3105 | } |
3106 | } |
3107 | } |
3108 | |
3109 | C.addTransition(State: state->set<RegionState>(RS), Pred: N); |
3110 | } |
3111 | |
3112 | void MallocChecker::checkPostCall(const CallEvent &Call, |
3113 | CheckerContext &C) const { |
3114 | if (const auto *PostFN = PostFnMap.lookup(Call)) { |
3115 | (*PostFN)(this, C.getState(), Call, C); |
3116 | return; |
3117 | } |
3118 | } |
3119 | |
3120 | void MallocChecker::checkPreCall(const CallEvent &Call, |
3121 | CheckerContext &C) const { |
3122 | |
3123 | if (const auto *DC = dyn_cast<CXXDeallocatorCall>(Val: &Call)) { |
3124 | const CXXDeleteExpr *DE = DC->getOriginExpr(); |
3125 | |
3126 | if (!ChecksEnabled[CK_NewDeleteChecker]) |
3127 | if (SymbolRef Sym = C.getSVal(DE->getArgument()).getAsSymbol()) |
3128 | checkUseAfterFree(Sym, C, DE->getArgument()); |
3129 | |
3130 | if (!isStandardNewDelete(FD: DC->getDecl())) |
3131 | return; |
3132 | |
3133 | ProgramStateRef State = C.getState(); |
3134 | bool IsKnownToBeAllocated; |
3135 | State = FreeMemAux( |
3136 | C, ArgExpr: DE->getArgument(), Call, State, |
3137 | /*Hold*/ false, IsKnownToBeAllocated, |
3138 | Family: AllocationFamily(DE->isArrayForm() ? AF_CXXNewArray : AF_CXXNew)); |
3139 | |
3140 | C.addTransition(State); |
3141 | return; |
3142 | } |
3143 | |
3144 | if (const auto *DC = dyn_cast<CXXDestructorCall>(Val: &Call)) { |
3145 | SymbolRef Sym = DC->getCXXThisVal().getAsSymbol(); |
3146 | if (!Sym || checkDoubleDelete(Sym, C)) |
3147 | return; |
3148 | } |
3149 | |
3150 | // We need to handle getline pre-conditions here before the pointed region |
3151 | // gets invalidated by StreamChecker |
3152 | if (const auto *PreFN = PreFnMap.lookup(Call)) { |
3153 | (*PreFN)(this, C.getState(), Call, C); |
3154 | return; |
3155 | } |
3156 | |
3157 | // We will check for double free in the post visit. |
3158 | if (const AnyFunctionCall *FC = dyn_cast<AnyFunctionCall>(Val: &Call)) { |
3159 | const FunctionDecl *FD = FC->getDecl(); |
3160 | if (!FD) |
3161 | return; |
3162 | |
3163 | if (ChecksEnabled[CK_MallocChecker] && isFreeingCall(Call)) |
3164 | return; |
3165 | } |
3166 | |
3167 | // Check if the callee of a method is deleted. |
3168 | if (const CXXInstanceCall *CC = dyn_cast<CXXInstanceCall>(Val: &Call)) { |
3169 | SymbolRef Sym = CC->getCXXThisVal().getAsSymbol(); |
3170 | if (!Sym || checkUseAfterFree(Sym, C, CC->getCXXThisExpr())) |
3171 | return; |
3172 | } |
3173 | |
3174 | // Check arguments for being used after free. |
3175 | for (unsigned I = 0, E = Call.getNumArgs(); I != E; ++I) { |
3176 | SVal ArgSVal = Call.getArgSVal(Index: I); |
3177 | if (isa<Loc>(Val: ArgSVal)) { |
3178 | SymbolRef Sym = ArgSVal.getAsSymbol(); |
3179 | if (!Sym) |
3180 | continue; |
3181 | if (checkUseAfterFree(Sym, C, Call.getArgExpr(Index: I))) |
3182 | return; |
3183 | } |
3184 | } |
3185 | } |
3186 | |
3187 | void MallocChecker::checkPreStmt(const ReturnStmt *S, |
3188 | CheckerContext &C) const { |
3189 | checkEscapeOnReturn(S, C); |
3190 | } |
3191 | |
3192 | // In the CFG, automatic destructors come after the return statement. |
3193 | // This callback checks for returning memory that is freed by automatic |
3194 | // destructors, as those cannot be reached in checkPreStmt(). |
3195 | void MallocChecker::checkEndFunction(const ReturnStmt *S, |
3196 | CheckerContext &C) const { |
3197 | checkEscapeOnReturn(S, C); |
3198 | } |
3199 | |
3200 | void MallocChecker::checkEscapeOnReturn(const ReturnStmt *S, |
3201 | CheckerContext &C) const { |
3202 | if (!S) |
3203 | return; |
3204 | |
3205 | const Expr *E = S->getRetValue(); |
3206 | if (!E) |
3207 | return; |
3208 | |
3209 | // Check if we are returning a symbol. |
3210 | ProgramStateRef State = C.getState(); |
3211 | SVal RetVal = C.getSVal(E); |
3212 | SymbolRef Sym = RetVal.getAsSymbol(); |
3213 | if (!Sym) |
3214 | // If we are returning a field of the allocated struct or an array element, |
3215 | // the callee could still free the memory. |
3216 | // TODO: This logic should be a part of generic symbol escape callback. |
3217 | if (const MemRegion *MR = RetVal.getAsRegion()) |
3218 | if (isa<FieldRegion, ElementRegion>(Val: MR)) |
3219 | if (const SymbolicRegion *BMR = |
3220 | dyn_cast<SymbolicRegion>(Val: MR->getBaseRegion())) |
3221 | Sym = BMR->getSymbol(); |
3222 | |
3223 | // Check if we are returning freed memory. |
3224 | if (Sym) |
3225 | checkUseAfterFree(Sym, C, E); |
3226 | } |
3227 | |
3228 | // TODO: Blocks should be either inlined or should call invalidate regions |
3229 | // upon invocation. After that's in place, special casing here will not be |
3230 | // needed. |
3231 | void MallocChecker::checkPostStmt(const BlockExpr *BE, |
3232 | CheckerContext &C) const { |
3233 | |
3234 | // Scan the BlockDecRefExprs for any object the retain count checker |
3235 | // may be tracking. |
3236 | if (!BE->getBlockDecl()->hasCaptures()) |
3237 | return; |
3238 | |
3239 | ProgramStateRef state = C.getState(); |
3240 | const BlockDataRegion *R = |
3241 | cast<BlockDataRegion>(Val: C.getSVal(BE).getAsRegion()); |
3242 | |
3243 | auto ReferencedVars = R->referenced_vars(); |
3244 | if (ReferencedVars.empty()) |
3245 | return; |
3246 | |
3247 | SmallVector<const MemRegion*, 10> Regions; |
3248 | const LocationContext *LC = C.getLocationContext(); |
3249 | MemRegionManager &MemMgr = C.getSValBuilder().getRegionManager(); |
3250 | |
3251 | for (const auto &Var : ReferencedVars) { |
3252 | const VarRegion *VR = Var.getCapturedRegion(); |
3253 | if (VR->getSuperRegion() == R) { |
3254 | VR = MemMgr.getVarRegion(VR->getDecl(), LC); |
3255 | } |
3256 | Regions.push_back(VR); |
3257 | } |
3258 | |
3259 | state = |
3260 | state->scanReachableSymbols<StopTrackingCallback>(Reachable: Regions).getState(); |
3261 | C.addTransition(State: state); |
3262 | } |
3263 | |
3264 | static bool isReleased(SymbolRef Sym, CheckerContext &C) { |
3265 | assert(Sym); |
3266 | const RefState *RS = C.getState()->get<RegionState>(key: Sym); |
3267 | return (RS && RS->isReleased()); |
3268 | } |
3269 | |
3270 | bool MallocChecker::suppressDeallocationsInSuspiciousContexts( |
3271 | const CallEvent &Call, CheckerContext &C) const { |
3272 | if (Call.getNumArgs() == 0) |
3273 | return false; |
3274 | |
3275 | StringRef FunctionStr = "" ; |
3276 | if (const auto *FD = dyn_cast<FunctionDecl>(Val: C.getStackFrame()->getDecl())) |
3277 | if (const Stmt *Body = FD->getBody()) |
3278 | if (Body->getBeginLoc().isValid()) |
3279 | FunctionStr = |
3280 | Lexer::getSourceText(Range: CharSourceRange::getTokenRange( |
3281 | {FD->getBeginLoc(), Body->getBeginLoc()}), |
3282 | SM: C.getSourceManager(), LangOpts: C.getLangOpts()); |
3283 | |
3284 | // We do not model the Integer Set Library's retain-count based allocation. |
3285 | if (!FunctionStr.contains(Other: "__isl_" )) |
3286 | return false; |
3287 | |
3288 | ProgramStateRef State = C.getState(); |
3289 | |
3290 | for (const Expr *Arg : cast<CallExpr>(Val: Call.getOriginExpr())->arguments()) |
3291 | if (SymbolRef Sym = C.getSVal(Arg).getAsSymbol()) |
3292 | if (const RefState *RS = State->get<RegionState>(key: Sym)) |
3293 | State = State->set<RegionState>(K: Sym, E: RefState::getEscaped(RS)); |
3294 | |
3295 | C.addTransition(State); |
3296 | return true; |
3297 | } |
3298 | |
3299 | bool MallocChecker::checkUseAfterFree(SymbolRef Sym, CheckerContext &C, |
3300 | const Stmt *S) const { |
3301 | |
3302 | if (isReleased(Sym, C)) { |
3303 | HandleUseAfterFree(C, Range: S->getSourceRange(), Sym); |
3304 | return true; |
3305 | } |
3306 | |
3307 | return false; |
3308 | } |
3309 | |
3310 | void MallocChecker::checkUseZeroAllocated(SymbolRef Sym, CheckerContext &C, |
3311 | const Stmt *S) const { |
3312 | assert(Sym); |
3313 | |
3314 | if (const RefState *RS = C.getState()->get<RegionState>(key: Sym)) { |
3315 | if (RS->isAllocatedOfSizeZero()) |
3316 | HandleUseZeroAlloc(C, Range: RS->getStmt()->getSourceRange(), Sym); |
3317 | } |
3318 | else if (C.getState()->contains<ReallocSizeZeroSymbols>(key: Sym)) { |
3319 | HandleUseZeroAlloc(C, Range: S->getSourceRange(), Sym); |
3320 | } |
3321 | } |
3322 | |
3323 | bool MallocChecker::checkDoubleDelete(SymbolRef Sym, CheckerContext &C) const { |
3324 | |
3325 | if (isReleased(Sym, C)) { |
3326 | HandleDoubleDelete(C, Sym); |
3327 | return true; |
3328 | } |
3329 | return false; |
3330 | } |
3331 | |
3332 | // Check if the location is a freed symbolic region. |
3333 | void MallocChecker::checkLocation(SVal l, bool isLoad, const Stmt *S, |
3334 | CheckerContext &C) const { |
3335 | SymbolRef Sym = l.getLocSymbolInBase(); |
3336 | if (Sym) { |
3337 | checkUseAfterFree(Sym, C, S); |
3338 | checkUseZeroAllocated(Sym, C, S); |
3339 | } |
3340 | } |
3341 | |
3342 | // If a symbolic region is assumed to NULL (or another constant), stop tracking |
3343 | // it - assuming that allocation failed on this path. |
3344 | ProgramStateRef MallocChecker::evalAssume(ProgramStateRef state, |
3345 | SVal Cond, |
3346 | bool Assumption) const { |
3347 | RegionStateTy RS = state->get<RegionState>(); |
3348 | for (SymbolRef Sym : llvm::make_first_range(c&: RS)) { |
3349 | // If the symbol is assumed to be NULL, remove it from consideration. |
3350 | ConstraintManager &CMgr = state->getConstraintManager(); |
3351 | ConditionTruthVal AllocFailed = CMgr.isNull(State: state, Sym); |
3352 | if (AllocFailed.isConstrainedTrue()) |
3353 | state = state->remove<RegionState>(K: Sym); |
3354 | } |
3355 | |
3356 | // Realloc returns 0 when reallocation fails, which means that we should |
3357 | // restore the state of the pointer being reallocated. |
3358 | ReallocPairsTy RP = state->get<ReallocPairs>(); |
3359 | for (auto [Sym, ReallocPair] : RP) { |
3360 | // If the symbol is assumed to be NULL, remove it from consideration. |
3361 | ConstraintManager &CMgr = state->getConstraintManager(); |
3362 | ConditionTruthVal AllocFailed = CMgr.isNull(State: state, Sym); |
3363 | if (!AllocFailed.isConstrainedTrue()) |
3364 | continue; |
3365 | |
3366 | SymbolRef ReallocSym = ReallocPair.ReallocatedSym; |
3367 | if (const RefState *RS = state->get<RegionState>(key: ReallocSym)) { |
3368 | if (RS->isReleased()) { |
3369 | switch (ReallocPair.Kind) { |
3370 | case OAR_ToBeFreedAfterFailure: |
3371 | state = state->set<RegionState>(K: ReallocSym, |
3372 | E: RefState::getAllocated(family: RS->getAllocationFamily(), s: RS->getStmt())); |
3373 | break; |
3374 | case OAR_DoNotTrackAfterFailure: |
3375 | state = state->remove<RegionState>(K: ReallocSym); |
3376 | break; |
3377 | default: |
3378 | assert(ReallocPair.Kind == OAR_FreeOnFailure); |
3379 | } |
3380 | } |
3381 | } |
3382 | state = state->remove<ReallocPairs>(K: Sym); |
3383 | } |
3384 | |
3385 | return state; |
3386 | } |
3387 | |
3388 | bool MallocChecker::mayFreeAnyEscapedMemoryOrIsModeledExplicitly( |
3389 | const CallEvent *Call, |
3390 | ProgramStateRef State, |
3391 | SymbolRef &EscapingSymbol) const { |
3392 | assert(Call); |
3393 | EscapingSymbol = nullptr; |
3394 | |
3395 | // For now, assume that any C++ or block call can free memory. |
3396 | // TODO: If we want to be more optimistic here, we'll need to make sure that |
3397 | // regions escape to C++ containers. They seem to do that even now, but for |
3398 | // mysterious reasons. |
3399 | if (!isa<SimpleFunctionCall, ObjCMethodCall>(Val: Call)) |
3400 | return true; |
3401 | |
3402 | // Check Objective-C messages by selector name. |
3403 | if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Val: Call)) { |
3404 | // If it's not a framework call, or if it takes a callback, assume it |
3405 | // can free memory. |
3406 | if (!Call->isInSystemHeader() || Call->argumentsMayEscape()) |
3407 | return true; |
3408 | |
3409 | // If it's a method we know about, handle it explicitly post-call. |
3410 | // This should happen before the "freeWhenDone" check below. |
3411 | if (isKnownDeallocObjCMethodName(Call: *Msg)) |
3412 | return false; |
3413 | |
3414 | // If there's a "freeWhenDone" parameter, but the method isn't one we know |
3415 | // about, we can't be sure that the object will use free() to deallocate the |
3416 | // memory, so we can't model it explicitly. The best we can do is use it to |
3417 | // decide whether the pointer escapes. |
3418 | if (std::optional<bool> FreeWhenDone = getFreeWhenDoneArg(Call: *Msg)) |
3419 | return *FreeWhenDone; |
3420 | |
3421 | // If the first selector piece ends with "NoCopy", and there is no |
3422 | // "freeWhenDone" parameter set to zero, we know ownership is being |
3423 | // transferred. Again, though, we can't be sure that the object will use |
3424 | // free() to deallocate the memory, so we can't model it explicitly. |
3425 | StringRef FirstSlot = Msg->getSelector().getNameForSlot(argIndex: 0); |
3426 | if (FirstSlot.ends_with(Suffix: "NoCopy" )) |
3427 | return true; |
3428 | |
3429 | // If the first selector starts with addPointer, insertPointer, |
3430 | // or replacePointer, assume we are dealing with NSPointerArray or similar. |
3431 | // This is similar to C++ containers (vector); we still might want to check |
3432 | // that the pointers get freed by following the container itself. |
3433 | if (FirstSlot.starts_with(Prefix: "addPointer" ) || |
3434 | FirstSlot.starts_with(Prefix: "insertPointer" ) || |
3435 | FirstSlot.starts_with(Prefix: "replacePointer" ) || |
3436 | FirstSlot == "valueWithPointer" ) { |
3437 | return true; |
3438 | } |
3439 | |
3440 | // We should escape receiver on call to 'init'. This is especially relevant |
3441 | // to the receiver, as the corresponding symbol is usually not referenced |
3442 | // after the call. |
3443 | if (Msg->getMethodFamily() == OMF_init) { |
3444 | EscapingSymbol = Msg->getReceiverSVal().getAsSymbol(); |
3445 | return true; |
3446 | } |
3447 | |
3448 | // Otherwise, assume that the method does not free memory. |
3449 | // Most framework methods do not free memory. |
3450 | return false; |
3451 | } |
3452 | |
3453 | // At this point the only thing left to handle is straight function calls. |
3454 | const FunctionDecl *FD = cast<SimpleFunctionCall>(Val: Call)->getDecl(); |
3455 | if (!FD) |
3456 | return true; |
3457 | |
3458 | // If it's one of the allocation functions we can reason about, we model |
3459 | // its behavior explicitly. |
3460 | if (isMemCall(Call: *Call)) |
3461 | return false; |
3462 | |
3463 | // If it's not a system call, assume it frees memory. |
3464 | if (!Call->isInSystemHeader()) |
3465 | return true; |
3466 | |
3467 | // White list the system functions whose arguments escape. |
3468 | const IdentifierInfo *II = FD->getIdentifier(); |
3469 | if (!II) |
3470 | return true; |
3471 | StringRef FName = II->getName(); |
3472 | |
3473 | // White list the 'XXXNoCopy' CoreFoundation functions. |
3474 | // We specifically check these before |
3475 | if (FName.ends_with(Suffix: "NoCopy" )) { |
3476 | // Look for the deallocator argument. We know that the memory ownership |
3477 | // is not transferred only if the deallocator argument is |
3478 | // 'kCFAllocatorNull'. |
3479 | for (unsigned i = 1; i < Call->getNumArgs(); ++i) { |
3480 | const Expr *ArgE = Call->getArgExpr(Index: i)->IgnoreParenCasts(); |
3481 | if (const DeclRefExpr *DE = dyn_cast<DeclRefExpr>(Val: ArgE)) { |
3482 | StringRef DeallocatorName = DE->getFoundDecl()->getName(); |
3483 | if (DeallocatorName == "kCFAllocatorNull" ) |
3484 | return false; |
3485 | } |
3486 | } |
3487 | return true; |
3488 | } |
3489 | |
3490 | // Associating streams with malloced buffers. The pointer can escape if |
3491 | // 'closefn' is specified (and if that function does free memory), |
3492 | // but it will not if closefn is not specified. |
3493 | // Currently, we do not inspect the 'closefn' function (PR12101). |
3494 | if (FName == "funopen" ) |
3495 | if (Call->getNumArgs() >= 4 && Call->getArgSVal(Index: 4).isConstant(I: 0)) |
3496 | return false; |
3497 | |
3498 | // Do not warn on pointers passed to 'setbuf' when used with std streams, |
3499 | // these leaks might be intentional when setting the buffer for stdio. |
3500 | // http://stackoverflow.com/questions/2671151/who-frees-setvbuf-buffer |
3501 | if (FName == "setbuf" || FName =="setbuffer" || |
3502 | FName == "setlinebuf" || FName == "setvbuf" ) { |
3503 | if (Call->getNumArgs() >= 1) { |
3504 | const Expr *ArgE = Call->getArgExpr(Index: 0)->IgnoreParenCasts(); |
3505 | if (const DeclRefExpr *ArgDRE = dyn_cast<DeclRefExpr>(Val: ArgE)) |
3506 | if (const VarDecl *D = dyn_cast<VarDecl>(Val: ArgDRE->getDecl())) |
3507 | if (D->getCanonicalDecl()->getName().contains("std" )) |
3508 | return true; |
3509 | } |
3510 | } |
3511 | |
3512 | // A bunch of other functions which either take ownership of a pointer or |
3513 | // wrap the result up in a struct or object, meaning it can be freed later. |
3514 | // (See RetainCountChecker.) Not all the parameters here are invalidated, |
3515 | // but the Malloc checker cannot differentiate between them. The right way |
3516 | // of doing this would be to implement a pointer escapes callback. |
3517 | if (FName == "CGBitmapContextCreate" || |
3518 | FName == "CGBitmapContextCreateWithData" || |
3519 | FName == "CVPixelBufferCreateWithBytes" || |
3520 | FName == "CVPixelBufferCreateWithPlanarBytes" || |
3521 | FName == "OSAtomicEnqueue" ) { |
3522 | return true; |
3523 | } |
3524 | |
3525 | if (FName == "postEvent" && |
3526 | FD->getQualifiedNameAsString() == "QCoreApplication::postEvent" ) { |
3527 | return true; |
3528 | } |
3529 | |
3530 | if (FName == "connectImpl" && |
3531 | FD->getQualifiedNameAsString() == "QObject::connectImpl" ) { |
3532 | return true; |
3533 | } |
3534 | |
3535 | if (FName == "singleShotImpl" && |
3536 | FD->getQualifiedNameAsString() == "QTimer::singleShotImpl" ) { |
3537 | return true; |
3538 | } |
3539 | |
3540 | // Handle cases where we know a buffer's /address/ can escape. |
3541 | // Note that the above checks handle some special cases where we know that |
3542 | // even though the address escapes, it's still our responsibility to free the |
3543 | // buffer. |
3544 | if (Call->argumentsMayEscape()) |
3545 | return true; |
3546 | |
3547 | // Otherwise, assume that the function does not free memory. |
3548 | // Most system calls do not free the memory. |
3549 | return false; |
3550 | } |
3551 | |
3552 | ProgramStateRef MallocChecker::checkPointerEscape(ProgramStateRef State, |
3553 | const InvalidatedSymbols &Escaped, |
3554 | const CallEvent *Call, |
3555 | PointerEscapeKind Kind) const { |
3556 | return checkPointerEscapeAux(State, Escaped, Call, Kind, |
3557 | /*IsConstPointerEscape*/ false); |
3558 | } |
3559 | |
3560 | ProgramStateRef MallocChecker::checkConstPointerEscape(ProgramStateRef State, |
3561 | const InvalidatedSymbols &Escaped, |
3562 | const CallEvent *Call, |
3563 | PointerEscapeKind Kind) const { |
3564 | // If a const pointer escapes, it may not be freed(), but it could be deleted. |
3565 | return checkPointerEscapeAux(State, Escaped, Call, Kind, |
3566 | /*IsConstPointerEscape*/ true); |
3567 | } |
3568 | |
3569 | static bool checkIfNewOrNewArrayFamily(const RefState *RS) { |
3570 | return (RS->getAllocationFamily().Kind == AF_CXXNewArray || |
3571 | RS->getAllocationFamily().Kind == AF_CXXNew); |
3572 | } |
3573 | |
3574 | ProgramStateRef MallocChecker::checkPointerEscapeAux( |
3575 | ProgramStateRef State, const InvalidatedSymbols &Escaped, |
3576 | const CallEvent *Call, PointerEscapeKind Kind, |
3577 | bool IsConstPointerEscape) const { |
3578 | // If we know that the call does not free memory, or we want to process the |
3579 | // call later, keep tracking the top level arguments. |
3580 | SymbolRef EscapingSymbol = nullptr; |
3581 | if (Kind == PSK_DirectEscapeOnCall && |
3582 | !mayFreeAnyEscapedMemoryOrIsModeledExplicitly(Call, State, |
3583 | EscapingSymbol) && |
3584 | !EscapingSymbol) { |
3585 | return State; |
3586 | } |
3587 | |
3588 | for (SymbolRef sym : Escaped) { |
3589 | if (EscapingSymbol && EscapingSymbol != sym) |
3590 | continue; |
3591 | |
3592 | if (const RefState *RS = State->get<RegionState>(key: sym)) |
3593 | if (RS->isAllocated() || RS->isAllocatedOfSizeZero()) |
3594 | if (!IsConstPointerEscape || checkIfNewOrNewArrayFamily(RS)) |
3595 | State = State->set<RegionState>(K: sym, E: RefState::getEscaped(RS)); |
3596 | } |
3597 | return State; |
3598 | } |
3599 | |
3600 | bool MallocChecker::isArgZERO_SIZE_PTR(ProgramStateRef State, CheckerContext &C, |
3601 | SVal ArgVal) const { |
3602 | if (!KernelZeroSizePtrValue) |
3603 | KernelZeroSizePtrValue = |
3604 | tryExpandAsInteger(Macro: "ZERO_SIZE_PTR" , PP: C.getPreprocessor()); |
3605 | |
3606 | const llvm::APSInt *ArgValKnown = |
3607 | C.getSValBuilder().getKnownValue(state: State, val: ArgVal); |
3608 | return ArgValKnown && *KernelZeroSizePtrValue && |
3609 | ArgValKnown->getSExtValue() == **KernelZeroSizePtrValue; |
3610 | } |
3611 | |
3612 | static SymbolRef findFailedReallocSymbol(ProgramStateRef currState, |
3613 | ProgramStateRef prevState) { |
3614 | ReallocPairsTy currMap = currState->get<ReallocPairs>(); |
3615 | ReallocPairsTy prevMap = prevState->get<ReallocPairs>(); |
3616 | |
3617 | for (const ReallocPairsTy::value_type &Pair : prevMap) { |
3618 | SymbolRef sym = Pair.first; |
3619 | if (!currMap.lookup(K: sym)) |
3620 | return sym; |
3621 | } |
3622 | |
3623 | return nullptr; |
3624 | } |
3625 | |
3626 | static bool isReferenceCountingPointerDestructor(const CXXDestructorDecl *DD) { |
3627 | if (const IdentifierInfo *II = DD->getParent()->getIdentifier()) { |
3628 | StringRef N = II->getName(); |
3629 | if (N.contains_insensitive(Other: "ptr" ) || N.contains_insensitive(Other: "pointer" )) { |
3630 | if (N.contains_insensitive(Other: "ref" ) || N.contains_insensitive(Other: "cnt" ) || |
3631 | N.contains_insensitive(Other: "intrusive" ) || |
3632 | N.contains_insensitive(Other: "shared" ) || N.ends_with_insensitive(Suffix: "rc" )) { |
3633 | return true; |
3634 | } |
3635 | } |
3636 | } |
3637 | return false; |
3638 | } |
3639 | |
3640 | PathDiagnosticPieceRef MallocBugVisitor::VisitNode(const ExplodedNode *N, |
3641 | BugReporterContext &BRC, |
3642 | PathSensitiveBugReport &BR) { |
3643 | ProgramStateRef state = N->getState(); |
3644 | ProgramStateRef statePrev = N->getFirstPred()->getState(); |
3645 | |
3646 | const RefState *RSCurr = state->get<RegionState>(key: Sym); |
3647 | const RefState *RSPrev = statePrev->get<RegionState>(key: Sym); |
3648 | |
3649 | const Stmt *S = N->getStmtForDiagnostics(); |
3650 | // When dealing with containers, we sometimes want to give a note |
3651 | // even if the statement is missing. |
3652 | if (!S && (!RSCurr || RSCurr->getAllocationFamily().Kind != AF_InnerBuffer)) |
3653 | return nullptr; |
3654 | |
3655 | const LocationContext *CurrentLC = N->getLocationContext(); |
3656 | |
3657 | // If we find an atomic fetch_add or fetch_sub within the function in which |
3658 | // the pointer was released (before the release), this is likely a release |
3659 | // point of reference-counted object (like shared pointer). |
3660 | // |
3661 | // Because we don't model atomics, and also because we don't know that the |
3662 | // original reference count is positive, we should not report use-after-frees |
3663 | // on objects deleted in such functions. This can probably be improved |
3664 | // through better shared pointer modeling. |
3665 | if (ReleaseFunctionLC && (ReleaseFunctionLC == CurrentLC || |
3666 | ReleaseFunctionLC->isParentOf(LC: CurrentLC))) { |
3667 | if (const auto *AE = dyn_cast<AtomicExpr>(Val: S)) { |
3668 | // Check for manual use of atomic builtins. |
3669 | AtomicExpr::AtomicOp Op = AE->getOp(); |
3670 | if (Op == AtomicExpr::AO__c11_atomic_fetch_add || |
3671 | Op == AtomicExpr::AO__c11_atomic_fetch_sub) { |
3672 | BR.markInvalid(Tag: getTag(), Data: S); |
3673 | // After report is considered invalid there is no need to proceed |
3674 | // futher. |
3675 | return nullptr; |
3676 | } |
3677 | } else if (const auto *CE = dyn_cast<CallExpr>(Val: S)) { |
3678 | // Check for `std::atomic` and such. This covers both regular method calls |
3679 | // and operator calls. |
3680 | if (const auto *MD = |
3681 | dyn_cast_or_null<CXXMethodDecl>(Val: CE->getDirectCallee())) { |
3682 | const CXXRecordDecl *RD = MD->getParent(); |
3683 | // A bit wobbly with ".contains()" because it may be like |
3684 | // "__atomic_base" or something. |
3685 | if (StringRef(RD->getNameAsString()).contains(Other: "atomic" )) { |
3686 | BR.markInvalid(Tag: getTag(), Data: S); |
3687 | // After report is considered invalid there is no need to proceed |
3688 | // futher. |
3689 | return nullptr; |
3690 | } |
3691 | } |
3692 | } |
3693 | } |
3694 | |
3695 | // FIXME: We will eventually need to handle non-statement-based events |
3696 | // (__attribute__((cleanup))). |
3697 | |
3698 | // Find out if this is an interesting point and what is the kind. |
3699 | StringRef Msg; |
3700 | std::unique_ptr<StackHintGeneratorForSymbol> StackHint = nullptr; |
3701 | SmallString<256> Buf; |
3702 | llvm::raw_svector_ostream OS(Buf); |
3703 | |
3704 | if (Mode == Normal) { |
3705 | if (isAllocated(RSCurr, RSPrev, Stmt: S)) { |
3706 | Msg = "Memory is allocated" ; |
3707 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3708 | args&: Sym, args: "Returned allocated memory" ); |
3709 | } else if (isReleased(RSCurr, RSPrev, Stmt: S)) { |
3710 | const auto Family = RSCurr->getAllocationFamily(); |
3711 | switch (Family.Kind) { |
3712 | case AF_Alloca: |
3713 | case AF_Malloc: |
3714 | case AF_Custom: |
3715 | case AF_CXXNew: |
3716 | case AF_CXXNewArray: |
3717 | case AF_IfNameIndex: |
3718 | Msg = "Memory is released" ; |
3719 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3720 | args&: Sym, args: "Returning; memory was released" ); |
3721 | break; |
3722 | case AF_InnerBuffer: { |
3723 | const MemRegion *ObjRegion = |
3724 | allocation_state::getContainerObjRegion(State: statePrev, Sym); |
3725 | const auto *TypedRegion = cast<TypedValueRegion>(Val: ObjRegion); |
3726 | QualType ObjTy = TypedRegion->getValueType(); |
3727 | OS << "Inner buffer of '" << ObjTy << "' " ; |
3728 | |
3729 | if (N->getLocation().getKind() == ProgramPoint::PostImplicitCallKind) { |
3730 | OS << "deallocated by call to destructor" ; |
3731 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3732 | args&: Sym, args: "Returning; inner buffer was deallocated" ); |
3733 | } else { |
3734 | OS << "reallocated by call to '" ; |
3735 | const Stmt *S = RSCurr->getStmt(); |
3736 | if (const auto *MemCallE = dyn_cast<CXXMemberCallExpr>(Val: S)) { |
3737 | OS << MemCallE->getMethodDecl()->getDeclName(); |
3738 | } else if (const auto *OpCallE = dyn_cast<CXXOperatorCallExpr>(Val: S)) { |
3739 | OS << OpCallE->getDirectCallee()->getDeclName(); |
3740 | } else if (const auto *CallE = dyn_cast<CallExpr>(Val: S)) { |
3741 | auto &CEMgr = BRC.getStateManager().getCallEventManager(); |
3742 | CallEventRef<> Call = |
3743 | CEMgr.getSimpleCall(E: CallE, State: state, LCtx: CurrentLC, ElemRef: {nullptr, 0}); |
3744 | if (const auto *D = dyn_cast_or_null<NamedDecl>(Val: Call->getDecl())) |
3745 | OS << D->getDeclName(); |
3746 | else |
3747 | OS << "unknown" ; |
3748 | } |
3749 | OS << "'" ; |
3750 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3751 | args&: Sym, args: "Returning; inner buffer was reallocated" ); |
3752 | } |
3753 | Msg = OS.str(); |
3754 | break; |
3755 | } |
3756 | case AF_None: |
3757 | assert(false && "Unhandled allocation family!" ); |
3758 | return nullptr; |
3759 | } |
3760 | |
3761 | // Save the first destructor/function as release point. |
3762 | assert(!ReleaseFunctionLC && "There should be only one release point" ); |
3763 | ReleaseFunctionLC = CurrentLC->getStackFrame(); |
3764 | |
3765 | // See if we're releasing memory while inlining a destructor that |
3766 | // decrement reference counters (or one of its callees). |
3767 | // This turns on various common false positive suppressions. |
3768 | for (const LocationContext *LC = CurrentLC; LC; LC = LC->getParent()) { |
3769 | if (const auto *DD = dyn_cast<CXXDestructorDecl>(Val: LC->getDecl())) { |
3770 | if (isReferenceCountingPointerDestructor(DD)) { |
3771 | // This immediately looks like a reference-counting destructor. |
3772 | // We're bad at guessing the original reference count of the |
3773 | // object, so suppress the report for now. |
3774 | BR.markInvalid(Tag: getTag(), Data: DD); |
3775 | |
3776 | // After report is considered invalid there is no need to proceed |
3777 | // futher. |
3778 | return nullptr; |
3779 | } |
3780 | |
3781 | // Switch suspection to outer destructor to catch patterns like: |
3782 | // (note that class name is distorted to bypass |
3783 | // isReferenceCountingPointerDestructor() logic) |
3784 | // |
3785 | // SmartPointr::~SmartPointr() { |
3786 | // if (refcount.fetch_sub(1) == 1) |
3787 | // release_resources(); |
3788 | // } |
3789 | // void SmartPointr::release_resources() { |
3790 | // free(buffer); |
3791 | // } |
3792 | // |
3793 | // This way ReleaseFunctionLC will point to outermost destructor and |
3794 | // it would be possible to catch wider range of FP. |
3795 | // |
3796 | // NOTE: it would be great to support smth like that in C, since |
3797 | // currently patterns like following won't be supressed: |
3798 | // |
3799 | // void doFree(struct Data *data) { free(data); } |
3800 | // void putData(struct Data *data) |
3801 | // { |
3802 | // if (refPut(data)) |
3803 | // doFree(data); |
3804 | // } |
3805 | ReleaseFunctionLC = LC->getStackFrame(); |
3806 | } |
3807 | } |
3808 | |
3809 | } else if (isRelinquished(RSCurr, RSPrev, Stmt: S)) { |
3810 | Msg = "Memory ownership is transferred" ; |
3811 | StackHint = std::make_unique<StackHintGeneratorForSymbol>(args&: Sym, args: "" ); |
3812 | } else if (hasReallocFailed(RSCurr, RSPrev, Stmt: S)) { |
3813 | Mode = ReallocationFailed; |
3814 | Msg = "Reallocation failed" ; |
3815 | StackHint = std::make_unique<StackHintGeneratorForReallocationFailed>( |
3816 | args&: Sym, args: "Reallocation failed" ); |
3817 | |
3818 | if (SymbolRef sym = findFailedReallocSymbol(currState: state, prevState: statePrev)) { |
3819 | // Is it possible to fail two reallocs WITHOUT testing in between? |
3820 | assert((!FailedReallocSymbol || FailedReallocSymbol == sym) && |
3821 | "We only support one failed realloc at a time." ); |
3822 | BR.markInteresting(sym); |
3823 | FailedReallocSymbol = sym; |
3824 | } |
3825 | } |
3826 | |
3827 | // We are in a special mode if a reallocation failed later in the path. |
3828 | } else if (Mode == ReallocationFailed) { |
3829 | assert(FailedReallocSymbol && "No symbol to look for." ); |
3830 | |
3831 | // Is this is the first appearance of the reallocated symbol? |
3832 | if (!statePrev->get<RegionState>(key: FailedReallocSymbol)) { |
3833 | // We're at the reallocation point. |
3834 | Msg = "Attempt to reallocate memory" ; |
3835 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3836 | args&: Sym, args: "Returned reallocated memory" ); |
3837 | FailedReallocSymbol = nullptr; |
3838 | Mode = Normal; |
3839 | } |
3840 | } |
3841 | |
3842 | if (Msg.empty()) { |
3843 | assert(!StackHint); |
3844 | return nullptr; |
3845 | } |
3846 | |
3847 | assert(StackHint); |
3848 | |
3849 | // Generate the extra diagnostic. |
3850 | PathDiagnosticLocation Pos; |
3851 | if (!S) { |
3852 | assert(RSCurr->getAllocationFamily().Kind == AF_InnerBuffer); |
3853 | auto PostImplCall = N->getLocation().getAs<PostImplicitCall>(); |
3854 | if (!PostImplCall) |
3855 | return nullptr; |
3856 | Pos = PathDiagnosticLocation(PostImplCall->getLocation(), |
3857 | BRC.getSourceManager()); |
3858 | } else { |
3859 | Pos = PathDiagnosticLocation(S, BRC.getSourceManager(), |
3860 | N->getLocationContext()); |
3861 | } |
3862 | |
3863 | auto P = std::make_shared<PathDiagnosticEventPiece>(args&: Pos, args&: Msg, args: true); |
3864 | BR.addCallStackHint(Piece: P, StackHint: std::move(StackHint)); |
3865 | return P; |
3866 | } |
3867 | |
3868 | void MallocChecker::printState(raw_ostream &Out, ProgramStateRef State, |
3869 | const char *NL, const char *Sep) const { |
3870 | |
3871 | RegionStateTy RS = State->get<RegionState>(); |
3872 | |
3873 | if (!RS.isEmpty()) { |
3874 | Out << Sep << "MallocChecker :" << NL; |
3875 | for (auto [Sym, Data] : RS) { |
3876 | const RefState *RefS = State->get<RegionState>(key: Sym); |
3877 | AllocationFamily Family = RefS->getAllocationFamily(); |
3878 | std::optional<MallocChecker::CheckKind> CheckKind = |
3879 | getCheckIfTracked(Family); |
3880 | if (!CheckKind) |
3881 | CheckKind = getCheckIfTracked(Family, IsALeakCheck: true); |
3882 | |
3883 | Sym->dumpToStream(os&: Out); |
3884 | Out << " : " ; |
3885 | Data.dump(OS&: Out); |
3886 | if (CheckKind) |
3887 | Out << " (" << CheckNames[*CheckKind] << ")" ; |
3888 | Out << NL; |
3889 | } |
3890 | } |
3891 | } |
3892 | |
3893 | namespace clang { |
3894 | namespace ento { |
3895 | namespace allocation_state { |
3896 | |
3897 | ProgramStateRef |
3898 | markReleased(ProgramStateRef State, SymbolRef Sym, const Expr *Origin) { |
3899 | AllocationFamily Family(AF_InnerBuffer); |
3900 | return State->set<RegionState>(Sym, RefState::getReleased(Family, Origin)); |
3901 | } |
3902 | |
3903 | } // end namespace allocation_state |
3904 | } // end namespace ento |
3905 | } // end namespace clang |
3906 | |
3907 | // Intended to be used in InnerPointerChecker to register the part of |
3908 | // MallocChecker connected to it. |
3909 | void ento::registerInnerPointerCheckerAux(CheckerManager &mgr) { |
3910 | MallocChecker *checker = mgr.getChecker<MallocChecker>(); |
3911 | checker->ChecksEnabled[MallocChecker::CK_InnerPointerChecker] = true; |
3912 | checker->CheckNames[MallocChecker::CK_InnerPointerChecker] = |
3913 | mgr.getCurrentCheckerName(); |
3914 | } |
3915 | |
3916 | void ento::registerDynamicMemoryModeling(CheckerManager &mgr) { |
3917 | auto *checker = mgr.registerChecker<MallocChecker>(); |
3918 | checker->ShouldIncludeOwnershipAnnotatedFunctions = |
3919 | mgr.getAnalyzerOptions().getCheckerBooleanOption(C: checker, OptionName: "Optimistic" ); |
3920 | checker->ShouldRegisterNoOwnershipChangeVisitor = |
3921 | mgr.getAnalyzerOptions().getCheckerBooleanOption( |
3922 | C: checker, OptionName: "AddNoOwnershipChangeNotes" ); |
3923 | } |
3924 | |
3925 | bool ento::shouldRegisterDynamicMemoryModeling(const CheckerManager &mgr) { |
3926 | return true; |
3927 | } |
3928 | |
3929 | #define REGISTER_CHECKER(name) \ |
3930 | void ento::register##name(CheckerManager &mgr) { \ |
3931 | MallocChecker *checker = mgr.getChecker<MallocChecker>(); \ |
3932 | checker->ChecksEnabled[MallocChecker::CK_##name] = true; \ |
3933 | checker->CheckNames[MallocChecker::CK_##name] = \ |
3934 | mgr.getCurrentCheckerName(); \ |
3935 | } \ |
3936 | \ |
3937 | bool ento::shouldRegister##name(const CheckerManager &mgr) { return true; } |
3938 | |
3939 | REGISTER_CHECKER(MallocChecker) |
3940 | REGISTER_CHECKER(NewDeleteChecker) |
3941 | REGISTER_CHECKER(NewDeleteLeaksChecker) |
3942 | REGISTER_CHECKER(MismatchedDeallocatorChecker) |
3943 | REGISTER_CHECKER(TaintedAllocChecker) |
3944 | |