1/* A state machine for detecting misuses of the malloc/free API.
2 Copyright (C) 2019-2024 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#define INCLUDE_MEMORY
23#include "system.h"
24#include "coretypes.h"
25#include "make-unique.h"
26#include "tree.h"
27#include "function.h"
28#include "basic-block.h"
29#include "gimple.h"
30#include "options.h"
31#include "bitmap.h"
32#include "diagnostic-core.h"
33#include "diagnostic-path.h"
34#include "analyzer/analyzer.h"
35#include "diagnostic-event-id.h"
36#include "analyzer/analyzer-logging.h"
37#include "analyzer/sm.h"
38#include "analyzer/pending-diagnostic.h"
39#include "analyzer/call-string.h"
40#include "analyzer/program-point.h"
41#include "analyzer/store.h"
42#include "analyzer/region-model.h"
43#include "analyzer/call-details.h"
44#include "stringpool.h"
45#include "attribs.h"
46#include "analyzer/function-set.h"
47#include "analyzer/program-state.h"
48#include "analyzer/checker-event.h"
49#include "analyzer/exploded-graph.h"
50#include "analyzer/inlining-iterator.h"
51
52#if ENABLE_ANALYZER
53
54namespace ana {
55
56namespace {
57
58/* This state machine and its various support classes track allocations
59 and deallocations.
60
61 It has a few standard allocation/deallocation pairs (e.g. new/delete),
62 and also supports user-defined ones via
63 __attribute__ ((malloc(DEALLOCATOR))).
64
65 There can be more than one valid deallocator for a given allocator,
66 for example:
67 __attribute__ ((malloc (fclose)))
68 __attribute__ ((malloc (freopen, 3)))
69 FILE* fopen (const char*, const char*);
70 A deallocator_set represents a particular set of valid deallocators.
71
72 We track the expected deallocator_set for a value, but not the allocation
73 function - there could be more than one allocator per deallocator_set.
74 For example, there could be dozens of allocators for "free" beyond just
75 malloc e.g. calloc, xstrdup, etc. We don't want to explode the number
76 of states by tracking individual allocators in the exploded graph;
77 we merely want to track "this value expects to have 'free' called on it".
78 Perhaps we can reconstruct which allocator was used later, when emitting
79 the path, if it's necessary for precision of wording of diagnostics. */
80
81class deallocator;
82class deallocator_set;
83class malloc_state_machine;
84
85/* An enum for discriminating between different kinds of allocation_state. */
86
87enum resource_state
88{
89 /* States that are independent of allocator/deallocator. */
90
91 /* The start state. */
92 RS_START,
93
94 /* State for a pointer that's been unconditionally dereferenced. */
95 RS_ASSUMED_NON_NULL,
96
97 /* State for a pointer that's known to be NULL. */
98 RS_NULL,
99
100 /* State for a pointer that's known to not be on the heap (e.g. to a local
101 or global). */
102 RS_NON_HEAP,
103
104 /* Stop state, for pointers we don't want to track any more. */
105 RS_STOP,
106
107 /* States that relate to a specific deallocator_set. */
108
109 /* State for a pointer returned from an allocator that hasn't
110 been checked for NULL.
111 It could be a pointer to heap-allocated memory, or could be NULL. */
112 RS_UNCHECKED,
113
114 /* State for a pointer returned from an allocator,
115 known to be non-NULL. */
116 RS_NONNULL,
117
118 /* State for a pointer passed to a deallocator. */
119 RS_FREED
120};
121
122/* Custom state subclass, which can optionally refer to an a
123 deallocator_set. */
124
125struct allocation_state : public state_machine::state
126{
127 allocation_state (const char *name, unsigned id,
128 enum resource_state rs,
129 const deallocator_set *deallocators,
130 const deallocator *deallocator)
131 : state (name, id), m_rs (rs),
132 m_deallocators (deallocators),
133 m_deallocator (deallocator)
134 {}
135
136 void dump_to_pp (pretty_printer *pp) const override;
137
138 const allocation_state *get_nonnull () const;
139
140 enum resource_state m_rs;
141 const deallocator_set *m_deallocators;
142 const deallocator *m_deallocator;
143};
144
145/* Custom state subclass, for the "assumed-non-null" state
146 where the assumption happens in a particular frame. */
147
148struct assumed_non_null_state : public allocation_state
149{
150 assumed_non_null_state (const char *name, unsigned id,
151 const frame_region *frame)
152 : allocation_state (name, id, RS_ASSUMED_NON_NULL,
153 NULL, NULL),
154 m_frame (frame)
155 {
156 gcc_assert (m_frame);
157 }
158
159 void dump_to_pp (pretty_printer *pp) const final override;
160
161 const frame_region *m_frame;
162};
163
164/* An enum for choosing which wording to use in various diagnostics
165 when describing deallocations. */
166
167enum wording
168{
169 WORDING_FREED,
170 WORDING_DELETED,
171 WORDING_DEALLOCATED,
172 WORDING_REALLOCATED
173};
174
175/* Base class representing a deallocation function,
176 either a built-in one we know about, or one exposed via
177 __attribute__((malloc(DEALLOCATOR))). */
178
179struct deallocator
180{
181 hashval_t hash () const;
182 void dump_to_pp (pretty_printer *pp) const;
183 static int cmp (const deallocator *a, const deallocator *b);
184 static int cmp_ptr_ptr (const void *, const void *);
185
186 /* Name to use in diagnostics. */
187 const char *m_name;
188
189 /* Which wording to use in diagnostics. */
190 enum wording m_wording;
191
192 /* State for a value passed to one of the deallocators. */
193 state_machine::state_t m_freed;
194
195protected:
196 deallocator (malloc_state_machine *sm,
197 const char *name,
198 enum wording wording);
199};
200
201/* Subclass representing a predefined deallocator.
202 e.g. "delete []", without needing a specific FUNCTION_DECL
203 ahead of time. */
204
205struct standard_deallocator : public deallocator
206{
207 standard_deallocator (malloc_state_machine *sm,
208 const char *name,
209 enum wording wording);
210};
211
212/* Subclass representing a user-defined deallocator
213 via __attribute__((malloc(DEALLOCATOR))) given
214 a specific FUNCTION_DECL. */
215
216struct custom_deallocator : public deallocator
217{
218 custom_deallocator (malloc_state_machine *sm,
219 tree deallocator_fndecl,
220 enum wording wording)
221 : deallocator (sm, IDENTIFIER_POINTER (DECL_NAME (deallocator_fndecl)),
222 wording)
223 {
224 }
225};
226
227/* Base class representing a set of possible deallocators.
228 Often this will be just a single deallocator, but some
229 allocators have multiple valid deallocators (e.g. the result of
230 "fopen" can be closed by either "fclose" or "freopen"). */
231
232struct deallocator_set
233{
234 deallocator_set (malloc_state_machine *sm,
235 enum wording wording);
236 virtual ~deallocator_set () {}
237
238 virtual bool contains_p (const deallocator *d) const = 0;
239 virtual const deallocator *maybe_get_single () const = 0;
240 virtual void dump_to_pp (pretty_printer *pp) const = 0;
241 void dump () const;
242
243 /* Which wording to use in diagnostics. */
244 enum wording m_wording;
245
246 /* Pointers to states.
247 These states are owned by the state_machine base class. */
248
249 /* State for an unchecked result from an allocator using this set. */
250 state_machine::state_t m_unchecked;
251
252 /* State for a known non-NULL result from such an allocator. */
253 state_machine::state_t m_nonnull;
254};
255
256/* Subclass of deallocator_set representing a set of deallocators
257 defined by one or more __attribute__((malloc(DEALLOCATOR))). */
258
259struct custom_deallocator_set : public deallocator_set
260{
261 typedef const auto_vec <const deallocator *> *key_t;
262
263 custom_deallocator_set (malloc_state_machine *sm,
264 const auto_vec <const deallocator *> *vec,
265 //const char *name,
266 //const char *dealloc_funcname,
267 //unsigned arg_idx,
268 enum wording wording);
269
270 bool contains_p (const deallocator *d) const final override;
271 const deallocator *maybe_get_single () const final override;
272 void dump_to_pp (pretty_printer *pp) const final override;
273
274 auto_vec <const deallocator *> m_deallocator_vec;
275};
276
277/* Subclass of deallocator_set representing a set of deallocators
278 with a single standard_deallocator, e.g. "delete []". */
279
280struct standard_deallocator_set : public deallocator_set
281{
282 standard_deallocator_set (malloc_state_machine *sm,
283 const char *name,
284 enum wording wording);
285
286 bool contains_p (const deallocator *d) const final override;
287 const deallocator *maybe_get_single () const final override;
288 void dump_to_pp (pretty_printer *pp) const final override;
289
290 standard_deallocator m_deallocator;
291};
292
293/* Traits class for ensuring uniqueness of deallocator_sets within
294 malloc_state_machine. */
295
296struct deallocator_set_map_traits
297{
298 typedef custom_deallocator_set::key_t key_type;
299 typedef custom_deallocator_set *value_type;
300 typedef custom_deallocator_set *compare_type;
301
302 static inline hashval_t hash (const key_type &k)
303 {
304 gcc_assert (k != NULL);
305 gcc_assert (k != reinterpret_cast<key_type> (1));
306
307 hashval_t result = 0;
308 unsigned i;
309 const deallocator *d;
310 FOR_EACH_VEC_ELT (*k, i, d)
311 result ^= d->hash ();
312 return result;
313 }
314 static inline bool equal_keys (const key_type &k1, const key_type &k2)
315 {
316 if (k1->length () != k2->length ())
317 return false;
318
319 for (unsigned i = 0; i < k1->length (); i++)
320 if ((*k1)[i] != (*k2)[i])
321 return false;
322
323 return true;
324 }
325 template <typename T>
326 static inline void remove (T &)
327 {
328 /* empty; the nodes are handled elsewhere. */
329 }
330 template <typename T>
331 static inline void mark_deleted (T &entry)
332 {
333 entry.m_key = reinterpret_cast<key_type> (1);
334 }
335 template <typename T>
336 static inline void mark_empty (T &entry)
337 {
338 entry.m_key = NULL;
339 }
340 template <typename T>
341 static inline bool is_deleted (const T &entry)
342 {
343 return entry.m_key == reinterpret_cast<key_type> (1);
344 }
345 template <typename T>
346 static inline bool is_empty (const T &entry)
347 {
348 return entry.m_key == NULL;
349 }
350 static const bool empty_zero_p = false;
351};
352
353/* A state machine for detecting misuses of the malloc/free API.
354
355 See sm-malloc.dot for an overview (keep this in-sync with that file). */
356
357class malloc_state_machine : public state_machine
358{
359public:
360 typedef allocation_state custom_data_t;
361
362 malloc_state_machine (logger *logger);
363 ~malloc_state_machine ();
364
365 state_t
366 add_state (const char *name, enum resource_state rs,
367 const deallocator_set *deallocators,
368 const deallocator *deallocator);
369
370 bool inherited_state_p () const final override { return false; }
371
372 state_machine::state_t
373 get_default_state (const svalue *sval) const final override
374 {
375 if (tree cst = sval->maybe_get_constant ())
376 {
377 if (zerop (cst))
378 return m_null;
379 }
380 if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
381 {
382 const region *reg = ptr->get_pointee ();
383 switch (reg->get_memory_space ())
384 {
385 default:
386 break;
387 case MEMSPACE_CODE:
388 case MEMSPACE_GLOBALS:
389 case MEMSPACE_STACK:
390 case MEMSPACE_READONLY_DATA:
391 return m_non_heap;
392 }
393 }
394 return m_start;
395 }
396
397 bool on_stmt (sm_context *sm_ctxt,
398 const supernode *node,
399 const gimple *stmt) const final override;
400
401 void on_phi (sm_context *sm_ctxt,
402 const supernode *node,
403 const gphi *phi,
404 tree rhs) const final override;
405
406 void on_condition (sm_context *sm_ctxt,
407 const supernode *node,
408 const gimple *stmt,
409 const svalue *lhs,
410 enum tree_code op,
411 const svalue *rhs) const final override;
412
413 void on_pop_frame (sm_state_map *smap,
414 const frame_region *) const final override;
415
416 bool can_purge_p (state_t s) const final override;
417 std::unique_ptr<pending_diagnostic> on_leak (tree var) const final override;
418
419 bool reset_when_passed_to_unknown_fn_p (state_t s,
420 bool is_mutable) const final override;
421
422 state_t
423 maybe_get_merged_states_nonequal (state_t state_a,
424 state_t state_b) const final override;
425
426 static bool unaffected_by_call_p (tree fndecl);
427
428 void maybe_assume_non_null (sm_context *sm_ctxt,
429 tree ptr,
430 const gimple *stmt) const;
431
432 void on_realloc_with_move (region_model *model,
433 sm_state_map *smap,
434 const svalue *old_ptr_sval,
435 const svalue *new_ptr_sval,
436 const extrinsic_state &ext_state) const;
437
438 void transition_ptr_sval_non_null (region_model *model,
439 sm_state_map *smap,
440 const svalue *new_ptr_sval,
441 const extrinsic_state &ext_state) const;
442
443 standard_deallocator_set m_free;
444 standard_deallocator_set m_scalar_delete;
445 standard_deallocator_set m_vector_delete;
446
447 standard_deallocator m_realloc;
448
449 /* States that are independent of api. */
450
451 /* States for a pointer that's been unconditionally dereferenced
452 in a particular stack frame. */
453 hash_map<const frame_region *, state_t> m_assumed_non_null;
454
455 /* State for a pointer that's known to be NULL. */
456 state_t m_null;
457
458 /* State for a pointer that's known to not be on the heap (e.g. to a local
459 or global). */
460 state_t m_non_heap; // TODO: or should this be a different state machine?
461 // or do we need child values etc?
462
463 /* Stop state, for pointers we don't want to track any more. */
464 state_t m_stop;
465
466private:
467 const custom_deallocator_set *
468 get_or_create_custom_deallocator_set (tree allocator_fndecl);
469 custom_deallocator_set *
470 maybe_create_custom_deallocator_set (tree allocator_fndecl);
471 const deallocator *
472 get_or_create_deallocator (tree deallocator_fndecl);
473
474 state_t
475 get_or_create_assumed_non_null_state_for_frame (const frame_region *frame);
476
477 void
478 maybe_complain_about_deref_before_check (sm_context *sm_ctxt,
479 const supernode *node,
480 const gimple *stmt,
481 const assumed_non_null_state *,
482 tree ptr) const;
483
484 void on_allocator_call (sm_context *sm_ctxt,
485 const gcall *call,
486 const deallocator_set *deallocators,
487 bool returns_nonnull = false) const;
488 void handle_free_of_non_heap (sm_context *sm_ctxt,
489 const supernode *node,
490 const gcall *call,
491 tree arg,
492 const deallocator *d) const;
493 void on_deallocator_call (sm_context *sm_ctxt,
494 const supernode *node,
495 const gcall *call,
496 const deallocator *d,
497 unsigned argno) const;
498 void on_realloc_call (sm_context *sm_ctxt,
499 const supernode *node,
500 const gcall *call) const;
501 void on_zero_assignment (sm_context *sm_ctxt,
502 const gimple *stmt,
503 tree lhs) const;
504
505 /* A map for consolidating deallocators so that they are
506 unique per deallocator FUNCTION_DECL. */
507 typedef hash_map<tree, deallocator *> deallocator_map_t;
508 deallocator_map_t m_deallocator_map;
509
510 /* Memoized lookups from FUNCTION_DECL to custom_deallocator_set *. */
511 typedef hash_map<tree, custom_deallocator_set *> deallocator_set_cache_t;
512 deallocator_set_cache_t m_custom_deallocator_set_cache;
513
514 /* A map for consolidating custom_deallocator_set instances. */
515 typedef hash_map<custom_deallocator_set::key_t,
516 custom_deallocator_set *,
517 deallocator_set_map_traits> custom_deallocator_set_map_t;
518 custom_deallocator_set_map_t m_custom_deallocator_set_map;
519
520 /* Record of dynamically-allocated objects, for cleanup. */
521 auto_vec <custom_deallocator_set *> m_dynamic_sets;
522 auto_vec <custom_deallocator *> m_dynamic_deallocators;
523};
524
525/* struct deallocator. */
526
527deallocator::deallocator (malloc_state_machine *sm,
528 const char *name,
529 enum wording wording)
530: m_name (name),
531 m_wording (wording),
532 m_freed (sm->add_state (name: "freed", rs: RS_FREED, NULL, deallocator: this))
533{
534}
535
536hashval_t
537deallocator::hash () const
538{
539 return (hashval_t)m_freed->get_id ();
540}
541
542void
543deallocator::dump_to_pp (pretty_printer *pp) const
544{
545 pp_printf (pp, "%qs", m_name);
546}
547
548int
549deallocator::cmp (const deallocator *a, const deallocator *b)
550{
551 return (int)a->m_freed->get_id () - (int)b->m_freed->get_id ();
552}
553
554int
555deallocator::cmp_ptr_ptr (const void *a, const void *b)
556{
557 return cmp (a: *(const deallocator * const *)a,
558 b: *(const deallocator * const *)b);
559}
560
561
562/* struct standard_deallocator : public deallocator. */
563
564standard_deallocator::standard_deallocator (malloc_state_machine *sm,
565 const char *name,
566 enum wording wording)
567: deallocator (sm, name, wording)
568{
569}
570
571/* struct deallocator_set. */
572
573deallocator_set::deallocator_set (malloc_state_machine *sm,
574 enum wording wording)
575: m_wording (wording),
576 m_unchecked (sm->add_state (name: "unchecked", rs: RS_UNCHECKED, deallocators: this, NULL)),
577 m_nonnull (sm->add_state (name: "nonnull", rs: RS_NONNULL, deallocators: this, NULL))
578{
579}
580
581/* Dump a description of this deallocator_set to stderr. */
582
583DEBUG_FUNCTION void
584deallocator_set::dump () const
585{
586 pretty_printer pp;
587 pp_show_color (&pp) = pp_show_color (global_dc->printer);
588 pp.buffer->stream = stderr;
589 dump_to_pp (pp: &pp);
590 pp_newline (&pp);
591 pp_flush (&pp);
592}
593
594/* struct custom_deallocator_set : public deallocator_set. */
595
596custom_deallocator_set::
597custom_deallocator_set (malloc_state_machine *sm,
598 const auto_vec <const deallocator *> *vec,
599 enum wording wording)
600: deallocator_set (sm, wording),
601 m_deallocator_vec (vec->length ())
602{
603 unsigned i;
604 const deallocator *d;
605 FOR_EACH_VEC_ELT (*vec, i, d)
606 m_deallocator_vec.safe_push (obj: d);
607}
608
609bool
610custom_deallocator_set::contains_p (const deallocator *d) const
611{
612 unsigned i;
613 const deallocator *cd;
614 FOR_EACH_VEC_ELT (m_deallocator_vec, i, cd)
615 if (cd == d)
616 return true;
617 return false;
618}
619
620const deallocator *
621custom_deallocator_set::maybe_get_single () const
622{
623 if (m_deallocator_vec.length () == 1)
624 return m_deallocator_vec[0];
625 return NULL;
626}
627
628void
629custom_deallocator_set::dump_to_pp (pretty_printer *pp) const
630{
631 pp_character (pp, '{');
632 unsigned i;
633 const deallocator *d;
634 FOR_EACH_VEC_ELT (m_deallocator_vec, i, d)
635 {
636 if (i > 0)
637 pp_string (pp, ", ");
638 d->dump_to_pp (pp);
639 }
640 pp_character (pp, '}');
641}
642
643/* struct standard_deallocator_set : public deallocator_set. */
644
645standard_deallocator_set::standard_deallocator_set (malloc_state_machine *sm,
646 const char *name,
647 enum wording wording)
648: deallocator_set (sm, wording),
649 m_deallocator (sm, name, wording)
650{
651}
652
653bool
654standard_deallocator_set::contains_p (const deallocator *d) const
655{
656 return d == &m_deallocator;
657}
658
659const deallocator *
660standard_deallocator_set::maybe_get_single () const
661{
662 return &m_deallocator;
663}
664
665void
666standard_deallocator_set::dump_to_pp (pretty_printer *pp) const
667{
668 pp_character (pp, '{');
669 pp_string (pp, m_deallocator.m_name);
670 pp_character (pp, '}');
671}
672
673/* Return STATE cast to the custom state subclass, or NULL for the start state.
674 Everything should be an allocation_state apart from the start state. */
675
676static const allocation_state *
677dyn_cast_allocation_state (state_machine::state_t state)
678{
679 if (state->get_id () == 0)
680 return NULL;
681 return static_cast <const allocation_state *> (state);
682}
683
684/* Return STATE cast to the custom state subclass, for a state that is
685 already known to not be the start state . */
686
687static const allocation_state *
688as_a_allocation_state (state_machine::state_t state)
689{
690 gcc_assert (state->get_id () != 0);
691 return static_cast <const allocation_state *> (state);
692}
693
694/* Get the resource_state for STATE. */
695
696static enum resource_state
697get_rs (state_machine::state_t state)
698{
699 if (const allocation_state *astate = dyn_cast_allocation_state (state))
700 return astate->m_rs;
701 else
702 return RS_START;
703}
704
705/* Return true if STATE is the start state. */
706
707static bool
708start_p (state_machine::state_t state)
709{
710 return get_rs (state) == RS_START;
711}
712
713/* Return true if STATE is an unchecked result from an allocator. */
714
715static bool
716unchecked_p (state_machine::state_t state)
717{
718 return get_rs (state) == RS_UNCHECKED;
719}
720
721/* Return true if STATE is a non-null result from an allocator. */
722
723static bool
724nonnull_p (state_machine::state_t state)
725{
726 return get_rs (state) == RS_NONNULL;
727}
728
729/* Return true if STATE is a value that has been passed to a deallocator. */
730
731static bool
732freed_p (state_machine::state_t state)
733{
734 return get_rs (state) == RS_FREED;
735}
736
737/* Return true if STATE is a value that has been assumed to be non-NULL. */
738
739static bool
740assumed_non_null_p (state_machine::state_t state)
741{
742 return get_rs (state) == RS_ASSUMED_NON_NULL;
743}
744
745/* Class for diagnostics relating to malloc_state_machine. */
746
747class malloc_diagnostic : public pending_diagnostic
748{
749public:
750 malloc_diagnostic (const malloc_state_machine &sm, tree arg)
751 : m_sm (sm), m_arg (arg)
752 {}
753
754 bool subclass_equal_p (const pending_diagnostic &base_other) const override
755 {
756 return same_tree_p (t1: m_arg, t2: ((const malloc_diagnostic &)base_other).m_arg);
757 }
758
759 label_text describe_state_change (const evdesc::state_change &change)
760 override
761 {
762 if (change.m_old_state == m_sm.get_start_state ()
763 && (unchecked_p (state: change.m_new_state) || nonnull_p (state: change.m_new_state)))
764 // TODO: verify that it's the allocation stmt, not a copy
765 return label_text::borrow (buffer: "allocated here");
766 if (unchecked_p (state: change.m_old_state)
767 && nonnull_p (state: change.m_new_state))
768 {
769 if (change.m_expr)
770 return change.formatted_print (fmt: "assuming %qE is non-NULL",
771 change.m_expr);
772 else
773 return change.formatted_print (fmt: "assuming %qs is non-NULL",
774 "<unknown>");
775 }
776 if (change.m_new_state == m_sm.m_null)
777 {
778 if (unchecked_p (state: change.m_old_state))
779 {
780 if (change.m_expr)
781 return change.formatted_print (fmt: "assuming %qE is NULL",
782 change.m_expr);
783 else
784 return change.formatted_print (fmt: "assuming %qs is NULL",
785 "<unknown>");
786 }
787 else
788 {
789 if (change.m_expr)
790 return change.formatted_print (fmt: "%qE is NULL",
791 change.m_expr);
792 else
793 return change.formatted_print (fmt: "%qs is NULL",
794 "<unknown>");
795 }
796 }
797
798 return label_text ();
799 }
800
801 diagnostic_event::meaning
802 get_meaning_for_state_change (const evdesc::state_change &change)
803 const final override
804 {
805 if (change.m_old_state == m_sm.get_start_state ()
806 && unchecked_p (state: change.m_new_state))
807 return diagnostic_event::meaning (diagnostic_event::VERB_acquire,
808 diagnostic_event::NOUN_memory);
809 if (freed_p (state: change.m_new_state))
810 return diagnostic_event::meaning (diagnostic_event::VERB_release,
811 diagnostic_event::NOUN_memory);
812 return diagnostic_event::meaning ();
813 }
814
815protected:
816 const malloc_state_machine &m_sm;
817 tree m_arg;
818};
819
820/* Concrete subclass for reporting mismatching allocator/deallocator
821 diagnostics. */
822
823class mismatching_deallocation : public malloc_diagnostic
824{
825public:
826 mismatching_deallocation (const malloc_state_machine &sm, tree arg,
827 const deallocator_set *expected_deallocators,
828 const deallocator *actual_dealloc)
829 : malloc_diagnostic (sm, arg),
830 m_expected_deallocators (expected_deallocators),
831 m_actual_dealloc (actual_dealloc)
832 {}
833
834 const char *get_kind () const final override
835 {
836 return "mismatching_deallocation";
837 }
838
839 int get_controlling_option () const final override
840 {
841 return OPT_Wanalyzer_mismatching_deallocation;
842 }
843
844 bool emit (diagnostic_emission_context &ctxt) final override
845 {
846 auto_diagnostic_group d;
847 ctxt.add_cwe (cwe: 762); /* CWE-762: Mismatched Memory Management Routines. */
848 if (const deallocator *expected_dealloc
849 = m_expected_deallocators->maybe_get_single ())
850 return ctxt.warn ("%qE should have been deallocated with %qs"
851 " but was deallocated with %qs",
852 m_arg, expected_dealloc->m_name,
853 m_actual_dealloc->m_name);
854 else
855 return ctxt.warn ("%qs called on %qE returned from a mismatched"
856 " allocation function",
857 m_actual_dealloc->m_name, m_arg);
858 }
859
860 label_text describe_state_change (const evdesc::state_change &change)
861 final override
862 {
863 if (unchecked_p (state: change.m_new_state))
864 {
865 m_alloc_event = change.m_event_id;
866 if (const deallocator *expected_dealloc
867 = m_expected_deallocators->maybe_get_single ())
868 return change.formatted_print (fmt: "allocated here"
869 " (expects deallocation with %qs)",
870 expected_dealloc->m_name);
871 else
872 return change.formatted_print (fmt: "allocated here");
873 }
874 return malloc_diagnostic::describe_state_change (change);
875 }
876
877 label_text describe_final_event (const evdesc::final_event &ev) final override
878 {
879 if (m_alloc_event.known_p ())
880 {
881 if (const deallocator *expected_dealloc
882 = m_expected_deallocators->maybe_get_single ())
883 return ev.formatted_print
884 (fmt: "deallocated with %qs here;"
885 " allocation at %@ expects deallocation with %qs",
886 m_actual_dealloc->m_name, &m_alloc_event,
887 expected_dealloc->m_name);
888 else
889 return ev.formatted_print
890 (fmt: "deallocated with %qs here;"
891 " allocated at %@",
892 m_actual_dealloc->m_name, &m_alloc_event);
893 }
894 return ev.formatted_print (fmt: "deallocated with %qs here",
895 m_actual_dealloc->m_name);
896 }
897
898private:
899 diagnostic_event_id_t m_alloc_event;
900 const deallocator_set *m_expected_deallocators;
901 const deallocator *m_actual_dealloc;
902};
903
904/* Concrete subclass for reporting double-free diagnostics. */
905
906class double_free : public malloc_diagnostic
907{
908public:
909 double_free (const malloc_state_machine &sm, tree arg, const char *funcname)
910 : malloc_diagnostic (sm, arg), m_funcname (funcname)
911 {}
912
913 const char *get_kind () const final override { return "double_free"; }
914
915 int get_controlling_option () const final override
916 {
917 return OPT_Wanalyzer_double_free;
918 }
919
920 bool emit (diagnostic_emission_context &ctxt) final override
921 {
922 auto_diagnostic_group d;
923 ctxt.add_cwe (cwe: 415); /* CWE-415: Double Free. */
924 return ctxt.warn ("double-%qs of %qE", m_funcname, m_arg);
925 }
926
927 label_text describe_state_change (const evdesc::state_change &change)
928 final override
929 {
930 if (freed_p (state: change.m_new_state))
931 {
932 m_first_free_event = change.m_event_id;
933 return change.formatted_print (fmt: "first %qs here", m_funcname);
934 }
935 return malloc_diagnostic::describe_state_change (change);
936 }
937
938 label_text describe_call_with_state (const evdesc::call_with_state &info)
939 final override
940 {
941 if (freed_p (state: info.m_state))
942 return info.formatted_print
943 (fmt: "passing freed pointer %qE in call to %qE from %qE",
944 info.m_expr, info.m_callee_fndecl, info.m_caller_fndecl);
945 return label_text ();
946 }
947
948 label_text describe_final_event (const evdesc::final_event &ev) final override
949 {
950 if (m_first_free_event.known_p ())
951 return ev.formatted_print (fmt: "second %qs here; first %qs was at %@",
952 m_funcname, m_funcname,
953 &m_first_free_event);
954 return ev.formatted_print (fmt: "second %qs here", m_funcname);
955 }
956
957private:
958 diagnostic_event_id_t m_first_free_event;
959 const char *m_funcname;
960};
961
962/* Abstract subclass for describing possible bad uses of NULL.
963 Responsible for describing the call that could return NULL. */
964
965class possible_null : public malloc_diagnostic
966{
967public:
968 possible_null (const malloc_state_machine &sm, tree arg)
969 : malloc_diagnostic (sm, arg)
970 {}
971
972 label_text describe_state_change (const evdesc::state_change &change)
973 final override
974 {
975 if (change.m_old_state == m_sm.get_start_state ()
976 && unchecked_p (state: change.m_new_state))
977 {
978 m_origin_of_unchecked_event = change.m_event_id;
979 return label_text::borrow (buffer: "this call could return NULL");
980 }
981 return malloc_diagnostic::describe_state_change (change);
982 }
983
984 label_text describe_return_of_state (const evdesc::return_of_state &info)
985 final override
986 {
987 if (unchecked_p (state: info.m_state))
988 return info.formatted_print (fmt: "possible return of NULL to %qE from %qE",
989 info.m_caller_fndecl, info.m_callee_fndecl);
990 return label_text ();
991 }
992
993protected:
994 diagnostic_event_id_t m_origin_of_unchecked_event;
995};
996
997/* Concrete subclass for describing dereference of a possible NULL
998 value. */
999
1000class possible_null_deref : public possible_null
1001{
1002public:
1003 possible_null_deref (const malloc_state_machine &sm, tree arg)
1004 : possible_null (sm, arg)
1005 {}
1006
1007 const char *get_kind () const final override { return "possible_null_deref"; }
1008
1009 int get_controlling_option () const final override
1010 {
1011 return OPT_Wanalyzer_possible_null_dereference;
1012 }
1013
1014 bool emit (diagnostic_emission_context &ctxt) final override
1015 {
1016 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1017 ctxt.add_cwe (cwe: 690);
1018 return ctxt.warn ("dereference of possibly-NULL %qE", m_arg);
1019 }
1020
1021 label_text describe_final_event (const evdesc::final_event &ev) final override
1022 {
1023 if (m_origin_of_unchecked_event.known_p ())
1024 return ev.formatted_print (fmt: "%qE could be NULL: unchecked value from %@",
1025 ev.m_expr,
1026 &m_origin_of_unchecked_event);
1027 else
1028 return ev.formatted_print (fmt: "%qE could be NULL", ev.m_expr);
1029 }
1030
1031};
1032
1033/* Return true if FNDECL is a C++ method. */
1034
1035static bool
1036method_p (tree fndecl)
1037{
1038 return TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE;
1039}
1040
1041/* Return a 1-based description of ARG_IDX (0-based) of FNDECL.
1042 Compare with %P in the C++ FE (implemented in cp/error.cc: parm_to_string
1043 as called from cp_printer). */
1044
1045static label_text
1046describe_argument_index (tree fndecl, int arg_idx)
1047{
1048 if (method_p (fndecl))
1049 if (arg_idx == 0)
1050 return label_text::borrow (buffer: "'this'");
1051 pretty_printer pp;
1052 pp_printf (&pp, "%u", arg_idx + 1 - method_p (fndecl));
1053 return label_text::take (buffer: xstrdup (pp_formatted_text (&pp)));
1054}
1055
1056/* Subroutine for use by possible_null_arg::emit and null_arg::emit.
1057 Issue a note informing that the pertinent argument must be non-NULL. */
1058
1059static void
1060inform_nonnull_attribute (tree fndecl, int arg_idx)
1061{
1062 label_text arg_desc = describe_argument_index (fndecl, arg_idx);
1063 inform (DECL_SOURCE_LOCATION (fndecl),
1064 "argument %s of %qD must be non-null",
1065 arg_desc.get (), fndecl);
1066 /* Ideally we would use the location of the parm and underline the
1067 attribute also - but we don't have the location_t values at this point
1068 in the middle-end.
1069 For reference, the C and C++ FEs have get_fndecl_argument_location. */
1070}
1071
1072/* Concrete subclass for describing passing a possibly-NULL value to a
1073 function marked with __attribute__((nonnull)). */
1074
1075class possible_null_arg : public possible_null
1076{
1077public:
1078 possible_null_arg (const malloc_state_machine &sm, tree arg,
1079 tree fndecl, int arg_idx)
1080 : possible_null (sm, arg),
1081 m_fndecl (fndecl), m_arg_idx (arg_idx)
1082 {}
1083
1084 const char *get_kind () const final override { return "possible_null_arg"; }
1085
1086 bool subclass_equal_p (const pending_diagnostic &base_other)
1087 const final override
1088 {
1089 const possible_null_arg &sub_other
1090 = (const possible_null_arg &)base_other;
1091 return (same_tree_p (t1: m_arg, t2: sub_other.m_arg)
1092 && m_fndecl == sub_other.m_fndecl
1093 && m_arg_idx == sub_other.m_arg_idx);
1094 }
1095
1096 int get_controlling_option () const final override
1097 {
1098 return OPT_Wanalyzer_possible_null_argument;
1099 }
1100
1101 bool emit (diagnostic_emission_context &ctxt) final override
1102 {
1103 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1104 auto_diagnostic_group d;
1105 ctxt.add_cwe (cwe: 690);
1106 bool warned
1107 = ctxt.warn ("use of possibly-NULL %qE where non-null expected",
1108 m_arg);
1109 if (warned)
1110 inform_nonnull_attribute (fndecl: m_fndecl, arg_idx: m_arg_idx);
1111 return warned;
1112 }
1113
1114 label_text describe_final_event (const evdesc::final_event &ev) final override
1115 {
1116 label_text arg_desc = describe_argument_index (fndecl: m_fndecl, arg_idx: m_arg_idx);
1117 label_text result;
1118 if (m_origin_of_unchecked_event.known_p ())
1119 result = ev.formatted_print (fmt: "argument %s (%qE) from %@ could be NULL"
1120 " where non-null expected",
1121 arg_desc.get (), ev.m_expr,
1122 &m_origin_of_unchecked_event);
1123 else
1124 result = ev.formatted_print (fmt: "argument %s (%qE) could be NULL"
1125 " where non-null expected",
1126 arg_desc.get (), ev.m_expr);
1127 return result;
1128 }
1129
1130private:
1131 tree m_fndecl;
1132 int m_arg_idx;
1133};
1134
1135/* Concrete subclass for describing a dereference of a NULL value. */
1136
1137class null_deref : public malloc_diagnostic
1138{
1139public:
1140 null_deref (const malloc_state_machine &sm, tree arg)
1141 : malloc_diagnostic (sm, arg) {}
1142
1143 const char *get_kind () const final override { return "null_deref"; }
1144
1145 int get_controlling_option () const final override
1146 {
1147 return OPT_Wanalyzer_null_dereference;
1148 }
1149
1150 bool terminate_path_p () const final override { return true; }
1151
1152 bool emit (diagnostic_emission_context &ctxt) final override
1153 {
1154 /* CWE-476: NULL Pointer Dereference. */
1155 ctxt.add_cwe (cwe: 476);
1156 return ctxt.warn ("dereference of NULL %qE", m_arg);
1157 }
1158
1159 label_text describe_return_of_state (const evdesc::return_of_state &info)
1160 final override
1161 {
1162 if (info.m_state == m_sm.m_null)
1163 return info.formatted_print (fmt: "return of NULL to %qE from %qE",
1164 info.m_caller_fndecl, info.m_callee_fndecl);
1165 return label_text ();
1166 }
1167
1168 label_text describe_final_event (const evdesc::final_event &ev) final override
1169 {
1170 return ev.formatted_print (fmt: "dereference of NULL %qE", ev.m_expr);
1171 }
1172
1173 /* Implementation of pending_diagnostic::supercedes_p for
1174 null-deref.
1175
1176 We want null-deref to supercede use-of-unitialized-value,
1177 so that if we have these at the same stmt, we don't emit
1178 a use-of-uninitialized, just the null-deref. */
1179
1180 bool supercedes_p (const pending_diagnostic &other) const final override
1181 {
1182 if (other.use_of_uninit_p ())
1183 return true;
1184
1185 return false;
1186 }
1187};
1188
1189/* Concrete subclass for describing passing a NULL value to a
1190 function marked with __attribute__((nonnull)). */
1191
1192class null_arg : public malloc_diagnostic
1193{
1194public:
1195 null_arg (const malloc_state_machine &sm, tree arg,
1196 tree fndecl, int arg_idx)
1197 : malloc_diagnostic (sm, arg),
1198 m_fndecl (fndecl), m_arg_idx (arg_idx)
1199 {}
1200
1201 const char *get_kind () const final override { return "null_arg"; }
1202
1203 bool subclass_equal_p (const pending_diagnostic &base_other)
1204 const final override
1205 {
1206 const null_arg &sub_other
1207 = (const null_arg &)base_other;
1208 return (same_tree_p (t1: m_arg, t2: sub_other.m_arg)
1209 && m_fndecl == sub_other.m_fndecl
1210 && m_arg_idx == sub_other.m_arg_idx);
1211 }
1212
1213 int get_controlling_option () const final override
1214 {
1215 return OPT_Wanalyzer_null_argument;
1216 }
1217
1218 bool terminate_path_p () const final override { return true; }
1219
1220 bool emit (diagnostic_emission_context &ctxt) final override
1221 {
1222 /* CWE-476: NULL Pointer Dereference. */
1223 auto_diagnostic_group d;
1224 ctxt.add_cwe (cwe: 476);
1225
1226 bool warned;
1227 if (zerop (m_arg))
1228 warned = ctxt.warn ("use of NULL where non-null expected");
1229 else
1230 warned = ctxt.warn ("use of NULL %qE where non-null expected",
1231 m_arg);
1232 if (warned)
1233 inform_nonnull_attribute (fndecl: m_fndecl, arg_idx: m_arg_idx);
1234 return warned;
1235 }
1236
1237 label_text describe_final_event (const evdesc::final_event &ev) final override
1238 {
1239 label_text arg_desc = describe_argument_index (fndecl: m_fndecl, arg_idx: m_arg_idx);
1240 label_text result;
1241 if (zerop (ev.m_expr))
1242 result = ev.formatted_print (fmt: "argument %s NULL where non-null expected",
1243 arg_desc.get ());
1244 else
1245 result = ev.formatted_print (fmt: "argument %s (%qE) NULL"
1246 " where non-null expected",
1247 arg_desc.get (), ev.m_expr);
1248 return result;
1249 }
1250
1251private:
1252 tree m_fndecl;
1253 int m_arg_idx;
1254};
1255
1256class use_after_free : public malloc_diagnostic
1257{
1258public:
1259 use_after_free (const malloc_state_machine &sm, tree arg,
1260 const deallocator *deallocator)
1261 : malloc_diagnostic (sm, arg),
1262 m_deallocator (deallocator)
1263 {
1264 gcc_assert (deallocator);
1265 }
1266
1267 const char *get_kind () const final override { return "use_after_free"; }
1268
1269 int get_controlling_option () const final override
1270 {
1271 return OPT_Wanalyzer_use_after_free;
1272 }
1273
1274 bool emit (diagnostic_emission_context &ctxt) final override
1275 {
1276 /* CWE-416: Use After Free. */
1277 ctxt.add_cwe (cwe: 416);
1278 return ctxt.warn ("use after %<%s%> of %qE",
1279 m_deallocator->m_name, m_arg);
1280 }
1281
1282 label_text describe_state_change (const evdesc::state_change &change)
1283 final override
1284 {
1285 if (freed_p (state: change.m_new_state))
1286 {
1287 m_free_event = change.m_event_id;
1288 switch (m_deallocator->m_wording)
1289 {
1290 default:
1291 case WORDING_REALLOCATED:
1292 gcc_unreachable ();
1293 case WORDING_FREED:
1294 return label_text::borrow (buffer: "freed here");
1295 case WORDING_DELETED:
1296 return label_text::borrow (buffer: "deleted here");
1297 case WORDING_DEALLOCATED:
1298 return label_text::borrow (buffer: "deallocated here");
1299 }
1300 }
1301 return malloc_diagnostic::describe_state_change (change);
1302 }
1303
1304 label_text describe_final_event (const evdesc::final_event &ev) final override
1305 {
1306 const char *funcname = m_deallocator->m_name;
1307 if (m_free_event.known_p ())
1308 switch (m_deallocator->m_wording)
1309 {
1310 default:
1311 case WORDING_REALLOCATED:
1312 gcc_unreachable ();
1313 case WORDING_FREED:
1314 return ev.formatted_print (fmt: "use after %<%s%> of %qE; freed at %@",
1315 funcname, ev.m_expr, &m_free_event);
1316 case WORDING_DELETED:
1317 return ev.formatted_print (fmt: "use after %<%s%> of %qE; deleted at %@",
1318 funcname, ev.m_expr, &m_free_event);
1319 case WORDING_DEALLOCATED:
1320 return ev.formatted_print (fmt: "use after %<%s%> of %qE;"
1321 " deallocated at %@",
1322 funcname, ev.m_expr, &m_free_event);
1323 }
1324 else
1325 return ev.formatted_print (fmt: "use after %<%s%> of %qE",
1326 funcname, ev.m_expr);
1327 }
1328
1329 /* Implementation of pending_diagnostic::supercedes_p for
1330 use_after_free.
1331
1332 We want use-after-free to supercede use-of-unitialized-value,
1333 so that if we have these at the same stmt, we don't emit
1334 a use-of-uninitialized, just the use-after-free.
1335 (this is because we fully purge information about freed
1336 buffers when we free them to avoid state explosions, so
1337 that if they are accessed after the free, it looks like
1338 they are uninitialized). */
1339
1340 bool supercedes_p (const pending_diagnostic &other) const final override
1341 {
1342 if (other.use_of_uninit_p ())
1343 return true;
1344
1345 return false;
1346 }
1347
1348private:
1349 diagnostic_event_id_t m_free_event;
1350 const deallocator *m_deallocator;
1351};
1352
1353class malloc_leak : public malloc_diagnostic
1354{
1355public:
1356 malloc_leak (const malloc_state_machine &sm, tree arg)
1357 : malloc_diagnostic (sm, arg) {}
1358
1359 const char *get_kind () const final override { return "malloc_leak"; }
1360
1361 int get_controlling_option () const final override
1362 {
1363 return OPT_Wanalyzer_malloc_leak;
1364 }
1365
1366 bool emit (diagnostic_emission_context &ctxt) final override
1367 {
1368 /* "CWE-401: Missing Release of Memory after Effective Lifetime". */
1369 ctxt.add_cwe (cwe: 401);
1370 if (m_arg)
1371 return ctxt.warn ("leak of %qE", m_arg);
1372 else
1373 return ctxt.warn ("leak of %qs", "<unknown>");
1374 }
1375
1376 label_text describe_state_change (const evdesc::state_change &change)
1377 final override
1378 {
1379 if (unchecked_p (state: change.m_new_state)
1380 || (start_p (state: change.m_old_state) && nonnull_p (state: change.m_new_state)))
1381 {
1382 m_alloc_event = change.m_event_id;
1383 return label_text::borrow (buffer: "allocated here");
1384 }
1385 return malloc_diagnostic::describe_state_change (change);
1386 }
1387
1388 label_text describe_final_event (const evdesc::final_event &ev) final override
1389 {
1390 if (ev.m_expr)
1391 {
1392 if (m_alloc_event.known_p ())
1393 return ev.formatted_print (fmt: "%qE leaks here; was allocated at %@",
1394 ev.m_expr, &m_alloc_event);
1395 else
1396 return ev.formatted_print (fmt: "%qE leaks here", ev.m_expr);
1397 }
1398 else
1399 {
1400 if (m_alloc_event.known_p ())
1401 return ev.formatted_print (fmt: "%qs leaks here; was allocated at %@",
1402 "<unknown>", &m_alloc_event);
1403 else
1404 return ev.formatted_print (fmt: "%qs leaks here", "<unknown>");
1405 }
1406 }
1407
1408private:
1409 diagnostic_event_id_t m_alloc_event;
1410};
1411
1412class free_of_non_heap : public malloc_diagnostic
1413{
1414public:
1415 free_of_non_heap (const malloc_state_machine &sm, tree arg,
1416 const region *freed_reg,
1417 const char *funcname)
1418 : malloc_diagnostic (sm, arg), m_freed_reg (freed_reg), m_funcname (funcname)
1419 {
1420 }
1421
1422 const char *get_kind () const final override { return "free_of_non_heap"; }
1423
1424 bool subclass_equal_p (const pending_diagnostic &base_other) const
1425 final override
1426 {
1427 const free_of_non_heap &other = (const free_of_non_heap &)base_other;
1428 return (same_tree_p (t1: m_arg, t2: other.m_arg)
1429 && m_freed_reg == other.m_freed_reg);
1430 }
1431
1432 int get_controlling_option () const final override
1433 {
1434 return OPT_Wanalyzer_free_of_non_heap;
1435 }
1436
1437 bool emit (diagnostic_emission_context &ctxt) final override
1438 {
1439 auto_diagnostic_group d;
1440 ctxt.add_cwe (cwe: 590); /* CWE-590: Free of Memory not on the Heap. */
1441 switch (get_memory_space ())
1442 {
1443 default:
1444 case MEMSPACE_HEAP:
1445 gcc_unreachable ();
1446 case MEMSPACE_UNKNOWN:
1447 case MEMSPACE_CODE:
1448 case MEMSPACE_GLOBALS:
1449 case MEMSPACE_READONLY_DATA:
1450 return ctxt.warn ("%<%s%> of %qE which points to memory"
1451 " not on the heap",
1452 m_funcname, m_arg);
1453 break;
1454 case MEMSPACE_STACK:
1455 return ctxt.warn ("%<%s%> of %qE which points to memory"
1456 " on the stack",
1457 m_funcname, m_arg);
1458 break;
1459 }
1460 }
1461
1462 label_text describe_state_change (const evdesc::state_change &)
1463 final override
1464 {
1465 return label_text::borrow (buffer: "pointer is from here");
1466 }
1467
1468 label_text describe_final_event (const evdesc::final_event &ev) final override
1469 {
1470 return ev.formatted_print (fmt: "call to %qs here", m_funcname);
1471 }
1472
1473 void mark_interesting_stuff (interesting_t *interest) final override
1474 {
1475 if (m_freed_reg)
1476 interest->add_region_creation (reg: m_freed_reg);
1477 }
1478
1479private:
1480 enum memory_space get_memory_space () const
1481 {
1482 if (m_freed_reg)
1483 return m_freed_reg->get_memory_space ();
1484 else
1485 return MEMSPACE_UNKNOWN;
1486 }
1487
1488 const region *m_freed_reg;
1489 const char *m_funcname;
1490};
1491
1492/* Concrete pending_diagnostic subclass for -Wanalyzer-deref-before-check. */
1493
1494class deref_before_check : public malloc_diagnostic
1495{
1496public:
1497 deref_before_check (const malloc_state_machine &sm, tree arg)
1498 : malloc_diagnostic (sm, arg),
1499 m_deref_enode (NULL),
1500 m_deref_expr (NULL),
1501 m_check_enode (NULL)
1502 {
1503 gcc_assert (arg);
1504 }
1505
1506 const char *get_kind () const final override { return "deref_before_check"; }
1507
1508 int get_controlling_option () const final override
1509 {
1510 return OPT_Wanalyzer_deref_before_check;
1511 }
1512
1513 bool emit (diagnostic_emission_context &ctxt) final override
1514 {
1515 /* Don't emit the warning if we can't show where the deref
1516 and the check occur. */
1517 if (!m_deref_enode)
1518 return false;
1519 if (!m_check_enode)
1520 return false;
1521 /* Only emit the warning for intraprocedural cases. */
1522 const program_point &deref_point = m_deref_enode->get_point ();
1523 const program_point &check_point = m_check_enode->get_point ();
1524
1525 if (!program_point::effectively_intraprocedural_p (point_a: deref_point,
1526 point_b: check_point))
1527 return false;
1528
1529 /* Reject the warning if the check occurs within a macro defintion.
1530 This avoids false positives for such code as:
1531
1532 #define throw_error \
1533 do { \
1534 if (p) \
1535 cleanup (p); \
1536 return; \
1537 } while (0)
1538
1539 if (p->idx >= n)
1540 throw_error ();
1541
1542 where the usage of "throw_error" implicitly adds a check
1543 on 'p'.
1544
1545 We do warn when the check is in a macro expansion if we can get
1546 at the location of the condition and it is't part of the
1547 definition, so that we warn for checks such as:
1548 if (words[0][0] == '@')
1549 return;
1550 g_assert(words[0] != NULL); <--- here
1551 Unfortunately we don't have locations for individual gimple
1552 arguments, so in:
1553 g_assert (ptr);
1554 we merely have a gimple_cond
1555 if (p_2(D) == 0B)
1556 with no way of getting at the location of the condition separately
1557 from that of the gimple_cond (where the "if" is within the macro
1558 definition). We reject the warning for such cases.
1559
1560 We do warn when the *deref* occurs in a macro, since this can be
1561 a source of real bugs; see e.g. PR 77425. */
1562 location_t check_loc = m_check_enode->get_point ().get_location ();
1563 if (linemap_location_from_macro_definition_p (line_table, check_loc))
1564 return false;
1565
1566 /* Reject warning if the check is in a loop header within a
1567 macro expansion. This rejects cases like:
1568 | deref of x;
1569 | [...snip...]
1570 | FOR_EACH(x) {
1571 | [...snip...]
1572 | }
1573 where the FOR_EACH macro tests for non-nullness of x, since
1574 the user is hoping to encapsulate the details of iteration
1575 in the macro, and the extra check on the first iteration
1576 would just be noise if we reported it. */
1577 if (loop_header_p (point: m_check_enode->get_point ())
1578 && linemap_location_from_macro_expansion_p (line_table, check_loc))
1579 return false;
1580
1581 /* Reject if m_deref_expr is sufficiently different from m_arg
1582 for cases where the dereference is spelled differently from
1583 the check, which is probably two different ways to get the
1584 same svalue, and thus not worth reporting. */
1585 if (!m_deref_expr)
1586 return false;
1587 if (!sufficiently_similar_p (expr_a: m_deref_expr, expr_b: m_arg))
1588 return false;
1589
1590 /* Reject the warning if the deref's BB doesn't dominate that
1591 of the check, so that we don't warn e.g. for shared cleanup
1592 code that checks a pointer for NULL, when that code is sometimes
1593 used before a deref and sometimes after.
1594 Using the dominance code requires setting cfun. */
1595 auto_cfun sentinel (m_deref_enode->get_function ());
1596 calculate_dominance_info (CDI_DOMINATORS);
1597 if (!dominated_by_p (CDI_DOMINATORS,
1598 m_check_enode->get_supernode ()->m_bb,
1599 m_deref_enode->get_supernode ()->m_bb))
1600 return false;
1601
1602 return ctxt.warn ("check of %qE for NULL after already"
1603 " dereferencing it",
1604 m_arg);
1605 }
1606
1607 label_text describe_state_change (const evdesc::state_change &change)
1608 final override
1609 {
1610 if (change.m_old_state == m_sm.get_start_state ()
1611 && assumed_non_null_p (state: change.m_new_state))
1612 {
1613 m_first_deref_event = change.m_event_id;
1614 m_deref_enode = change.m_event.get_exploded_node ();
1615 m_deref_expr = change.m_expr;
1616 return change.formatted_print (fmt: "pointer %qE is dereferenced here",
1617 m_arg);
1618 }
1619 return malloc_diagnostic::describe_state_change (change);
1620 }
1621
1622 label_text describe_final_event (const evdesc::final_event &ev) final override
1623 {
1624 m_check_enode = ev.m_event.get_exploded_node ();
1625 if (m_first_deref_event.known_p ())
1626 return ev.formatted_print (fmt: "pointer %qE is checked for NULL here but"
1627 " it was already dereferenced at %@",
1628 m_arg, &m_first_deref_event);
1629 else
1630 return ev.formatted_print (fmt: "pointer %qE is checked for NULL here but"
1631 " it was already dereferenced",
1632 m_arg);
1633 }
1634
1635private:
1636 static bool loop_header_p (const program_point &point)
1637 {
1638 const supernode *snode = point.get_supernode ();
1639 if (!snode)
1640 return false;
1641 for (auto &in_edge : snode->m_preds)
1642 {
1643 if (const cfg_superedge *cfg_in_edge
1644 = in_edge->dyn_cast_cfg_superedge ())
1645 if (cfg_in_edge->back_edge_p ())
1646 return true;
1647 }
1648 return false;
1649 }
1650
1651 static bool sufficiently_similar_p (tree expr_a, tree expr_b)
1652 {
1653 pretty_printer *pp_a = global_dc->printer->clone ();
1654 pretty_printer *pp_b = global_dc->printer->clone ();
1655 pp_printf (pp_a, "%qE", expr_a);
1656 pp_printf (pp_b, "%qE", expr_b);
1657 bool result = (strcmp (s1: pp_formatted_text (pp_a), s2: pp_formatted_text (pp_b))
1658 == 0);
1659 delete pp_a;
1660 delete pp_b;
1661 return result;
1662 }
1663
1664 diagnostic_event_id_t m_first_deref_event;
1665 const exploded_node *m_deref_enode;
1666 tree m_deref_expr;
1667 const exploded_node *m_check_enode;
1668};
1669
1670/* struct allocation_state : public state_machine::state. */
1671
1672/* Implementation of state_machine::state::dump_to_pp vfunc
1673 for allocation_state: append the API that this allocation is
1674 associated with. */
1675
1676void
1677allocation_state::dump_to_pp (pretty_printer *pp) const
1678{
1679 state_machine::state::dump_to_pp (pp);
1680 if (m_deallocators)
1681 {
1682 pp_string (pp, " (");
1683 m_deallocators->dump_to_pp (pp);
1684 pp_character (pp, ')');
1685 }
1686}
1687
1688/* Given a allocation_state for a deallocator_set, get the "nonnull" state
1689 for the corresponding allocator(s). */
1690
1691const allocation_state *
1692allocation_state::get_nonnull () const
1693{
1694 gcc_assert (m_deallocators);
1695 return as_a_allocation_state (state: m_deallocators->m_nonnull);
1696}
1697
1698/* struct assumed_non_null_state : public allocation_state. */
1699
1700void
1701assumed_non_null_state::dump_to_pp (pretty_printer *pp) const
1702{
1703 allocation_state::dump_to_pp (pp);
1704 pp_string (pp, " (in ");
1705 m_frame->dump_to_pp (pp, simple: true);
1706 pp_character (pp, ')');
1707}
1708
1709/* malloc_state_machine's ctor. */
1710
1711malloc_state_machine::malloc_state_machine (logger *logger)
1712: state_machine ("malloc", logger),
1713 m_free (this, "free", WORDING_FREED),
1714 m_scalar_delete (this, "delete", WORDING_DELETED),
1715 m_vector_delete (this, "delete[]", WORDING_DELETED),
1716 m_realloc (this, "realloc", WORDING_REALLOCATED)
1717{
1718 gcc_assert (m_start->get_id () == 0);
1719 m_null = add_state (name: "null", rs: RS_FREED, NULL, NULL);
1720 m_non_heap = add_state (name: "non-heap", rs: RS_NON_HEAP, NULL, NULL);
1721 m_stop = add_state (name: "stop", rs: RS_STOP, NULL, NULL);
1722}
1723
1724malloc_state_machine::~malloc_state_machine ()
1725{
1726 unsigned i;
1727 custom_deallocator_set *set;
1728 FOR_EACH_VEC_ELT (m_dynamic_sets, i, set)
1729 delete set;
1730 custom_deallocator *d;
1731 FOR_EACH_VEC_ELT (m_dynamic_deallocators, i, d)
1732 delete d;
1733}
1734
1735state_machine::state_t
1736malloc_state_machine::add_state (const char *name, enum resource_state rs,
1737 const deallocator_set *deallocators,
1738 const deallocator *deallocator)
1739{
1740 return add_custom_state (s: new allocation_state (name, alloc_state_id (),
1741 rs, deallocators,
1742 deallocator));
1743}
1744
1745/* If ALLOCATOR_FNDECL has any "__attribute__((malloc(FOO)))",
1746 return a custom_deallocator_set for them, consolidating them
1747 to ensure uniqueness of the sets.
1748
1749 Return NULL if it has no such attributes. */
1750
1751const custom_deallocator_set *
1752malloc_state_machine::
1753get_or_create_custom_deallocator_set (tree allocator_fndecl)
1754{
1755 /* Early rejection of decls without attributes. */
1756 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1757 if (!attrs)
1758 return NULL;
1759
1760 /* Otherwise, call maybe_create_custom_deallocator_set,
1761 memoizing the result. */
1762 if (custom_deallocator_set **slot
1763 = m_custom_deallocator_set_cache.get (k: allocator_fndecl))
1764 return *slot;
1765 custom_deallocator_set *set
1766 = maybe_create_custom_deallocator_set (allocator_fndecl);
1767 m_custom_deallocator_set_cache.put (k: allocator_fndecl, v: set);
1768 return set;
1769}
1770
1771/* Given ALLOCATOR_FNDECL, a FUNCTION_DECL with attributes,
1772 look for any "__attribute__((malloc(FOO)))" and return a
1773 custom_deallocator_set for them, consolidating them
1774 to ensure uniqueness of the sets.
1775
1776 Return NULL if it has no such attributes.
1777
1778 Subroutine of get_or_create_custom_deallocator_set which
1779 memoizes the result. */
1780
1781custom_deallocator_set *
1782malloc_state_machine::
1783maybe_create_custom_deallocator_set (tree allocator_fndecl)
1784{
1785 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1786 gcc_assert (attrs);
1787
1788 /* Look for instances of __attribute__((malloc(FOO))). */
1789 auto_vec<const deallocator *> deallocator_vec;
1790 for (tree allocs = attrs;
1791 (allocs = lookup_attribute (attr_name: "malloc", list: allocs));
1792 allocs = TREE_CHAIN (allocs))
1793 {
1794 tree args = TREE_VALUE (allocs);
1795 if (!args)
1796 continue;
1797 if (TREE_VALUE (args))
1798 {
1799 const deallocator *d
1800 = get_or_create_deallocator (TREE_VALUE (args));
1801 deallocator_vec.safe_push (obj: d);
1802 }
1803 }
1804
1805 /* If there weren't any deallocators, bail. */
1806 if (deallocator_vec.length () == 0)
1807 return NULL;
1808
1809 /* Consolidate, so that we reuse existing deallocator_set
1810 instances. */
1811 deallocator_vec.qsort (deallocator::cmp_ptr_ptr);
1812 custom_deallocator_set **slot
1813 = m_custom_deallocator_set_map.get (k: &deallocator_vec);
1814 if (slot)
1815 return *slot;
1816 custom_deallocator_set *set
1817 = new custom_deallocator_set (this, &deallocator_vec, WORDING_DEALLOCATED);
1818 m_custom_deallocator_set_map.put (k: &set->m_deallocator_vec, v: set);
1819 m_dynamic_sets.safe_push (obj: set);
1820 return set;
1821}
1822
1823/* Get the deallocator for DEALLOCATOR_FNDECL, creating it if necessary. */
1824
1825const deallocator *
1826malloc_state_machine::get_or_create_deallocator (tree deallocator_fndecl)
1827{
1828 deallocator **slot = m_deallocator_map.get (k: deallocator_fndecl);
1829 if (slot)
1830 return *slot;
1831
1832 /* Reuse "free". */
1833 deallocator *d;
1834 if (is_named_call_p (fndecl: deallocator_fndecl, funcname: "free")
1835 || is_std_named_call_p (fndecl: deallocator_fndecl, funcname: "free")
1836 || is_named_call_p (fndecl: deallocator_fndecl, funcname: "__builtin_free"))
1837 d = &m_free.m_deallocator;
1838 else
1839 {
1840 custom_deallocator *cd
1841 = new custom_deallocator (this, deallocator_fndecl,
1842 WORDING_DEALLOCATED);
1843 m_dynamic_deallocators.safe_push (obj: cd);
1844 d = cd;
1845 }
1846 m_deallocator_map.put (k: deallocator_fndecl, v: d);
1847 return d;
1848}
1849
1850/* Get the "assumed-non-null" state for assumptions made within FRAME,
1851 creating it if necessary. */
1852
1853state_machine::state_t
1854malloc_state_machine::
1855get_or_create_assumed_non_null_state_for_frame (const frame_region *frame)
1856{
1857 if (state_t *slot = m_assumed_non_null.get (k: frame))
1858 return *slot;
1859 state_machine::state *new_state
1860 = new assumed_non_null_state ("assumed-non-null", alloc_state_id (), frame);
1861 add_custom_state (s: new_state);
1862 m_assumed_non_null.put (k: frame, v: new_state);
1863 return new_state;
1864}
1865
1866/* Try to identify the function declaration either by name or as a known malloc
1867 builtin. */
1868
1869static bool
1870known_allocator_p (const_tree fndecl, const gcall *call)
1871{
1872 /* Either it is a function we know by name and number of arguments... */
1873 if (is_named_call_p (fndecl, funcname: "malloc", call, num_args: 1)
1874 || is_named_call_p (fndecl, funcname: "calloc", call, num_args: 2)
1875 || is_std_named_call_p (fndecl, funcname: "malloc", call, num_args: 1)
1876 || is_std_named_call_p (fndecl, funcname: "calloc", call, num_args: 2)
1877 || is_named_call_p (fndecl, funcname: "strdup", call, num_args: 1)
1878 || is_named_call_p (fndecl, funcname: "strndup", call, num_args: 2))
1879 return true;
1880
1881 /* ... or it is a builtin allocator that allocates objects freed with
1882 __builtin_free. */
1883 if (fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL))
1884 switch (DECL_FUNCTION_CODE (decl: fndecl))
1885 {
1886 case BUILT_IN_MALLOC:
1887 case BUILT_IN_CALLOC:
1888 case BUILT_IN_STRDUP:
1889 case BUILT_IN_STRNDUP:
1890 return true;
1891 default:
1892 break;
1893 }
1894
1895 return false;
1896}
1897
1898/* If PTR's nullness is not known, transition it to the "assumed-non-null"
1899 state for the current frame. */
1900
1901void
1902malloc_state_machine::maybe_assume_non_null (sm_context *sm_ctxt,
1903 tree ptr,
1904 const gimple *stmt) const
1905{
1906 const region_model *old_model = sm_ctxt->get_old_region_model ();
1907 if (!old_model)
1908 return;
1909
1910 tree null_ptr_cst = build_int_cst (TREE_TYPE (ptr), 0);
1911 tristate known_non_null
1912 = old_model->eval_condition (lhs: ptr, op: NE_EXPR, rhs: null_ptr_cst, NULL);
1913 if (known_non_null.is_unknown ())
1914 {
1915 /* Cast away const-ness for cache-like operations. */
1916 malloc_state_machine *mut_this
1917 = const_cast <malloc_state_machine *> (this);
1918 state_t next_state
1919 = mut_this->get_or_create_assumed_non_null_state_for_frame
1920 (frame: old_model->get_current_frame ());
1921 sm_ctxt->set_next_state (stmt, var: ptr, to: next_state);
1922 }
1923}
1924
1925/* Implementation of state_machine::on_stmt vfunc for malloc_state_machine. */
1926
1927bool
1928malloc_state_machine::on_stmt (sm_context *sm_ctxt,
1929 const supernode *node,
1930 const gimple *stmt) const
1931{
1932 if (const gcall *call = dyn_cast <const gcall *> (p: stmt))
1933 if (tree callee_fndecl = sm_ctxt->get_fndecl_for_call (call))
1934 {
1935 if (known_allocator_p (fndecl: callee_fndecl, call))
1936 {
1937 on_allocator_call (sm_ctxt, call, deallocators: &m_free);
1938 return true;
1939 }
1940
1941 if (!is_placement_new_p (call))
1942 {
1943 bool returns_nonnull = !TREE_NOTHROW (callee_fndecl)
1944 && flag_exceptions;
1945 if (is_named_call_p (fndecl: callee_fndecl, funcname: "operator new"))
1946 on_allocator_call (sm_ctxt, call,
1947 deallocators: &m_scalar_delete, returns_nonnull);
1948 else if (is_named_call_p (fndecl: callee_fndecl, funcname: "operator new []"))
1949 on_allocator_call (sm_ctxt, call,
1950 deallocators: &m_vector_delete, returns_nonnull);
1951 }
1952
1953 if (is_named_call_p (fndecl: callee_fndecl, funcname: "operator delete", call, num_args: 1)
1954 || is_named_call_p (fndecl: callee_fndecl, funcname: "operator delete", call, num_args: 2))
1955 {
1956 on_deallocator_call (sm_ctxt, node, call,
1957 d: &m_scalar_delete.m_deallocator, argno: 0);
1958 return true;
1959 }
1960 else if (is_named_call_p (fndecl: callee_fndecl, funcname: "operator delete []", call, num_args: 1))
1961 {
1962 on_deallocator_call (sm_ctxt, node, call,
1963 d: &m_vector_delete.m_deallocator, argno: 0);
1964 return true;
1965 }
1966
1967 if (is_named_call_p (fndecl: callee_fndecl, funcname: "alloca", call, num_args: 1)
1968 || is_named_call_p (fndecl: callee_fndecl, funcname: "__builtin_alloca", call, num_args: 1))
1969 {
1970 tree lhs = gimple_call_lhs (gs: call);
1971 if (lhs)
1972 sm_ctxt->on_transition (node, stmt, var: lhs, from: m_start, to: m_non_heap);
1973 return true;
1974 }
1975
1976 if (is_named_call_p (fndecl: callee_fndecl, funcname: "free", call, num_args: 1)
1977 || is_std_named_call_p (fndecl: callee_fndecl, funcname: "free", call, num_args: 1)
1978 || is_named_call_p (fndecl: callee_fndecl, funcname: "__builtin_free", call, num_args: 1))
1979 {
1980 on_deallocator_call (sm_ctxt, node, call,
1981 d: &m_free.m_deallocator, argno: 0);
1982 return true;
1983 }
1984
1985 if (is_named_call_p (fndecl: callee_fndecl, funcname: "realloc", call, num_args: 2)
1986 || is_named_call_p (fndecl: callee_fndecl, funcname: "__builtin_realloc", call, num_args: 2))
1987 {
1988 on_realloc_call (sm_ctxt, node, call);
1989 return true;
1990 }
1991
1992 if (unaffected_by_call_p (fndecl: callee_fndecl))
1993 return true;
1994
1995 /* Cast away const-ness for cache-like operations. */
1996 malloc_state_machine *mutable_this
1997 = const_cast <malloc_state_machine *> (this);
1998
1999 /* Handle interesting attributes of the callee_fndecl,
2000 or prioritize those of the builtin that callee_fndecl is expected
2001 to be.
2002 Might want this to be controlled by a flag. */
2003 {
2004 tree fndecl = callee_fndecl;
2005 /* If call is recognized as a builtin known_function, use that
2006 builtin's function_decl. */
2007 if (const region_model *old_model = sm_ctxt->get_old_region_model ())
2008 if (const builtin_known_function *builtin_kf
2009 = old_model->get_builtin_kf (call))
2010 fndecl = builtin_kf->builtin_decl ();
2011
2012 /* Handle "__attribute__((malloc(FOO)))". */
2013 if (const deallocator_set *deallocators
2014 = mutable_this->get_or_create_custom_deallocator_set
2015 (allocator_fndecl: fndecl))
2016 {
2017 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (fndecl));
2018 bool returns_nonnull
2019 = lookup_attribute (attr_name: "returns_nonnull", list: attrs);
2020 on_allocator_call (sm_ctxt, call, deallocators, returns_nonnull);
2021 }
2022
2023 {
2024 /* Handle "__attribute__((nonnull))". */
2025 tree fntype = TREE_TYPE (fndecl);
2026 bitmap nonnull_args = get_nonnull_args (fntype);
2027 if (nonnull_args)
2028 {
2029 for (unsigned i = 0; i < gimple_call_num_args (gs: stmt); i++)
2030 {
2031 tree arg = gimple_call_arg (gs: stmt, index: i);
2032 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
2033 continue;
2034 /* If we have a nonnull-args, and either all pointers, or
2035 just the specified pointers. */
2036 if (bitmap_empty_p (map: nonnull_args)
2037 || bitmap_bit_p (nonnull_args, i))
2038 {
2039 state_t state = sm_ctxt->get_state (stmt, var: arg);
2040 /* Can't use a switch as the states are non-const. */
2041 /* Do use the fndecl that caused the warning so that the
2042 misused attributes are printed and the user not
2043 confused. */
2044 if (unchecked_p (state))
2045 {
2046 tree diag_arg = sm_ctxt->get_diagnostic_tree (expr: arg);
2047 sm_ctxt->warn (node, stmt, var: arg,
2048 d: make_unique<possible_null_arg>
2049 (args: *this, args&: diag_arg, args&: fndecl, args&: i));
2050 const allocation_state *astate
2051 = as_a_allocation_state (state);
2052 sm_ctxt->set_next_state (stmt, var: arg,
2053 to: astate->get_nonnull ());
2054 }
2055 else if (state == m_null)
2056 {
2057 tree diag_arg = sm_ctxt->get_diagnostic_tree (expr: arg);
2058 sm_ctxt->warn (node, stmt, var: arg,
2059 d: make_unique<null_arg>
2060 (args: *this, args&: diag_arg, args&: fndecl, args&: i));
2061 sm_ctxt->set_next_state (stmt, var: arg, to: m_stop);
2062 }
2063 else if (state == m_start)
2064 maybe_assume_non_null (sm_ctxt, ptr: arg, stmt);
2065 }
2066 }
2067 BITMAP_FREE (nonnull_args);
2068 }
2069 }
2070
2071 /* Check for this after nonnull, so that if we have both
2072 then we transition to "freed", rather than "checked". */
2073 unsigned dealloc_argno = fndecl_dealloc_argno (fndecl);
2074 if (dealloc_argno != UINT_MAX)
2075 {
2076 const deallocator *d
2077 = mutable_this->get_or_create_deallocator (deallocator_fndecl: fndecl);
2078 on_deallocator_call (sm_ctxt, node, call, d, argno: dealloc_argno);
2079 }
2080 }
2081 }
2082
2083 /* Look for pointers explicitly being compared against zero
2084 that are in state assumed_non_null i.e. we already defererenced
2085 them.
2086 We have to do this check here, rather than in on_condition
2087 because we add a constraint that the pointer is non-null when
2088 dereferencing it, and this makes the apply_constraints_for_gcond
2089 find known-true and known-false conditions; on_condition is only
2090 called when adding new constraints. */
2091 if (const gcond *cond_stmt = dyn_cast <const gcond *> (p: stmt))
2092 {
2093 enum tree_code op = gimple_cond_code (gs: cond_stmt);
2094 if (op == EQ_EXPR || op == NE_EXPR)
2095 {
2096 tree lhs = gimple_cond_lhs (gs: cond_stmt);
2097 tree rhs = gimple_cond_rhs (gs: cond_stmt);
2098 if (any_pointer_p (expr: lhs)
2099 && any_pointer_p (expr: rhs)
2100 && zerop (rhs))
2101 {
2102 state_t state = sm_ctxt->get_state (stmt, var: lhs);
2103 if (assumed_non_null_p (state))
2104 maybe_complain_about_deref_before_check
2105 (sm_ctxt, node,
2106 stmt,
2107 (const assumed_non_null_state *)state,
2108 ptr: lhs);
2109 }
2110 }
2111 }
2112
2113 if (tree lhs = sm_ctxt->is_zero_assignment (stmt))
2114 if (any_pointer_p (expr: lhs))
2115 on_zero_assignment (sm_ctxt, stmt,lhs);
2116
2117 /* Handle dereferences. */
2118 for (unsigned i = 0; i < gimple_num_ops (gs: stmt); i++)
2119 {
2120 tree op = gimple_op (gs: stmt, i);
2121 if (!op)
2122 continue;
2123 if (TREE_CODE (op) == COMPONENT_REF)
2124 op = TREE_OPERAND (op, 0);
2125
2126 if (TREE_CODE (op) == MEM_REF)
2127 {
2128 tree arg = TREE_OPERAND (op, 0);
2129
2130 state_t state = sm_ctxt->get_state (stmt, var: arg);
2131 if (state == m_start)
2132 maybe_assume_non_null (sm_ctxt, ptr: arg, stmt);
2133 else if (unchecked_p (state))
2134 {
2135 tree diag_arg = sm_ctxt->get_diagnostic_tree (expr: arg);
2136 sm_ctxt->warn (node, stmt, var: arg,
2137 d: make_unique<possible_null_deref> (args: *this,
2138 args&: diag_arg));
2139 const allocation_state *astate = as_a_allocation_state (state);
2140 sm_ctxt->set_next_state (stmt, var: arg, to: astate->get_nonnull ());
2141 }
2142 else if (state == m_null)
2143 {
2144 tree diag_arg = sm_ctxt->get_diagnostic_tree (expr: arg);
2145 sm_ctxt->warn (node, stmt, var: arg,
2146 d: make_unique<null_deref> (args: *this, args&: diag_arg));
2147 sm_ctxt->set_next_state (stmt, var: arg, to: m_stop);
2148 }
2149 else if (freed_p (state))
2150 {
2151 tree diag_arg = sm_ctxt->get_diagnostic_tree (expr: arg);
2152 const allocation_state *astate = as_a_allocation_state (state);
2153 sm_ctxt->warn (node, stmt, var: arg,
2154 d: make_unique<use_after_free>
2155 (args: *this, args&: diag_arg, args: astate->m_deallocator));
2156 sm_ctxt->set_next_state (stmt, var: arg, to: m_stop);
2157 }
2158 }
2159 }
2160 return false;
2161}
2162
2163/* Given a check against null of PTR in assumed-non-null state STATE,
2164 potentially add a deref_before_check warning to SM_CTXT. */
2165
2166void
2167malloc_state_machine::
2168maybe_complain_about_deref_before_check (sm_context *sm_ctxt,
2169 const supernode *node,
2170 const gimple *stmt,
2171 const assumed_non_null_state *state,
2172 tree ptr) const
2173{
2174 const region_model *model = sm_ctxt->get_old_region_model ();
2175 if (!model)
2176 return;
2177
2178 /* Don't complain if the current frame (where the check is occurring) is
2179 deeper than the frame in which the "not null" assumption was made.
2180 This suppress false positives for cases like:
2181
2182 void foo (struct s *p)
2183 {
2184 int val = s->some_field; // deref here
2185 shared_helper (p);
2186 }
2187
2188 where "shared_helper" has:
2189
2190 void shared_helper (struct s *p)
2191 {
2192 if (!p) // check here
2193 return;
2194 // etc
2195 }
2196
2197 since the check in "shared_helper" is OK. */
2198 const frame_region *checked_in_frame = model->get_current_frame ();
2199 const frame_region *assumed_nonnull_in_frame = state->m_frame;
2200 if (checked_in_frame->get_index () > assumed_nonnull_in_frame->get_index ())
2201 return;
2202
2203 /* Don't complain if STMT was inlined from another function, to avoid
2204 similar false positives involving shared helper functions. */
2205 if (stmt->location)
2206 {
2207 inlining_info info (stmt->location);
2208 if (info.get_extra_frames () > 0)
2209 return;
2210 }
2211
2212 tree diag_ptr = sm_ctxt->get_diagnostic_tree (expr: ptr);
2213 if (diag_ptr)
2214 sm_ctxt->warn
2215 (node, stmt, var: ptr,
2216 d: make_unique<deref_before_check> (args: *this, args&: diag_ptr));
2217 sm_ctxt->set_next_state (stmt, var: ptr, to: m_stop);
2218}
2219
2220/* Handle a call to an allocator.
2221 RETURNS_NONNULL is true if CALL is to a fndecl known to have
2222 __attribute__((returns_nonnull)). */
2223
2224void
2225malloc_state_machine::on_allocator_call (sm_context *sm_ctxt,
2226 const gcall *call,
2227 const deallocator_set *deallocators,
2228 bool returns_nonnull) const
2229{
2230 tree lhs = gimple_call_lhs (gs: call);
2231 if (lhs)
2232 {
2233 if (sm_ctxt->get_state (stmt: call, var: lhs) == m_start)
2234 sm_ctxt->set_next_state (stmt: call, var: lhs,
2235 to: (returns_nonnull
2236 ? deallocators->m_nonnull
2237 : deallocators->m_unchecked));
2238 }
2239 else
2240 {
2241 /* TODO: report leak. */
2242 }
2243}
2244
2245/* Handle deallocations of non-heap pointers.
2246 non-heap -> stop, with warning. */
2247
2248void
2249malloc_state_machine::handle_free_of_non_heap (sm_context *sm_ctxt,
2250 const supernode *node,
2251 const gcall *call,
2252 tree arg,
2253 const deallocator *d) const
2254{
2255 tree diag_arg = sm_ctxt->get_diagnostic_tree (expr: arg);
2256 const region *freed_reg = NULL;
2257 if (const program_state *old_state = sm_ctxt->get_old_program_state ())
2258 {
2259 const region_model *old_model = old_state->m_region_model;
2260 const svalue *ptr_sval = old_model->get_rvalue (expr: arg, NULL);
2261 freed_reg = old_model->deref_rvalue (ptr_sval, ptr_tree: arg, NULL);
2262 }
2263 sm_ctxt->warn (node, stmt: call, var: arg,
2264 d: make_unique<free_of_non_heap>
2265 (args: *this, args&: diag_arg, args&: freed_reg, args: d->m_name));
2266 sm_ctxt->set_next_state (stmt: call, var: arg, to: m_stop);
2267}
2268
2269void
2270malloc_state_machine::on_deallocator_call (sm_context *sm_ctxt,
2271 const supernode *node,
2272 const gcall *call,
2273 const deallocator *d,
2274 unsigned argno) const
2275{
2276 if (argno >= gimple_call_num_args (gs: call))
2277 return;
2278 tree arg = gimple_call_arg (gs: call, index: argno);
2279
2280 state_t state = sm_ctxt->get_state (stmt: call, var: arg);
2281
2282 /* start/assumed_non_null/unchecked/nonnull -> freed. */
2283 if (state == m_start || assumed_non_null_p (state))
2284 sm_ctxt->set_next_state (stmt: call, var: arg, to: d->m_freed);
2285 else if (unchecked_p (state) || nonnull_p (state))
2286 {
2287 const allocation_state *astate = as_a_allocation_state (state);
2288 gcc_assert (astate->m_deallocators);
2289 if (!astate->m_deallocators->contains_p (d))
2290 {
2291 /* Wrong allocator. */
2292 tree diag_arg = sm_ctxt->get_diagnostic_tree (expr: arg);
2293 sm_ctxt->warn (node, stmt: call, var: arg,
2294 d: make_unique<mismatching_deallocation>
2295 (args: *this, args&: diag_arg,
2296 args: astate->m_deallocators,
2297 args&: d));
2298 }
2299 sm_ctxt->set_next_state (stmt: call, var: arg, to: d->m_freed);
2300 }
2301
2302 /* Keep state "null" as-is, rather than transitioning to "freed";
2303 we don't want to complain about double-free of NULL. */
2304 else if (state == d->m_freed)
2305 {
2306 /* freed -> stop, with warning. */
2307 tree diag_arg = sm_ctxt->get_diagnostic_tree (expr: arg);
2308 sm_ctxt->warn (node, stmt: call, var: arg,
2309 d: make_unique<double_free> (args: *this, args&: diag_arg, args: d->m_name));
2310 sm_ctxt->set_next_state (stmt: call, var: arg, to: m_stop);
2311 }
2312 else if (state == m_non_heap)
2313 {
2314 /* non-heap -> stop, with warning. */
2315 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
2316 }
2317}
2318
2319/* Handle a call to "realloc".
2320 Check for free of non-heap or mismatching allocators,
2321 transitioning to the "stop" state for such cases.
2322
2323 Otherwise, kf_realloc::impl_call_post will later
2324 get called (which will handle other sm-state transitions
2325 when the state is bifurcated). */
2326
2327void
2328malloc_state_machine::on_realloc_call (sm_context *sm_ctxt,
2329 const supernode *node,
2330 const gcall *call) const
2331{
2332 const unsigned argno = 0;
2333 const deallocator *d = &m_realloc;
2334
2335 tree arg = gimple_call_arg (gs: call, index: argno);
2336
2337 state_t state = sm_ctxt->get_state (stmt: call, var: arg);
2338
2339 if (unchecked_p (state) || nonnull_p (state))
2340 {
2341 const allocation_state *astate = as_a_allocation_state (state);
2342 gcc_assert (astate->m_deallocators);
2343 if (!astate->m_deallocators->contains_p (d: &m_free.m_deallocator))
2344 {
2345 /* Wrong allocator. */
2346 tree diag_arg = sm_ctxt->get_diagnostic_tree (expr: arg);
2347 sm_ctxt->warn (node, stmt: call, var: arg,
2348 d: make_unique<mismatching_deallocation>
2349 (args: *this, args&: diag_arg,
2350 args: astate->m_deallocators, args&: d));
2351 sm_ctxt->set_next_state (stmt: call, var: arg, to: m_stop);
2352 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2353 path_ctxt->terminate_path ();
2354 }
2355 }
2356 else if (state == m_free.m_deallocator.m_freed)
2357 {
2358 /* freed -> stop, with warning. */
2359 tree diag_arg = sm_ctxt->get_diagnostic_tree (expr: arg);
2360 sm_ctxt->warn (node, stmt: call, var: arg,
2361 d: make_unique<double_free> (args: *this, args&: diag_arg, args: "free"));
2362 sm_ctxt->set_next_state (stmt: call, var: arg, to: m_stop);
2363 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2364 path_ctxt->terminate_path ();
2365 }
2366 else if (state == m_non_heap)
2367 {
2368 /* non-heap -> stop, with warning. */
2369 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
2370 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2371 path_ctxt->terminate_path ();
2372 }
2373}
2374
2375/* Implementation of state_machine::on_phi vfunc for malloc_state_machine. */
2376
2377void
2378malloc_state_machine::on_phi (sm_context *sm_ctxt,
2379 const supernode *node ATTRIBUTE_UNUSED,
2380 const gphi *phi,
2381 tree rhs) const
2382{
2383 if (zerop (rhs))
2384 {
2385 tree lhs = gimple_phi_result (gs: phi);
2386 on_zero_assignment (sm_ctxt, stmt: phi, lhs);
2387 }
2388}
2389
2390/* Implementation of state_machine::on_condition vfunc for malloc_state_machine.
2391 Potentially transition state 'unchecked' to 'nonnull' or to 'null'. */
2392
2393void
2394malloc_state_machine::on_condition (sm_context *sm_ctxt,
2395 const supernode *node ATTRIBUTE_UNUSED,
2396 const gimple *stmt,
2397 const svalue *lhs,
2398 enum tree_code op,
2399 const svalue *rhs) const
2400{
2401 if (!rhs->all_zeroes_p ())
2402 return;
2403
2404 if (!any_pointer_p (sval: lhs))
2405 return;
2406 if (!any_pointer_p (sval: rhs))
2407 return;
2408
2409 if (op == NE_EXPR)
2410 {
2411 log (fmt: "got 'ARG != 0' match");
2412 state_t s = sm_ctxt->get_state (stmt, lhs);
2413 if (unchecked_p (state: s))
2414 {
2415 const allocation_state *astate = as_a_allocation_state (state: s);
2416 sm_ctxt->set_next_state (stmt, var: lhs, to: astate->get_nonnull ());
2417 }
2418 }
2419 else if (op == EQ_EXPR)
2420 {
2421 log (fmt: "got 'ARG == 0' match");
2422 state_t s = sm_ctxt->get_state (stmt, lhs);
2423 if (unchecked_p (state: s))
2424 sm_ctxt->set_next_state (stmt, var: lhs, to: m_null);
2425 }
2426}
2427
2428/* Implementation of state_machine::on_pop_frame vfunc for malloc_state_machine.
2429 Clear any "assumed-non-null" state where the assumption happened in
2430 FRAME_REG. */
2431
2432void
2433malloc_state_machine::on_pop_frame (sm_state_map *smap,
2434 const frame_region *frame_reg) const
2435{
2436 hash_set<const svalue *> svals_to_clear;
2437 for (auto kv : *smap)
2438 {
2439 const svalue *sval = kv.first;
2440 state_t state = kv.second.m_state;
2441 if (assumed_non_null_p (state))
2442 {
2443 const assumed_non_null_state *assumed_state
2444 = (const assumed_non_null_state *)state;
2445 if (frame_reg == assumed_state->m_frame)
2446 svals_to_clear.add (k: sval);
2447 }
2448 }
2449 for (auto sval : svals_to_clear)
2450 smap->clear_any_state (sval);
2451}
2452
2453/* Implementation of state_machine::can_purge_p vfunc for malloc_state_machine.
2454 Don't allow purging of pointers in state 'unchecked' or 'nonnull'
2455 (to avoid false leak reports). */
2456
2457bool
2458malloc_state_machine::can_purge_p (state_t s) const
2459{
2460 enum resource_state rs = get_rs (state: s);
2461 return rs != RS_UNCHECKED && rs != RS_NONNULL;
2462}
2463
2464/* Implementation of state_machine::on_leak vfunc for malloc_state_machine
2465 (for complaining about leaks of pointers in state 'unchecked' and
2466 'nonnull'). */
2467
2468std::unique_ptr<pending_diagnostic>
2469malloc_state_machine::on_leak (tree var) const
2470{
2471 return make_unique<malloc_leak> (args: *this, args&: var);
2472}
2473
2474/* Implementation of state_machine::reset_when_passed_to_unknown_fn_p vfunc
2475 for malloc_state_machine. */
2476
2477bool
2478malloc_state_machine::reset_when_passed_to_unknown_fn_p (state_t s,
2479 bool is_mutable) const
2480{
2481 /* An on-stack ptr doesn't stop being stack-allocated when passed to an
2482 unknown fn. */
2483 if (s == m_non_heap)
2484 return false;
2485
2486 /* Otherwise, pointers passed as non-const can be freed. */
2487 return is_mutable;
2488}
2489
2490/* Implementation of state_machine::maybe_get_merged_states_nonequal vfunc
2491 for malloc_state_machine.
2492
2493 Support discarding "assumed-non-null" states when merging with
2494 start state. */
2495
2496state_machine::state_t
2497malloc_state_machine::maybe_get_merged_states_nonequal (state_t state_a,
2498 state_t state_b) const
2499{
2500 if (assumed_non_null_p (state: state_a) && state_b == m_start)
2501 return m_start;
2502 if (state_a == m_start && assumed_non_null_p (state: state_b))
2503 return m_start;
2504 return NULL;
2505}
2506
2507/* Return true if calls to FNDECL are known to not affect this sm-state. */
2508
2509bool
2510malloc_state_machine::unaffected_by_call_p (tree fndecl)
2511{
2512 /* A set of functions that are known to not affect allocation
2513 status, even if we haven't fully modelled the rest of their
2514 behavior yet. */
2515 static const char * const funcnames[] = {
2516 /* This array must be kept sorted. */
2517 "strsep",
2518 };
2519 const size_t count = ARRAY_SIZE (funcnames);
2520 function_set fs (funcnames, count);
2521
2522 if (fs.contains_decl_p (fndecl))
2523 return true;
2524
2525 return false;
2526}
2527
2528/* Shared logic for handling GIMPLE_ASSIGNs and GIMPLE_PHIs that
2529 assign zero to LHS. */
2530
2531void
2532malloc_state_machine::on_zero_assignment (sm_context *sm_ctxt,
2533 const gimple *stmt,
2534 tree lhs) const
2535{
2536 state_t s = sm_ctxt->get_state (stmt, var: lhs);
2537 enum resource_state rs = get_rs (state: s);
2538 if (rs == RS_START
2539 || rs == RS_UNCHECKED
2540 || rs == RS_NONNULL
2541 || rs == RS_FREED)
2542 sm_ctxt->set_next_state (stmt, var: lhs, to: m_null);
2543}
2544
2545/* Special-case hook for handling realloc, for the "success with move to
2546 a new buffer" case, marking OLD_PTR_SVAL as freed and NEW_PTR_SVAL as
2547 non-null.
2548
2549 This is similar to on_deallocator_call and on_allocator_call,
2550 but the checks happen in on_realloc_call, and by splitting the states. */
2551
2552void
2553malloc_state_machine::
2554on_realloc_with_move (region_model *model,
2555 sm_state_map *smap,
2556 const svalue *old_ptr_sval,
2557 const svalue *new_ptr_sval,
2558 const extrinsic_state &ext_state) const
2559{
2560 smap->set_state (model, sval: old_ptr_sval,
2561 state: m_free.m_deallocator.m_freed,
2562 NULL, ext_state);
2563
2564 smap->set_state (model, sval: new_ptr_sval,
2565 state: m_free.m_nonnull,
2566 NULL, ext_state);
2567}
2568
2569/* Hook for get_or_create_region_for_heap_alloc for the case when we want
2570 ptr_sval to mark a newly created region as assumed non null on malloc SM. */
2571void
2572malloc_state_machine::transition_ptr_sval_non_null (region_model *model,
2573 sm_state_map *smap,
2574 const svalue *new_ptr_sval,
2575 const extrinsic_state &ext_state) const
2576{
2577 smap->set_state (model, sval: new_ptr_sval, state: m_free.m_nonnull, NULL, ext_state);
2578}
2579
2580} // anonymous namespace
2581
2582/* Internal interface to this file. */
2583
2584state_machine *
2585make_malloc_state_machine (logger *logger)
2586{
2587 return new malloc_state_machine (logger);
2588}
2589
2590/* Specialcase hook for handling realloc, for use by
2591 kf_realloc::impl_call_post::success_with_move::update_model. */
2592
2593void
2594region_model::on_realloc_with_move (const call_details &cd,
2595 const svalue *old_ptr_sval,
2596 const svalue *new_ptr_sval)
2597{
2598 region_model_context *ctxt = cd.get_ctxt ();
2599 if (!ctxt)
2600 return;
2601 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2602 if (!ext_state)
2603 return;
2604
2605 sm_state_map *smap;
2606 const state_machine *sm;
2607 unsigned sm_idx;
2608 if (!ctxt->get_malloc_map (out_smap: &smap, out_sm: &sm, out_sm_idx: &sm_idx))
2609 return;
2610
2611 gcc_assert (smap);
2612 gcc_assert (sm);
2613
2614 const malloc_state_machine &malloc_sm
2615 = (const malloc_state_machine &)*sm;
2616
2617 malloc_sm.on_realloc_with_move (model: this,
2618 smap,
2619 old_ptr_sval,
2620 new_ptr_sval,
2621 ext_state: *ext_state);
2622}
2623
2624/* Moves ptr_sval from start to assumed non-null, for use by
2625 region_model::get_or_create_region_for_heap_alloc. */
2626void
2627region_model::transition_ptr_sval_non_null (region_model_context *ctxt,
2628const svalue *ptr_sval)
2629{
2630 if (!ctxt)
2631 return;
2632 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2633 if (!ext_state)
2634 return;
2635
2636 sm_state_map *smap;
2637 const state_machine *sm;
2638 unsigned sm_idx;
2639 if (!ctxt->get_malloc_map (out_smap: &smap, out_sm: &sm, out_sm_idx: &sm_idx))
2640 return;
2641
2642 gcc_assert (smap);
2643 gcc_assert (sm);
2644
2645 const malloc_state_machine &malloc_sm = (const malloc_state_machine &)*sm;
2646
2647 malloc_sm.transition_ptr_sval_non_null (model: this, smap, new_ptr_sval: ptr_sval, ext_state: *ext_state);
2648}
2649
2650} // namespace ana
2651
2652#endif /* #if ENABLE_ANALYZER */
2653

source code of gcc/analyzer/sm-malloc.cc