1/* Classes for modeling the state of memory.
2 Copyright (C) 2020-2024 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#ifndef GCC_ANALYZER_STORE_H
22#define GCC_ANALYZER_STORE_H
23
24/* Implementation of the region-based ternary model described in:
25 "A Memory Model for Static Analysis of C Programs"
26 (Zhongxing Xu, Ted Kremenek, and Jian Zhang)
27 http://lcs.ios.ac.cn/~xuzb/canalyze/memmodel.pdf */
28
29/* The store models memory as a collection of "clusters", where regions
30 are partitioned into clusters via their base region.
31
32 For example, given:
33 int a, b, c;
34 struct coord { double x; double y; } verts[3];
35 then "verts[0].y" and "verts[1].x" both have "verts" as their base region.
36 Each of a, b, c, and verts will have their own clusters, so that we
37 know that writes to e.g. "verts[1].x".don't affect e.g. "a".
38
39 Within each cluster we store a map of bindings to values, where the
40 binding keys can be either concrete or symbolic.
41
42 Concrete bindings affect a specific range of bits relative to the start
43 of the base region of the cluster, whereas symbolic bindings affect
44 a specific subregion within the cluster.
45
46 Consider (from the symbolic-1.c testcase):
47
48 char arr[1024];
49 arr[2] = a; (1)
50 arr[3] = b; (2)
51 After (1) and (2), the cluster for "arr" has concrete bindings
52 for bits 16-23 and for bits 24-31, with svalues "INIT_VAL(a)"
53 and "INIT_VAL(b)" respectively:
54 cluster: {bits 16-23: "INIT_VAL(a)",
55 bits 24-31: "INIT_VAL(b)";
56 flags: {}}
57 Attempting to query unbound subregions e.g. arr[4] will
58 return "UNINITIALIZED".
59 "a" and "b" are each in their own clusters, with no explicit
60 bindings, and thus implicitly have value INIT_VAL(a) and INIT_VAL(b).
61
62 arr[3] = c; (3)
63 After (3), the concrete binding for bits 24-31 is replaced with the
64 svalue "INIT_VAL(c)":
65 cluster: {bits 16-23: "INIT_VAL(a)", (from before)
66 bits 24-31: "INIT_VAL(c)"; (updated)
67 flags: {}}
68
69 arr[i] = d; (4)
70 After (4), we lose the concrete bindings and replace them with a
71 symbolic binding for "arr[i]", with svalue "INIT_VAL(d)". We also
72 mark the cluster as having been "symbolically touched": future
73 attempts to query the values of subregions other than "arr[i]",
74 such as "arr[3]" are "UNKNOWN", since we don't know if the write
75 to arr[i] affected them.
76 cluster: {symbolic_key(arr[i]): "INIT_VAL(d)";
77 flags: {TOUCHED}}
78
79 arr[j] = e; (5)
80 After (5), we lose the symbolic binding for "arr[i]" since we could
81 have overwritten it, and add a symbolic binding for "arr[j]".
82 cluster: {symbolic_key(arr[j]): "INIT_VAL(d)"; (different symbolic
83 flags: {TOUCHED}} binding)
84
85 arr[3] = f; (6)
86 After (6), we lose the symbolic binding for "arr[j]" since we could
87 have overwritten it, and gain a concrete binding for bits 24-31
88 again, this time with svalue "INIT_VAL(e)":
89 cluster: {bits 24-31: "INIT_VAL(d)";
90 flags: {TOUCHED}}
91 The cluster is still flagged as touched, so that we know that
92 accesses to other elements are "UNKNOWN" rather than
93 "UNINITIALIZED".
94
95 Handling symbolic regions requires us to handle aliasing.
96
97 In the first example above, each of a, b, c and verts are non-symbolic
98 base regions and so their clusters are "concrete clusters", whereas given:
99 struct coord *p, *q;
100 then "*p" and "*q" are symbolic base regions, and thus "*p" and "*q"
101 have "symbolic clusters".
102
103 In the above, "verts[i].x" will have a symbolic *binding* within a
104 concrete cluster for "verts", whereas "*p" is a symbolic *cluster*.
105
106 Writes to concrete clusters can't affect other concrete clusters,
107 but can affect symbolic clusters; e.g. after:
108 verts[0].x = 42;
109 we bind 42 in the cluster for "verts", but the clusters for "b" and "c"
110 can't be affected. Any symbolic clusters for *p and for *q can be
111 affected, *p and *q could alias verts.
112
113 Writes to a symbolic cluster can affect other clusters, both
114 concrete and symbolic; e.g. after:
115 p->x = 17;
116 we bind 17 within the cluster for "*p". The concrete clusters for a, b,
117 c, and verts could be affected, depending on whether *p aliases them.
118 Similarly, the symbolic cluster to *q could be affected. */
119
120namespace ana {
121
122/* A class for keeping track of aspects of a program_state that we don't
123 know about, to avoid false positives about leaks.
124
125 Consider:
126
127 p->field = malloc (1024);
128 q->field = NULL;
129
130 where we don't know whether or not p and q point to the same memory,
131 and:
132
133 p->field = malloc (1024);
134 unknown_fn (p);
135
136 In both cases, the svalue for the address of the allocated buffer
137 goes from being bound to p->field to not having anything explicitly bound
138 to it.
139
140 Given that we conservatively discard bindings due to possible aliasing or
141 calls to unknown function, the store loses references to svalues,
142 but these svalues could still be live. We don't want to warn about
143 them leaking - they're effectively in a "maybe live" state.
144
145 This "maybe live" information is somewhat transient.
146
147 We don't want to store this "maybe live" information in the program_state,
148 region_model, or store, since we don't want to bloat these objects (and
149 potentially bloat the exploded_graph with more nodes).
150 However, we can't store it in the region_model_context, as these context
151 objects sometimes don't last long enough to be around when comparing the
152 old vs the new state.
153
154 This class is a way to track a set of such svalues, so that we can
155 temporarily capture that they are in a "maybe live" state whilst
156 comparing old and new states. */
157
158class uncertainty_t
159{
160public:
161 typedef hash_set<const svalue *>::iterator iterator;
162
163 void on_maybe_bound_sval (const svalue *sval)
164 {
165 m_maybe_bound_svals.add (k: sval);
166 }
167 void on_mutable_sval_at_unknown_call (const svalue *sval)
168 {
169 m_mutable_at_unknown_call_svals.add (k: sval);
170 }
171
172 bool unknown_sm_state_p (const svalue *sval)
173 {
174 return (m_maybe_bound_svals.contains (k: sval)
175 || m_mutable_at_unknown_call_svals.contains (k: sval));
176 }
177
178 void dump_to_pp (pretty_printer *pp, bool simple) const;
179 void dump (bool simple) const;
180
181 iterator begin_maybe_bound_svals () const
182 {
183 return m_maybe_bound_svals.begin ();
184 }
185 iterator end_maybe_bound_svals () const
186 {
187 return m_maybe_bound_svals.end ();
188 }
189
190private:
191
192 /* svalues that might or might not still be bound. */
193 hash_set<const svalue *> m_maybe_bound_svals;
194
195 /* svalues that have mutable sm-state at unknown calls. */
196 hash_set<const svalue *> m_mutable_at_unknown_call_svals;
197};
198
199class byte_range;
200class concrete_binding;
201class symbolic_binding;
202
203/* Abstract base class for describing ranges of bits within a binding_map
204 that can have svalues bound to them. */
205
206class binding_key
207{
208public:
209 virtual ~binding_key () {}
210 virtual bool concrete_p () const = 0;
211 bool symbolic_p () const { return !concrete_p (); }
212
213 static const binding_key *make (store_manager *mgr, const region *r);
214
215 virtual void dump_to_pp (pretty_printer *pp, bool simple) const = 0;
216 void dump (bool simple) const;
217 label_text get_desc (bool simple=true) const;
218
219 static int cmp_ptrs (const void *, const void *);
220 static int cmp (const binding_key *, const binding_key *);
221
222 virtual const concrete_binding *dyn_cast_concrete_binding () const
223 { return NULL; }
224 virtual const symbolic_binding *dyn_cast_symbolic_binding () const
225 { return NULL; }
226};
227
228/* A concrete range of bits. */
229
230struct bit_range
231{
232 bit_range (bit_offset_t start_bit_offset, bit_size_t size_in_bits)
233 : m_start_bit_offset (start_bit_offset),
234 m_size_in_bits (size_in_bits)
235 {}
236
237 void dump_to_pp (pretty_printer *pp) const;
238 void dump () const;
239
240 json::object *to_json () const;
241
242 bool empty_p () const
243 {
244 return m_size_in_bits == 0;
245 }
246
247 bit_offset_t get_start_bit_offset () const
248 {
249 return m_start_bit_offset;
250 }
251 bit_offset_t get_next_bit_offset () const
252 {
253 return m_start_bit_offset + m_size_in_bits;
254 }
255 bit_offset_t get_last_bit_offset () const
256 {
257 gcc_assert (!empty_p ());
258 return get_next_bit_offset () - 1;
259 }
260
261 bool contains_p (bit_offset_t offset) const
262 {
263 return (offset >= get_start_bit_offset ()
264 && offset < get_next_bit_offset ());
265 }
266
267 bool contains_p (const bit_range &other, bit_range *out) const;
268
269 bool operator== (const bit_range &other) const
270 {
271 return (m_start_bit_offset == other.m_start_bit_offset
272 && m_size_in_bits == other.m_size_in_bits);
273 }
274
275 bool intersects_p (const bit_range &other) const
276 {
277 return (get_start_bit_offset () < other.get_next_bit_offset ()
278 && other.get_start_bit_offset () < get_next_bit_offset ());
279 }
280 bool intersects_p (const bit_range &other,
281 bit_size_t *out_num_overlap_bits) const;
282 bool intersects_p (const bit_range &other,
283 bit_range *out_this,
284 bit_range *out_other) const;
285
286 bool exceeds_p (const bit_range &other,
287 bit_range *out_overhanging_bit_range) const;
288
289 bool falls_short_of_p (bit_offset_t offset,
290 bit_range *out_fall_short_bits) const;
291
292 static int cmp (const bit_range &br1, const bit_range &br2);
293
294 bit_range operator- (bit_offset_t offset) const;
295
296 static bool from_mask (unsigned HOST_WIDE_INT mask, bit_range *out);
297
298 bool as_byte_range (byte_range *out) const;
299
300 bit_offset_t m_start_bit_offset;
301 bit_size_t m_size_in_bits;
302};
303
304/* A concrete range of bytes. */
305
306struct byte_range
307{
308 byte_range (byte_offset_t start_byte_offset, byte_size_t size_in_bytes)
309 : m_start_byte_offset (start_byte_offset),
310 m_size_in_bytes (size_in_bytes)
311 {}
312
313 void dump_to_pp (pretty_printer *pp) const;
314 void dump () const;
315
316 json::object *to_json () const;
317
318 bool empty_p () const
319 {
320 return m_size_in_bytes == 0;
321 }
322
323 bool contains_p (byte_offset_t offset) const
324 {
325 return (offset >= get_start_byte_offset ()
326 && offset < get_next_byte_offset ());
327 }
328 bool contains_p (const byte_range &other, byte_range *out) const;
329
330 bool operator== (const byte_range &other) const
331 {
332 return (m_start_byte_offset == other.m_start_byte_offset
333 && m_size_in_bytes == other.m_size_in_bytes);
334 }
335
336 byte_offset_t get_start_byte_offset () const
337 {
338 return m_start_byte_offset;
339 }
340 byte_offset_t get_next_byte_offset () const
341 {
342 return m_start_byte_offset + m_size_in_bytes;
343 }
344 byte_offset_t get_last_byte_offset () const
345 {
346 gcc_assert (!empty_p ());
347 return m_start_byte_offset + m_size_in_bytes - 1;
348 }
349
350 bit_range as_bit_range () const
351 {
352 return bit_range (m_start_byte_offset * BITS_PER_UNIT,
353 m_size_in_bytes * BITS_PER_UNIT);
354 }
355
356 bit_offset_t get_start_bit_offset () const
357 {
358 return m_start_byte_offset * BITS_PER_UNIT;
359 }
360 bit_offset_t get_next_bit_offset () const
361 {
362 return get_next_byte_offset () * BITS_PER_UNIT;
363 }
364
365 static int cmp (const byte_range &br1, const byte_range &br2);
366
367 byte_offset_t m_start_byte_offset;
368 byte_size_t m_size_in_bytes;
369};
370
371/* Concrete subclass of binding_key, for describing a non-empty
372 concrete range of bits within the binding_map (e.g. "bits 8-15"). */
373
374class concrete_binding : public binding_key
375{
376public:
377 /* This class is its own key for the purposes of consolidation. */
378 typedef concrete_binding key_t;
379
380 concrete_binding (bit_offset_t start_bit_offset, bit_size_t size_in_bits)
381 : m_bit_range (start_bit_offset, size_in_bits)
382 {
383 gcc_assert (m_bit_range.m_size_in_bits > 0);
384 }
385 bool concrete_p () const final override { return true; }
386
387 hashval_t hash () const
388 {
389 inchash::hash hstate;
390 hstate.add_wide_int (x: m_bit_range.m_start_bit_offset);
391 hstate.add_wide_int (x: m_bit_range.m_size_in_bits);
392 return hstate.end ();
393 }
394 bool operator== (const concrete_binding &other) const
395 {
396 return m_bit_range == other.m_bit_range;
397 }
398
399 void dump_to_pp (pretty_printer *pp, bool simple) const final override;
400
401 const concrete_binding *dyn_cast_concrete_binding () const final override
402 { return this; }
403
404 const bit_range &get_bit_range () const { return m_bit_range; }
405 bool get_byte_range (byte_range *out) const;
406
407 bit_offset_t get_start_bit_offset () const
408 {
409 return m_bit_range.m_start_bit_offset;
410 }
411 bit_size_t get_size_in_bits () const
412 {
413 return m_bit_range.m_size_in_bits;
414 }
415 /* Return the next bit offset after the end of this binding. */
416 bit_offset_t get_next_bit_offset () const
417 {
418 return m_bit_range.get_next_bit_offset ();
419 }
420
421 bool overlaps_p (const concrete_binding &other) const;
422
423 static int cmp_ptr_ptr (const void *, const void *);
424
425 void mark_deleted () { m_bit_range.m_size_in_bits = -1; }
426 void mark_empty () { m_bit_range.m_size_in_bits = -2; }
427 bool is_deleted () const { return m_bit_range.m_size_in_bits == -1; }
428 bool is_empty () const { return m_bit_range.m_size_in_bits == -2; }
429
430private:
431 bit_range m_bit_range;
432};
433
434} // namespace ana
435
436template <>
437template <>
438inline bool
439is_a_helper <const ana::concrete_binding *>::test (const ana::binding_key *key)
440{
441 return key->concrete_p ();
442}
443
444template <> struct default_hash_traits<ana::concrete_binding>
445: public member_function_hash_traits<ana::concrete_binding>
446{
447 static const bool empty_zero_p = false;
448};
449
450namespace ana {
451
452/* Concrete subclass of binding_key, for describing a symbolic set of
453 bits within the binding_map in terms of a region (e.g. "arr[i]"). */
454
455class symbolic_binding : public binding_key
456{
457public:
458 /* This class is its own key for the purposes of consolidation. */
459 typedef symbolic_binding key_t;
460
461 symbolic_binding (const region *region) : m_region (region) {}
462 bool concrete_p () const final override { return false; }
463
464 hashval_t hash () const
465 {
466 return (intptr_t)m_region;
467 }
468 bool operator== (const symbolic_binding &other) const
469 {
470 return m_region == other.m_region;
471 }
472
473 void dump_to_pp (pretty_printer *pp, bool simple) const final override;
474
475 const symbolic_binding *dyn_cast_symbolic_binding () const final override
476 { return this; }
477
478 const region *get_region () const { return m_region; }
479
480 static int cmp_ptr_ptr (const void *, const void *);
481
482 void mark_deleted () { m_region = reinterpret_cast<const region *> (1); }
483 void mark_empty () { m_region = NULL; }
484 bool is_deleted () const
485 { return m_region == reinterpret_cast<const region *> (1); }
486 bool is_empty () const { return m_region == NULL; }
487
488private:
489 const region *m_region;
490};
491
492} // namespace ana
493
494template <> struct default_hash_traits<ana::symbolic_binding>
495: public member_function_hash_traits<ana::symbolic_binding>
496{
497 static const bool empty_zero_p = true;
498};
499
500namespace ana {
501
502/* A mapping from binding_keys to svalues, for use by binding_cluster
503 and compound_svalue. */
504
505class binding_map
506{
507public:
508 typedef hash_map <const binding_key *, const svalue *> map_t;
509 typedef map_t::iterator iterator_t;
510
511 binding_map () : m_map () {}
512 binding_map (const binding_map &other);
513 binding_map& operator=(const binding_map &other);
514
515 bool operator== (const binding_map &other) const;
516 bool operator!= (const binding_map &other) const
517 {
518 return !(*this == other);
519 }
520
521 hashval_t hash () const;
522
523 const svalue *get (const binding_key *key) const
524 {
525 const svalue **slot = const_cast<map_t &> (m_map).get (k: key);
526 if (slot)
527 return *slot;
528 else
529 return NULL;
530 }
531 bool put (const binding_key *k, const svalue *v)
532 {
533 gcc_assert (v);
534 return m_map.put (k, v);
535 }
536
537 void remove (const binding_key *k) { m_map.remove (k); }
538 void empty () { m_map.empty (); }
539
540 iterator_t begin () const { return m_map.begin (); }
541 iterator_t end () const { return m_map.end (); }
542 size_t elements () const { return m_map.elements (); }
543
544 void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
545 void dump (bool simple) const;
546
547 json::object *to_json () const;
548
549 bool apply_ctor_to_region (const region *parent_reg, tree ctor,
550 region_model_manager *mgr);
551
552 static int cmp (const binding_map &map1, const binding_map &map2);
553
554 void remove_overlapping_bindings (store_manager *mgr,
555 const binding_key *drop_key,
556 uncertainty_t *uncertainty,
557 svalue_set *maybe_live_values,
558 bool always_overlap);
559
560private:
561 void get_overlapping_bindings (const binding_key *key,
562 auto_vec<const binding_key *> *out);
563 bool apply_ctor_val_to_range (const region *parent_reg,
564 region_model_manager *mgr,
565 tree min_index, tree max_index,
566 tree val);
567 bool apply_ctor_pair_to_child_region (const region *parent_reg,
568 region_model_manager *mgr,
569 tree index, tree val);
570
571 map_t m_map;
572};
573
574/* Concept: BindingVisitor, for use by binding_cluster::for_each_binding
575 and store::for_each_binding.
576
577 Should implement:
578 void on_binding (const binding_key *key, const svalue *&sval);
579*/
580
581/* All of the bindings within a store for regions that share the same
582 base region. */
583
584class binding_cluster
585{
586public:
587 friend class store;
588
589 typedef hash_map <const binding_key *, const svalue *> map_t;
590 typedef map_t::iterator iterator_t;
591
592 binding_cluster (const region *base_region);
593 binding_cluster (const binding_cluster &other);
594 binding_cluster& operator=(const binding_cluster &other);
595
596 bool operator== (const binding_cluster &other) const;
597 bool operator!= (const binding_cluster &other) const
598 {
599 return !(*this == other);
600 }
601
602 hashval_t hash () const;
603
604 bool symbolic_p () const;
605
606 const region *get_base_region () const { return m_base_region; }
607
608 void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
609 void dump (bool simple) const;
610
611 void validate () const;
612
613 json::object *to_json () const;
614
615 void bind (store_manager *mgr, const region *, const svalue *);
616
617 void clobber_region (store_manager *mgr, const region *reg);
618 void purge_region (store_manager *mgr, const region *reg);
619 void fill_region (store_manager *mgr, const region *reg, const svalue *sval);
620 void zero_fill_region (store_manager *mgr, const region *reg);
621 void mark_region_as_unknown (store_manager *mgr,
622 const region *reg_to_bind,
623 const region *reg_for_overlap,
624 uncertainty_t *uncertainty,
625 svalue_set *maybe_live_values);
626 void purge_state_involving (const svalue *sval,
627 region_model_manager *sval_mgr);
628
629 const svalue *get_binding (store_manager *mgr, const region *reg) const;
630 const svalue *get_binding_recursive (store_manager *mgr,
631 const region *reg) const;
632 const svalue *get_any_binding (store_manager *mgr,
633 const region *reg) const;
634 const svalue *maybe_get_compound_binding (store_manager *mgr,
635 const region *reg) const;
636
637 void remove_overlapping_bindings (store_manager *mgr, const region *reg,
638 uncertainty_t *uncertainty,
639 svalue_set *maybe_live_values);
640
641 template <typename T>
642 void for_each_value (void (*cb) (const svalue *sval, T user_data),
643 T user_data) const
644 {
645 for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter)
646 cb ((*iter).second, user_data);
647 }
648
649 static bool can_merge_p (const binding_cluster *cluster_a,
650 const binding_cluster *cluster_b,
651 binding_cluster *out_cluster,
652 store *out_store,
653 store_manager *mgr,
654 model_merger *merger);
655 void make_unknown_relative_to (const binding_cluster *other_cluster,
656 store *out_store,
657 store_manager *mgr);
658
659 void mark_as_escaped ();
660 void on_unknown_fncall (const gcall *call, store_manager *mgr,
661 const conjured_purge &p);
662 void on_asm (const gasm *stmt, store_manager *mgr,
663 const conjured_purge &p);
664
665 bool escaped_p () const;
666 bool touched_p () const { return m_touched; }
667
668 bool redundant_p () const;
669 bool empty_p () const { return m_map.elements () == 0; }
670
671 void get_representative_path_vars (const region_model *model,
672 svalue_set *visited,
673 const region *base_reg,
674 const svalue *sval,
675 auto_vec<path_var> *out_pvs) const;
676
677 const svalue *maybe_get_simple_value (store_manager *mgr) const;
678
679 template <typename BindingVisitor>
680 void for_each_binding (BindingVisitor &v) const
681 {
682 for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter)
683 {
684 const binding_key *key = (*iter).first;
685 const svalue *&sval = (*iter).second;
686 v.on_binding (key, sval);
687 }
688 }
689
690 iterator_t begin () const { return m_map.begin (); }
691 iterator_t end () const { return m_map.end (); }
692
693 const binding_map &get_map () const { return m_map; }
694
695private:
696 const svalue *get_any_value (const binding_key *key) const;
697 void bind_compound_sval (store_manager *mgr,
698 const region *reg,
699 const compound_svalue *compound_sval);
700 void bind_key (const binding_key *key, const svalue *sval);
701
702 const region *m_base_region;
703
704 binding_map m_map;
705
706 /* Has a pointer to this cluster "escaped" into a part of the program
707 we don't know about (via a call to a function with an unknown body,
708 or by being passed in as a pointer param of a "top-level" function call).
709 Such regions could be overwritten when other such functions are called,
710 even if the region is no longer reachable by pointers that we are
711 tracking. */
712 bool m_escaped;
713
714 /* Has this cluster been written to via a symbolic binding?
715 If so, then we don't know anything about unbound subregions,
716 so we can't use initial_svalue, treat them as uninitialized, or
717 inherit values from a parent region. */
718 bool m_touched;
719};
720
721/* The mapping from regions to svalues.
722 This is actually expressed by subdividing into clusters, to better
723 handle aliasing. */
724
725class store
726{
727public:
728 typedef hash_map <const region *, binding_cluster *> cluster_map_t;
729
730 store ();
731 store (const store &other);
732 ~store ();
733
734 store &operator= (const store &other);
735
736 bool operator== (const store &other) const;
737 bool operator!= (const store &other) const
738 {
739 return !(*this == other);
740 }
741
742 hashval_t hash () const;
743
744 void dump_to_pp (pretty_printer *pp, bool summarize, bool multiline,
745 store_manager *mgr) const;
746 void dump (bool simple) const;
747 void summarize_to_pp (pretty_printer *pp, bool simple) const;
748
749 void validate () const;
750
751 json::object *to_json () const;
752
753 const svalue *get_any_binding (store_manager *mgr, const region *reg) const;
754
755 bool called_unknown_fn_p () const { return m_called_unknown_fn; }
756
757 void set_value (store_manager *mgr, const region *lhs_reg,
758 const svalue *rhs_sval,
759 uncertainty_t *uncertainty);
760 void clobber_region (store_manager *mgr, const region *reg);
761 void purge_region (store_manager *mgr, const region *reg);
762 void fill_region (store_manager *mgr, const region *reg, const svalue *sval);
763 void zero_fill_region (store_manager *mgr, const region *reg);
764 void mark_region_as_unknown (store_manager *mgr, const region *reg,
765 uncertainty_t *uncertainty,
766 svalue_set *maybe_live_values);
767 void purge_state_involving (const svalue *sval,
768 region_model_manager *sval_mgr);
769
770 const binding_cluster *get_cluster (const region *base_reg) const;
771 binding_cluster *get_cluster (const region *base_reg);
772 binding_cluster *get_or_create_cluster (const region *base_reg);
773 void purge_cluster (const region *base_reg);
774
775 template <typename T>
776 void for_each_cluster (void (*cb) (const region *base_reg, T user_data),
777 T user_data) const
778 {
779 for (cluster_map_t::iterator iter = m_cluster_map.begin ();
780 iter != m_cluster_map.end (); ++iter)
781 cb ((*iter).first, user_data);
782 }
783
784 static bool can_merge_p (const store *store_a, const store *store_b,
785 store *out_store, store_manager *mgr,
786 model_merger *merger);
787
788 void mark_as_escaped (const region *base_reg);
789 void on_unknown_fncall (const gcall *call, store_manager *mgr,
790 const conjured_purge &p);
791 bool escaped_p (const region *reg) const;
792
793 void get_representative_path_vars (const region_model *model,
794 svalue_set *visited,
795 const svalue *sval,
796 auto_vec<path_var> *out_pvs) const;
797
798 cluster_map_t::iterator begin () const { return m_cluster_map.begin (); }
799 cluster_map_t::iterator end () const { return m_cluster_map.end (); }
800
801 tristate eval_alias (const region *base_reg_a,
802 const region *base_reg_b) const;
803
804 template <typename BindingVisitor>
805 void for_each_binding (BindingVisitor &v)
806 {
807 for (cluster_map_t::iterator iter = m_cluster_map.begin ();
808 iter != m_cluster_map.end (); ++iter)
809 (*iter).second->for_each_binding (v);
810 }
811
812 void canonicalize (store_manager *mgr);
813 void loop_replay_fixup (const store *other_store,
814 region_model_manager *mgr);
815
816 void replay_call_summary (call_summary_replay &r,
817 const store &summary);
818 void replay_call_summary_cluster (call_summary_replay &r,
819 const store &summary,
820 const region *base_reg);
821 void on_maybe_live_values (const svalue_set &maybe_live_values);
822
823private:
824 void remove_overlapping_bindings (store_manager *mgr, const region *reg,
825 uncertainty_t *uncertainty);
826 tristate eval_alias_1 (const region *base_reg_a,
827 const region *base_reg_b) const;
828
829 cluster_map_t m_cluster_map;
830
831 /* If this is true, then unknown code has been called, and so
832 any global variable that isn't currently modelled by the store
833 has unknown state, rather than being in an "initial state".
834 This is to avoid having to mark (and thus explicitly track)
835 every global when an unknown function is called; instead, they
836 can be tracked implicitly. */
837 bool m_called_unknown_fn;
838};
839
840/* A class responsible for owning and consolidating binding keys
841 (both concrete and symbolic).
842 Key instances are immutable as far as clients are concerned, so they
843 are provided as "const" ptrs. */
844
845class store_manager
846{
847public:
848 store_manager (region_model_manager *mgr) : m_mgr (mgr) {}
849
850 logger *get_logger () const;
851
852 /* binding consolidation. */
853 const concrete_binding *
854 get_concrete_binding (bit_offset_t start_bit_offset,
855 bit_offset_t size_in_bits);
856 const concrete_binding *
857 get_concrete_binding (const bit_range &bits)
858 {
859 return get_concrete_binding (start_bit_offset: bits.get_start_bit_offset (),
860 size_in_bits: bits.m_size_in_bits);
861 }
862 const concrete_binding *
863 get_concrete_binding (const byte_range &bytes)
864 {
865 bit_range bits = bytes.as_bit_range ();
866 return get_concrete_binding (bits);
867 }
868 const symbolic_binding *
869 get_symbolic_binding (const region *region);
870
871 region_model_manager *get_svalue_manager () const
872 {
873 return m_mgr;
874 }
875
876 void log_stats (logger *logger, bool show_objs) const;
877
878private:
879 region_model_manager *m_mgr;
880 consolidation_map<concrete_binding> m_concrete_binding_key_mgr;
881 consolidation_map<symbolic_binding> m_symbolic_binding_key_mgr;
882};
883
884} // namespace ana
885
886#endif /* GCC_ANALYZER_STORE_H */
887

source code of gcc/analyzer/store.h