1/* Regions of memory.
2 Copyright (C) 2019-2024 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#define INCLUDE_MEMORY
23#include "system.h"
24#include "coretypes.h"
25#include "tree.h"
26#include "diagnostic-core.h"
27#include "gimple-pretty-print.h"
28#include "function.h"
29#include "basic-block.h"
30#include "gimple.h"
31#include "gimple-iterator.h"
32#include "diagnostic-core.h"
33#include "graphviz.h"
34#include "options.h"
35#include "cgraph.h"
36#include "tree-dfa.h"
37#include "stringpool.h"
38#include "convert.h"
39#include "target.h"
40#include "fold-const.h"
41#include "tree-pretty-print.h"
42#include "diagnostic-color.h"
43#include "bitmap.h"
44#include "analyzer/analyzer.h"
45#include "analyzer/analyzer-logging.h"
46#include "ordered-hash-map.h"
47#include "options.h"
48#include "cgraph.h"
49#include "cfg.h"
50#include "digraph.h"
51#include "analyzer/supergraph.h"
52#include "sbitmap.h"
53#include "analyzer/call-string.h"
54#include "analyzer/program-point.h"
55#include "analyzer/store.h"
56#include "analyzer/region.h"
57#include "analyzer/region-model.h"
58#include "analyzer/sm.h"
59#include "analyzer/program-state.h"
60
61#if ENABLE_ANALYZER
62
63namespace ana {
64
65region_offset
66region_offset::make_byte_offset (const region *base_region,
67 const svalue *num_bytes_sval)
68{
69 if (tree num_bytes_cst = num_bytes_sval->maybe_get_constant ())
70 {
71 gcc_assert (TREE_CODE (num_bytes_cst) == INTEGER_CST);
72 bit_offset_t num_bits = wi::to_offset (t: num_bytes_cst) * BITS_PER_UNIT;
73 return make_concrete (base_region, offset: num_bits);
74 }
75 else
76 {
77 return make_symbolic (base_region, sym_offset: num_bytes_sval);
78 }
79}
80
81const svalue &
82region_offset::calc_symbolic_bit_offset (region_model_manager *mgr) const
83{
84 if (symbolic_p ())
85 {
86 const svalue *bits_per_byte
87 = mgr->get_or_create_int_cst (NULL_TREE, BITS_PER_UNIT);
88 return *mgr->get_or_create_binop (NULL_TREE, op: MULT_EXPR,
89 arg0: m_sym_offset, arg1: bits_per_byte);
90 }
91 else
92 return *mgr->get_or_create_int_cst (NULL_TREE, cst: m_offset);
93}
94
95const svalue *
96region_offset::calc_symbolic_byte_offset (region_model_manager *mgr) const
97{
98 if (symbolic_p ())
99 return m_sym_offset;
100 else
101 {
102 byte_offset_t concrete_byte_offset;
103 if (get_concrete_byte_offset (out: &concrete_byte_offset))
104 return mgr->get_or_create_int_cst (size_type_node,
105 cst: concrete_byte_offset);
106 else
107 /* Can't handle bitfields; return UNKNOWN. */
108 return mgr->get_or_create_unknown_svalue (size_type_node);
109 }
110}
111
112void
113region_offset::dump_to_pp (pretty_printer *pp, bool simple) const
114{
115 if (symbolic_p ())
116 {
117 /* We don't bother showing the base region. */
118 pp_string (pp, "byte ");
119 m_sym_offset->dump_to_pp (pp, simple);
120 }
121 else
122 {
123 if (m_offset % BITS_PER_UNIT == 0)
124 {
125 pp_string (pp, "byte ");
126 pp_wide_int (pp, w: m_offset / BITS_PER_UNIT, sgn: SIGNED);
127 }
128 else
129 {
130 pp_string (pp, "bit ");
131 pp_wide_int (pp, w: m_offset, sgn: SIGNED);
132 }
133 }
134}
135
136DEBUG_FUNCTION void
137region_offset::dump (bool simple) const
138{
139 pretty_printer pp;
140 pp_format_decoder (&pp) = default_tree_printer;
141 pp_show_color (&pp) = pp_show_color (global_dc->printer);
142 pp.buffer->stream = stderr;
143 dump_to_pp (pp: &pp, simple);
144 pp_newline (&pp);
145 pp_flush (&pp);
146}
147
148/* An svalue that matches the pattern (BASE * FACTOR) + OFFSET
149 where FACTOR or OFFSET could be the identity (represented as NULL). */
150
151struct linear_op
152{
153 linear_op (const svalue *base,
154 const svalue *factor,
155 const svalue *offset)
156 : m_base (base), m_factor (factor), m_offset (offset)
157 {
158 }
159
160 bool maybe_get_cst_factor (bit_offset_t *out) const
161 {
162 if (m_factor == nullptr)
163 {
164 *out = 1;
165 return true;
166 }
167 if (tree cst_factor = m_factor->maybe_get_constant ())
168 {
169 *out = wi::to_offset (t: cst_factor);
170 return true;
171 }
172 return false;
173 }
174
175 bool maybe_get_cst_offset (bit_offset_t *out) const
176 {
177 if (m_offset == nullptr)
178 {
179 *out = 0;
180 return true;
181 }
182 if (tree cst_offset = m_offset->maybe_get_constant ())
183 {
184 *out = wi::to_offset (t: cst_offset);
185 return true;
186 }
187 return false;
188 }
189
190 static tristate
191 less (const linear_op &a, const linear_op &b)
192 {
193 /* Same base. */
194 if (a.m_base == b.m_base)
195 {
196 bit_offset_t a_wi_factor;
197 bit_offset_t b_wi_factor;
198 if (a.maybe_get_cst_factor (out: &a_wi_factor)
199 && b.maybe_get_cst_factor (out: &b_wi_factor))
200 {
201 if (a_wi_factor != b_wi_factor)
202 return tristate (a_wi_factor < b_wi_factor);
203 else
204 {
205 bit_offset_t a_wi_offset;
206 bit_offset_t b_wi_offset;
207 if (a.maybe_get_cst_offset (out: &a_wi_offset)
208 && b.maybe_get_cst_offset (out: &b_wi_offset))
209 return tristate (a_wi_offset < b_wi_offset);
210 }
211 }
212 }
213 return tristate::unknown ();
214 }
215
216 static tristate
217 le (const linear_op &a, const linear_op &b)
218 {
219 /* Same base. */
220 if (a.m_base == b.m_base)
221 {
222 bit_offset_t a_wi_factor;
223 bit_offset_t b_wi_factor;
224 if (a.maybe_get_cst_factor (out: &a_wi_factor)
225 && b.maybe_get_cst_factor (out: &b_wi_factor))
226 {
227 if (a_wi_factor != b_wi_factor)
228 return tristate (a_wi_factor <= b_wi_factor);
229 else
230 {
231 bit_offset_t a_wi_offset;
232 bit_offset_t b_wi_offset;
233 if (a.maybe_get_cst_offset (out: &a_wi_offset)
234 && b.maybe_get_cst_offset (out: &b_wi_offset))
235 return tristate (a_wi_offset <= b_wi_offset);
236 }
237 }
238 }
239 return tristate::unknown ();
240 }
241
242 static bool
243 from_svalue (const svalue &sval, linear_op *out)
244 {
245 switch (sval.get_kind ())
246 {
247 default:
248 break;
249 case SK_BINOP:
250 {
251 const binop_svalue &binop_sval ((const binop_svalue &)sval);
252 if (binop_sval.get_op () == MULT_EXPR)
253 {
254 *out = linear_op (binop_sval.get_arg0 (),
255 binop_sval.get_arg1 (),
256 NULL);
257 return true;
258 }
259 else if (binop_sval.get_op () == PLUS_EXPR)
260 {
261 if (binop_sval.get_arg0 ()->get_kind () == SK_BINOP)
262 {
263 const binop_svalue &inner_binop_sval
264 ((const binop_svalue &)*binop_sval.get_arg0 ());
265 if (inner_binop_sval.get_op () == MULT_EXPR)
266 {
267 *out = linear_op (inner_binop_sval.get_arg0 (),
268 inner_binop_sval.get_arg1 (),
269 binop_sval.get_arg1 ());
270 return true;
271 }
272 }
273
274 *out = linear_op (binop_sval.get_arg0 (),
275 NULL,
276 binop_sval.get_arg1 ());
277 return true;
278 }
279 }
280 break;
281 }
282 return false;
283 }
284
285 const svalue *m_base;
286 const svalue *m_factor;
287 const svalue *m_offset;
288};
289
290bool
291operator< (const region_offset &a, const region_offset &b)
292{
293 if (a.symbolic_p ())
294 {
295 if (b.symbolic_p ())
296 {
297 /* Symbolic vs symbolic. */
298 const svalue &a_sval = *a.get_symbolic_byte_offset ();
299 const svalue &b_sval = *b.get_symbolic_byte_offset ();
300
301 linear_op op_a (NULL, NULL, NULL);
302 linear_op op_b (NULL, NULL, NULL);
303 if (linear_op::from_svalue (sval: a_sval, out: &op_a)
304 && linear_op::from_svalue (sval: b_sval, out: &op_b))
305 {
306 tristate ts = linear_op::less (a: op_a, b: op_b);
307 if (ts.is_true ())
308 return true;
309 else if (ts.is_false ())
310 return false;
311 }
312 /* Use svalue's deterministic order, for now. */
313 return (svalue::cmp_ptr (a.get_symbolic_byte_offset (),
314 b.get_symbolic_byte_offset ())
315 < 0);
316 }
317 else
318 /* Symbolic vs concrete: put all symbolic after all concrete. */
319 return false;
320 }
321 else
322 {
323 if (b.symbolic_p ())
324 /* Concrete vs symbolic: put all concrete before all symbolic. */
325 return true;
326 else
327 /* Concrete vs concrete. */
328 return a.get_bit_offset () < b.get_bit_offset ();
329 }
330}
331
332bool
333operator<= (const region_offset &a, const region_offset &b)
334{
335 if (a.symbolic_p ())
336 {
337 if (b.symbolic_p ())
338 {
339 /* Symbolic vs symbolic. */
340 const svalue &a_sval = *a.get_symbolic_byte_offset ();
341 const svalue &b_sval = *b.get_symbolic_byte_offset ();
342
343 linear_op op_a (NULL, NULL, NULL);
344 linear_op op_b (NULL, NULL, NULL);
345 if (linear_op::from_svalue (sval: a_sval, out: &op_a)
346 && linear_op::from_svalue (sval: b_sval, out: &op_b))
347 {
348 tristate ts = linear_op::le (a: op_a, b: op_b);
349 if (ts.is_true ())
350 return true;
351 else if (ts.is_false ())
352 return false;
353 }
354 /* Use svalue's deterministic order, for now. */
355 return (svalue::cmp_ptr (a.get_symbolic_byte_offset (),
356 b.get_symbolic_byte_offset ())
357 <= 0);
358 }
359 else
360 /* Symbolic vs concrete: put all symbolic after all concrete. */
361 return false;
362 }
363 else
364 {
365 if (b.symbolic_p ())
366 /* Concrete vs symbolic: put all concrete before all symbolic. */
367 return true;
368 else
369 /* Concrete vs concrete. */
370 return a.get_bit_offset () <= b.get_bit_offset ();
371 }
372}
373
374bool
375operator> (const region_offset &a, const region_offset &b)
376{
377 return b < a;
378}
379
380bool
381operator>= (const region_offset &a, const region_offset &b)
382{
383 return b <= a;
384}
385
386region_offset
387strip_types (const region_offset &offset, region_model_manager &mgr)
388{
389 if (offset.symbolic_p ())
390 return region_offset::make_symbolic
391 (base_region: offset.get_base_region (),
392 sym_offset: strip_types (sval: offset.get_symbolic_byte_offset (),
393 mgr));
394 else
395 return offset;
396}
397
398/* class region and its various subclasses. */
399
400/* class region. */
401
402region::~region ()
403{
404 delete m_cached_offset;
405}
406
407/* Determine the base region for this region: when considering bindings
408 for this region, the base region is the ancestor which identifies
409 which cluster they should be partitioned into.
410 Regions within the same struct/union/array are in the same cluster.
411 Different decls are in different clusters. */
412
413const region *
414region::get_base_region () const
415{
416 const region *iter = this;
417 while (iter)
418 {
419 switch (iter->get_kind ())
420 {
421 case RK_FIELD:
422 case RK_ELEMENT:
423 case RK_OFFSET:
424 case RK_SIZED:
425 case RK_BIT_RANGE:
426 iter = iter->get_parent_region ();
427 continue;
428 case RK_CAST:
429 iter = iter->dyn_cast_cast_region ()->get_original_region ();
430 continue;
431 default:
432 return iter;
433 }
434 }
435 return iter;
436}
437
438/* Return true if get_base_region() == this for this region. */
439
440bool
441region::base_region_p () const
442{
443 switch (get_kind ())
444 {
445 /* Region kinds representing a descendent of a base region. */
446 case RK_FIELD:
447 case RK_ELEMENT:
448 case RK_OFFSET:
449 case RK_SIZED:
450 case RK_CAST:
451 case RK_BIT_RANGE:
452 return false;
453
454 default:
455 return true;
456 }
457}
458
459/* Return true if this region is ELDER or one of its descendents. */
460
461bool
462region::descendent_of_p (const region *elder) const
463{
464 const region *iter = this;
465 while (iter)
466 {
467 if (iter == elder)
468 return true;
469 if (iter->get_kind () == RK_CAST)
470 iter = iter->dyn_cast_cast_region ()->get_original_region ();
471 else
472 iter = iter->get_parent_region ();
473 }
474 return false;
475}
476
477/* If this region is a frame_region, or a descendent of one, return it.
478 Otherwise return NULL. */
479
480const frame_region *
481region::maybe_get_frame_region () const
482{
483 const region *iter = this;
484 while (iter)
485 {
486 if (const frame_region *frame_reg = iter->dyn_cast_frame_region ())
487 return frame_reg;
488 if (iter->get_kind () == RK_CAST)
489 iter = iter->dyn_cast_cast_region ()->get_original_region ();
490 else
491 iter = iter->get_parent_region ();
492 }
493 return NULL;
494}
495
496/* Get the memory space of this region. */
497
498enum memory_space
499region::get_memory_space () const
500{
501 const region *iter = this;
502 while (iter)
503 {
504 switch (iter->get_kind ())
505 {
506 default:
507 break;
508 case RK_GLOBALS:
509 return MEMSPACE_GLOBALS;
510 case RK_CODE:
511 case RK_FUNCTION:
512 case RK_LABEL:
513 return MEMSPACE_CODE;
514 case RK_FRAME:
515 case RK_STACK:
516 case RK_ALLOCA:
517 return MEMSPACE_STACK;
518 case RK_HEAP:
519 case RK_HEAP_ALLOCATED:
520 return MEMSPACE_HEAP;
521 case RK_STRING:
522 return MEMSPACE_READONLY_DATA;
523 case RK_PRIVATE:
524 return MEMSPACE_PRIVATE;
525 }
526 if (iter->get_kind () == RK_CAST)
527 iter = iter->dyn_cast_cast_region ()->get_original_region ();
528 else
529 iter = iter->get_parent_region ();
530 }
531 return MEMSPACE_UNKNOWN;
532}
533
534/* Subroutine for use by region_model_manager::get_or_create_initial_value.
535 Return true if this region has an initial_svalue.
536 Return false if attempting to use INIT_VAL(this_region) should give
537 the "UNINITIALIZED" poison value. */
538
539bool
540region::can_have_initial_svalue_p () const
541{
542 const region *base_reg = get_base_region ();
543
544 /* Check for memory spaces that are uninitialized by default. */
545 enum memory_space mem_space = base_reg->get_memory_space ();
546 switch (mem_space)
547 {
548 default:
549 gcc_unreachable ();
550 case MEMSPACE_UNKNOWN:
551 case MEMSPACE_CODE:
552 case MEMSPACE_GLOBALS:
553 case MEMSPACE_READONLY_DATA:
554 case MEMSPACE_PRIVATE:
555 /* Such regions have initial_svalues. */
556 return true;
557
558 case MEMSPACE_HEAP:
559 /* Heap allocations are uninitialized by default. */
560 return false;
561
562 case MEMSPACE_STACK:
563 if (tree decl = base_reg->maybe_get_decl ())
564 {
565 /* See the assertion in frame_region::get_region_for_local for the
566 tree codes we need to handle here. */
567 switch (TREE_CODE (decl))
568 {
569 default:
570 gcc_unreachable ();
571
572 case PARM_DECL:
573 /* Parameters have initial values. */
574 return true;
575
576 case VAR_DECL:
577 case RESULT_DECL:
578 /* Function locals don't have initial values. */
579 return false;
580
581 case SSA_NAME:
582 {
583 tree ssa_name = decl;
584 /* SSA names that are the default defn of a PARM_DECL
585 have initial_svalues; other SSA names don't. */
586 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
587 && SSA_NAME_VAR (ssa_name)
588 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == PARM_DECL)
589 return true;
590 else
591 return false;
592 }
593 }
594 }
595
596 /* If we have an on-stack region that isn't associated with a decl
597 or SSA name, then we have VLA/alloca, which is uninitialized. */
598 return false;
599 }
600}
601
602/* For regions within a global decl, get the svalue for the initial
603 value of this region when the program starts, caching the result. */
604
605const svalue *
606region::get_initial_value_at_main (region_model_manager *mgr) const
607{
608 if (!m_cached_init_sval_at_main)
609 m_cached_init_sval_at_main = calc_initial_value_at_main (mgr);
610 return m_cached_init_sval_at_main;
611}
612
613/* Implementation of region::get_initial_value_at_main. */
614
615const svalue *
616region::calc_initial_value_at_main (region_model_manager *mgr) const
617{
618 const decl_region *base_reg = get_base_region ()->dyn_cast_decl_region ();
619 gcc_assert (base_reg);
620
621 /* Attempt to get the initializer value for base_reg. */
622 if (const svalue *base_reg_init
623 = base_reg->get_svalue_for_initializer (mgr))
624 {
625 if (this == base_reg)
626 return base_reg_init;
627 else
628 {
629 /* Get the value for REG within base_reg_init. */
630 binding_cluster c (base_reg);
631 c.bind (mgr: mgr->get_store_manager (), base_reg, base_reg_init);
632 const svalue *sval
633 = c.get_any_binding (mgr: mgr->get_store_manager (), reg: this);
634 if (sval)
635 {
636 if (get_type ())
637 sval = mgr->get_or_create_cast (type: get_type (), arg: sval);
638 return sval;
639 }
640 }
641 }
642
643 /* Otherwise, return INIT_VAL(REG). */
644 return mgr->get_or_create_initial_value (reg: this);
645}
646
647/* If this region is a decl_region, return the decl.
648 Otherwise return NULL. */
649
650tree
651region::maybe_get_decl () const
652{
653 if (const decl_region *decl_reg = dyn_cast_decl_region ())
654 return decl_reg->get_decl ();
655 return NULL_TREE;
656}
657
658/* Get the region_offset for this region (calculating it on the
659 first call and caching it internally). */
660
661region_offset
662region::get_offset (region_model_manager *mgr) const
663{
664 if(!m_cached_offset)
665 m_cached_offset = new region_offset (calc_offset (mgr));
666 return *m_cached_offset;
667}
668
669/* Get the region_offset for immediately beyond this region. */
670
671region_offset
672region::get_next_offset (region_model_manager *mgr) const
673{
674 region_offset start = get_offset (mgr);
675
676 bit_size_t bit_size;
677 if (get_bit_size (out: &bit_size))
678 {
679 if (start.concrete_p ())
680 {
681 bit_offset_t next_bit_offset = start.get_bit_offset () + bit_size;
682 return region_offset::make_concrete (base_region: start.get_base_region (),
683 offset: next_bit_offset);
684 }
685 }
686
687 const svalue *start_byte_offset_sval = start.calc_symbolic_byte_offset (mgr);
688 const svalue *byte_size_sval = get_byte_size_sval (mgr);
689 const svalue *sum_sval
690 = mgr->get_or_create_binop (size_type_node,
691 op: PLUS_EXPR,
692 arg0: start_byte_offset_sval,
693 arg1: byte_size_sval);
694 return region_offset::make_symbolic (base_region: start.get_base_region (),
695 sym_offset: sum_sval);
696}
697
698/* Base class implementation of region::get_byte_size vfunc.
699 If the size of this region (in bytes) is known statically, write it to *OUT
700 and return true.
701 Otherwise return false. */
702
703bool
704region::get_byte_size (byte_size_t *out) const
705{
706 tree type = get_type ();
707
708 /* Bail out e.g. for heap-allocated regions. */
709 if (!type)
710 return false;
711
712 HOST_WIDE_INT bytes = int_size_in_bytes (type);
713 if (bytes == -1)
714 return false;
715 *out = bytes;
716 return true;
717}
718
719/* Base implementation of region::get_byte_size_sval vfunc. */
720
721const svalue *
722region::get_byte_size_sval (region_model_manager *mgr) const
723{
724 tree type = get_type ();
725
726 /* Bail out e.g. for heap-allocated regions. */
727 if (!type)
728 return mgr->get_or_create_unknown_svalue (size_type_node);
729
730 HOST_WIDE_INT bytes = int_size_in_bytes (type);
731 if (bytes == -1)
732 return mgr->get_or_create_unknown_svalue (size_type_node);
733
734 tree byte_size = size_in_bytes (t: type);
735 if (TREE_TYPE (byte_size) != size_type_node)
736 byte_size = fold_build1 (NOP_EXPR, size_type_node, byte_size);
737 return mgr->get_or_create_constant_svalue (cst_expr: byte_size);
738}
739
740/* Attempt to get the size of TYPE in bits.
741 If successful, return true and write the size to *OUT.
742 Otherwise return false. */
743
744bool
745int_size_in_bits (const_tree type, bit_size_t *out)
746{
747 if (INTEGRAL_TYPE_P (type))
748 {
749 *out = TYPE_PRECISION (type);
750 return true;
751 }
752
753 tree sz = TYPE_SIZE (type);
754 if (sz
755 && tree_fits_uhwi_p (sz)
756 /* If the size is zero, then we may have a zero-sized
757 array; handle such cases by returning false. */
758 && !integer_zerop (sz))
759 {
760 *out = TREE_INT_CST_LOW (sz);
761 return true;
762 }
763 else
764 return false;
765}
766
767/* Base implementation of region::get_bit_size_sval vfunc. */
768
769const svalue *
770region::get_bit_size_sval (region_model_manager *mgr) const
771{
772 tree type = get_type ();
773
774 /* Bail out e.g. for heap-allocated regions. */
775 if (!type)
776 return mgr->get_or_create_unknown_svalue (size_type_node);
777
778 bit_size_t bits;
779 if (!int_size_in_bits (type, out: &bits))
780 return mgr->get_or_create_unknown_svalue (size_type_node);
781
782 return mgr->get_or_create_int_cst (size_type_node, cst: bits);
783}
784
785/* If the size of this region (in bits) is known statically, write it to *OUT
786 and return true.
787 Otherwise return false. */
788
789bool
790region::get_bit_size (bit_size_t *out) const
791{
792 tree type = get_type ();
793
794 /* Bail out e.g. for heap-allocated regions. */
795 if (!type)
796 return false;
797
798 return int_size_in_bits (type, out);
799}
800
801/* Get the field within RECORD_TYPE at BIT_OFFSET. */
802
803tree
804get_field_at_bit_offset (tree record_type, bit_offset_t bit_offset)
805{
806 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
807 if (bit_offset < 0)
808 return NULL;
809
810 /* Find the first field that has an offset > BIT_OFFSET,
811 then return the one preceding it.
812 Skip other trees within the chain, such as FUNCTION_DECLs. */
813 tree last_field = NULL_TREE;
814 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
815 iter = DECL_CHAIN (iter))
816 {
817 if (TREE_CODE (iter) == FIELD_DECL)
818 {
819 int iter_field_offset = int_bit_position (field: iter);
820 if (bit_offset < iter_field_offset)
821 return last_field;
822 last_field = iter;
823 }
824 }
825 return last_field;
826}
827
828/* Populate *OUT with descendent regions of type TYPE that match
829 RELATIVE_BIT_OFFSET and SIZE_IN_BITS within this region. */
830
831void
832region::get_subregions_for_binding (region_model_manager *mgr,
833 bit_offset_t relative_bit_offset,
834 bit_size_t size_in_bits,
835 tree type,
836 auto_vec <const region *> *out) const
837{
838 if (get_type () == NULL_TREE || type == NULL_TREE)
839 return;
840 if (relative_bit_offset == 0
841 && types_compatible_p (type1: get_type (), type2: type))
842 {
843 out->safe_push (obj: this);
844 return;
845 }
846 switch (TREE_CODE (get_type ()))
847 {
848 case ARRAY_TYPE:
849 {
850 tree element_type = TREE_TYPE (get_type ());
851 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (element_type);
852 if (hwi_byte_size > 0)
853 {
854 HOST_WIDE_INT bits_per_element
855 = hwi_byte_size << LOG2_BITS_PER_UNIT;
856 HOST_WIDE_INT element_index
857 = (relative_bit_offset.to_shwi () / bits_per_element);
858 tree element_index_cst
859 = build_int_cst (integer_type_node, element_index);
860 HOST_WIDE_INT inner_bit_offset
861 = relative_bit_offset.to_shwi () % bits_per_element;
862 const region *subregion = mgr->get_element_region
863 (parent: this, element_type,
864 index: mgr->get_or_create_constant_svalue (cst_expr: element_index_cst));
865 subregion->get_subregions_for_binding (mgr, relative_bit_offset: inner_bit_offset,
866 size_in_bits, type, out);
867 }
868 }
869 break;
870 case RECORD_TYPE:
871 {
872 /* The bit offset might be *within* one of the fields (such as
873 with nested structs).
874 So we want to find the enclosing field, adjust the offset,
875 and repeat. */
876 if (tree field = get_field_at_bit_offset (record_type: get_type (),
877 bit_offset: relative_bit_offset))
878 {
879 int field_bit_offset = int_bit_position (field);
880 const region *subregion = mgr->get_field_region (parent: this, field);
881 subregion->get_subregions_for_binding
882 (mgr, relative_bit_offset: relative_bit_offset - field_bit_offset,
883 size_in_bits, type, out);
884 }
885 }
886 break;
887 case UNION_TYPE:
888 {
889 for (tree field = TYPE_FIELDS (get_type ()); field != NULL_TREE;
890 field = DECL_CHAIN (field))
891 {
892 if (TREE_CODE (field) != FIELD_DECL)
893 continue;
894 const region *subregion = mgr->get_field_region (parent: this, field);
895 subregion->get_subregions_for_binding (mgr,
896 relative_bit_offset,
897 size_in_bits,
898 type,
899 out);
900 }
901 }
902 break;
903 default:
904 /* Do nothing. */
905 break;
906 }
907}
908
909/* Walk from this region up to the base region within its cluster, calculating
910 the offset relative to the base region, either as an offset in bits,
911 or a symbolic offset. */
912
913region_offset
914region::calc_offset (region_model_manager *mgr) const
915{
916 const region *iter_region = this;
917 bit_offset_t accum_bit_offset = 0;
918 const svalue *accum_byte_sval = NULL;
919
920 while (iter_region)
921 {
922 switch (iter_region->get_kind ())
923 {
924 case RK_FIELD:
925 case RK_ELEMENT:
926 case RK_OFFSET:
927 case RK_BIT_RANGE:
928 if (accum_byte_sval)
929 {
930 const svalue *sval
931 = iter_region->get_relative_symbolic_offset (mgr);
932 accum_byte_sval
933 = mgr->get_or_create_binop (ptrdiff_type_node, op: PLUS_EXPR,
934 arg0: accum_byte_sval, arg1: sval);
935 iter_region = iter_region->get_parent_region ();
936 }
937 else
938 {
939 bit_offset_t rel_bit_offset;
940 if (iter_region->get_relative_concrete_offset (out: &rel_bit_offset))
941 {
942 accum_bit_offset += rel_bit_offset;
943 iter_region = iter_region->get_parent_region ();
944 }
945 else
946 {
947 /* If the iter_region is not concrete anymore, convert the
948 accumulated bits to a svalue in bytes and revisit the
949 iter_region collecting the symbolic value. */
950 byte_offset_t byte_offset = accum_bit_offset / BITS_PER_UNIT;
951 tree offset_tree = wide_int_to_tree (ptrdiff_type_node,
952 cst: byte_offset);
953 accum_byte_sval
954 = mgr->get_or_create_constant_svalue (cst_expr: offset_tree);
955 }
956 }
957 continue;
958 case RK_SIZED:
959 iter_region = iter_region->get_parent_region ();
960 continue;
961
962 case RK_CAST:
963 {
964 const cast_region *cast_reg
965 = as_a <const cast_region *> (p: iter_region);
966 iter_region = cast_reg->get_original_region ();
967 }
968 continue;
969
970 default:
971 return accum_byte_sval
972 ? region_offset::make_symbolic (base_region: iter_region,
973 sym_offset: accum_byte_sval)
974 : region_offset::make_concrete (base_region: iter_region,
975 offset: accum_bit_offset);
976 }
977 }
978
979 return accum_byte_sval ? region_offset::make_symbolic (base_region: iter_region,
980 sym_offset: accum_byte_sval)
981 : region_offset::make_concrete (base_region: iter_region,
982 offset: accum_bit_offset);
983}
984
985/* Base implementation of region::get_relative_concrete_offset vfunc. */
986
987bool
988region::get_relative_concrete_offset (bit_offset_t *) const
989{
990 return false;
991}
992
993/* Base implementation of region::get_relative_symbolic_offset vfunc. */
994
995const svalue *
996region::get_relative_symbolic_offset (region_model_manager *mgr) const
997{
998 return mgr->get_or_create_unknown_svalue (ptrdiff_type_node);
999}
1000
1001/* Attempt to get the position and size of this region expressed as a
1002 concrete range of bytes relative to its parent.
1003 If successful, return true and write to *OUT.
1004 Otherwise return false. */
1005
1006bool
1007region::get_relative_concrete_byte_range (byte_range *out) const
1008{
1009 /* We must have a concrete offset relative to the parent. */
1010 bit_offset_t rel_bit_offset;
1011 if (!get_relative_concrete_offset (&rel_bit_offset))
1012 return false;
1013 /* ...which must be a whole number of bytes. */
1014 if (rel_bit_offset % BITS_PER_UNIT != 0)
1015 return false;
1016 byte_offset_t start_byte_offset = rel_bit_offset / BITS_PER_UNIT;
1017
1018 /* We must have a concrete size, which must be a whole number
1019 of bytes. */
1020 byte_size_t num_bytes;
1021 if (!get_byte_size (out: &num_bytes))
1022 return false;
1023
1024 /* Success. */
1025 *out = byte_range (start_byte_offset, num_bytes);
1026 return true;
1027}
1028
1029/* Dump a description of this region to stderr. */
1030
1031DEBUG_FUNCTION void
1032region::dump (bool simple) const
1033{
1034 pretty_printer pp;
1035 pp_format_decoder (&pp) = default_tree_printer;
1036 pp_show_color (&pp) = pp_show_color (global_dc->printer);
1037 pp.buffer->stream = stderr;
1038 dump_to_pp (pp: &pp, simple);
1039 pp_newline (&pp);
1040 pp_flush (&pp);
1041}
1042
1043/* Return a new json::string describing the region. */
1044
1045json::value *
1046region::to_json () const
1047{
1048 label_text desc = get_desc (simple: true);
1049 json::value *reg_js = new json::string (desc.get ());
1050 return reg_js;
1051}
1052
1053bool
1054region::maybe_print_for_user (pretty_printer *pp,
1055 const region_model &) const
1056{
1057 switch (get_kind ())
1058 {
1059 default:
1060 break;
1061 case RK_DECL:
1062 {
1063 const decl_region *reg = (const decl_region *)this;
1064 tree decl = reg->get_decl ();
1065 if (TREE_CODE (decl) == SSA_NAME)
1066 decl = SSA_NAME_VAR (decl);
1067 print_expr_for_user (pp, t: decl);
1068 return true;
1069 }
1070 }
1071
1072 return false;
1073}
1074
1075/* Generate a description of this region. */
1076
1077DEBUG_FUNCTION label_text
1078region::get_desc (bool simple) const
1079{
1080 pretty_printer pp;
1081 pp_format_decoder (&pp) = default_tree_printer;
1082 dump_to_pp (pp: &pp, simple);
1083 return label_text::take (buffer: xstrdup (pp_formatted_text (&pp)));
1084}
1085
1086/* Base implementation of region::accept vfunc.
1087 Subclass implementations should chain up to this. */
1088
1089void
1090region::accept (visitor *v) const
1091{
1092 v->visit_region (this);
1093 if (m_parent)
1094 m_parent->accept (v);
1095}
1096
1097/* Return true if this is a symbolic region for deferencing an
1098 unknown ptr.
1099 We shouldn't attempt to bind values for this region (but
1100 can unbind values for other regions). */
1101
1102bool
1103region::symbolic_for_unknown_ptr_p () const
1104{
1105 if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ())
1106 if (sym_reg->get_pointer ()->get_kind () == SK_UNKNOWN)
1107 return true;
1108 return false;
1109}
1110
1111/* Return true if this is a symbolic region. */
1112
1113bool
1114region::symbolic_p () const
1115{
1116 return get_kind () == RK_SYMBOLIC;
1117}
1118
1119/* Return true if this region is known to be zero bits in size. */
1120
1121bool
1122region::empty_p () const
1123{
1124 bit_size_t num_bits;
1125 if (get_bit_size (out: &num_bits))
1126 if (num_bits == 0)
1127 return true;
1128 return false;
1129}
1130
1131/* Return true if this is a region for a decl with name DECL_NAME.
1132 Intended for use when debugging (for assertions and conditional
1133 breakpoints). */
1134
1135DEBUG_FUNCTION bool
1136region::is_named_decl_p (const char *decl_name) const
1137{
1138 if (tree decl = maybe_get_decl ())
1139 if (DECL_NAME (decl)
1140 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (decl)), s2: decl_name))
1141 return true;
1142 return false;
1143}
1144
1145/* region's ctor. */
1146
1147region::region (complexity c, symbol::id_t id, const region *parent, tree type)
1148: symbol (c, id),
1149 m_parent (parent), m_type (type),
1150 m_cached_offset (NULL), m_cached_init_sval_at_main (NULL)
1151{
1152 gcc_assert (type == NULL_TREE || TYPE_P (type));
1153}
1154
1155/* Comparator for use by vec<const region *>::qsort,
1156 using their IDs to order them. */
1157
1158int
1159region::cmp_ptr_ptr (const void *p1, const void *p2)
1160{
1161 const region * const *reg1 = (const region * const *)p1;
1162 const region * const *reg2 = (const region * const *)p2;
1163
1164 return cmp_ids (s1: *reg1, s2: *reg2);
1165}
1166
1167/* Determine if a pointer to this region must be non-NULL.
1168
1169 Generally, pointers to regions must be non-NULL, but pointers
1170 to symbolic_regions might, in fact, be NULL.
1171
1172 This allows us to simulate functions like malloc and calloc with:
1173 - only one "outcome" from each statement,
1174 - the idea that the pointer is on the heap if non-NULL
1175 - the possibility that the pointer could be NULL
1176 - the idea that successive values returned from malloc are non-equal
1177 - to be able to zero-fill for calloc. */
1178
1179bool
1180region::non_null_p () const
1181{
1182 switch (get_kind ())
1183 {
1184 default:
1185 return true;
1186 case RK_SYMBOLIC:
1187 /* Are we within a symbolic_region? If so, it could be NULL, and we
1188 have to fall back on the constraints. */
1189 return false;
1190 case RK_HEAP_ALLOCATED:
1191 return false;
1192 }
1193}
1194
1195/* Return true iff this region is defined in terms of SVAL. */
1196
1197bool
1198region::involves_p (const svalue *sval) const
1199{
1200 if (const symbolic_region *symbolic_reg = dyn_cast_symbolic_region ())
1201 {
1202 if (symbolic_reg->get_pointer ()->involves_p (other: sval))
1203 return true;
1204 }
1205
1206 return false;
1207}
1208
1209/* Comparator for trees to impose a deterministic ordering on
1210 T1 and T2. */
1211
1212static int
1213tree_cmp (const_tree t1, const_tree t2)
1214{
1215 gcc_assert (t1);
1216 gcc_assert (t2);
1217
1218 /* Test tree codes first. */
1219 if (TREE_CODE (t1) != TREE_CODE (t2))
1220 return TREE_CODE (t1) - TREE_CODE (t2);
1221
1222 /* From this point on, we know T1 and T2 have the same tree code. */
1223
1224 if (DECL_P (t1))
1225 {
1226 if (DECL_NAME (t1) && DECL_NAME (t2))
1227 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)),
1228 IDENTIFIER_POINTER (DECL_NAME (t2)));
1229 else
1230 {
1231 if (DECL_NAME (t1))
1232 return -1;
1233 else if (DECL_NAME (t2))
1234 return 1;
1235 else
1236 return DECL_UID (t1) - DECL_UID (t2);
1237 }
1238 }
1239
1240 switch (TREE_CODE (t1))
1241 {
1242 case SSA_NAME:
1243 {
1244 if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2))
1245 {
1246 int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2));
1247 if (var_cmp)
1248 return var_cmp;
1249 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
1250 }
1251 else
1252 {
1253 if (SSA_NAME_VAR (t1))
1254 return -1;
1255 else if (SSA_NAME_VAR (t2))
1256 return 1;
1257 else
1258 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
1259 }
1260 }
1261 break;
1262
1263 case INTEGER_CST:
1264 return tree_int_cst_compare (t1, t2);
1265
1266 case REAL_CST:
1267 {
1268 const real_value *rv1 = TREE_REAL_CST_PTR (t1);
1269 const real_value *rv2 = TREE_REAL_CST_PTR (t2);
1270 if (real_compare (UNORDERED_EXPR, rv1, rv2))
1271 {
1272 /* Impose an arbitrary order on NaNs relative to other NaNs
1273 and to non-NaNs. */
1274 if (int cmp_isnan = real_isnan (rv1) - real_isnan (rv2))
1275 return cmp_isnan;
1276 if (int cmp_issignaling_nan
1277 = real_issignaling_nan (rv1) - real_issignaling_nan (rv2))
1278 return cmp_issignaling_nan;
1279 return real_isneg (rv1) - real_isneg (rv2);
1280 }
1281 if (real_compare (LT_EXPR, rv1, rv2))
1282 return -1;
1283 if (real_compare (GT_EXPR, rv1, rv2))
1284 return 1;
1285 return 0;
1286 }
1287
1288 case STRING_CST:
1289 return strcmp (TREE_STRING_POINTER (t1),
1290 TREE_STRING_POINTER (t2));
1291
1292 default:
1293 gcc_unreachable ();
1294 break;
1295 }
1296
1297 gcc_unreachable ();
1298
1299 return 0;
1300}
1301
1302/* qsort comparator for trees to impose a deterministic ordering on
1303 P1 and P2. */
1304
1305int
1306tree_cmp (const void *p1, const void *p2)
1307{
1308 const_tree t1 = *(const_tree const *)p1;
1309 const_tree t2 = *(const_tree const *)p2;
1310
1311 return tree_cmp (t1, t2);
1312}
1313
1314/* class frame_region : public space_region. */
1315
1316frame_region::~frame_region ()
1317{
1318 for (map_t::iterator iter = m_locals.begin ();
1319 iter != m_locals.end ();
1320 ++iter)
1321 delete (*iter).second;
1322}
1323
1324void
1325frame_region::accept (visitor *v) const
1326{
1327 region::accept (v);
1328 if (m_calling_frame)
1329 m_calling_frame->accept (v);
1330}
1331
1332/* Implementation of region::dump_to_pp vfunc for frame_region. */
1333
1334void
1335frame_region::dump_to_pp (pretty_printer *pp, bool simple) const
1336{
1337 if (simple)
1338 pp_printf (pp, "frame: %qs@%i", function_name (&m_fun), get_stack_depth ());
1339 else
1340 pp_printf (pp, "frame_region(%qs, index: %i, depth: %i)",
1341 function_name (&m_fun), m_index, get_stack_depth ());
1342}
1343
1344const decl_region *
1345frame_region::get_region_for_local (region_model_manager *mgr,
1346 tree expr,
1347 const region_model_context *ctxt) const
1348{
1349 if (CHECKING_P)
1350 {
1351 /* Verify that EXPR is a local or SSA name, and that it's for the
1352 correct function for this stack frame. */
1353 gcc_assert (TREE_CODE (expr) == PARM_DECL
1354 || TREE_CODE (expr) == VAR_DECL
1355 || TREE_CODE (expr) == SSA_NAME
1356 || TREE_CODE (expr) == RESULT_DECL);
1357 switch (TREE_CODE (expr))
1358 {
1359 default:
1360 gcc_unreachable ();
1361 case VAR_DECL:
1362 gcc_assert (!is_global_var (expr));
1363 /* Fall through. */
1364 case PARM_DECL:
1365 case RESULT_DECL:
1366 gcc_assert (DECL_CONTEXT (expr) == m_fun.decl);
1367 break;
1368 case SSA_NAME:
1369 {
1370 if (tree var = SSA_NAME_VAR (expr))
1371 {
1372 if (DECL_P (var))
1373 gcc_assert (DECL_CONTEXT (var) == m_fun.decl);
1374 }
1375 else if (ctxt)
1376 if (const extrinsic_state *ext_state = ctxt->get_ext_state ())
1377 if (const supergraph *sg
1378 = ext_state->get_engine ()->get_supergraph ())
1379 {
1380 const gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
1381 const supernode *snode
1382 = sg->get_supernode_for_stmt (stmt: def_stmt);
1383 gcc_assert (snode->get_function () == &m_fun);
1384 }
1385 }
1386 break;
1387 }
1388 }
1389
1390 /* Ideally we'd use mutable here. */
1391 map_t &mutable_locals = const_cast <map_t &> (m_locals);
1392
1393 if (decl_region **slot = mutable_locals.get (k: expr))
1394 return *slot;
1395 decl_region *reg
1396 = new decl_region (mgr->alloc_symbol_id (), this, expr);
1397 mutable_locals.put (k: expr, v: reg);
1398 return reg;
1399}
1400
1401/* class globals_region : public space_region. */
1402
1403/* Implementation of region::dump_to_pp vfunc for globals_region. */
1404
1405void
1406globals_region::dump_to_pp (pretty_printer *pp, bool simple) const
1407{
1408 if (simple)
1409 pp_string (pp, "::");
1410 else
1411 pp_string (pp, "globals");
1412}
1413
1414/* class code_region : public map_region. */
1415
1416/* Implementation of region::dump_to_pp vfunc for code_region. */
1417
1418void
1419code_region::dump_to_pp (pretty_printer *pp, bool simple) const
1420{
1421 if (simple)
1422 pp_string (pp, "code region");
1423 else
1424 pp_string (pp, "code_region()");
1425}
1426
1427/* class function_region : public region. */
1428
1429/* Implementation of region::dump_to_pp vfunc for function_region. */
1430
1431void
1432function_region::dump_to_pp (pretty_printer *pp, bool simple) const
1433{
1434 if (simple)
1435 {
1436 dump_quoted_tree (pp, t: m_fndecl);
1437 }
1438 else
1439 {
1440 pp_string (pp, "function_region(");
1441 dump_quoted_tree (pp, t: m_fndecl);
1442 pp_string (pp, ")");
1443 }
1444}
1445
1446/* class label_region : public region. */
1447
1448/* Implementation of region::dump_to_pp vfunc for label_region. */
1449
1450void
1451label_region::dump_to_pp (pretty_printer *pp, bool simple) const
1452{
1453 if (simple)
1454 {
1455 dump_quoted_tree (pp, t: m_label);
1456 }
1457 else
1458 {
1459 pp_string (pp, "label_region(");
1460 dump_quoted_tree (pp, t: m_label);
1461 pp_string (pp, ")");
1462 }
1463}
1464
1465/* class stack_region : public region. */
1466
1467/* Implementation of region::dump_to_pp vfunc for stack_region. */
1468
1469void
1470stack_region::dump_to_pp (pretty_printer *pp, bool simple) const
1471{
1472 if (simple)
1473 pp_string (pp, "stack region");
1474 else
1475 pp_string (pp, "stack_region()");
1476}
1477
1478/* class heap_region : public region. */
1479
1480/* Implementation of region::dump_to_pp vfunc for heap_region. */
1481
1482void
1483heap_region::dump_to_pp (pretty_printer *pp, bool simple) const
1484{
1485 if (simple)
1486 pp_string (pp, "heap region");
1487 else
1488 pp_string (pp, "heap_region()");
1489}
1490
1491/* class root_region : public region. */
1492
1493/* root_region's ctor. */
1494
1495root_region::root_region (symbol::id_t id)
1496: region (complexity (1, 1), id, NULL, NULL_TREE)
1497{
1498}
1499
1500/* Implementation of region::dump_to_pp vfunc for root_region. */
1501
1502void
1503root_region::dump_to_pp (pretty_printer *pp, bool simple) const
1504{
1505 if (simple)
1506 pp_string (pp, "root region");
1507 else
1508 pp_string (pp, "root_region()");
1509}
1510
1511/* class thread_local_region : public space_region. */
1512
1513void
1514thread_local_region::dump_to_pp (pretty_printer *pp, bool simple) const
1515{
1516 if (simple)
1517 pp_string (pp, "thread_local_region");
1518 else
1519 pp_string (pp, "thread_local_region()");
1520}
1521
1522/* class symbolic_region : public map_region. */
1523
1524/* symbolic_region's ctor. */
1525
1526symbolic_region::symbolic_region (symbol::id_t id, region *parent,
1527 const svalue *sval_ptr)
1528: region (complexity::from_pair (c1: parent, c: sval_ptr), id, parent,
1529 (sval_ptr->get_type ()
1530 ? TREE_TYPE (sval_ptr->get_type ())
1531 : NULL_TREE)),
1532 m_sval_ptr (sval_ptr)
1533{
1534}
1535
1536/* Implementation of region::accept vfunc for symbolic_region. */
1537
1538void
1539symbolic_region::accept (visitor *v) const
1540{
1541 region::accept (v);
1542 m_sval_ptr->accept (v);
1543}
1544
1545/* Implementation of region::dump_to_pp vfunc for symbolic_region. */
1546
1547void
1548symbolic_region::dump_to_pp (pretty_printer *pp, bool simple) const
1549{
1550 if (simple)
1551 {
1552 pp_string (pp, "(*");
1553 m_sval_ptr->dump_to_pp (pp, simple);
1554 pp_string (pp, ")");
1555 }
1556 else
1557 {
1558 pp_string (pp, "symbolic_region(");
1559 get_parent_region ()->dump_to_pp (pp, simple);
1560 if (get_type ())
1561 {
1562 pp_string (pp, ", ");
1563 print_quoted_type (pp, t: get_type ());
1564 }
1565 pp_string (pp, ", ");
1566 m_sval_ptr->dump_to_pp (pp, simple);
1567 pp_string (pp, ")");
1568 }
1569}
1570
1571/* class decl_region : public region. */
1572
1573/* Implementation of region::dump_to_pp vfunc for decl_region. */
1574
1575void
1576decl_region::dump_to_pp (pretty_printer *pp, bool simple) const
1577{
1578 if (simple)
1579 pp_printf (pp, "%E", m_decl);
1580 else
1581 {
1582 pp_string (pp, "decl_region(");
1583 get_parent_region ()->dump_to_pp (pp, simple);
1584 pp_string (pp, ", ");
1585 print_quoted_type (pp, t: get_type ());
1586 pp_printf (pp, ", %qE)", m_decl);
1587 }
1588}
1589
1590/* Get the stack depth for the frame containing this decl, or 0
1591 for a global. */
1592
1593int
1594decl_region::get_stack_depth () const
1595{
1596 if (get_parent_region () == NULL)
1597 return 0;
1598 if (const frame_region *frame_reg
1599 = get_parent_region ()->dyn_cast_frame_region ())
1600 return frame_reg->get_stack_depth ();
1601 return 0;
1602}
1603
1604/* If the underlying decl is in the global constant pool,
1605 return an svalue representing the constant value.
1606 Otherwise return NULL. */
1607
1608const svalue *
1609decl_region::maybe_get_constant_value (region_model_manager *mgr) const
1610{
1611 if (VAR_P (m_decl)
1612 && DECL_IN_CONSTANT_POOL (m_decl)
1613 && DECL_INITIAL (m_decl)
1614 && TREE_CODE (DECL_INITIAL (m_decl)) == CONSTRUCTOR)
1615 return get_svalue_for_constructor (DECL_INITIAL (m_decl), mgr);
1616 return NULL;
1617}
1618
1619/* Implementation of decl_region::get_svalue_for_constructor
1620 for when the cached value hasn't yet been calculated. */
1621
1622const svalue *
1623decl_region::calc_svalue_for_constructor (tree ctor,
1624 region_model_manager *mgr) const
1625{
1626 /* Create a binding map, applying ctor to it, using this
1627 decl_region as the base region when building child regions
1628 for offset calculations. */
1629 binding_map map;
1630 if (!map.apply_ctor_to_region (parent_reg: this, ctor, mgr))
1631 return mgr->get_or_create_unknown_svalue (type: get_type ());
1632
1633 /* Return a compound svalue for the map we built. */
1634 return mgr->get_or_create_compound_svalue (type: get_type (), map);
1635}
1636
1637/* Get an svalue for CTOR, a CONSTRUCTOR for this region's decl. */
1638
1639const svalue *
1640decl_region::get_svalue_for_constructor (tree ctor,
1641 region_model_manager *mgr) const
1642{
1643 gcc_assert (!TREE_CLOBBER_P (ctor));
1644 gcc_assert (ctor == DECL_INITIAL (m_decl));
1645
1646 if (!m_ctor_svalue)
1647 m_ctor_svalue = calc_svalue_for_constructor (ctor, mgr);
1648
1649 return m_ctor_svalue;
1650}
1651
1652/* For use on decl_regions for global variables.
1653
1654 Get an svalue for the initial value of this region at entry to
1655 "main" (either based on DECL_INITIAL, or implicit initialization to
1656 zero.
1657
1658 Return NULL if there is a problem. */
1659
1660const svalue *
1661decl_region::get_svalue_for_initializer (region_model_manager *mgr) const
1662{
1663 tree init = DECL_INITIAL (m_decl);
1664 if (!init)
1665 {
1666 /* If we have an "extern" decl then there may be an initializer in
1667 another TU. */
1668 if (DECL_EXTERNAL (m_decl))
1669 return NULL;
1670
1671 if (empty_p ())
1672 return NULL;
1673
1674 /* Implicit initialization to zero; use a compound_svalue for it.
1675 Doing so requires that we have a concrete binding for this region,
1676 which can fail if we have a region with unknown size
1677 (e.g. "extern const char arr[];"). */
1678 const binding_key *binding
1679 = binding_key::make (mgr: mgr->get_store_manager (), r: this);
1680 if (binding->symbolic_p ())
1681 return NULL;
1682
1683 /* If we don't care about tracking the content of this region, then
1684 it's unused, and the value doesn't matter. */
1685 if (!tracked_p ())
1686 return NULL;
1687
1688 binding_cluster c (this);
1689 c.zero_fill_region (mgr: mgr->get_store_manager (), reg: this);
1690 return mgr->get_or_create_compound_svalue (TREE_TYPE (m_decl),
1691 map: c.get_map ());
1692 }
1693
1694 /* LTO can write out error_mark_node as the DECL_INITIAL for simple scalar
1695 values (to avoid writing out an extra section). */
1696 if (init == error_mark_node)
1697 return NULL;
1698
1699 if (TREE_CODE (init) == CONSTRUCTOR)
1700 return get_svalue_for_constructor (ctor: init, mgr);
1701
1702 /* Reuse the get_rvalue logic from region_model. */
1703 region_model m (mgr);
1704 return m.get_rvalue (pv: path_var (init, 0), NULL);
1705}
1706
1707/* Subroutine of symnode_requires_tracking_p; return true if REF
1708 might imply that we should be tracking the value of its decl. */
1709
1710static bool
1711ipa_ref_requires_tracking (ipa_ref *ref)
1712{
1713 /* If we have a load/store/alias of the symbol, then we'll track
1714 the decl's value. */
1715 if (ref->use != IPA_REF_ADDR)
1716 return true;
1717
1718 if (ref->stmt == NULL)
1719 return true;
1720
1721 switch (ref->stmt->code)
1722 {
1723 default:
1724 return true;
1725 case GIMPLE_CALL:
1726 {
1727 cgraph_node *caller_cnode = dyn_cast <cgraph_node *> (p: ref->referring);
1728 if (caller_cnode == NULL)
1729 return true;
1730 cgraph_edge *edge = caller_cnode->get_edge (call_stmt: ref->stmt);
1731 if (!edge)
1732 return true;
1733 if (edge->callee == NULL)
1734 return true; /* e.g. call through function ptr. */
1735 if (edge->callee->definition)
1736 return true;
1737 /* If we get here, then this ref is a pointer passed to
1738 a function we don't have the definition for. */
1739 return false;
1740 }
1741 break;
1742 case GIMPLE_ASM:
1743 {
1744 const gasm *asm_stmt = as_a <const gasm *> (p: ref->stmt);
1745 if (gimple_asm_noutputs (asm_stmt) > 0)
1746 return true;
1747 if (gimple_asm_nclobbers (asm_stmt) > 0)
1748 return true;
1749 /* If we get here, then this ref is the decl being passed
1750 by pointer to asm with no outputs. */
1751 return false;
1752 }
1753 break;
1754 }
1755}
1756
1757/* Determine if the decl for SYMNODE should have binding_clusters
1758 in our state objects; return false to optimize away tracking
1759 certain decls in our state objects, as an optimization. */
1760
1761static bool
1762symnode_requires_tracking_p (symtab_node *symnode)
1763{
1764 gcc_assert (symnode);
1765 if (symnode->externally_visible)
1766 return true;
1767 tree context_fndecl = DECL_CONTEXT (symnode->decl);
1768 if (context_fndecl == NULL)
1769 return true;
1770 if (TREE_CODE (context_fndecl) != FUNCTION_DECL)
1771 return true;
1772 for (auto ref : symnode->ref_list.referring)
1773 if (ipa_ref_requires_tracking (ref))
1774 return true;
1775
1776 /* If we get here, then we don't have uses of this decl that require
1777 tracking; we never read from it or write to it explicitly. */
1778 return false;
1779}
1780
1781/* Subroutine of decl_region ctor: determine whether this decl_region
1782 can have binding_clusters; return false to optimize away tracking
1783 of certain decls in our state objects, as an optimization. */
1784
1785bool
1786decl_region::calc_tracked_p (tree decl)
1787{
1788 /* Precondition of symtab_node::get. */
1789 if (TREE_CODE (decl) == VAR_DECL
1790 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl) || in_lto_p))
1791 if (symtab_node *symnode = symtab_node::get (decl))
1792 return symnode_requires_tracking_p (symnode);
1793 return true;
1794}
1795
1796/* class field_region : public region. */
1797
1798/* Implementation of region::dump_to_pp vfunc for field_region. */
1799
1800void
1801field_region::dump_to_pp (pretty_printer *pp, bool simple) const
1802{
1803 if (simple)
1804 {
1805 get_parent_region ()->dump_to_pp (pp, simple);
1806 pp_string (pp, ".");
1807 pp_printf (pp, "%E", m_field);
1808 }
1809 else
1810 {
1811 pp_string (pp, "field_region(");
1812 get_parent_region ()->dump_to_pp (pp, simple);
1813 pp_string (pp, ", ");
1814 print_quoted_type (pp, t: get_type ());
1815 pp_printf (pp, ", %qE)", m_field);
1816 }
1817}
1818
1819/* Implementation of region::get_relative_concrete_offset vfunc
1820 for field_region. */
1821
1822bool
1823field_region::get_relative_concrete_offset (bit_offset_t *out) const
1824{
1825 /* Compare with e.g. gimple-fold.cc's
1826 fold_nonarray_ctor_reference. */
1827 tree byte_offset = DECL_FIELD_OFFSET (m_field);
1828 if (TREE_CODE (byte_offset) != INTEGER_CST)
1829 return false;
1830 tree field_offset = DECL_FIELD_BIT_OFFSET (m_field);
1831 /* Compute bit offset of the field. */
1832 offset_int bitoffset
1833 = (wi::to_offset (t: field_offset)
1834 + (wi::to_offset (t: byte_offset) << LOG2_BITS_PER_UNIT));
1835 *out = bitoffset;
1836 return true;
1837}
1838
1839
1840/* Implementation of region::get_relative_symbolic_offset vfunc
1841 for field_region.
1842 If known, the returned svalue is equal to the offset converted to bytes and
1843 rounded off. */
1844
1845const svalue *
1846field_region::get_relative_symbolic_offset (region_model_manager *mgr) const
1847{
1848 bit_offset_t out;
1849 if (get_relative_concrete_offset (out: &out))
1850 {
1851 tree cst_tree
1852 = wide_int_to_tree (ptrdiff_type_node, cst: out / BITS_PER_UNIT);
1853 return mgr->get_or_create_constant_svalue (cst_expr: cst_tree);
1854 }
1855 return mgr->get_or_create_unknown_svalue (ptrdiff_type_node);
1856}
1857
1858/* class element_region : public region. */
1859
1860/* Implementation of region::accept vfunc for element_region. */
1861
1862void
1863element_region::accept (visitor *v) const
1864{
1865 region::accept (v);
1866 m_index->accept (v);
1867}
1868
1869/* Implementation of region::dump_to_pp vfunc for element_region. */
1870
1871void
1872element_region::dump_to_pp (pretty_printer *pp, bool simple) const
1873{
1874 if (simple)
1875 {
1876 //pp_string (pp, "(");
1877 get_parent_region ()->dump_to_pp (pp, simple);
1878 pp_string (pp, "[");
1879 m_index->dump_to_pp (pp, simple);
1880 pp_string (pp, "]");
1881 //pp_string (pp, ")");
1882 }
1883 else
1884 {
1885 pp_string (pp, "element_region(");
1886 get_parent_region ()->dump_to_pp (pp, simple);
1887 pp_string (pp, ", ");
1888 print_quoted_type (pp, t: get_type ());
1889 pp_string (pp, ", ");
1890 m_index->dump_to_pp (pp, simple);
1891 pp_printf (pp, ")");
1892 }
1893}
1894
1895/* Implementation of region::get_relative_concrete_offset vfunc
1896 for element_region. */
1897
1898bool
1899element_region::get_relative_concrete_offset (bit_offset_t *out) const
1900{
1901 if (tree idx_cst = m_index->maybe_get_constant ())
1902 {
1903 gcc_assert (TREE_CODE (idx_cst) == INTEGER_CST);
1904
1905 tree elem_type = get_type ();
1906 offset_int element_idx = wi::to_offset (t: idx_cst);
1907
1908 /* First, use int_size_in_bytes, to reject the case where we
1909 have an incomplete type, or a non-constant value. */
1910 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1911 if (hwi_byte_size > 0)
1912 {
1913 offset_int element_bit_size
1914 = hwi_byte_size << LOG2_BITS_PER_UNIT;
1915 offset_int element_bit_offset
1916 = element_idx * element_bit_size;
1917 *out = element_bit_offset;
1918 return true;
1919 }
1920 }
1921 return false;
1922}
1923
1924/* Implementation of region::get_relative_symbolic_offset vfunc
1925 for element_region. */
1926
1927const svalue *
1928element_region::get_relative_symbolic_offset (region_model_manager *mgr) const
1929{
1930 tree elem_type = get_type ();
1931
1932 /* First, use int_size_in_bytes, to reject the case where we
1933 have an incomplete type, or a non-constant value. */
1934 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1935 if (hwi_byte_size > 0)
1936 {
1937 tree byte_size_tree = wide_int_to_tree (ptrdiff_type_node,
1938 cst: hwi_byte_size);
1939 const svalue *byte_size_sval
1940 = mgr->get_or_create_constant_svalue (cst_expr: byte_size_tree);
1941 return mgr->get_or_create_binop (NULL_TREE, op: MULT_EXPR,
1942 arg0: m_index, arg1: byte_size_sval);
1943 }
1944 return mgr->get_or_create_unknown_svalue (ptrdiff_type_node);
1945}
1946
1947/* class offset_region : public region. */
1948
1949/* Implementation of region::accept vfunc for offset_region. */
1950
1951void
1952offset_region::accept (visitor *v) const
1953{
1954 region::accept (v);
1955 m_byte_offset->accept (v);
1956}
1957
1958/* Implementation of region::dump_to_pp vfunc for offset_region. */
1959
1960void
1961offset_region::dump_to_pp (pretty_printer *pp, bool simple) const
1962{
1963 if (simple)
1964 {
1965 //pp_string (pp, "(");
1966 get_parent_region ()->dump_to_pp (pp, simple);
1967 pp_string (pp, "+");
1968 m_byte_offset->dump_to_pp (pp, simple);
1969 //pp_string (pp, ")");
1970 }
1971 else
1972 {
1973 pp_string (pp, "offset_region(");
1974 get_parent_region ()->dump_to_pp (pp, simple);
1975 pp_string (pp, ", ");
1976 print_quoted_type (pp, t: get_type ());
1977 pp_string (pp, ", ");
1978 m_byte_offset->dump_to_pp (pp, simple);
1979 pp_printf (pp, ")");
1980 }
1981}
1982
1983const svalue *
1984offset_region::get_bit_offset (region_model_manager *mgr) const
1985{
1986 const svalue *bits_per_byte_sval
1987 = mgr->get_or_create_int_cst (NULL_TREE, BITS_PER_UNIT);
1988 return mgr->get_or_create_binop (NULL_TREE, op: MULT_EXPR,
1989 arg0: m_byte_offset, arg1: bits_per_byte_sval);
1990}
1991
1992/* Implementation of region::get_relative_concrete_offset vfunc
1993 for offset_region. */
1994
1995bool
1996offset_region::get_relative_concrete_offset (bit_offset_t *out) const
1997{
1998 if (tree byte_offset_cst = m_byte_offset->maybe_get_constant ())
1999 {
2000 gcc_assert (TREE_CODE (byte_offset_cst) == INTEGER_CST);
2001 /* Use a signed value for the byte offset, to handle
2002 negative offsets. */
2003 HOST_WIDE_INT byte_offset
2004 = wi::to_offset (t: byte_offset_cst).to_shwi ();
2005 HOST_WIDE_INT bit_offset = byte_offset * BITS_PER_UNIT;
2006 *out = bit_offset;
2007 return true;
2008 }
2009 return false;
2010}
2011
2012/* Implementation of region::get_relative_symbolic_offset vfunc
2013 for offset_region. */
2014
2015const svalue *
2016offset_region::get_relative_symbolic_offset (region_model_manager *mgr
2017 ATTRIBUTE_UNUSED) const
2018{
2019 return get_byte_offset ();
2020}
2021
2022/* class sized_region : public region. */
2023
2024/* Implementation of region::accept vfunc for sized_region. */
2025
2026void
2027sized_region::accept (visitor *v) const
2028{
2029 region::accept (v);
2030 m_byte_size_sval->accept (v);
2031}
2032
2033/* Implementation of region::dump_to_pp vfunc for sized_region. */
2034
2035void
2036sized_region::dump_to_pp (pretty_printer *pp, bool simple) const
2037{
2038 if (simple)
2039 {
2040 pp_string (pp, "SIZED_REG(");
2041 get_parent_region ()->dump_to_pp (pp, simple);
2042 pp_string (pp, ", ");
2043 m_byte_size_sval->dump_to_pp (pp, simple);
2044 pp_string (pp, ")");
2045 }
2046 else
2047 {
2048 pp_string (pp, "sized_region(");
2049 get_parent_region ()->dump_to_pp (pp, simple);
2050 pp_string (pp, ", ");
2051 m_byte_size_sval->dump_to_pp (pp, simple);
2052 pp_printf (pp, ")");
2053 }
2054}
2055
2056/* Implementation of region::get_byte_size vfunc for sized_region. */
2057
2058bool
2059sized_region::get_byte_size (byte_size_t *out) const
2060{
2061 if (tree cst = m_byte_size_sval->maybe_get_constant ())
2062 {
2063 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2064 *out = tree_to_uhwi (cst);
2065 return true;
2066 }
2067 return false;
2068}
2069
2070/* Implementation of region::get_bit_size vfunc for sized_region. */
2071
2072bool
2073sized_region::get_bit_size (bit_size_t *out) const
2074{
2075 byte_size_t byte_size;
2076 if (!get_byte_size (out: &byte_size))
2077 return false;
2078 *out = byte_size * BITS_PER_UNIT;
2079 return true;
2080}
2081
2082/* Implementation of region::get_bit_size_sval vfunc for sized_region. */
2083
2084const svalue *
2085sized_region::get_bit_size_sval (region_model_manager *mgr) const
2086{
2087 const svalue *bits_per_byte_sval
2088 = mgr->get_or_create_int_cst (NULL_TREE, BITS_PER_UNIT);
2089 return mgr->get_or_create_binop (NULL_TREE, op: MULT_EXPR,
2090 arg0: m_byte_size_sval, arg1: bits_per_byte_sval);
2091}
2092
2093/* class cast_region : public region. */
2094
2095/* Implementation of region::accept vfunc for cast_region. */
2096
2097void
2098cast_region::accept (visitor *v) const
2099{
2100 region::accept (v);
2101 m_original_region->accept (v);
2102}
2103
2104/* Implementation of region::dump_to_pp vfunc for cast_region. */
2105
2106void
2107cast_region::dump_to_pp (pretty_printer *pp, bool simple) const
2108{
2109 if (simple)
2110 {
2111 pp_string (pp, "CAST_REG(");
2112 print_quoted_type (pp, t: get_type ());
2113 pp_string (pp, ", ");
2114 m_original_region->dump_to_pp (pp, simple);
2115 pp_string (pp, ")");
2116 }
2117 else
2118 {
2119 pp_string (pp, "cast_region(");
2120 m_original_region->dump_to_pp (pp, simple);
2121 pp_string (pp, ", ");
2122 print_quoted_type (pp, t: get_type ());
2123 pp_printf (pp, ")");
2124 }
2125}
2126
2127/* Implementation of region::get_relative_concrete_offset vfunc
2128 for cast_region. */
2129
2130bool
2131cast_region::get_relative_concrete_offset (bit_offset_t *out) const
2132{
2133 *out = (int) 0;
2134 return true;
2135}
2136
2137/* class heap_allocated_region : public region. */
2138
2139/* Implementation of region::dump_to_pp vfunc for heap_allocated_region. */
2140
2141void
2142heap_allocated_region::dump_to_pp (pretty_printer *pp, bool simple) const
2143{
2144 if (simple)
2145 pp_printf (pp, "HEAP_ALLOCATED_REGION(%i)", get_id ());
2146 else
2147 pp_printf (pp, "heap_allocated_region(%i)", get_id ());
2148}
2149
2150/* class alloca_region : public region. */
2151
2152/* Implementation of region::dump_to_pp vfunc for alloca_region. */
2153
2154void
2155alloca_region::dump_to_pp (pretty_printer *pp, bool simple) const
2156{
2157 if (simple)
2158 pp_printf (pp, "ALLOCA_REGION(%i)", get_id ());
2159 else
2160 pp_printf (pp, "alloca_region(%i)", get_id ());
2161}
2162
2163/* class string_region : public region. */
2164
2165/* Implementation of region::dump_to_pp vfunc for string_region. */
2166
2167void
2168string_region::dump_to_pp (pretty_printer *pp, bool simple) const
2169{
2170 if (simple)
2171 dump_tree (pp, t: m_string_cst);
2172 else
2173 {
2174 pp_string (pp, "string_region(");
2175 dump_tree (pp, t: m_string_cst);
2176 if (!flag_dump_noaddr)
2177 {
2178 pp_string (pp, " (");
2179 pp_pointer (pp, m_string_cst);
2180 pp_string (pp, "))");
2181 }
2182 }
2183}
2184
2185/* class bit_range_region : public region. */
2186
2187/* Implementation of region::dump_to_pp vfunc for bit_range_region. */
2188
2189void
2190bit_range_region::dump_to_pp (pretty_printer *pp, bool simple) const
2191{
2192 if (simple)
2193 {
2194 pp_string (pp, "BIT_RANGE_REG(");
2195 get_parent_region ()->dump_to_pp (pp, simple);
2196 pp_string (pp, ", ");
2197 m_bits.dump_to_pp (pp);
2198 pp_string (pp, ")");
2199 }
2200 else
2201 {
2202 pp_string (pp, "bit_range_region(");
2203 get_parent_region ()->dump_to_pp (pp, simple);
2204 pp_string (pp, ", ");
2205 m_bits.dump_to_pp (pp);
2206 pp_printf (pp, ")");
2207 }
2208}
2209
2210/* Implementation of region::get_byte_size vfunc for bit_range_region. */
2211
2212bool
2213bit_range_region::get_byte_size (byte_size_t *out) const
2214{
2215 if (m_bits.m_size_in_bits % BITS_PER_UNIT == 0)
2216 {
2217 *out = m_bits.m_size_in_bits / BITS_PER_UNIT;
2218 return true;
2219 }
2220 return false;
2221}
2222
2223/* Implementation of region::get_bit_size vfunc for bit_range_region. */
2224
2225bool
2226bit_range_region::get_bit_size (bit_size_t *out) const
2227{
2228 *out = m_bits.m_size_in_bits;
2229 return true;
2230}
2231
2232/* Implementation of region::get_byte_size_sval vfunc for bit_range_region. */
2233
2234const svalue *
2235bit_range_region::get_byte_size_sval (region_model_manager *mgr) const
2236{
2237 if (m_bits.m_size_in_bits % BITS_PER_UNIT != 0)
2238 return mgr->get_or_create_unknown_svalue (size_type_node);
2239
2240 HOST_WIDE_INT num_bytes = m_bits.m_size_in_bits.to_shwi () / BITS_PER_UNIT;
2241 return mgr->get_or_create_int_cst (size_type_node, cst: num_bytes);
2242}
2243
2244/* Implementation of region::get_bit_size_sval vfunc for bit_range_region. */
2245
2246const svalue *
2247bit_range_region::get_bit_size_sval (region_model_manager *mgr) const
2248{
2249 return mgr->get_or_create_int_cst (size_type_node,
2250 cst: m_bits.m_size_in_bits);
2251}
2252
2253/* Implementation of region::get_relative_concrete_offset vfunc for
2254 bit_range_region. */
2255
2256bool
2257bit_range_region::get_relative_concrete_offset (bit_offset_t *out) const
2258{
2259 *out = m_bits.get_start_bit_offset ();
2260 return true;
2261}
2262
2263/* Implementation of region::get_relative_symbolic_offset vfunc for
2264 bit_range_region.
2265 The returned svalue is equal to the offset converted to bytes and
2266 rounded off. */
2267
2268const svalue *
2269bit_range_region::get_relative_symbolic_offset (region_model_manager *mgr)
2270 const
2271{
2272 byte_offset_t start_byte = m_bits.get_start_bit_offset () / BITS_PER_UNIT;
2273 tree start_bit_tree = wide_int_to_tree (ptrdiff_type_node, cst: start_byte);
2274 return mgr->get_or_create_constant_svalue (cst_expr: start_bit_tree);
2275}
2276
2277/* class var_arg_region : public region. */
2278
2279void
2280var_arg_region::dump_to_pp (pretty_printer *pp, bool simple) const
2281{
2282 if (simple)
2283 {
2284 pp_string (pp, "VAR_ARG_REG(");
2285 get_parent_region ()->dump_to_pp (pp, simple);
2286 pp_printf (pp, ", arg_idx: %d)", m_idx);
2287 }
2288 else
2289 {
2290 pp_string (pp, "var_arg_region(");
2291 get_parent_region ()->dump_to_pp (pp, simple);
2292 pp_printf (pp, ", arg_idx: %d)", m_idx);
2293 }
2294}
2295
2296/* Get the frame_region for this var_arg_region. */
2297
2298const frame_region *
2299var_arg_region::get_frame_region () const
2300{
2301 gcc_assert (get_parent_region ());
2302 return as_a <const frame_region *> (p: get_parent_region ());
2303}
2304
2305/* class errno_region : public region. */
2306
2307void
2308errno_region::dump_to_pp (pretty_printer *pp, bool simple) const
2309{
2310 if (simple)
2311 pp_string (pp, "errno_region");
2312 else
2313 pp_string (pp, "errno_region()");
2314}
2315
2316/* class private_region : public region. */
2317
2318void
2319private_region::dump_to_pp (pretty_printer *pp, bool simple) const
2320{
2321 if (simple)
2322 pp_printf (pp, "PRIVATE_REG(%qs)", m_desc);
2323 else
2324 pp_printf (pp, "private_region(%qs)", m_desc);
2325}
2326
2327/* class unknown_region : public region. */
2328
2329/* Implementation of region::dump_to_pp vfunc for unknown_region. */
2330
2331void
2332unknown_region::dump_to_pp (pretty_printer *pp, bool /*simple*/) const
2333{
2334 pp_string (pp, "UNKNOWN_REGION");
2335}
2336
2337} // namespace ana
2338
2339#endif /* #if ENABLE_ANALYZER */
2340

source code of gcc/analyzer/region.cc