1/* Bounds-checking of reads and writes to memory regions.
2 Copyright (C) 2019-2024 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful, but
12WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#define INCLUDE_MEMORY
22#define INCLUDE_VECTOR
23#include "system.h"
24#include "coretypes.h"
25#include "make-unique.h"
26#include "tree.h"
27#include "function.h"
28#include "basic-block.h"
29#include "intl.h"
30#include "gimple.h"
31#include "gimple-iterator.h"
32#include "diagnostic-core.h"
33#include "diagnostic-diagram.h"
34#include "diagnostic-format-sarif.h"
35#include "analyzer/analyzer.h"
36#include "analyzer/analyzer-logging.h"
37#include "analyzer/region-model.h"
38#include "analyzer/checker-event.h"
39#include "analyzer/checker-path.h"
40#include "analyzer/access-diagram.h"
41
42#if ENABLE_ANALYZER
43
44namespace ana {
45
46/* Abstract base class for all out-of-bounds warnings. */
47
48class out_of_bounds : public pending_diagnostic
49{
50public:
51 class oob_region_creation_event_capacity : public region_creation_event_capacity
52 {
53 public:
54 oob_region_creation_event_capacity (tree byte_capacity,
55 const event_loc_info &loc_info,
56 out_of_bounds &oob)
57 : region_creation_event_capacity (byte_capacity,
58 loc_info),
59 m_oob (oob)
60 {
61 }
62 void prepare_for_emission (checker_path *path,
63 pending_diagnostic *pd,
64 diagnostic_event_id_t emission_id) override
65 {
66 region_creation_event_capacity::prepare_for_emission (path,
67 pd,
68 emission_id);
69 m_oob.m_region_creation_event_id = emission_id;
70 }
71 private:
72 out_of_bounds &m_oob;
73 };
74
75 out_of_bounds (const region_model &model,
76 const region *reg,
77 tree diag_arg,
78 const svalue *sval_hint)
79 : m_model (model), m_reg (reg), m_diag_arg (diag_arg), m_sval_hint (sval_hint)
80 {}
81
82 bool subclass_equal_p (const pending_diagnostic &base_other) const override
83 {
84 const out_of_bounds &other
85 (static_cast <const out_of_bounds &>(base_other));
86 return (m_reg == other.m_reg
87 && pending_diagnostic::same_tree_p (t1: m_diag_arg, t2: other.m_diag_arg));
88 }
89
90 int get_controlling_option () const final override
91 {
92 return OPT_Wanalyzer_out_of_bounds;
93 }
94
95 void mark_interesting_stuff (interesting_t *interest) final override
96 {
97 interest->add_region_creation (reg: m_reg->get_base_region ());
98 }
99
100 void add_region_creation_events (const region *,
101 tree byte_capacity,
102 const event_loc_info &loc_info,
103 checker_path &emission_path) override
104 {
105 /* The memory space is described in the diagnostic message itself,
106 so we don't need an event for that. */
107 if (byte_capacity)
108 emission_path.add_event
109 (event: make_unique<oob_region_creation_event_capacity> (args&: byte_capacity,
110 args: loc_info,
111 args&: *this));
112 }
113
114 void maybe_add_sarif_properties (sarif_object &result_obj)
115 const override
116 {
117 sarif_property_bag &props = result_obj.get_or_create_properties ();
118#define PROPERTY_PREFIX "gcc/analyzer/out_of_bounds/"
119 props.set_string (PROPERTY_PREFIX "dir",
120 utf8_value: get_dir () == DIR_READ ? "read" : "write");
121 props.set (PROPERTY_PREFIX "model", v: m_model.to_json ());
122 props.set (PROPERTY_PREFIX "region", v: m_reg->to_json ());
123 props.set (PROPERTY_PREFIX "diag_arg", v: tree_to_json (node: m_diag_arg));
124 if (m_sval_hint)
125 props.set (PROPERTY_PREFIX "sval_hint", v: m_sval_hint->to_json ());
126 props.set (PROPERTY_PREFIX "region_creation_event_id",
127 v: diagnostic_event_id_to_json (m_region_creation_event_id));
128#undef PROPERTY_PREFIX
129 }
130
131 virtual enum access_direction get_dir () const = 0;
132
133protected:
134 enum memory_space get_memory_space () const
135 {
136 return m_reg->get_memory_space ();
137 }
138
139 void
140 maybe_show_notes (diagnostic_emission_context &ctxt) const
141 {
142 maybe_describe_array_bounds (loc: ctxt.get_location ());
143 maybe_show_diagram (logger: ctxt.get_logger ());
144 }
145
146 /* Potentially add a note about valid ways to index this array, such
147 as (given "int arr[10];"):
148 note: valid subscripts for 'arr' are '[0]' to '[9]'
149 We print the '[' and ']' characters so as to express the valid
150 subscripts using C syntax, rather than just as byte ranges,
151 which hopefully is more clear to the user. */
152 void
153 maybe_describe_array_bounds (location_t loc) const
154 {
155 if (!m_diag_arg)
156 return;
157 tree t = TREE_TYPE (m_diag_arg);
158 if (!t)
159 return;
160 if (TREE_CODE (t) != ARRAY_TYPE)
161 return;
162 tree domain = TYPE_DOMAIN (t);
163 if (!domain)
164 return;
165 tree max_idx = TYPE_MAX_VALUE (domain);
166 if (!max_idx)
167 return;
168 tree min_idx = TYPE_MIN_VALUE (domain);
169 inform (loc,
170 "valid subscripts for %qE are %<[%E]%> to %<[%E]%>",
171 m_diag_arg, min_idx, max_idx);
172 }
173
174 void
175 maybe_show_diagram (logger *logger) const
176 {
177 access_operation op (m_model, get_dir (), *m_reg, m_sval_hint);
178
179 /* Don't attempt to make a diagram if there's no valid way of
180 accessing the base region (e.g. a 0-element array). */
181 if (op.get_valid_bits ().empty_p ())
182 return;
183
184 if (const text_art::theme *theme = global_dc->get_diagram_theme ())
185 {
186 text_art::style_manager sm;
187 text_art::canvas canvas (make_access_diagram (op, sm, theme: *theme, logger));
188 if (canvas.get_size ().w == 0 && canvas.get_size ().h == 0)
189 {
190 /* In lieu of exceptions, return a zero-sized diagram if there's
191 a problem. Give up if that's happened. */
192 return;
193 }
194 diagnostic_diagram diagram
195 (canvas,
196 /* Alt text. */
197 _("Diagram visualizing the predicted out-of-bounds access"));
198 global_dc->emit_diagram (diagram);
199 }
200 }
201
202 text_art::canvas
203 make_access_diagram (const access_operation &op,
204 text_art::style_manager &sm,
205 const text_art::theme &theme,
206 logger *logger) const
207 {
208 access_diagram d (op, m_region_creation_event_id, sm, theme, logger);
209 return d.to_canvas (style_mgr: sm);
210 }
211
212 region_model m_model;
213 const region *m_reg;
214 tree m_diag_arg;
215 const svalue *m_sval_hint;
216 diagnostic_event_id_t m_region_creation_event_id;
217};
218
219/* Abstract base class for all out-of-bounds warnings where the
220 out-of-bounds range is concrete. */
221
222class concrete_out_of_bounds : public out_of_bounds
223{
224public:
225 concrete_out_of_bounds (const region_model &model,
226 const region *reg, tree diag_arg,
227 bit_range out_of_bounds_bits,
228 const svalue *sval_hint)
229 : out_of_bounds (model, reg, diag_arg, sval_hint),
230 m_out_of_bounds_bits (out_of_bounds_bits)
231 {}
232
233 bool subclass_equal_p (const pending_diagnostic &base_other) const override
234 {
235 const concrete_out_of_bounds &other
236 (static_cast <const concrete_out_of_bounds &>(base_other));
237 return (out_of_bounds::subclass_equal_p (base_other: other)
238 && m_out_of_bounds_bits == other.m_out_of_bounds_bits);
239 }
240
241 void maybe_add_sarif_properties (sarif_object &result_obj)
242 const override
243 {
244 out_of_bounds::maybe_add_sarif_properties (result_obj);
245 sarif_property_bag &props = result_obj.get_or_create_properties ();
246#define PROPERTY_PREFIX "gcc/analyzer/concrete_out_of_bounds/"
247 props.set (PROPERTY_PREFIX "out_of_bounds_bits",
248 v: m_out_of_bounds_bits.to_json ());
249 byte_range out_of_bounds_bytes (0, 0);
250 if (get_out_of_bounds_bytes (out: &out_of_bounds_bytes))
251 props.set (PROPERTY_PREFIX "out_of_bounds_bytes",
252 v: out_of_bounds_bytes.to_json ());
253#undef PROPERTY_PREFIX
254 }
255
256 bool get_out_of_bounds_bytes (byte_range *out) const
257 {
258 return m_out_of_bounds_bits.as_byte_range (out);
259 }
260
261protected:
262 bit_range m_out_of_bounds_bits;
263};
264
265/* Abstract subclass to complaing about concrete out-of-bounds
266 past the end of the buffer. */
267
268class concrete_past_the_end : public concrete_out_of_bounds
269{
270public:
271 concrete_past_the_end (const region_model &model,
272 const region *reg, tree diag_arg, bit_range range,
273 tree bit_bound,
274 const svalue *sval_hint)
275 : concrete_out_of_bounds (model, reg, diag_arg, range, sval_hint),
276 m_bit_bound (bit_bound),
277 m_byte_bound (NULL_TREE)
278 {
279 if (m_bit_bound && TREE_CODE (m_bit_bound) == INTEGER_CST)
280 m_byte_bound
281 = wide_int_to_tree (size_type_node,
282 cst: wi::to_offset (t: m_bit_bound) >> LOG2_BITS_PER_UNIT);
283 }
284
285 bool
286 subclass_equal_p (const pending_diagnostic &base_other) const final override
287 {
288 const concrete_past_the_end &other
289 (static_cast <const concrete_past_the_end &>(base_other));
290 return (concrete_out_of_bounds::subclass_equal_p (base_other: other)
291 && pending_diagnostic::same_tree_p (t1: m_bit_bound,
292 t2: other.m_bit_bound));
293 }
294
295 void add_region_creation_events (const region *,
296 tree,
297 const event_loc_info &loc_info,
298 checker_path &emission_path) final override
299 {
300 if (m_byte_bound && TREE_CODE (m_byte_bound) == INTEGER_CST)
301 emission_path.add_event
302 (event: make_unique<oob_region_creation_event_capacity> (args&: m_byte_bound,
303 args: loc_info,
304 args&: *this));
305 }
306
307 void maybe_add_sarif_properties (sarif_object &result_obj)
308 const final override
309 {
310 concrete_out_of_bounds::maybe_add_sarif_properties (result_obj);
311 sarif_property_bag &props = result_obj.get_or_create_properties ();
312#define PROPERTY_PREFIX "gcc/analyzer/concrete_past_the_end/"
313 props.set (PROPERTY_PREFIX "bit_bound",
314 v: tree_to_json (node: m_bit_bound));
315 props.set (PROPERTY_PREFIX "byte_bound",
316 v: tree_to_json (node: m_byte_bound));
317#undef PROPERTY_PREFIX
318 }
319
320protected:
321 tree m_bit_bound;
322 tree m_byte_bound;
323};
324
325/* Concrete subclass to complain about buffer overflows. */
326
327class concrete_buffer_overflow : public concrete_past_the_end
328{
329public:
330 concrete_buffer_overflow (const region_model &model,
331 const region *reg, tree diag_arg,
332 bit_range range, tree bit_bound,
333 const svalue *sval_hint)
334 : concrete_past_the_end (model, reg, diag_arg, range, bit_bound, sval_hint)
335 {}
336
337 const char *get_kind () const final override
338 {
339 return "concrete_buffer_overflow";
340 }
341
342 bool emit (diagnostic_emission_context &ctxt) final override
343 {
344 bool warned;
345 switch (get_memory_space ())
346 {
347 default:
348 ctxt.add_cwe (cwe: 787);
349 warned = ctxt.warn ("buffer overflow");
350 break;
351 case MEMSPACE_STACK:
352 ctxt.add_cwe (cwe: 121);
353 warned = ctxt.warn ("stack-based buffer overflow");
354 break;
355 case MEMSPACE_HEAP:
356 ctxt.add_cwe (cwe: 122);
357 warned = ctxt.warn ("heap-based buffer overflow");
358 break;
359 }
360
361 if (warned)
362 {
363 if (wi::fits_uhwi_p (x: m_out_of_bounds_bits.m_size_in_bits))
364 {
365 unsigned HOST_WIDE_INT num_bad_bits
366 = m_out_of_bounds_bits.m_size_in_bits.to_uhwi ();
367 if (num_bad_bits % BITS_PER_UNIT == 0)
368 {
369 unsigned HOST_WIDE_INT num_bad_bytes
370 = num_bad_bits / BITS_PER_UNIT;
371 if (m_diag_arg)
372 inform_n (ctxt.get_location (),
373 num_bad_bytes,
374 "write of %wu byte to beyond the end of %qE",
375 "write of %wu bytes to beyond the end of %qE",
376 num_bad_bytes,
377 m_diag_arg);
378 else
379 inform_n (ctxt.get_location (),
380 num_bad_bytes,
381 "write of %wu byte to beyond the end of the region",
382 "write of %wu bytes to beyond the end of the region",
383 num_bad_bytes);
384 }
385 else
386 {
387 if (m_diag_arg)
388 inform_n (ctxt.get_location (),
389 num_bad_bits,
390 "write of %wu bit to beyond the end of %qE",
391 "write of %wu bits to beyond the end of %qE",
392 num_bad_bits,
393 m_diag_arg);
394 else
395 inform_n (ctxt.get_location (),
396 num_bad_bits,
397 "write of %wu bit to beyond the end of the region",
398 "write of %wu bits to beyond the end of the region",
399 num_bad_bits);
400 }
401 }
402 else if (m_diag_arg)
403 inform (ctxt.get_location (),
404 "write to beyond the end of %qE",
405 m_diag_arg);
406
407 maybe_show_notes (ctxt);
408 }
409
410 return warned;
411 }
412
413 label_text describe_final_event (const evdesc::final_event &ev)
414 final override
415 {
416 if (m_byte_bound || !m_bit_bound)
417 {
418 byte_range out_of_bounds_bytes (0, 0);
419 if (get_out_of_bounds_bytes (out: &out_of_bounds_bytes))
420 return describe_final_event_as_bytes (ev, out_of_bounds_bytes);
421 }
422 return describe_final_event_as_bits (ev);
423 }
424
425 label_text
426 describe_final_event_as_bytes (const evdesc::final_event &ev,
427 const byte_range &out_of_bounds_bytes)
428 {
429 byte_size_t start = out_of_bounds_bytes.get_start_byte_offset ();
430 byte_size_t end = out_of_bounds_bytes.get_last_byte_offset ();
431 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
432 print_dec (wi: start, buf: start_buf, sgn: SIGNED);
433 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
434 print_dec (wi: end, buf: end_buf, sgn: SIGNED);
435
436 if (start == end)
437 {
438 if (m_diag_arg)
439 return ev.formatted_print (fmt: "out-of-bounds write at byte %s but %qE"
440 " ends at byte %E", start_buf, m_diag_arg,
441 m_byte_bound);
442 return ev.formatted_print (fmt: "out-of-bounds write at byte %s but region"
443 " ends at byte %E", start_buf,
444 m_byte_bound);
445 }
446 else
447 {
448 if (m_diag_arg)
449 return ev.formatted_print (fmt: "out-of-bounds write from byte %s till"
450 " byte %s but %qE ends at byte %E",
451 start_buf, end_buf, m_diag_arg,
452 m_byte_bound);
453 return ev.formatted_print (fmt: "out-of-bounds write from byte %s till"
454 " byte %s but region ends at byte %E",
455 start_buf, end_buf, m_byte_bound);
456 }
457 }
458
459 label_text describe_final_event_as_bits (const evdesc::final_event &ev)
460 {
461 bit_size_t start = m_out_of_bounds_bits.get_start_bit_offset ();
462 bit_size_t end = m_out_of_bounds_bits.get_last_bit_offset ();
463 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
464 print_dec (wi: start, buf: start_buf, sgn: SIGNED);
465 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
466 print_dec (wi: end, buf: end_buf, sgn: SIGNED);
467
468 if (start == end)
469 {
470 if (m_diag_arg)
471 return ev.formatted_print (fmt: "out-of-bounds write at bit %s but %qE"
472 " ends at bit %E", start_buf, m_diag_arg,
473 m_bit_bound);
474 return ev.formatted_print (fmt: "out-of-bounds write at bit %s but region"
475 " ends at bit %E", start_buf,
476 m_bit_bound);
477 }
478 else
479 {
480 if (m_diag_arg)
481 return ev.formatted_print (fmt: "out-of-bounds write from bit %s till"
482 " bit %s but %qE ends at bit %E",
483 start_buf, end_buf, m_diag_arg,
484 m_bit_bound);
485 return ev.formatted_print (fmt: "out-of-bounds write from bit %s till"
486 " bit %s but region ends at bit %E",
487 start_buf, end_buf, m_bit_bound);
488 }
489 }
490
491 enum access_direction get_dir () const final override { return DIR_WRITE; }
492};
493
494/* Concrete subclass to complain about buffer over-reads. */
495
496class concrete_buffer_over_read : public concrete_past_the_end
497{
498public:
499 concrete_buffer_over_read (const region_model &model,
500 const region *reg, tree diag_arg,
501 bit_range range, tree bit_bound)
502 : concrete_past_the_end (model, reg, diag_arg, range, bit_bound, NULL)
503 {}
504
505 const char *get_kind () const final override
506 {
507 return "concrete_buffer_over_read";
508 }
509
510 bool emit (diagnostic_emission_context &ctxt) final override
511 {
512 bool warned;
513 ctxt.add_cwe (cwe: 126);
514 switch (get_memory_space ())
515 {
516 default:
517 warned = ctxt.warn ("buffer over-read");
518 break;
519 case MEMSPACE_STACK:
520 warned = ctxt.warn ("stack-based buffer over-read");
521 break;
522 case MEMSPACE_HEAP:
523 warned = ctxt.warn ("heap-based buffer over-read");
524 break;
525 }
526
527 if (warned)
528 {
529 if (wi::fits_uhwi_p (x: m_out_of_bounds_bits.m_size_in_bits))
530 {
531 unsigned HOST_WIDE_INT num_bad_bits
532 = m_out_of_bounds_bits.m_size_in_bits.to_uhwi ();
533 if (num_bad_bits % BITS_PER_UNIT == 0)
534 {
535 unsigned HOST_WIDE_INT num_bad_bytes
536 = num_bad_bits / BITS_PER_UNIT;
537 if (m_diag_arg)
538 inform_n (ctxt.get_location (),
539 num_bad_bytes,
540 "read of %wu byte from after the end of %qE",
541 "read of %wu bytes from after the end of %qE",
542 num_bad_bytes,
543 m_diag_arg);
544 else
545 inform_n (ctxt.get_location (),
546 num_bad_bytes,
547 "read of %wu byte from after the end of the region",
548 "read of %wu bytes from after the end of the region",
549 num_bad_bytes);
550 }
551 else
552 {
553 if (m_diag_arg)
554 inform_n (ctxt.get_location (),
555 num_bad_bits,
556 "read of %wu bit from after the end of %qE",
557 "read of %wu bits from after the end of %qE",
558 num_bad_bits,
559 m_diag_arg);
560 else
561 inform_n (ctxt.get_location (),
562 num_bad_bits,
563 "read of %wu bit from after the end of the region",
564 "read of %wu bits from after the end of the region",
565 num_bad_bits);
566 }
567 }
568 else if (m_diag_arg)
569 inform (ctxt.get_location (),
570 "read from after the end of %qE",
571 m_diag_arg);
572
573 maybe_show_notes (ctxt);
574 }
575
576 return warned;
577 }
578
579 label_text describe_final_event (const evdesc::final_event &ev)
580 final override
581 {
582 if (m_byte_bound || !m_bit_bound)
583 {
584 byte_range out_of_bounds_bytes (0, 0);
585 if (get_out_of_bounds_bytes (out: &out_of_bounds_bytes))
586 return describe_final_event_as_bytes (ev, out_of_bounds_bytes);
587 }
588 return describe_final_event_as_bits (ev);
589 }
590
591 label_text
592 describe_final_event_as_bytes (const evdesc::final_event &ev,
593 const byte_range &out_of_bounds_bytes)
594 {
595 byte_size_t start = out_of_bounds_bytes.get_start_byte_offset ();
596 byte_size_t end = out_of_bounds_bytes.get_last_byte_offset ();
597 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
598 print_dec (wi: start, buf: start_buf, sgn: SIGNED);
599 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
600 print_dec (wi: end, buf: end_buf, sgn: SIGNED);
601
602 if (start == end)
603 {
604 if (m_diag_arg)
605 return ev.formatted_print (fmt: "out-of-bounds read at byte %s but %qE"
606 " ends at byte %E", start_buf, m_diag_arg,
607 m_byte_bound);
608 return ev.formatted_print (fmt: "out-of-bounds read at byte %s but region"
609 " ends at byte %E", start_buf,
610 m_byte_bound);
611 }
612 else
613 {
614 if (m_diag_arg)
615 return ev.formatted_print (fmt: "out-of-bounds read from byte %s till"
616 " byte %s but %qE ends at byte %E",
617 start_buf, end_buf, m_diag_arg,
618 m_byte_bound);
619 return ev.formatted_print (fmt: "out-of-bounds read from byte %s till"
620 " byte %s but region ends at byte %E",
621 start_buf, end_buf, m_byte_bound);
622 }
623 }
624
625 label_text describe_final_event_as_bits (const evdesc::final_event &ev)
626 {
627 bit_size_t start = m_out_of_bounds_bits.get_start_bit_offset ();
628 bit_size_t end = m_out_of_bounds_bits.get_last_bit_offset ();
629 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
630 print_dec (wi: start, buf: start_buf, sgn: SIGNED);
631 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
632 print_dec (wi: end, buf: end_buf, sgn: SIGNED);
633
634 if (start == end)
635 {
636 if (m_diag_arg)
637 return ev.formatted_print (fmt: "out-of-bounds read at bit %s but %qE"
638 " ends at bit %E", start_buf, m_diag_arg,
639 m_bit_bound);
640 return ev.formatted_print (fmt: "out-of-bounds read at bit %s but region"
641 " ends at bit %E", start_buf,
642 m_bit_bound);
643 }
644 else
645 {
646 if (m_diag_arg)
647 return ev.formatted_print (fmt: "out-of-bounds read from bit %s till"
648 " bit %s but %qE ends at bit %E",
649 start_buf, end_buf, m_diag_arg,
650 m_bit_bound);
651 return ev.formatted_print (fmt: "out-of-bounds read from bit %s till"
652 " bit %s but region ends at bit %E",
653 start_buf, end_buf, m_bit_bound);
654 }
655 }
656
657 enum access_direction get_dir () const final override { return DIR_READ; }
658};
659
660/* Concrete subclass to complain about buffer underwrites. */
661
662class concrete_buffer_underwrite : public concrete_out_of_bounds
663{
664public:
665 concrete_buffer_underwrite (const region_model &model,
666 const region *reg, tree diag_arg,
667 bit_range range,
668 const svalue *sval_hint)
669 : concrete_out_of_bounds (model, reg, diag_arg, range, sval_hint)
670 {}
671
672 const char *get_kind () const final override
673 {
674 return "concrete_buffer_underwrite";
675 }
676
677 bool emit (diagnostic_emission_context &ctxt) final override
678 {
679 bool warned;
680 ctxt.add_cwe (cwe: 124);
681 switch (get_memory_space ())
682 {
683 default:
684 warned = ctxt.warn ("buffer underwrite");
685 break;
686 case MEMSPACE_STACK:
687 warned = ctxt.warn ("stack-based buffer underwrite");
688 break;
689 case MEMSPACE_HEAP:
690 warned = ctxt.warn ("heap-based buffer underwrite");
691 break;
692 }
693 if (warned)
694 maybe_show_notes (ctxt);
695 return warned;
696 }
697
698 label_text describe_final_event (const evdesc::final_event &ev)
699 final override
700 {
701 byte_range out_of_bounds_bytes (0, 0);
702 if (get_out_of_bounds_bytes (out: &out_of_bounds_bytes))
703 return describe_final_event_as_bytes (ev, out_of_bounds_bytes);
704 return describe_final_event_as_bits (ev);
705 }
706
707 label_text
708 describe_final_event_as_bytes (const evdesc::final_event &ev,
709 const byte_range &out_of_bounds_bytes)
710 {
711 byte_size_t start = out_of_bounds_bytes.get_start_byte_offset ();
712 byte_size_t end = out_of_bounds_bytes.get_last_byte_offset ();
713 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
714 print_dec (wi: start, buf: start_buf, sgn: SIGNED);
715 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
716 print_dec (wi: end, buf: end_buf, sgn: SIGNED);
717
718 if (start == end)
719 {
720 if (m_diag_arg)
721 return ev.formatted_print (fmt: "out-of-bounds write at byte %s but %qE"
722 " starts at byte 0",
723 start_buf, m_diag_arg);
724 return ev.formatted_print (fmt: "out-of-bounds write at byte %s but region"
725 " starts at byte 0", start_buf);
726 }
727 else
728 {
729 if (m_diag_arg)
730 return ev.formatted_print (fmt: "out-of-bounds write from byte %s till"
731 " byte %s but %qE starts at byte 0",
732 start_buf, end_buf, m_diag_arg);
733 return ev.formatted_print (fmt: "out-of-bounds write from byte %s till"
734 " byte %s but region starts at byte 0",
735 start_buf, end_buf);;
736 }
737 }
738
739 label_text
740 describe_final_event_as_bits (const evdesc::final_event &ev)
741 {
742 bit_size_t start = m_out_of_bounds_bits.get_start_bit_offset ();
743 bit_size_t end = m_out_of_bounds_bits.get_last_bit_offset ();
744 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
745 print_dec (wi: start, buf: start_buf, sgn: SIGNED);
746 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
747 print_dec (wi: end, buf: end_buf, sgn: SIGNED);
748
749 if (start == end)
750 {
751 if (m_diag_arg)
752 return ev.formatted_print (fmt: "out-of-bounds write at bit %s but %qE"
753 " starts at bit 0",
754 start_buf, m_diag_arg);
755 return ev.formatted_print (fmt: "out-of-bounds write at bit %s but region"
756 " starts at bit 0", start_buf);
757 }
758 else
759 {
760 if (m_diag_arg)
761 return ev.formatted_print (fmt: "out-of-bounds write from bit %s till"
762 " bit %s but %qE starts at bit 0",
763 start_buf, end_buf, m_diag_arg);
764 return ev.formatted_print (fmt: "out-of-bounds write from bit %s till"
765 " bit %s but region starts at bit 0",
766 start_buf, end_buf);;
767 }
768 }
769
770 enum access_direction get_dir () const final override { return DIR_WRITE; }
771};
772
773/* Concrete subclass to complain about buffer under-reads. */
774
775class concrete_buffer_under_read : public concrete_out_of_bounds
776{
777public:
778 concrete_buffer_under_read (const region_model &model,
779 const region *reg, tree diag_arg,
780 bit_range range)
781 : concrete_out_of_bounds (model, reg, diag_arg, range, NULL)
782 {}
783
784 const char *get_kind () const final override
785 {
786 return "concrete_buffer_under_read";
787 }
788
789 bool emit (diagnostic_emission_context &ctxt) final override
790 {
791 bool warned;
792 ctxt.add_cwe (cwe: 127);
793 switch (get_memory_space ())
794 {
795 default:
796 warned = ctxt.warn ("buffer under-read");
797 break;
798 case MEMSPACE_STACK:
799 warned = ctxt.warn ("stack-based buffer under-read");
800 break;
801 case MEMSPACE_HEAP:
802 warned = ctxt.warn ("heap-based buffer under-read");
803 break;
804 }
805 if (warned)
806 maybe_show_notes (ctxt);
807 return warned;
808 }
809
810 label_text describe_final_event (const evdesc::final_event &ev)
811 final override
812 {
813 byte_range out_of_bounds_bytes (0, 0);
814 if (get_out_of_bounds_bytes (out: &out_of_bounds_bytes))
815 return describe_final_event_as_bytes (ev, out_of_bounds_bytes);
816 return describe_final_event_as_bits (ev);
817 }
818
819 label_text
820 describe_final_event_as_bytes (const evdesc::final_event &ev,
821 const byte_range &out_of_bounds_bytes)
822 {
823 byte_size_t start = out_of_bounds_bytes.get_start_byte_offset ();
824 byte_size_t end = out_of_bounds_bytes.get_last_byte_offset ();
825 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
826 print_dec (wi: start, buf: start_buf, sgn: SIGNED);
827 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
828 print_dec (wi: end, buf: end_buf, sgn: SIGNED);
829
830 if (start == end)
831 {
832 if (m_diag_arg)
833 return ev.formatted_print (fmt: "out-of-bounds read at byte %s but %qE"
834 " starts at byte 0", start_buf,
835 m_diag_arg);
836 return ev.formatted_print (fmt: "out-of-bounds read at byte %s but region"
837 " starts at byte 0", start_buf);
838 }
839 else
840 {
841 if (m_diag_arg)
842 return ev.formatted_print (fmt: "out-of-bounds read from byte %s till"
843 " byte %s but %qE starts at byte 0",
844 start_buf, end_buf, m_diag_arg);
845 return ev.formatted_print (fmt: "out-of-bounds read from byte %s till"
846 " byte %s but region starts at byte 0",
847 start_buf, end_buf);;
848 }
849 }
850
851 label_text describe_final_event_as_bits (const evdesc::final_event &ev)
852 {
853 bit_size_t start = m_out_of_bounds_bits.get_start_bit_offset ();
854 bit_size_t end = m_out_of_bounds_bits.get_last_bit_offset ();
855 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
856 print_dec (wi: start, buf: start_buf, sgn: SIGNED);
857 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
858 print_dec (wi: end, buf: end_buf, sgn: SIGNED);
859
860 if (start == end)
861 {
862 if (m_diag_arg)
863 return ev.formatted_print (fmt: "out-of-bounds read at bit %s but %qE"
864 " starts at bit 0", start_buf,
865 m_diag_arg);
866 return ev.formatted_print (fmt: "out-of-bounds read at bit %s but region"
867 " starts at bit 0", start_buf);
868 }
869 else
870 {
871 if (m_diag_arg)
872 return ev.formatted_print (fmt: "out-of-bounds read from bit %s till"
873 " bit %s but %qE starts at bit 0",
874 start_buf, end_buf, m_diag_arg);
875 return ev.formatted_print (fmt: "out-of-bounds read from bit %s till"
876 " bit %s but region starts at bit 0",
877 start_buf, end_buf);;
878 }
879 }
880
881 enum access_direction get_dir () const final override { return DIR_READ; }
882};
883
884/* Abstract class to complain about out-of-bounds read/writes where
885 the values are symbolic. */
886
887class symbolic_past_the_end : public out_of_bounds
888{
889public:
890 symbolic_past_the_end (const region_model &model,
891 const region *reg, tree diag_arg, tree offset,
892 tree num_bytes, tree capacity,
893 const svalue *sval_hint)
894 : out_of_bounds (model, reg, diag_arg, sval_hint),
895 m_offset (offset),
896 m_num_bytes (num_bytes),
897 m_capacity (capacity)
898 {}
899
900 bool
901 subclass_equal_p (const pending_diagnostic &base_other) const final override
902 {
903 const symbolic_past_the_end &other
904 (static_cast <const symbolic_past_the_end &>(base_other));
905 return (out_of_bounds::subclass_equal_p (base_other: other)
906 && pending_diagnostic::same_tree_p (t1: m_offset, t2: other.m_offset)
907 && pending_diagnostic::same_tree_p (t1: m_num_bytes, t2: other.m_num_bytes)
908 && pending_diagnostic::same_tree_p (t1: m_capacity, t2: other.m_capacity));
909 }
910
911 void maybe_add_sarif_properties (sarif_object &result_obj)
912 const final override
913 {
914 out_of_bounds::maybe_add_sarif_properties (result_obj);
915 sarif_property_bag &props = result_obj.get_or_create_properties ();
916#define PROPERTY_PREFIX "gcc/analyzer/symbolic_past_the_end/"
917 props.set (PROPERTY_PREFIX "offset", v: tree_to_json (node: m_offset));
918 props.set (PROPERTY_PREFIX "num_bytes", v: tree_to_json (node: m_num_bytes));
919 props.set (PROPERTY_PREFIX "capacity", v: tree_to_json (node: m_capacity));
920#undef PROPERTY_PREFIX
921 }
922
923protected:
924 tree m_offset;
925 tree m_num_bytes;
926 tree m_capacity;
927};
928
929/* Concrete subclass to complain about overflows with symbolic values. */
930
931class symbolic_buffer_overflow : public symbolic_past_the_end
932{
933public:
934 symbolic_buffer_overflow (const region_model &model,
935 const region *reg, tree diag_arg, tree offset,
936 tree num_bytes, tree capacity,
937 const svalue *sval_hint)
938 : symbolic_past_the_end (model, reg, diag_arg, offset, num_bytes, capacity,
939 sval_hint)
940 {
941 }
942
943 const char *get_kind () const final override
944 {
945 return "symbolic_buffer_overflow";
946 }
947
948 bool emit (diagnostic_emission_context &ctxt) final override
949 {
950 bool warned;
951 switch (get_memory_space ())
952 {
953 default:
954 ctxt.add_cwe (cwe: 787);
955 warned = ctxt.warn ("buffer overflow");
956 break;
957 case MEMSPACE_STACK:
958 ctxt.add_cwe (cwe: 121);
959 warned = ctxt.warn ("stack-based buffer overflow");
960 break;
961 case MEMSPACE_HEAP:
962 ctxt.add_cwe (cwe: 122);
963 warned = ctxt.warn ("heap-based buffer overflow");
964 break;
965 }
966 if (warned)
967 maybe_show_notes (ctxt);
968 return warned;
969 }
970
971 label_text
972 describe_final_event (const evdesc::final_event &ev) final override
973 {
974 if (m_offset)
975 {
976 /* Known offset. */
977 if (m_num_bytes)
978 {
979 /* Known offset, known size. */
980 if (TREE_CODE (m_num_bytes) == INTEGER_CST)
981 {
982 /* Known offset, known constant size. */
983 if (pending_diagnostic::same_tree_p (t1: m_num_bytes,
984 integer_one_node))
985 {
986 /* Singular m_num_bytes. */
987 if (m_diag_arg)
988 return ev.formatted_print
989 (fmt: "write of %E byte at offset %qE exceeds %qE",
990 m_num_bytes, m_offset, m_diag_arg);
991 else
992 return ev.formatted_print
993 (fmt: "write of %E byte at offset %qE exceeds the buffer",
994 m_num_bytes, m_offset);
995 }
996 else
997 {
998 /* Plural m_num_bytes. */
999 if (m_diag_arg)
1000 return ev.formatted_print
1001 (fmt: "write of %E bytes at offset %qE exceeds %qE",
1002 m_num_bytes, m_offset, m_diag_arg);
1003 else
1004 return ev.formatted_print
1005 (fmt: "write of %E bytes at offset %qE exceeds the buffer",
1006 m_num_bytes, m_offset);
1007 }
1008 }
1009 else
1010 {
1011 /* Known offset, known symbolic size. */
1012 if (m_diag_arg)
1013 return ev.formatted_print
1014 (fmt: "write of %qE bytes at offset %qE exceeds %qE",
1015 m_num_bytes, m_offset, m_diag_arg);
1016 else
1017 return ev.formatted_print
1018 (fmt: "write of %qE bytes at offset %qE exceeds the buffer",
1019 m_num_bytes, m_offset);
1020 }
1021 }
1022 else
1023 {
1024 /* Known offset, unknown size. */
1025 if (m_diag_arg)
1026 return ev.formatted_print (fmt: "write at offset %qE exceeds %qE",
1027 m_offset, m_diag_arg);
1028 else
1029 return ev.formatted_print (fmt: "write at offset %qE exceeds the"
1030 " buffer", m_offset);
1031 }
1032 }
1033 /* Unknown offset. */
1034 if (m_diag_arg)
1035 return ev.formatted_print (fmt: "out-of-bounds write on %qE",
1036 m_diag_arg);
1037 return ev.formatted_print (fmt: "out-of-bounds write");
1038 }
1039
1040 enum access_direction get_dir () const final override { return DIR_WRITE; }
1041};
1042
1043/* Concrete subclass to complain about over-reads with symbolic values. */
1044
1045class symbolic_buffer_over_read : public symbolic_past_the_end
1046{
1047public:
1048 symbolic_buffer_over_read (const region_model &model,
1049 const region *reg, tree diag_arg, tree offset,
1050 tree num_bytes, tree capacity)
1051 : symbolic_past_the_end (model, reg, diag_arg, offset, num_bytes, capacity,
1052 NULL)
1053 {
1054 }
1055
1056 const char *get_kind () const final override
1057 {
1058 return "symbolic_buffer_over_read";
1059 }
1060
1061 bool emit (diagnostic_emission_context &ctxt) final override
1062 {
1063 ctxt.add_cwe (cwe: 126);
1064 bool warned;
1065 switch (get_memory_space ())
1066 {
1067 default:
1068 ctxt.add_cwe (cwe: 787);
1069 warned = ctxt.warn ("buffer over-read");
1070 break;
1071 case MEMSPACE_STACK:
1072 ctxt.add_cwe (cwe: 121);
1073 warned = ctxt.warn ("stack-based buffer over-read");
1074 break;
1075 case MEMSPACE_HEAP:
1076 ctxt.add_cwe (cwe: 122);
1077 warned = ctxt.warn ("heap-based buffer over-read");
1078 break;
1079 }
1080 if (warned)
1081 maybe_show_notes (ctxt);
1082 return warned;
1083 }
1084
1085 label_text
1086 describe_final_event (const evdesc::final_event &ev) final override
1087 {
1088 if (m_offset)
1089 {
1090 /* Known offset. */
1091 if (m_num_bytes)
1092 {
1093 /* Known offset, known size. */
1094 if (TREE_CODE (m_num_bytes) == INTEGER_CST)
1095 {
1096 /* Known offset, known constant size. */
1097 if (pending_diagnostic::same_tree_p (t1: m_num_bytes,
1098 integer_one_node))
1099 {
1100 /* Singular m_num_bytes. */
1101 if (m_diag_arg)
1102 return ev.formatted_print
1103 (fmt: "read of %E byte at offset %qE exceeds %qE",
1104 m_num_bytes, m_offset, m_diag_arg);
1105 else
1106 return ev.formatted_print
1107 (fmt: "read of %E byte at offset %qE exceeds the buffer",
1108 m_num_bytes, m_offset);
1109 }
1110 else
1111 {
1112 /* Plural m_num_bytes. */
1113 if (m_diag_arg)
1114 return ev.formatted_print
1115 (fmt: "read of %E bytes at offset %qE exceeds %qE",
1116 m_num_bytes, m_offset, m_diag_arg);
1117 else
1118 return ev.formatted_print
1119 (fmt: "read of %E bytes at offset %qE exceeds the buffer",
1120 m_num_bytes, m_offset);
1121 }
1122 }
1123 else
1124 {
1125 /* Known offset, known symbolic size. */
1126 if (m_diag_arg)
1127 return ev.formatted_print
1128 (fmt: "read of %qE bytes at offset %qE exceeds %qE",
1129 m_num_bytes, m_offset, m_diag_arg);
1130 else
1131 return ev.formatted_print
1132 (fmt: "read of %qE bytes at offset %qE exceeds the buffer",
1133 m_num_bytes, m_offset);
1134 }
1135 }
1136 else
1137 {
1138 /* Known offset, unknown size. */
1139 if (m_diag_arg)
1140 return ev.formatted_print (fmt: "read at offset %qE exceeds %qE",
1141 m_offset, m_diag_arg);
1142 else
1143 return ev.formatted_print (fmt: "read at offset %qE exceeds the"
1144 " buffer", m_offset);
1145 }
1146 }
1147 /* Unknown offset. */
1148 if (m_diag_arg)
1149 return ev.formatted_print (fmt: "out-of-bounds read on %qE",
1150 m_diag_arg);
1151 return ev.formatted_print (fmt: "out-of-bounds read");
1152 }
1153
1154 enum access_direction get_dir () const final override { return DIR_READ; }
1155};
1156
1157const svalue *
1158strip_types (const svalue *sval,
1159 region_model_manager &mgr)
1160{
1161 switch (sval->get_kind ())
1162 {
1163 default:
1164 gcc_unreachable ();
1165 case SK_REGION:
1166 {
1167 const region_svalue *region_sval = (const region_svalue *)sval;
1168 return mgr.get_ptr_svalue (NULL_TREE, pointee: region_sval->get_pointee ());
1169 }
1170 case SK_CONSTANT:
1171 return sval;
1172 case SK_UNKNOWN:
1173 return mgr.get_or_create_unknown_svalue (NULL_TREE);
1174 case SK_POISONED:
1175 {
1176 const poisoned_svalue *poisoned_sval = (const poisoned_svalue *)sval;
1177 return mgr.get_or_create_poisoned_svalue
1178 (kind: poisoned_sval->get_poison_kind (),
1179 NULL_TREE);
1180 }
1181 case SK_SETJMP:
1182 return sval;
1183 case SK_INITIAL:
1184 return sval;
1185 case SK_UNARYOP:
1186 {
1187 const unaryop_svalue *unaryop_sval = (const unaryop_svalue *)sval;
1188 const enum tree_code op = unaryop_sval->get_op ();
1189 if (op == VIEW_CONVERT_EXPR || op == NOP_EXPR)
1190 return strip_types (sval: unaryop_sval->get_arg (), mgr);
1191 return mgr.get_or_create_unaryop
1192 (NULL_TREE,
1193 op,
1194 arg: strip_types (sval: unaryop_sval->get_arg (), mgr));
1195 }
1196 case SK_BINOP:
1197 {
1198 const binop_svalue *binop_sval = (const binop_svalue *)sval;
1199 const enum tree_code op = binop_sval->get_op ();
1200 return mgr.get_or_create_binop
1201 (NULL_TREE,
1202 op,
1203 arg0: strip_types (sval: binop_sval->get_arg0 (), mgr),
1204 arg1: strip_types (sval: binop_sval->get_arg1 (), mgr));
1205 }
1206 case SK_SUB:
1207 {
1208 const sub_svalue *sub_sval = (const sub_svalue *)sval;
1209 return mgr.get_or_create_sub_svalue
1210 (NULL_TREE,
1211 parent_svalue: strip_types (sval: sub_sval->get_parent (), mgr),
1212 subregion: sub_sval->get_subregion ());
1213 }
1214 case SK_REPEATED:
1215 {
1216 const repeated_svalue *repeated_sval = (const repeated_svalue *)sval;
1217 return mgr.get_or_create_repeated_svalue
1218 (NULL_TREE,
1219 outer_size: strip_types (sval: repeated_sval->get_outer_size (), mgr),
1220 inner_svalue: strip_types (sval: repeated_sval->get_inner_svalue (), mgr));
1221 }
1222 case SK_BITS_WITHIN:
1223 {
1224 const bits_within_svalue *bits_within_sval
1225 = (const bits_within_svalue *)sval;
1226 return mgr.get_or_create_bits_within
1227 (NULL_TREE,
1228 bits: bits_within_sval->get_bits (),
1229 inner_svalue: strip_types (sval: bits_within_sval->get_inner_svalue (), mgr));
1230 }
1231 case SK_UNMERGEABLE:
1232 {
1233 const unmergeable_svalue *unmergeable_sval
1234 = (const unmergeable_svalue *)sval;
1235 return mgr.get_or_create_unmergeable
1236 (arg: strip_types (sval: unmergeable_sval->get_arg (), mgr));
1237 }
1238 case SK_PLACEHOLDER:
1239 return sval;
1240 case SK_WIDENING:
1241 {
1242 const widening_svalue *widening_sval = (const widening_svalue *)sval;
1243 return mgr.get_or_create_widening_svalue
1244 (NULL_TREE,
1245 point: widening_sval->get_point (),
1246 base_svalue: strip_types (sval: widening_sval->get_base_svalue (), mgr),
1247 iter_svalue: strip_types (sval: widening_sval->get_iter_svalue (), mgr));
1248 }
1249 case SK_COMPOUND:
1250 {
1251 const compound_svalue *compound_sval = (const compound_svalue *)sval;
1252 binding_map typeless_map;
1253 for (auto iter : compound_sval->get_map ())
1254 {
1255 const binding_key *key = iter.first;
1256 const svalue *bound_sval = iter.second;
1257 typeless_map.put (k: key, v: strip_types (sval: bound_sval, mgr));
1258 }
1259 return mgr.get_or_create_compound_svalue (NULL_TREE, map: typeless_map);
1260 }
1261 case SK_CONJURED:
1262 return sval;
1263 case SK_ASM_OUTPUT:
1264 {
1265 const asm_output_svalue *asm_output_sval
1266 = (const asm_output_svalue *)sval;
1267 auto_vec<const svalue *> typeless_inputs
1268 (asm_output_sval->get_num_inputs ());
1269 for (unsigned idx = 0; idx < asm_output_sval->get_num_inputs (); idx++)
1270 typeless_inputs.quick_push
1271 (obj: strip_types (sval: asm_output_sval->get_input (idx),
1272 mgr));
1273 return mgr.get_or_create_asm_output_svalue
1274 (NULL_TREE,
1275 asm_string: asm_output_sval->get_asm_string (),
1276 output_idx: asm_output_sval->get_output_idx (),
1277 num_outputs: asm_output_sval->get_num_outputs (),
1278 inputs: typeless_inputs);
1279 }
1280 case SK_CONST_FN_RESULT:
1281 {
1282 const const_fn_result_svalue *const_fn_result_sval
1283 = (const const_fn_result_svalue *)sval;
1284 auto_vec<const svalue *> typeless_inputs
1285 (const_fn_result_sval->get_num_inputs ());
1286 for (unsigned idx = 0;
1287 idx < const_fn_result_sval->get_num_inputs ();
1288 idx++)
1289 typeless_inputs.quick_push
1290 (obj: strip_types (sval: const_fn_result_sval->get_input (idx),
1291 mgr));
1292 return mgr.get_or_create_const_fn_result_svalue
1293 (NULL_TREE,
1294 fndecl: const_fn_result_sval->get_fndecl (),
1295 inputs: typeless_inputs);
1296 }
1297 }
1298}
1299
1300/* Check whether an access is past the end of the BASE_REG.
1301 Return TRUE if the access was valid, FALSE otherwise. */
1302
1303bool
1304region_model::check_symbolic_bounds (const region *base_reg,
1305 const svalue *sym_byte_offset,
1306 const svalue *num_bytes_sval,
1307 const svalue *capacity,
1308 enum access_direction dir,
1309 const svalue *sval_hint,
1310 region_model_context *ctxt) const
1311{
1312 gcc_assert (ctxt);
1313
1314 const svalue *next_byte
1315 = m_mgr->get_or_create_binop (NULL_TREE, op: PLUS_EXPR,
1316 arg0: sym_byte_offset, arg1: num_bytes_sval);
1317
1318 next_byte = strip_types (sval: next_byte, mgr&: *m_mgr);
1319 capacity = strip_types (sval: capacity, mgr&: *m_mgr);
1320
1321 if (eval_condition (lhs: next_byte, op: GT_EXPR, rhs: capacity).is_true ())
1322 {
1323 tree diag_arg = get_representative_tree (reg: base_reg);
1324 tree offset_tree = get_representative_tree (sval: sym_byte_offset);
1325 tree num_bytes_tree = get_representative_tree (sval: num_bytes_sval);
1326 tree capacity_tree = get_representative_tree (sval: capacity);
1327 const region *offset_reg = m_mgr->get_offset_region (parent: base_reg,
1328 NULL_TREE,
1329 byte_offset: sym_byte_offset);
1330 const region *sized_offset_reg = m_mgr->get_sized_region (parent: offset_reg,
1331 NULL_TREE,
1332 byte_size_sval: num_bytes_sval);
1333 switch (dir)
1334 {
1335 default:
1336 gcc_unreachable ();
1337 break;
1338 case DIR_READ:
1339 gcc_assert (sval_hint == nullptr);
1340 ctxt->warn (d: make_unique<symbolic_buffer_over_read> (args: *this,
1341 args&: sized_offset_reg,
1342 args&: diag_arg,
1343 args&: offset_tree,
1344 args&: num_bytes_tree,
1345 args&: capacity_tree));
1346 return false;
1347 break;
1348 case DIR_WRITE:
1349 ctxt->warn (d: make_unique<symbolic_buffer_overflow> (args: *this,
1350 args&: sized_offset_reg,
1351 args&: diag_arg,
1352 args&: offset_tree,
1353 args&: num_bytes_tree,
1354 args&: capacity_tree,
1355 args&: sval_hint));
1356 return false;
1357 break;
1358 }
1359 }
1360 return true;
1361}
1362
1363static tree
1364maybe_get_integer_cst_tree (const svalue *sval)
1365{
1366 tree cst_tree = sval->maybe_get_constant ();
1367 if (cst_tree && TREE_CODE (cst_tree) == INTEGER_CST)
1368 return cst_tree;
1369
1370 return NULL_TREE;
1371}
1372
1373/* May complain when the access on REG is out-of-bounds.
1374 Return TRUE if the access was valid, FALSE otherwise. */
1375
1376bool
1377region_model::check_region_bounds (const region *reg,
1378 enum access_direction dir,
1379 const svalue *sval_hint,
1380 region_model_context *ctxt) const
1381{
1382 gcc_assert (ctxt);
1383
1384 /* Get the offset. */
1385 region_offset reg_offset = reg->get_offset (mgr: m_mgr);
1386 const region *base_reg = reg_offset.get_base_region ();
1387
1388 /* Find out how many bits were accessed. */
1389 const svalue *num_bits_sval = reg->get_bit_size_sval (mgr: m_mgr);
1390 tree num_bits_tree = maybe_get_integer_cst_tree (sval: num_bits_sval);
1391 /* Bail out if 0 bits are accessed. */
1392 if (num_bits_tree && zerop (num_bits_tree))
1393 return true;
1394
1395 /* Get the capacity of the buffer (in bytes). */
1396 const svalue *byte_capacity = get_capacity (reg: base_reg);
1397 tree cst_byte_capacity_tree = maybe_get_integer_cst_tree (sval: byte_capacity);
1398
1399 /* The constant offset from a pointer is represented internally as a sizetype
1400 but should be interpreted as a signed value here. The statement below
1401 converts the offset from bits to bytes and then to a signed integer with
1402 the same precision the sizetype has on the target system.
1403
1404 For example, this is needed for out-of-bounds-3.c test1 to pass when
1405 compiled with a 64-bit gcc build targeting 32-bit systems. */
1406 bit_offset_t bit_offset;
1407 if (!reg_offset.symbolic_p ())
1408 bit_offset = wi::sext (x: reg_offset.get_bit_offset (),
1409 TYPE_PRECISION (size_type_node));
1410
1411 /* If any of the base region, the offset, or the number of bytes accessed
1412 are symbolic, we have to reason about symbolic values. */
1413 if (base_reg->symbolic_p () || reg_offset.symbolic_p () || !num_bits_tree)
1414 {
1415 const svalue* byte_offset_sval;
1416 if (!reg_offset.symbolic_p ())
1417 {
1418 tree byte_offset_tree
1419 = wide_int_to_tree (integer_type_node,
1420 cst: bit_offset >> LOG2_BITS_PER_UNIT);
1421 byte_offset_sval
1422 = m_mgr->get_or_create_constant_svalue (cst_expr: byte_offset_tree);
1423 }
1424 else
1425 byte_offset_sval = reg_offset.get_symbolic_byte_offset ();
1426 const svalue *num_bytes_sval = reg->get_byte_size_sval (mgr: m_mgr);
1427 return check_symbolic_bounds (base_reg, sym_byte_offset: byte_offset_sval, num_bytes_sval,
1428 capacity: byte_capacity, dir, sval_hint, ctxt);
1429 }
1430
1431 /* Otherwise continue to check with concrete values. */
1432 bit_range bits_outside (0, 0);
1433 bool oob_safe = true;
1434 /* NUM_BITS_TREE should always be interpreted as unsigned. */
1435 bit_offset_t num_bits_unsigned = wi::to_offset (t: num_bits_tree);
1436 bit_range read_bits (bit_offset, num_bits_unsigned);
1437 /* If read_bits has a subset < 0, we do have an underwrite. */
1438 if (read_bits.falls_short_of_p (offset: 0, out_fall_short_bits: &bits_outside))
1439 {
1440 tree diag_arg = get_representative_tree (reg: base_reg);
1441 switch (dir)
1442 {
1443 default:
1444 gcc_unreachable ();
1445 break;
1446 case DIR_READ:
1447 gcc_assert (sval_hint == nullptr);
1448 ctxt->warn (d: make_unique<concrete_buffer_under_read> (args: *this, args&: reg,
1449 args&: diag_arg,
1450 args&: bits_outside));
1451 oob_safe = false;
1452 break;
1453 case DIR_WRITE:
1454 ctxt->warn (d: make_unique<concrete_buffer_underwrite> (args: *this,
1455 args&: reg, args&: diag_arg,
1456 args&: bits_outside,
1457 args&: sval_hint));
1458 oob_safe = false;
1459 break;
1460 }
1461 }
1462
1463 /* For accesses past the end, we do need a concrete capacity. No need to
1464 do a symbolic check here because the inequality check does not reason
1465 whether constants are greater than symbolic values. */
1466 if (!cst_byte_capacity_tree)
1467 return oob_safe;
1468
1469 bit_range buffer (0, wi::to_offset (t: cst_byte_capacity_tree) * BITS_PER_UNIT);
1470 /* If READ_BITS exceeds BUFFER, we do have an overflow. */
1471 if (read_bits.exceeds_p (other: buffer, out_overhanging_bit_range: &bits_outside))
1472 {
1473 tree bit_bound = wide_int_to_tree (size_type_node,
1474 cst: buffer.get_next_bit_offset ());
1475 tree diag_arg = get_representative_tree (reg: base_reg);
1476
1477 switch (dir)
1478 {
1479 default:
1480 gcc_unreachable ();
1481 break;
1482 case DIR_READ:
1483 gcc_assert (sval_hint == nullptr);
1484 ctxt->warn (d: make_unique<concrete_buffer_over_read> (args: *this,
1485 args&: reg, args&: diag_arg,
1486 args&: bits_outside,
1487 args&: bit_bound));
1488 oob_safe = false;
1489 break;
1490 case DIR_WRITE:
1491 ctxt->warn (d: make_unique<concrete_buffer_overflow> (args: *this,
1492 args&: reg, args&: diag_arg,
1493 args&: bits_outside,
1494 args&: bit_bound,
1495 args&: sval_hint));
1496 oob_safe = false;
1497 break;
1498 }
1499 }
1500 return oob_safe;
1501}
1502
1503} // namespace ana
1504
1505#endif /* #if ENABLE_ANALYZER */
1506

source code of gcc/analyzer/bounds-checking.cc