1 | /* Pass to detect and issue warnings for invalid accesses, including |
2 | invalid or mismatched allocation/deallocation calls. |
3 | |
4 | Copyright (C) 2020-2023 Free Software Foundation, Inc. |
5 | Contributed by Martin Sebor <msebor@redhat.com>. |
6 | |
7 | This file is part of GCC. |
8 | |
9 | GCC is free software; you can redistribute it and/or modify it under |
10 | the terms of the GNU General Public License as published by the Free |
11 | Software Foundation; either version 3, or (at your option) any later |
12 | version. |
13 | |
14 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
15 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
17 | for more details. |
18 | |
19 | You should have received a copy of the GNU General Public License |
20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ |
22 | |
23 | #define INCLUDE_STRING |
24 | #include "config.h" |
25 | #include "system.h" |
26 | #include "coretypes.h" |
27 | #include "backend.h" |
28 | #include "tree.h" |
29 | #include "gimple.h" |
30 | #include "tree-pass.h" |
31 | #include "builtins.h" |
32 | #include "diagnostic.h" |
33 | #include "ssa.h" |
34 | #include "gimple-pretty-print.h" |
35 | #include "gimple-ssa-warn-access.h" |
36 | #include "gimple-ssa-warn-restrict.h" |
37 | #include "diagnostic-core.h" |
38 | #include "fold-const.h" |
39 | #include "gimple-iterator.h" |
40 | #include "gimple-fold.h" |
41 | #include "langhooks.h" |
42 | #include "memmodel.h" |
43 | #include "target.h" |
44 | #include "tree-dfa.h" |
45 | #include "tree-ssa.h" |
46 | #include "tree-cfg.h" |
47 | #include "tree-object-size.h" |
48 | #include "tree-ssa-strlen.h" |
49 | #include "calls.h" |
50 | #include "cfganal.h" |
51 | #include "intl.h" |
52 | #include "gimple-range.h" |
53 | #include "stringpool.h" |
54 | #include "attribs.h" |
55 | #include "demangle.h" |
56 | #include "attr-fnspec.h" |
57 | #include "pointer-query.h" |
58 | |
59 | /* Return true if tree node X has an associated location. */ |
60 | |
61 | static inline location_t |
62 | has_location (const_tree x) |
63 | { |
64 | if (DECL_P (x)) |
65 | return DECL_SOURCE_LOCATION (x) != UNKNOWN_LOCATION; |
66 | |
67 | if (EXPR_P (x)) |
68 | return EXPR_HAS_LOCATION (x); |
69 | |
70 | return false; |
71 | } |
72 | |
73 | /* Return the associated location of STMT. */ |
74 | |
75 | static inline location_t |
76 | get_location (const gimple *stmt) |
77 | { |
78 | return gimple_location (g: stmt); |
79 | } |
80 | |
81 | /* Return the associated location of tree node X. */ |
82 | |
83 | static inline location_t |
84 | get_location (tree x) |
85 | { |
86 | if (DECL_P (x)) |
87 | return DECL_SOURCE_LOCATION (x); |
88 | |
89 | if (EXPR_P (x)) |
90 | return EXPR_LOCATION (x); |
91 | |
92 | return UNKNOWN_LOCATION; |
93 | } |
94 | |
95 | /* Overload of the nascent tree function for GIMPLE STMT. */ |
96 | |
97 | static inline tree |
98 | get_callee_fndecl (const gimple *stmt) |
99 | { |
100 | return gimple_call_fndecl (gs: stmt); |
101 | } |
102 | |
103 | static inline unsigned |
104 | call_nargs (const gimple *stmt) |
105 | { |
106 | return gimple_call_num_args (gs: stmt); |
107 | } |
108 | |
109 | static inline unsigned |
110 | call_nargs (const_tree expr) |
111 | { |
112 | return call_expr_nargs (expr); |
113 | } |
114 | |
115 | |
116 | static inline tree |
117 | call_arg (const gimple *stmt, unsigned argno) |
118 | { |
119 | return gimple_call_arg (gs: stmt, index: argno); |
120 | } |
121 | |
122 | static inline tree |
123 | call_arg (tree expr, unsigned argno) |
124 | { |
125 | return CALL_EXPR_ARG (expr, argno); |
126 | } |
127 | |
128 | /* For a call EXPR at LOC to a function FNAME that expects a string |
129 | in the argument ARG, issue a diagnostic due to it being a called |
130 | with an argument that is a character array with no terminating |
131 | NUL. SIZE is the EXACT size of the array, and BNDRNG the number |
132 | of characters in which the NUL is expected. Either EXPR or FNAME |
133 | may be null but noth both. SIZE may be null when BNDRNG is null. */ |
134 | |
135 | template <class GimpleOrTree> |
136 | static void |
137 | warn_string_no_nul (location_t loc, GimpleOrTree expr, const char *fname, |
138 | tree arg, tree decl, tree size, bool exact, |
139 | const wide_int bndrng[2] /* = NULL */) |
140 | { |
141 | const opt_code opt = OPT_Wstringop_overread; |
142 | if ((expr && warning_suppressed_p (expr, opt)) |
143 | || warning_suppressed_p (arg, opt)) |
144 | return; |
145 | |
146 | loc = expansion_point_location_if_in_system_header (loc); |
147 | bool warned; |
148 | |
149 | /* Format the bound range as a string to keep the number of messages |
150 | from exploding. */ |
151 | char bndstr[80]; |
152 | *bndstr = 0; |
153 | if (bndrng) |
154 | { |
155 | if (bndrng[0] == bndrng[1]) |
156 | sprintf (s: bndstr, format: "%llu" , (unsigned long long) bndrng[0].to_uhwi ()); |
157 | else |
158 | sprintf (s: bndstr, format: "[%llu, %llu]" , |
159 | (unsigned long long) bndrng[0].to_uhwi (), |
160 | (unsigned long long) bndrng[1].to_uhwi ()); |
161 | } |
162 | |
163 | auto_diagnostic_group d; |
164 | |
165 | const tree maxobjsize = max_object_size (); |
166 | const wide_int maxsiz = wi::to_wide (t: maxobjsize); |
167 | if (expr) |
168 | { |
169 | tree func = get_callee_fndecl (expr); |
170 | if (bndrng) |
171 | { |
172 | if (wi::ltu_p (x: maxsiz, y: bndrng[0])) |
173 | warned = warning_at (loc, opt, |
174 | "%qD specified bound %s exceeds " |
175 | "maximum object size %E" , |
176 | func, bndstr, maxobjsize); |
177 | else |
178 | { |
179 | bool maybe = wi::to_wide (t: size) == bndrng[0]; |
180 | warned = warning_at (loc, opt, |
181 | exact |
182 | ? G_("%qD specified bound %s exceeds " |
183 | "the size %E of unterminated array" ) |
184 | : (maybe |
185 | ? G_("%qD specified bound %s may " |
186 | "exceed the size of at most %E " |
187 | "of unterminated array" ) |
188 | : G_("%qD specified bound %s exceeds " |
189 | "the size of at most %E " |
190 | "of unterminated array" )), |
191 | func, bndstr, size); |
192 | } |
193 | } |
194 | else |
195 | warned = warning_at (loc, opt, |
196 | "%qD argument missing terminating nul" , |
197 | func); |
198 | } |
199 | else |
200 | { |
201 | if (bndrng) |
202 | { |
203 | if (wi::ltu_p (x: maxsiz, y: bndrng[0])) |
204 | warned = warning_at (loc, opt, |
205 | "%qs specified bound %s exceeds " |
206 | "maximum object size %E" , |
207 | fname, bndstr, maxobjsize); |
208 | else |
209 | { |
210 | bool maybe = wi::to_wide (t: size) == bndrng[0]; |
211 | warned = warning_at (loc, opt, |
212 | exact |
213 | ? G_("%qs specified bound %s exceeds " |
214 | "the size %E of unterminated array" ) |
215 | : (maybe |
216 | ? G_("%qs specified bound %s may " |
217 | "exceed the size of at most %E " |
218 | "of unterminated array" ) |
219 | : G_("%qs specified bound %s exceeds " |
220 | "the size of at most %E " |
221 | "of unterminated array" )), |
222 | fname, bndstr, size); |
223 | } |
224 | } |
225 | else |
226 | warned = warning_at (loc, opt, |
227 | "%qs argument missing terminating nul" , |
228 | fname); |
229 | } |
230 | |
231 | if (warned) |
232 | { |
233 | inform (get_location (x: decl), |
234 | "referenced argument declared here" ); |
235 | suppress_warning (arg, opt); |
236 | if (expr) |
237 | suppress_warning (expr, opt); |
238 | } |
239 | } |
240 | |
241 | void |
242 | warn_string_no_nul (location_t loc, gimple *stmt, const char *fname, |
243 | tree arg, tree decl, tree size /* = NULL_TREE */, |
244 | bool exact /* = false */, |
245 | const wide_int bndrng[2] /* = NULL */) |
246 | { |
247 | return warn_string_no_nul<gimple *> (loc, expr: stmt, fname, |
248 | arg, decl, size, exact, bndrng); |
249 | } |
250 | |
251 | void |
252 | warn_string_no_nul (location_t loc, tree expr, const char *fname, |
253 | tree arg, tree decl, tree size /* = NULL_TREE */, |
254 | bool exact /* = false */, |
255 | const wide_int bndrng[2] /* = NULL */) |
256 | { |
257 | return warn_string_no_nul<tree> (loc, expr, fname, |
258 | arg, decl, size, exact, bndrng); |
259 | } |
260 | |
261 | /* If EXP refers to an unterminated constant character array return |
262 | the declaration of the object of which the array is a member or |
263 | element and if SIZE is not null, set *SIZE to the size of |
264 | the unterminated array and set *EXACT if the size is exact or |
265 | clear it otherwise. Otherwise return null. */ |
266 | |
267 | tree |
268 | unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */) |
269 | { |
270 | /* C_STRLEN will return NULL and set DECL in the info |
271 | structure if EXP references a unterminated array. */ |
272 | c_strlen_data lendata = { }; |
273 | tree len = c_strlen (exp, 1, &lendata); |
274 | if (len || !lendata.minlen || !lendata.decl) |
275 | return NULL_TREE; |
276 | |
277 | if (!size) |
278 | return lendata.decl; |
279 | |
280 | len = lendata.minlen; |
281 | if (lendata.off) |
282 | { |
283 | /* Constant offsets are already accounted for in LENDATA.MINLEN, |
284 | but not in a SSA_NAME + CST expression. */ |
285 | if (TREE_CODE (lendata.off) == INTEGER_CST) |
286 | *exact = true; |
287 | else if (TREE_CODE (lendata.off) == PLUS_EXPR |
288 | && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST) |
289 | { |
290 | /* Subtract the offset from the size of the array. */ |
291 | *exact = false; |
292 | tree temp = TREE_OPERAND (lendata.off, 1); |
293 | temp = fold_convert (ssizetype, temp); |
294 | len = fold_build2 (MINUS_EXPR, ssizetype, len, temp); |
295 | } |
296 | else |
297 | *exact = false; |
298 | } |
299 | else |
300 | *exact = true; |
301 | |
302 | *size = len; |
303 | return lendata.decl; |
304 | } |
305 | |
306 | /* For a call EXPR (which may be null) that expects a string argument |
307 | SRC as an argument, returns false if SRC is a character array with |
308 | no terminating NUL. When nonnull, BOUND is the number of characters |
309 | in which to expect the terminating NUL. When EXPR is nonnull also |
310 | issues a warning. */ |
311 | |
312 | template <class GimpleOrTree> |
313 | static bool |
314 | check_nul_terminated_array (GimpleOrTree expr, tree src, tree bound) |
315 | { |
316 | /* The constant size of the array SRC points to. The actual size |
317 | may be less of EXACT is true, but not more. */ |
318 | tree size; |
319 | /* True if SRC involves a non-constant offset into the array. */ |
320 | bool exact; |
321 | /* The unterminated constant array SRC points to. */ |
322 | tree nonstr = unterminated_array (exp: src, size: &size, exact: &exact); |
323 | if (!nonstr) |
324 | return true; |
325 | |
326 | /* NONSTR refers to the non-nul terminated constant array and SIZE |
327 | is the constant size of the array in bytes. EXACT is true when |
328 | SIZE is exact. */ |
329 | |
330 | wide_int bndrng[2]; |
331 | if (bound) |
332 | { |
333 | Value_Range r (TREE_TYPE (bound)); |
334 | |
335 | get_range_query (cfun)->range_of_expr (r, expr: bound); |
336 | |
337 | if (r.undefined_p () || r.varying_p ()) |
338 | return true; |
339 | |
340 | bndrng[0] = r.lower_bound (); |
341 | bndrng[1] = r.upper_bound (); |
342 | |
343 | if (exact) |
344 | { |
345 | if (wi::leu_p (x: bndrng[0], y: wi::to_wide (t: size))) |
346 | return true; |
347 | } |
348 | else if (wi::lt_p (x: bndrng[0], y: wi::to_wide (t: size), sgn: UNSIGNED)) |
349 | return true; |
350 | } |
351 | |
352 | if (expr) |
353 | warn_string_no_nul (get_location (expr), expr, NULL, src, nonstr, |
354 | size, exact, bound ? bndrng : NULL); |
355 | |
356 | return false; |
357 | } |
358 | |
359 | bool |
360 | check_nul_terminated_array (gimple *stmt, tree src, tree bound /* = NULL_TREE */) |
361 | { |
362 | return check_nul_terminated_array<gimple *>(expr: stmt, src, bound); |
363 | } |
364 | |
365 | bool |
366 | check_nul_terminated_array (tree expr, tree src, tree bound /* = NULL_TREE */) |
367 | { |
368 | return check_nul_terminated_array<tree>(expr, src, bound); |
369 | } |
370 | |
371 | /* Warn about passing a non-string array/pointer to a built-in function |
372 | that expects a nul-terminated string argument. Returns true if |
373 | a warning has been issued.*/ |
374 | |
375 | template <class GimpleOrTree> |
376 | static bool |
377 | maybe_warn_nonstring_arg (tree fndecl, GimpleOrTree exp) |
378 | { |
379 | if (!fndecl || !fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL)) |
380 | return false; |
381 | |
382 | if (!warn_stringop_overread |
383 | || warning_suppressed_p (exp, OPT_Wstringop_overread)) |
384 | return false; |
385 | |
386 | /* Avoid clearly invalid calls (more checking done below). */ |
387 | unsigned nargs = call_nargs (exp); |
388 | if (!nargs) |
389 | return false; |
390 | |
391 | /* The bound argument to a bounded string function like strncpy. */ |
392 | tree bound = NULL_TREE; |
393 | |
394 | /* The longest known or possible string argument to one of the comparison |
395 | functions. If the length is less than the bound it is used instead. |
396 | Since the length is only used for warning and not for code generation |
397 | disable strict mode in the calls to get_range_strlen below. */ |
398 | tree maxlen = NULL_TREE; |
399 | |
400 | /* It's safe to call "bounded" string functions with a non-string |
401 | argument since the functions provide an explicit bound for this |
402 | purpose. The exception is strncat where the bound may refer to |
403 | either the destination or the source. */ |
404 | int fncode = DECL_FUNCTION_CODE (decl: fndecl); |
405 | switch (fncode) |
406 | { |
407 | case BUILT_IN_STRCMP: |
408 | case BUILT_IN_STRNCMP: |
409 | case BUILT_IN_STRNCASECMP: |
410 | { |
411 | /* For these, if one argument refers to one or more of a set |
412 | of string constants or arrays of known size, determine |
413 | the range of their known or possible lengths and use it |
414 | conservatively as the bound for the unbounded function, |
415 | and to adjust the range of the bound of the bounded ones. */ |
416 | for (unsigned argno = 0; |
417 | argno < MIN (nargs, 2) |
418 | && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++) |
419 | { |
420 | tree arg = call_arg (exp, argno); |
421 | if (!get_attr_nonstring_decl (arg)) |
422 | { |
423 | c_strlen_data lendata = { }; |
424 | /* Set MAXBOUND to an arbitrary non-null non-integer |
425 | node as a request to have it set to the length of |
426 | the longest string in a PHI. */ |
427 | lendata.maxbound = arg; |
428 | get_range_strlen (arg, &lendata, /* eltsize = */ 1); |
429 | maxlen = lendata.maxbound; |
430 | } |
431 | } |
432 | } |
433 | /* Fall through. */ |
434 | |
435 | case BUILT_IN_STRNCAT: |
436 | case BUILT_IN_STPNCPY: |
437 | case BUILT_IN_STRNCPY: |
438 | if (nargs > 2) |
439 | bound = call_arg (exp, 2); |
440 | break; |
441 | |
442 | case BUILT_IN_STRNDUP: |
443 | if (nargs < 2) |
444 | return false; |
445 | bound = call_arg (exp, 1); |
446 | break; |
447 | |
448 | case BUILT_IN_STRNLEN: |
449 | { |
450 | tree arg = call_arg (exp, 0); |
451 | if (!get_attr_nonstring_decl (arg)) |
452 | { |
453 | c_strlen_data lendata = { }; |
454 | /* Set MAXBOUND to an arbitrary non-null non-integer |
455 | node as a request to have it set to the length of |
456 | the longest string in a PHI. */ |
457 | lendata.maxbound = arg; |
458 | get_range_strlen (arg, &lendata, /* eltsize = */ 1); |
459 | maxlen = lendata.maxbound; |
460 | } |
461 | if (nargs > 1) |
462 | bound = call_arg (exp, 1); |
463 | break; |
464 | } |
465 | |
466 | default: |
467 | break; |
468 | } |
469 | |
470 | /* Determine the range of the bound argument (if specified). */ |
471 | tree bndrng[2] = { NULL_TREE, NULL_TREE }; |
472 | if (bound) |
473 | { |
474 | STRIP_NOPS (bound); |
475 | get_size_range (bound, bndrng); |
476 | } |
477 | |
478 | location_t loc = get_location (exp); |
479 | |
480 | if (bndrng[0]) |
481 | { |
482 | /* Diagnose excessive bound prior to the adjustment below and |
483 | regardless of attribute nonstring. */ |
484 | tree maxobjsize = max_object_size (); |
485 | if (tree_int_cst_lt (t1: maxobjsize, t2: bndrng[0])) |
486 | { |
487 | bool warned = false; |
488 | if (tree_int_cst_equal (bndrng[0], bndrng[1])) |
489 | warned = warning_at (loc, OPT_Wstringop_overread, |
490 | "%qD specified bound %E " |
491 | "exceeds maximum object size %E" , |
492 | fndecl, bndrng[0], maxobjsize); |
493 | else |
494 | warned = warning_at (loc, OPT_Wstringop_overread, |
495 | "%qD specified bound [%E, %E] " |
496 | "exceeds maximum object size %E" , |
497 | fndecl, bndrng[0], bndrng[1], |
498 | maxobjsize); |
499 | if (warned) |
500 | suppress_warning (exp, OPT_Wstringop_overread); |
501 | |
502 | return warned; |
503 | } |
504 | } |
505 | |
506 | if (maxlen && !integer_all_onesp (maxlen)) |
507 | { |
508 | /* Add one for the nul. */ |
509 | maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen, |
510 | size_one_node); |
511 | |
512 | if (!bndrng[0]) |
513 | { |
514 | /* Conservatively use the upper bound of the lengths for |
515 | both the lower and the upper bound of the operation. */ |
516 | bndrng[0] = maxlen; |
517 | bndrng[1] = maxlen; |
518 | bound = void_type_node; |
519 | } |
520 | else if (maxlen) |
521 | { |
522 | /* Replace the bound on the operation with the upper bound |
523 | of the length of the string if the latter is smaller. */ |
524 | if (tree_int_cst_lt (t1: maxlen, t2: bndrng[0])) |
525 | bndrng[0] = maxlen; |
526 | else if (tree_int_cst_lt (t1: maxlen, t2: bndrng[1])) |
527 | bndrng[1] = maxlen; |
528 | } |
529 | } |
530 | |
531 | bool any_arg_warned = false; |
532 | /* Iterate over the built-in function's formal arguments and check |
533 | each const char* against the actual argument. If the actual |
534 | argument is declared attribute non-string issue a warning unless |
535 | the argument's maximum length is bounded. */ |
536 | function_args_iterator it; |
537 | function_args_iter_init (&it, TREE_TYPE (fndecl)); |
538 | |
539 | for (unsigned argno = 0; ; ++argno, function_args_iter_next (i: &it)) |
540 | { |
541 | /* Avoid iterating past the declared argument in a call |
542 | to function declared without a prototype. */ |
543 | if (argno >= nargs) |
544 | break; |
545 | |
546 | tree argtype = function_args_iter_cond (i: &it); |
547 | if (!argtype) |
548 | break; |
549 | |
550 | if (TREE_CODE (argtype) != POINTER_TYPE) |
551 | continue; |
552 | |
553 | argtype = TREE_TYPE (argtype); |
554 | |
555 | if (TREE_CODE (argtype) != INTEGER_TYPE |
556 | || !TYPE_READONLY (argtype)) |
557 | continue; |
558 | |
559 | argtype = TYPE_MAIN_VARIANT (argtype); |
560 | if (argtype != char_type_node) |
561 | continue; |
562 | |
563 | tree callarg = call_arg (exp, argno); |
564 | if (TREE_CODE (callarg) == ADDR_EXPR) |
565 | callarg = TREE_OPERAND (callarg, 0); |
566 | |
567 | /* See if the destination is declared with attribute "nonstring". */ |
568 | tree decl = get_attr_nonstring_decl (callarg); |
569 | if (!decl) |
570 | continue; |
571 | |
572 | /* The maximum number of array elements accessed. */ |
573 | offset_int wibnd = 0; |
574 | |
575 | if (argno && fncode == BUILT_IN_STRNCAT) |
576 | { |
577 | /* See if the bound in strncat is derived from the length |
578 | of the strlen of the destination (as it's expected to be). |
579 | If so, reset BOUND and FNCODE to trigger a warning. */ |
580 | tree dstarg = call_arg (exp, 0); |
581 | if (is_strlen_related_p (dstarg, bound)) |
582 | { |
583 | /* The bound applies to the destination, not to the source, |
584 | so reset these to trigger a warning without mentioning |
585 | the bound. */ |
586 | bound = NULL; |
587 | fncode = 0; |
588 | } |
589 | else if (bndrng[1]) |
590 | /* Use the upper bound of the range for strncat. */ |
591 | wibnd = wi::to_offset (t: bndrng[1]); |
592 | } |
593 | else if (bndrng[0]) |
594 | /* Use the lower bound of the range for functions other than |
595 | strncat. */ |
596 | wibnd = wi::to_offset (t: bndrng[0]); |
597 | |
598 | /* Determine the size of the argument array if it is one. */ |
599 | offset_int asize = wibnd; |
600 | bool known_size = false; |
601 | tree type = TREE_TYPE (decl); |
602 | |
603 | /* Determine the array size. For arrays of unknown bound and |
604 | pointers reset BOUND to trigger the appropriate warning. */ |
605 | if (TREE_CODE (type) == ARRAY_TYPE) |
606 | { |
607 | if (tree arrbnd = TYPE_DOMAIN (type)) |
608 | { |
609 | if ((arrbnd = TYPE_MAX_VALUE (arrbnd))) |
610 | { |
611 | asize = wi::to_offset (t: arrbnd) + 1; |
612 | known_size = true; |
613 | } |
614 | } |
615 | else if (bound == void_type_node) |
616 | bound = NULL_TREE; |
617 | } |
618 | else if (bound == void_type_node) |
619 | bound = NULL_TREE; |
620 | |
621 | /* In a call to strncat with a bound in a range whose lower but |
622 | not upper bound is less than the array size, reset ASIZE to |
623 | be the same as the bound and the other variable to trigger |
624 | the appropriate warning below. */ |
625 | if (fncode == BUILT_IN_STRNCAT |
626 | && bndrng[0] != bndrng[1] |
627 | && wi::ltu_p (x: wi::to_offset (t: bndrng[0]), y: asize) |
628 | && (!known_size |
629 | || wi::ltu_p (x: asize, y: wibnd))) |
630 | { |
631 | asize = wibnd; |
632 | bound = NULL_TREE; |
633 | fncode = 0; |
634 | } |
635 | |
636 | bool warned = false; |
637 | |
638 | auto_diagnostic_group d; |
639 | if (wi::ltu_p (x: asize, y: wibnd)) |
640 | { |
641 | if (bndrng[0] == bndrng[1]) |
642 | warned = warning_at (loc, OPT_Wstringop_overread, |
643 | "%qD argument %i declared attribute " |
644 | "%<nonstring%> is smaller than the specified " |
645 | "bound %wu" , |
646 | fndecl, argno + 1, wibnd.to_uhwi ()); |
647 | else if (wi::ltu_p (x: asize, y: wi::to_offset (t: bndrng[0]))) |
648 | warned = warning_at (loc, OPT_Wstringop_overread, |
649 | "%qD argument %i declared attribute " |
650 | "%<nonstring%> is smaller than " |
651 | "the specified bound [%E, %E]" , |
652 | fndecl, argno + 1, bndrng[0], bndrng[1]); |
653 | else |
654 | warned = warning_at (loc, OPT_Wstringop_overread, |
655 | "%qD argument %i declared attribute " |
656 | "%<nonstring%> may be smaller than " |
657 | "the specified bound [%E, %E]" , |
658 | fndecl, argno + 1, bndrng[0], bndrng[1]); |
659 | } |
660 | else if (fncode == BUILT_IN_STRNCAT) |
661 | ; /* Avoid warning for calls to strncat() when the bound |
662 | is equal to the size of the non-string argument. */ |
663 | else if (!bound) |
664 | warned = warning_at (loc, OPT_Wstringop_overread, |
665 | "%qD argument %i declared attribute %<nonstring%>" , |
666 | fndecl, argno + 1); |
667 | |
668 | if (warned) |
669 | { |
670 | inform (DECL_SOURCE_LOCATION (decl), |
671 | "argument %qD declared here" , decl); |
672 | any_arg_warned = true; |
673 | } |
674 | } |
675 | |
676 | if (any_arg_warned) |
677 | suppress_warning (exp, OPT_Wstringop_overread); |
678 | |
679 | return any_arg_warned; |
680 | } |
681 | |
682 | bool |
683 | maybe_warn_nonstring_arg (tree fndecl, gimple *stmt) |
684 | { |
685 | return maybe_warn_nonstring_arg<gimple *>(fndecl, exp: stmt); |
686 | } |
687 | |
688 | |
689 | bool |
690 | maybe_warn_nonstring_arg (tree fndecl, tree expr) |
691 | { |
692 | return maybe_warn_nonstring_arg<tree>(fndecl, exp: expr); |
693 | } |
694 | |
695 | /* Issue a warning OPT for a bounded call EXP with a bound in RANGE |
696 | accessing an object with SIZE. */ |
697 | |
698 | template <class GimpleOrTree> |
699 | static bool |
700 | maybe_warn_for_bound (opt_code opt, location_t loc, GimpleOrTree exp, tree func, |
701 | tree bndrng[2], tree size, const access_data *pad) |
702 | { |
703 | if (!bndrng[0] || warning_suppressed_p (exp, opt)) |
704 | return false; |
705 | |
706 | tree maxobjsize = max_object_size (); |
707 | |
708 | bool warned = false; |
709 | |
710 | if (opt == OPT_Wstringop_overread) |
711 | { |
712 | bool maybe = pad && pad->src.phi (); |
713 | if (maybe) |
714 | { |
715 | /* Issue a "maybe" warning only if the PHI refers to objects |
716 | at least one of which has more space remaining than the bound. |
717 | Otherwise, if the bound is greater, use the definitive form. */ |
718 | offset_int remmax = pad->src.size_remaining (); |
719 | if (remmax < wi::to_offset (t: bndrng[0])) |
720 | maybe = false; |
721 | } |
722 | |
723 | auto_diagnostic_group d; |
724 | if (tree_int_cst_lt (t1: maxobjsize, t2: bndrng[0])) |
725 | { |
726 | if (bndrng[0] == bndrng[1]) |
727 | warned = (func |
728 | ? warning_at (loc, opt, |
729 | (maybe |
730 | ? G_("%qD specified bound %E may " |
731 | "exceed maximum object size %E" ) |
732 | : G_("%qD specified bound %E " |
733 | "exceeds maximum object size %E" )), |
734 | func, bndrng[0], maxobjsize) |
735 | : warning_at (loc, opt, |
736 | (maybe |
737 | ? G_("specified bound %E may " |
738 | "exceed maximum object size %E" ) |
739 | : G_("specified bound %E " |
740 | "exceeds maximum object size %E" )), |
741 | bndrng[0], maxobjsize)); |
742 | else |
743 | warned = (func |
744 | ? warning_at (loc, opt, |
745 | (maybe |
746 | ? G_("%qD specified bound [%E, %E] may " |
747 | "exceed maximum object size %E" ) |
748 | : G_("%qD specified bound [%E, %E] " |
749 | "exceeds maximum object size %E" )), |
750 | func, |
751 | bndrng[0], bndrng[1], maxobjsize) |
752 | : warning_at (loc, opt, |
753 | (maybe |
754 | ? G_("specified bound [%E, %E] may " |
755 | "exceed maximum object size %E" ) |
756 | : G_("specified bound [%E, %E] " |
757 | "exceeds maximum object size %E" )), |
758 | bndrng[0], bndrng[1], maxobjsize)); |
759 | } |
760 | else if (!size || tree_int_cst_le (t1: bndrng[0], t2: size)) |
761 | return false; |
762 | else if (tree_int_cst_equal (bndrng[0], bndrng[1])) |
763 | warned = (func |
764 | ? warning_at (loc, opt, |
765 | (maybe |
766 | ? G_("%qD specified bound %E may exceed " |
767 | "source size %E" ) |
768 | : G_("%qD specified bound %E exceeds " |
769 | "source size %E" )), |
770 | func, bndrng[0], size) |
771 | : warning_at (loc, opt, |
772 | (maybe |
773 | ? G_("specified bound %E may exceed " |
774 | "source size %E" ) |
775 | : G_("specified bound %E exceeds " |
776 | "source size %E" )), |
777 | bndrng[0], size)); |
778 | else |
779 | warned = (func |
780 | ? warning_at (loc, opt, |
781 | (maybe |
782 | ? G_("%qD specified bound [%E, %E] may " |
783 | "exceed source size %E" ) |
784 | : G_("%qD specified bound [%E, %E] exceeds " |
785 | "source size %E" )), |
786 | func, bndrng[0], bndrng[1], size) |
787 | : warning_at (loc, opt, |
788 | (maybe |
789 | ? G_("specified bound [%E, %E] may exceed " |
790 | "source size %E" ) |
791 | : G_("specified bound [%E, %E] exceeds " |
792 | "source size %E" )), |
793 | bndrng[0], bndrng[1], size)); |
794 | if (warned) |
795 | { |
796 | if (pad && pad->src.ref |
797 | && has_location (x: pad->src.ref)) |
798 | inform (get_location (x: pad->src.ref), |
799 | "source object allocated here" ); |
800 | suppress_warning (exp, opt); |
801 | } |
802 | |
803 | return warned; |
804 | } |
805 | |
806 | bool maybe = pad && pad->dst.phi (); |
807 | if (maybe) |
808 | { |
809 | /* Issue a "maybe" warning only if the PHI refers to objects |
810 | at least one of which has more space remaining than the bound. |
811 | Otherwise, if the bound is greater, use the definitive form. */ |
812 | offset_int remmax = pad->dst.size_remaining (); |
813 | if (remmax < wi::to_offset (t: bndrng[0])) |
814 | maybe = false; |
815 | } |
816 | if (tree_int_cst_lt (t1: maxobjsize, t2: bndrng[0])) |
817 | { |
818 | if (bndrng[0] == bndrng[1]) |
819 | warned = (func |
820 | ? warning_at (loc, opt, |
821 | (maybe |
822 | ? G_("%qD specified size %E may " |
823 | "exceed maximum object size %E" ) |
824 | : G_("%qD specified size %E " |
825 | "exceeds maximum object size %E" )), |
826 | func, bndrng[0], maxobjsize) |
827 | : warning_at (loc, opt, |
828 | (maybe |
829 | ? G_("specified size %E may exceed " |
830 | "maximum object size %E" ) |
831 | : G_("specified size %E exceeds " |
832 | "maximum object size %E" )), |
833 | bndrng[0], maxobjsize)); |
834 | else |
835 | warned = (func |
836 | ? warning_at (loc, opt, |
837 | (maybe |
838 | ? G_("%qD specified size between %E and %E " |
839 | "may exceed maximum object size %E" ) |
840 | : G_("%qD specified size between %E and %E " |
841 | "exceeds maximum object size %E" )), |
842 | func, bndrng[0], bndrng[1], maxobjsize) |
843 | : warning_at (loc, opt, |
844 | (maybe |
845 | ? G_("specified size between %E and %E " |
846 | "may exceed maximum object size %E" ) |
847 | : G_("specified size between %E and %E " |
848 | "exceeds maximum object size %E" )), |
849 | bndrng[0], bndrng[1], maxobjsize)); |
850 | } |
851 | else if (!size || tree_int_cst_le (t1: bndrng[0], t2: size)) |
852 | return false; |
853 | else if (tree_int_cst_equal (bndrng[0], bndrng[1])) |
854 | warned = (func |
855 | ? warning_at (loc, opt, |
856 | (maybe |
857 | ? G_("%qD specified bound %E may exceed " |
858 | "destination size %E" ) |
859 | : G_("%qD specified bound %E exceeds " |
860 | "destination size %E" )), |
861 | func, bndrng[0], size) |
862 | : warning_at (loc, opt, |
863 | (maybe |
864 | ? G_("specified bound %E may exceed " |
865 | "destination size %E" ) |
866 | : G_("specified bound %E exceeds " |
867 | "destination size %E" )), |
868 | bndrng[0], size)); |
869 | else |
870 | warned = (func |
871 | ? warning_at (loc, opt, |
872 | (maybe |
873 | ? G_("%qD specified bound [%E, %E] may exceed " |
874 | "destination size %E" ) |
875 | : G_("%qD specified bound [%E, %E] exceeds " |
876 | "destination size %E" )), |
877 | func, bndrng[0], bndrng[1], size) |
878 | : warning_at (loc, opt, |
879 | (maybe |
880 | ? G_("specified bound [%E, %E] exceeds " |
881 | "destination size %E" ) |
882 | : G_("specified bound [%E, %E] exceeds " |
883 | "destination size %E" )), |
884 | bndrng[0], bndrng[1], size)); |
885 | |
886 | if (warned) |
887 | { |
888 | if (pad && pad->dst.ref |
889 | && has_location (x: pad->dst.ref)) |
890 | inform (get_location (x: pad->dst.ref), |
891 | "destination object allocated here" ); |
892 | suppress_warning (exp, opt); |
893 | } |
894 | |
895 | return warned; |
896 | } |
897 | |
898 | bool |
899 | maybe_warn_for_bound (opt_code opt, location_t loc, gimple *stmt, tree func, |
900 | tree bndrng[2], tree size, |
901 | const access_data *pad /* = NULL */) |
902 | { |
903 | return maybe_warn_for_bound<gimple *> (opt, loc, exp: stmt, func, bndrng, size, |
904 | pad); |
905 | } |
906 | |
907 | bool |
908 | maybe_warn_for_bound (opt_code opt, location_t loc, tree expr, tree func, |
909 | tree bndrng[2], tree size, |
910 | const access_data *pad /* = NULL */) |
911 | { |
912 | return maybe_warn_for_bound<tree> (opt, loc, exp: expr, func, bndrng, size, pad); |
913 | } |
914 | |
915 | /* For an expression EXP issue an access warning controlled by option OPT |
916 | with access to a region SIZE bytes in size in the RANGE of sizes. |
917 | WRITE is true for a write access, READ for a read access, neither for |
918 | call that may or may not perform an access but for which the range |
919 | is expected to valid. |
920 | Returns true when a warning has been issued. */ |
921 | |
922 | template <class GimpleOrTree> |
923 | static bool |
924 | warn_for_access (location_t loc, tree func, GimpleOrTree exp, int opt, |
925 | tree range[2], tree size, bool write, bool read, bool maybe) |
926 | { |
927 | bool warned = false; |
928 | |
929 | if (write && read) |
930 | { |
931 | if (tree_int_cst_equal (range[0], range[1])) |
932 | warned = (func |
933 | ? warning_n (loc, opt, tree_to_uhwi (range[0]), |
934 | (maybe |
935 | ? G_("%qD may access %E byte in a region " |
936 | "of size %E" ) |
937 | : G_("%qD accessing %E byte in a region " |
938 | "of size %E" )), |
939 | (maybe |
940 | ? G_ ("%qD may access %E bytes in a region " |
941 | "of size %E" ) |
942 | : G_ ("%qD accessing %E bytes in a region " |
943 | "of size %E" )), |
944 | func, range[0], size) |
945 | : warning_n (loc, opt, tree_to_uhwi (range[0]), |
946 | (maybe |
947 | ? G_("may access %E byte in a region " |
948 | "of size %E" ) |
949 | : G_("accessing %E byte in a region " |
950 | "of size %E" )), |
951 | (maybe |
952 | ? G_("may access %E bytes in a region " |
953 | "of size %E" ) |
954 | : G_("accessing %E bytes in a region " |
955 | "of size %E" )), |
956 | range[0], size)); |
957 | else if (tree_int_cst_sign_bit (range[1])) |
958 | { |
959 | /* Avoid printing the upper bound if it's invalid. */ |
960 | warned = (func |
961 | ? warning_at (loc, opt, |
962 | (maybe |
963 | ? G_("%qD may access %E or more bytes " |
964 | "in a region of size %E" ) |
965 | : G_("%qD accessing %E or more bytes " |
966 | "in a region of size %E" )), |
967 | func, range[0], size) |
968 | : warning_at (loc, opt, |
969 | (maybe |
970 | ? G_("may access %E or more bytes " |
971 | "in a region of size %E" ) |
972 | : G_("accessing %E or more bytes " |
973 | "in a region of size %E" )), |
974 | range[0], size)); |
975 | } |
976 | else |
977 | warned = (func |
978 | ? warning_at (loc, opt, |
979 | (maybe |
980 | ? G_("%qD may access between %E and %E " |
981 | "bytes in a region of size %E" ) |
982 | : G_("%qD accessing between %E and %E " |
983 | "bytes in a region of size %E" )), |
984 | func, range[0], range[1], size) |
985 | : warning_at (loc, opt, |
986 | (maybe |
987 | ? G_("may access between %E and %E bytes " |
988 | "in a region of size %E" ) |
989 | : G_("accessing between %E and %E bytes " |
990 | "in a region of size %E" )), |
991 | range[0], range[1], size)); |
992 | return warned; |
993 | } |
994 | |
995 | if (write) |
996 | { |
997 | if (tree_int_cst_equal (range[0], range[1])) |
998 | warned = (func |
999 | ? warning_n (loc, opt, tree_to_uhwi (range[0]), |
1000 | (maybe |
1001 | ? G_("%qD may write %E byte into a region " |
1002 | "of size %E" ) |
1003 | : G_("%qD writing %E byte into a region " |
1004 | "of size %E overflows the destination" )), |
1005 | (maybe |
1006 | ? G_("%qD may write %E bytes into a region " |
1007 | "of size %E" ) |
1008 | : G_("%qD writing %E bytes into a region " |
1009 | "of size %E overflows the destination" )), |
1010 | func, range[0], size) |
1011 | : warning_n (loc, opt, tree_to_uhwi (range[0]), |
1012 | (maybe |
1013 | ? G_("may write %E byte into a region " |
1014 | "of size %E" ) |
1015 | : G_("writing %E byte into a region " |
1016 | "of size %E overflows the destination" )), |
1017 | (maybe |
1018 | ? G_("may write %E bytes into a region " |
1019 | "of size %E" ) |
1020 | : G_("writing %E bytes into a region " |
1021 | "of size %E overflows the destination" )), |
1022 | range[0], size)); |
1023 | else if (tree_int_cst_sign_bit (range[1])) |
1024 | { |
1025 | /* Avoid printing the upper bound if it's invalid. */ |
1026 | warned = (func |
1027 | ? warning_at (loc, opt, |
1028 | (maybe |
1029 | ? G_("%qD may write %E or more bytes " |
1030 | "into a region of size %E" ) |
1031 | : G_("%qD writing %E or more bytes " |
1032 | "into a region of size %E overflows " |
1033 | "the destination" )), |
1034 | func, range[0], size) |
1035 | : warning_at (loc, opt, |
1036 | (maybe |
1037 | ? G_("may write %E or more bytes into " |
1038 | "a region of size %E" ) |
1039 | : G_("writing %E or more bytes into " |
1040 | "a region of size %E overflows " |
1041 | "the destination" )), |
1042 | range[0], size)); |
1043 | } |
1044 | else |
1045 | warned = (func |
1046 | ? warning_at (loc, opt, |
1047 | (maybe |
1048 | ? G_("%qD may write between %E and %E bytes " |
1049 | "into a region of size %E" ) |
1050 | : G_("%qD writing between %E and %E bytes " |
1051 | "into a region of size %E overflows " |
1052 | "the destination" )), |
1053 | func, range[0], range[1], size) |
1054 | : warning_at (loc, opt, |
1055 | (maybe |
1056 | ? G_("may write between %E and %E bytes " |
1057 | "into a region of size %E" ) |
1058 | : G_("writing between %E and %E bytes " |
1059 | "into a region of size %E overflows " |
1060 | "the destination" )), |
1061 | range[0], range[1], size)); |
1062 | return warned; |
1063 | } |
1064 | |
1065 | if (read) |
1066 | { |
1067 | if (tree_int_cst_equal (range[0], range[1])) |
1068 | warned = (func |
1069 | ? warning_n (loc, OPT_Wstringop_overread, |
1070 | tree_to_uhwi (range[0]), |
1071 | (maybe |
1072 | ? G_("%qD may read %E byte from a region " |
1073 | "of size %E" ) |
1074 | : G_("%qD reading %E byte from a region " |
1075 | "of size %E" )), |
1076 | (maybe |
1077 | ? G_("%qD may read %E bytes from a region " |
1078 | "of size %E" ) |
1079 | : G_("%qD reading %E bytes from a region " |
1080 | "of size %E" )), |
1081 | func, range[0], size) |
1082 | : warning_n (loc, OPT_Wstringop_overread, |
1083 | tree_to_uhwi (range[0]), |
1084 | (maybe |
1085 | ? G_("may read %E byte from a region " |
1086 | "of size %E" ) |
1087 | : G_("reading %E byte from a region " |
1088 | "of size %E" )), |
1089 | (maybe |
1090 | ? G_("may read %E bytes from a region " |
1091 | "of size %E" ) |
1092 | : G_("reading %E bytes from a region " |
1093 | "of size %E" )), |
1094 | range[0], size)); |
1095 | else if (tree_int_cst_sign_bit (range[1])) |
1096 | { |
1097 | /* Avoid printing the upper bound if it's invalid. */ |
1098 | warned = (func |
1099 | ? warning_at (loc, OPT_Wstringop_overread, |
1100 | (maybe |
1101 | ? G_("%qD may read %E or more bytes " |
1102 | "from a region of size %E" ) |
1103 | : G_("%qD reading %E or more bytes " |
1104 | "from a region of size %E" )), |
1105 | func, range[0], size) |
1106 | : warning_at (loc, OPT_Wstringop_overread, |
1107 | (maybe |
1108 | ? G_("may read %E or more bytes " |
1109 | "from a region of size %E" ) |
1110 | : G_("reading %E or more bytes " |
1111 | "from a region of size %E" )), |
1112 | range[0], size)); |
1113 | } |
1114 | else |
1115 | warned = (func |
1116 | ? warning_at (loc, OPT_Wstringop_overread, |
1117 | (maybe |
1118 | ? G_("%qD may read between %E and %E bytes " |
1119 | "from a region of size %E" ) |
1120 | : G_("%qD reading between %E and %E bytes " |
1121 | "from a region of size %E" )), |
1122 | func, range[0], range[1], size) |
1123 | : warning_at (loc, opt, |
1124 | (maybe |
1125 | ? G_("may read between %E and %E bytes " |
1126 | "from a region of size %E" ) |
1127 | : G_("reading between %E and %E bytes " |
1128 | "from a region of size %E" )), |
1129 | range[0], range[1], size)); |
1130 | |
1131 | if (warned) |
1132 | suppress_warning (exp, OPT_Wstringop_overread); |
1133 | |
1134 | return warned; |
1135 | } |
1136 | |
1137 | if (tree_int_cst_equal (range[0], range[1]) |
1138 | || tree_int_cst_sign_bit (range[1])) |
1139 | warned = (func |
1140 | ? warning_n (loc, OPT_Wstringop_overread, |
1141 | tree_to_uhwi (range[0]), |
1142 | "%qD expecting %E byte in a region of size %E" , |
1143 | "%qD expecting %E bytes in a region of size %E" , |
1144 | func, range[0], size) |
1145 | : warning_n (loc, OPT_Wstringop_overread, |
1146 | tree_to_uhwi (range[0]), |
1147 | "expecting %E byte in a region of size %E" , |
1148 | "expecting %E bytes in a region of size %E" , |
1149 | range[0], size)); |
1150 | else if (tree_int_cst_sign_bit (range[1])) |
1151 | { |
1152 | /* Avoid printing the upper bound if it's invalid. */ |
1153 | warned = (func |
1154 | ? warning_at (loc, OPT_Wstringop_overread, |
1155 | "%qD expecting %E or more bytes in a region " |
1156 | "of size %E" , |
1157 | func, range[0], size) |
1158 | : warning_at (loc, OPT_Wstringop_overread, |
1159 | "expecting %E or more bytes in a region " |
1160 | "of size %E" , |
1161 | range[0], size)); |
1162 | } |
1163 | else |
1164 | warned = (func |
1165 | ? warning_at (loc, OPT_Wstringop_overread, |
1166 | "%qD expecting between %E and %E bytes in " |
1167 | "a region of size %E" , |
1168 | func, range[0], range[1], size) |
1169 | : warning_at (loc, OPT_Wstringop_overread, |
1170 | "expecting between %E and %E bytes in " |
1171 | "a region of size %E" , |
1172 | range[0], range[1], size)); |
1173 | |
1174 | if (warned) |
1175 | suppress_warning (exp, OPT_Wstringop_overread); |
1176 | |
1177 | return warned; |
1178 | } |
1179 | |
1180 | static bool |
1181 | warn_for_access (location_t loc, tree func, gimple *stmt, int opt, |
1182 | tree range[2], tree size, bool write, bool read, bool maybe) |
1183 | { |
1184 | return warn_for_access<gimple *>(loc, func, exp: stmt, opt, range, size, |
1185 | write, read, maybe); |
1186 | } |
1187 | |
1188 | static bool |
1189 | warn_for_access (location_t loc, tree func, tree expr, int opt, |
1190 | tree range[2], tree size, bool write, bool read, bool maybe) |
1191 | { |
1192 | return warn_for_access<tree>(loc, func, exp: expr, opt, range, size, |
1193 | write, read, maybe); |
1194 | } |
1195 | |
1196 | /* Helper to set RANGE to the range of BOUND if it's nonnull, bounded |
1197 | by BNDRNG if nonnull and valid. */ |
1198 | |
1199 | static void |
1200 | get_size_range (range_query *query, tree bound, gimple *stmt, tree range[2], |
1201 | int flags, const offset_int bndrng[2]) |
1202 | { |
1203 | if (bound) |
1204 | get_size_range (query, bound, stmt, range, flags); |
1205 | |
1206 | if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U)) |
1207 | return; |
1208 | |
1209 | if (range[0] && TREE_CODE (range[0]) == INTEGER_CST) |
1210 | { |
1211 | offset_int r[] = |
1212 | { wi::to_offset (t: range[0]), wi::to_offset (t: range[1]) }; |
1213 | if (r[0] < bndrng[0]) |
1214 | range[0] = wide_int_to_tree (sizetype, cst: bndrng[0]); |
1215 | if (bndrng[1] < r[1]) |
1216 | range[1] = wide_int_to_tree (sizetype, cst: bndrng[1]); |
1217 | } |
1218 | else |
1219 | { |
1220 | range[0] = wide_int_to_tree (sizetype, cst: bndrng[0]); |
1221 | range[1] = wide_int_to_tree (sizetype, cst: bndrng[1]); |
1222 | } |
1223 | } |
1224 | |
1225 | /* Try to verify that the sizes and lengths of the arguments to a string |
1226 | manipulation function given by EXP are within valid bounds and that |
1227 | the operation does not lead to buffer overflow or read past the end. |
1228 | Arguments other than EXP may be null. When non-null, the arguments |
1229 | have the following meaning: |
1230 | DST is the destination of a copy call or NULL otherwise. |
1231 | SRC is the source of a copy call or NULL otherwise. |
1232 | DSTWRITE is the number of bytes written into the destination obtained |
1233 | from the user-supplied size argument to the function (such as in |
1234 | memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE). |
1235 | MAXREAD is the user-supplied bound on the length of the source sequence |
1236 | (such as in strncat(d, s, N). It specifies the upper limit on the number |
1237 | of bytes to write. If NULL, it's taken to be the same as DSTWRITE. |
1238 | SRCSTR is the source string (such as in strcpy(DST, SRC)) when the |
1239 | expression EXP is a string function call (as opposed to a memory call |
1240 | like memcpy). As an exception, SRCSTR can also be an integer denoting |
1241 | the precomputed size of the source string or object (for functions like |
1242 | memcpy). |
1243 | DSTSIZE is the size of the destination object. |
1244 | |
1245 | When DSTWRITE is null LEN is checked to verify that it doesn't exceed |
1246 | SIZE_MAX. |
1247 | |
1248 | WRITE is true for write accesses, READ is true for reads. Both are |
1249 | false for simple size checks in calls to functions that neither read |
1250 | from nor write to the region. |
1251 | |
1252 | When nonnull, PAD points to a more detailed description of the access. |
1253 | |
1254 | If the call is successfully verified as safe return true, otherwise |
1255 | return false. */ |
1256 | |
1257 | template <class GimpleOrTree> |
1258 | static bool |
1259 | check_access (GimpleOrTree exp, tree dstwrite, |
1260 | tree maxread, tree srcstr, tree dstsize, |
1261 | access_mode mode, const access_data *pad, |
1262 | range_query *rvals) |
1263 | { |
1264 | /* The size of the largest object is half the address space, or |
1265 | PTRDIFF_MAX. (This is way too permissive.) */ |
1266 | tree maxobjsize = max_object_size (); |
1267 | |
1268 | /* Either an approximate/minimum the length of the source string for |
1269 | string functions or the size of the source object for raw memory |
1270 | functions. */ |
1271 | tree slen = NULL_TREE; |
1272 | |
1273 | /* The range of the access in bytes; first set to the write access |
1274 | for functions that write and then read for those that also (or |
1275 | just) read. */ |
1276 | tree range[2] = { NULL_TREE, NULL_TREE }; |
1277 | |
1278 | /* Set to true when the exact number of bytes written by a string |
1279 | function like strcpy is not known and the only thing that is |
1280 | known is that it must be at least one (for the terminating nul). */ |
1281 | bool at_least_one = false; |
1282 | if (srcstr) |
1283 | { |
1284 | /* SRCSTR is normally a pointer to string but as a special case |
1285 | it can be an integer denoting the length of a string. */ |
1286 | if (POINTER_TYPE_P (TREE_TYPE (srcstr))) |
1287 | { |
1288 | if (!check_nul_terminated_array (exp, srcstr, maxread)) |
1289 | /* Return if the array is not nul-terminated and a warning |
1290 | has been issued. */ |
1291 | return false; |
1292 | |
1293 | /* Try to determine the range of lengths the source string |
1294 | refers to. If it can be determined and is less than |
1295 | the upper bound given by MAXREAD add one to it for |
1296 | the terminating nul. Otherwise, set it to one for |
1297 | the same reason, or to MAXREAD as appropriate. */ |
1298 | c_strlen_data lendata = { }; |
1299 | get_range_strlen (srcstr, &lendata, /* eltsize = */ 1); |
1300 | range[0] = lendata.minlen; |
1301 | range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen; |
1302 | if (range[0] |
1303 | && TREE_CODE (range[0]) == INTEGER_CST |
1304 | && TREE_CODE (range[1]) == INTEGER_CST |
1305 | && (!maxread || TREE_CODE (maxread) == INTEGER_CST)) |
1306 | { |
1307 | if (maxread && tree_int_cst_le (t1: maxread, t2: range[0])) |
1308 | range[0] = range[1] = maxread; |
1309 | else |
1310 | range[0] = fold_build2 (PLUS_EXPR, size_type_node, |
1311 | range[0], size_one_node); |
1312 | |
1313 | if (maxread && tree_int_cst_le (t1: maxread, t2: range[1])) |
1314 | range[1] = maxread; |
1315 | else if (!integer_all_onesp (range[1])) |
1316 | range[1] = fold_build2 (PLUS_EXPR, size_type_node, |
1317 | range[1], size_one_node); |
1318 | |
1319 | slen = range[0]; |
1320 | } |
1321 | else |
1322 | { |
1323 | at_least_one = true; |
1324 | slen = size_one_node; |
1325 | } |
1326 | } |
1327 | else |
1328 | slen = srcstr; |
1329 | } |
1330 | |
1331 | if (!dstwrite && !maxread) |
1332 | { |
1333 | /* When the only available piece of data is the object size |
1334 | there is nothing to do. */ |
1335 | if (!slen) |
1336 | return true; |
1337 | |
1338 | /* Otherwise, when the length of the source sequence is known |
1339 | (as with strlen), set DSTWRITE to it. */ |
1340 | if (!range[0]) |
1341 | dstwrite = slen; |
1342 | } |
1343 | |
1344 | if (!dstsize) |
1345 | dstsize = maxobjsize; |
1346 | |
1347 | /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST_BNDRNG |
1348 | if valid. */ |
1349 | gimple *stmt = pad ? pad->stmt : nullptr; |
1350 | get_size_range (query: rvals, bound: dstwrite, stmt, range, |
1351 | /* If the destination has known zero size prefer a zero |
1352 | size range to avoid false positives if that's a |
1353 | possibility. */ |
1354 | flags: integer_zerop (dstsize) ? SR_ALLOW_ZERO : 0, |
1355 | bndrng: pad ? pad->dst_bndrng : NULL); |
1356 | |
1357 | tree func = get_callee_fndecl (exp); |
1358 | /* Read vs write access by built-ins can be determined from the const |
1359 | qualifiers on the pointer argument. In the absence of attribute |
1360 | access, non-const qualified pointer arguments to user-defined |
1361 | functions are assumed to both read and write the objects. */ |
1362 | const bool builtin = func ? fndecl_built_in_p (node: func) : false; |
1363 | |
1364 | /* First check the number of bytes to be written against the maximum |
1365 | object size. */ |
1366 | if (range[0] |
1367 | && TREE_CODE (range[0]) == INTEGER_CST |
1368 | && tree_int_cst_lt (t1: maxobjsize, t2: range[0])) |
1369 | { |
1370 | location_t loc = get_location (exp); |
1371 | maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range, |
1372 | NULL_TREE, pad); |
1373 | return false; |
1374 | } |
1375 | |
1376 | /* The number of bytes to write is "exact" if DSTWRITE is non-null, |
1377 | constant, and in range of unsigned HOST_WIDE_INT. */ |
1378 | bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite); |
1379 | |
1380 | /* Next check the number of bytes to be written against the destination |
1381 | object size. */ |
1382 | if (range[0] || !exactwrite || integer_all_onesp (dstwrite)) |
1383 | { |
1384 | if (range[0] |
1385 | && TREE_CODE (range[0]) == INTEGER_CST |
1386 | && ((tree_fits_uhwi_p (dstsize) |
1387 | && tree_int_cst_lt (t1: dstsize, t2: range[0])) |
1388 | || (dstwrite |
1389 | && tree_fits_uhwi_p (dstwrite) |
1390 | && tree_int_cst_lt (t1: dstwrite, t2: range[0])))) |
1391 | { |
1392 | const opt_code opt = OPT_Wstringop_overflow_; |
1393 | if (warning_suppressed_p (exp, opt) |
1394 | || (pad && pad->dst.ref |
1395 | && warning_suppressed_p (pad->dst.ref, opt))) |
1396 | return false; |
1397 | |
1398 | auto_diagnostic_group d; |
1399 | location_t loc = get_location (exp); |
1400 | bool warned = false; |
1401 | if (dstwrite == slen && at_least_one) |
1402 | { |
1403 | /* This is a call to strcpy with a destination of 0 size |
1404 | and a source of unknown length. The call will write |
1405 | at least one byte past the end of the destination. */ |
1406 | warned = (func |
1407 | ? warning_at (loc, opt, |
1408 | "%qD writing %E or more bytes into " |
1409 | "a region of size %E overflows " |
1410 | "the destination" , |
1411 | func, range[0], dstsize) |
1412 | : warning_at (loc, opt, |
1413 | "writing %E or more bytes into " |
1414 | "a region of size %E overflows " |
1415 | "the destination" , |
1416 | range[0], dstsize)); |
1417 | } |
1418 | else |
1419 | { |
1420 | const bool read |
1421 | = mode == access_read_only || mode == access_read_write; |
1422 | const bool write |
1423 | = mode == access_write_only || mode == access_read_write; |
1424 | const bool maybe = pad && pad->dst.parmarray; |
1425 | warned = warn_for_access (loc, func, exp, |
1426 | OPT_Wstringop_overflow_, |
1427 | range, dstsize, |
1428 | write, read && !builtin, maybe); |
1429 | } |
1430 | |
1431 | if (warned) |
1432 | { |
1433 | suppress_warning (exp, OPT_Wstringop_overflow_); |
1434 | if (pad) |
1435 | pad->dst.inform_access (pad->mode); |
1436 | } |
1437 | |
1438 | /* Return error when an overflow has been detected. */ |
1439 | return false; |
1440 | } |
1441 | } |
1442 | |
1443 | /* Check the maximum length of the source sequence against the size |
1444 | of the destination object if known, or against the maximum size |
1445 | of an object. */ |
1446 | if (maxread) |
1447 | { |
1448 | /* Set RANGE to that of MAXREAD, bounded by PAD->SRC_BNDRNG if |
1449 | PAD is nonnull and BNDRNG is valid. */ |
1450 | get_size_range (query: rvals, bound: maxread, stmt, range, flags: 0, |
1451 | bndrng: pad ? pad->src_bndrng : NULL); |
1452 | |
1453 | location_t loc = get_location (exp); |
1454 | tree size = dstsize; |
1455 | if (pad && pad->mode == access_read_only) |
1456 | size = wide_int_to_tree (sizetype, cst: pad->src.size_remaining ()); |
1457 | |
1458 | if (range[0] && maxread && tree_fits_uhwi_p (size)) |
1459 | { |
1460 | if (tree_int_cst_lt (t1: maxobjsize, t2: range[0])) |
1461 | { |
1462 | maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func, |
1463 | range, size, pad); |
1464 | return false; |
1465 | } |
1466 | |
1467 | if (size != maxobjsize && tree_int_cst_lt (t1: size, t2: range[0])) |
1468 | { |
1469 | opt_code opt = (dstwrite || mode != access_read_only |
1470 | ? OPT_Wstringop_overflow_ |
1471 | : OPT_Wstringop_overread); |
1472 | maybe_warn_for_bound (opt, loc, exp, func, range, size, pad); |
1473 | return false; |
1474 | } |
1475 | } |
1476 | |
1477 | maybe_warn_nonstring_arg (func, exp); |
1478 | } |
1479 | |
1480 | /* Check for reading past the end of SRC. */ |
1481 | bool overread = (slen |
1482 | && slen == srcstr |
1483 | && dstwrite |
1484 | && range[0] |
1485 | && TREE_CODE (slen) == INTEGER_CST |
1486 | && tree_int_cst_lt (t1: slen, t2: range[0])); |
1487 | /* If none is determined try to get a better answer based on the details |
1488 | in PAD. */ |
1489 | if (!overread |
1490 | && pad |
1491 | && pad->src.sizrng[1] >= 0 |
1492 | && pad->src.offrng[0] >= 0 |
1493 | && (pad->src.offrng[1] < 0 |
1494 | || pad->src.offrng[0] <= pad->src.offrng[1])) |
1495 | { |
1496 | /* Set RANGE to that of MAXREAD, bounded by PAD->SRC_BNDRNG if |
1497 | PAD is nonnull and BNDRNG is valid. */ |
1498 | get_size_range (query: rvals, bound: maxread, stmt, range, flags: 0, |
1499 | bndrng: pad ? pad->src_bndrng : NULL); |
1500 | /* Set OVERREAD for reads starting just past the end of an object. */ |
1501 | overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src_bndrng[0]; |
1502 | range[0] = wide_int_to_tree (sizetype, cst: pad->src_bndrng[0]); |
1503 | slen = size_zero_node; |
1504 | } |
1505 | |
1506 | if (overread) |
1507 | { |
1508 | const opt_code opt = OPT_Wstringop_overread; |
1509 | if (warning_suppressed_p (exp, opt) |
1510 | || (srcstr && warning_suppressed_p (srcstr, opt)) |
1511 | || (pad && pad->src.ref |
1512 | && warning_suppressed_p (pad->src.ref, opt))) |
1513 | return false; |
1514 | |
1515 | location_t loc = get_location (exp); |
1516 | const bool read |
1517 | = mode == access_read_only || mode == access_read_write; |
1518 | const bool maybe = pad && pad->dst.parmarray; |
1519 | auto_diagnostic_group d; |
1520 | if (warn_for_access (loc, func, exp, opt, range, slen, false, read, |
1521 | maybe)) |
1522 | { |
1523 | suppress_warning (exp, opt); |
1524 | if (pad) |
1525 | pad->src.inform_access (access_read_only); |
1526 | } |
1527 | return false; |
1528 | } |
1529 | |
1530 | return true; |
1531 | } |
1532 | |
1533 | static bool |
1534 | check_access (gimple *stmt, tree dstwrite, |
1535 | tree maxread, tree srcstr, tree dstsize, |
1536 | access_mode mode, const access_data *pad, |
1537 | range_query *rvals) |
1538 | { |
1539 | return check_access<gimple *> (exp: stmt, dstwrite, maxread, srcstr, dstsize, |
1540 | mode, pad, rvals); |
1541 | } |
1542 | |
1543 | bool |
1544 | check_access (tree expr, tree dstwrite, |
1545 | tree maxread, tree srcstr, tree dstsize, |
1546 | access_mode mode, const access_data *pad /* = NULL */) |
1547 | { |
1548 | return check_access<tree> (exp: expr, dstwrite, maxread, srcstr, dstsize, |
1549 | mode, pad, rvals: nullptr); |
1550 | } |
1551 | |
1552 | /* Return true if STMT is a call to an allocation function. Unless |
1553 | ALL_ALLOC is set, consider only functions that return dynamically |
1554 | allocated objects. Otherwise return true even for all forms of |
1555 | alloca (including VLA). */ |
1556 | |
1557 | static bool |
1558 | fndecl_alloc_p (tree fndecl, bool all_alloc) |
1559 | { |
1560 | if (!fndecl) |
1561 | return false; |
1562 | |
1563 | /* A call to operator new isn't recognized as one to a built-in. */ |
1564 | if (DECL_IS_OPERATOR_NEW_P (fndecl)) |
1565 | return true; |
1566 | |
1567 | if (fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL)) |
1568 | { |
1569 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
1570 | { |
1571 | case BUILT_IN_ALLOCA: |
1572 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
1573 | return all_alloc; |
1574 | case BUILT_IN_ALIGNED_ALLOC: |
1575 | case BUILT_IN_CALLOC: |
1576 | case BUILT_IN_GOMP_ALLOC: |
1577 | case BUILT_IN_MALLOC: |
1578 | case BUILT_IN_REALLOC: |
1579 | case BUILT_IN_STRDUP: |
1580 | case BUILT_IN_STRNDUP: |
1581 | return true; |
1582 | default: |
1583 | break; |
1584 | } |
1585 | } |
1586 | |
1587 | /* A function is considered an allocation function if it's declared |
1588 | with attribute malloc with an argument naming its associated |
1589 | deallocation function. */ |
1590 | tree attrs = DECL_ATTRIBUTES (fndecl); |
1591 | if (!attrs) |
1592 | return false; |
1593 | |
1594 | for (tree allocs = attrs; |
1595 | (allocs = lookup_attribute (attr_name: "malloc" , list: allocs)); |
1596 | allocs = TREE_CHAIN (allocs)) |
1597 | { |
1598 | tree args = TREE_VALUE (allocs); |
1599 | if (!args) |
1600 | continue; |
1601 | |
1602 | if (TREE_VALUE (args)) |
1603 | return true; |
1604 | } |
1605 | |
1606 | return false; |
1607 | } |
1608 | |
1609 | /* Return true if STMT is a call to an allocation function. A wrapper |
1610 | around fndecl_alloc_p. */ |
1611 | |
1612 | static bool |
1613 | gimple_call_alloc_p (gimple *stmt, bool all_alloc = false) |
1614 | { |
1615 | return fndecl_alloc_p (fndecl: gimple_call_fndecl (gs: stmt), all_alloc); |
1616 | } |
1617 | |
1618 | /* Return true if DELC doesn't refer to an operator delete that's |
1619 | suitable to call with a pointer returned from the operator new |
1620 | described by NEWC. */ |
1621 | |
1622 | static bool |
1623 | new_delete_mismatch_p (const demangle_component &newc, |
1624 | const demangle_component &delc) |
1625 | { |
1626 | if (newc.type != delc.type) |
1627 | return true; |
1628 | |
1629 | switch (newc.type) |
1630 | { |
1631 | case DEMANGLE_COMPONENT_NAME: |
1632 | { |
1633 | int len = newc.u.s_name.len; |
1634 | const char *news = newc.u.s_name.s; |
1635 | const char *dels = delc.u.s_name.s; |
1636 | if (len != delc.u.s_name.len || memcmp (s1: news, s2: dels, n: len)) |
1637 | return true; |
1638 | |
1639 | if (news[len] == 'n') |
1640 | { |
1641 | if (news[len + 1] == 'a') |
1642 | return dels[len] != 'd' || dels[len + 1] != 'a'; |
1643 | if (news[len + 1] == 'w') |
1644 | return dels[len] != 'd' || dels[len + 1] != 'l'; |
1645 | } |
1646 | return false; |
1647 | } |
1648 | |
1649 | case DEMANGLE_COMPONENT_OPERATOR: |
1650 | /* Operator mismatches are handled above. */ |
1651 | return false; |
1652 | |
1653 | case DEMANGLE_COMPONENT_EXTENDED_OPERATOR: |
1654 | if (newc.u.s_extended_operator.args != delc.u.s_extended_operator.args) |
1655 | return true; |
1656 | return new_delete_mismatch_p (newc: *newc.u.s_extended_operator.name, |
1657 | delc: *delc.u.s_extended_operator.name); |
1658 | |
1659 | case DEMANGLE_COMPONENT_FIXED_TYPE: |
1660 | if (newc.u.s_fixed.accum != delc.u.s_fixed.accum |
1661 | || newc.u.s_fixed.sat != delc.u.s_fixed.sat) |
1662 | return true; |
1663 | return new_delete_mismatch_p (newc: *newc.u.s_fixed.length, |
1664 | delc: *delc.u.s_fixed.length); |
1665 | |
1666 | case DEMANGLE_COMPONENT_CTOR: |
1667 | if (newc.u.s_ctor.kind != delc.u.s_ctor.kind) |
1668 | return true; |
1669 | return new_delete_mismatch_p (newc: *newc.u.s_ctor.name, |
1670 | delc: *delc.u.s_ctor.name); |
1671 | |
1672 | case DEMANGLE_COMPONENT_DTOR: |
1673 | if (newc.u.s_dtor.kind != delc.u.s_dtor.kind) |
1674 | return true; |
1675 | return new_delete_mismatch_p (newc: *newc.u.s_dtor.name, |
1676 | delc: *delc.u.s_dtor.name); |
1677 | |
1678 | case DEMANGLE_COMPONENT_BUILTIN_TYPE: |
1679 | { |
1680 | /* The demangler API provides no better way to compare built-in |
1681 | types except to by comparing their demangled names. */ |
1682 | size_t nsz, dsz; |
1683 | demangle_component *pnc = const_cast<demangle_component *>(&newc); |
1684 | demangle_component *pdc = const_cast<demangle_component *>(&delc); |
1685 | char *nts = cplus_demangle_print (options: 0, tree: pnc, estimated_length: 16, p_allocated_size: &nsz); |
1686 | char *dts = cplus_demangle_print (options: 0, tree: pdc, estimated_length: 16, p_allocated_size: &dsz); |
1687 | if (!nts != !dts) |
1688 | return true; |
1689 | bool mismatch = strcmp (s1: nts, s2: dts); |
1690 | free (ptr: nts); |
1691 | free (ptr: dts); |
1692 | return mismatch; |
1693 | } |
1694 | |
1695 | case DEMANGLE_COMPONENT_SUB_STD: |
1696 | if (newc.u.s_string.len != delc.u.s_string.len) |
1697 | return true; |
1698 | return memcmp (s1: newc.u.s_string.string, s2: delc.u.s_string.string, |
1699 | n: newc.u.s_string.len); |
1700 | |
1701 | case DEMANGLE_COMPONENT_FUNCTION_PARAM: |
1702 | case DEMANGLE_COMPONENT_TEMPLATE_PARAM: |
1703 | return newc.u.s_number.number != delc.u.s_number.number; |
1704 | |
1705 | case DEMANGLE_COMPONENT_CHARACTER: |
1706 | return newc.u.s_character.character != delc.u.s_character.character; |
1707 | |
1708 | case DEMANGLE_COMPONENT_DEFAULT_ARG: |
1709 | case DEMANGLE_COMPONENT_LAMBDA: |
1710 | if (newc.u.s_unary_num.num != delc.u.s_unary_num.num) |
1711 | return true; |
1712 | return new_delete_mismatch_p (newc: *newc.u.s_unary_num.sub, |
1713 | delc: *delc.u.s_unary_num.sub); |
1714 | default: |
1715 | break; |
1716 | } |
1717 | |
1718 | if (!newc.u.s_binary.left != !delc.u.s_binary.left) |
1719 | return true; |
1720 | |
1721 | if (!newc.u.s_binary.left) |
1722 | return false; |
1723 | |
1724 | if (new_delete_mismatch_p (newc: *newc.u.s_binary.left, delc: *delc.u.s_binary.left) |
1725 | || !newc.u.s_binary.right != !delc.u.s_binary.right) |
1726 | return true; |
1727 | |
1728 | if (newc.u.s_binary.right) |
1729 | return new_delete_mismatch_p (newc: *newc.u.s_binary.right, |
1730 | delc: *delc.u.s_binary.right); |
1731 | return false; |
1732 | } |
1733 | |
1734 | /* Return true if DELETE_DECL is an operator delete that's not suitable |
1735 | to call with a pointer returned from NEW_DECL. */ |
1736 | |
1737 | static bool |
1738 | new_delete_mismatch_p (tree new_decl, tree delete_decl) |
1739 | { |
1740 | tree new_name = DECL_ASSEMBLER_NAME (new_decl); |
1741 | tree delete_name = DECL_ASSEMBLER_NAME (delete_decl); |
1742 | |
1743 | /* valid_new_delete_pair_p() returns a conservative result (currently |
1744 | it only handles global operators). A true result is reliable but |
1745 | a false result doesn't necessarily mean the operators don't match |
1746 | unless CERTAIN is set. */ |
1747 | bool certain; |
1748 | if (valid_new_delete_pair_p (new_name, delete_name, &certain)) |
1749 | return false; |
1750 | /* CERTAIN is set when the negative result is certain. */ |
1751 | if (certain) |
1752 | return true; |
1753 | |
1754 | /* For anything not handled by valid_new_delete_pair_p() such as member |
1755 | operators compare the individual demangled components of the mangled |
1756 | name. */ |
1757 | const char *new_str = IDENTIFIER_POINTER (new_name); |
1758 | const char *del_str = IDENTIFIER_POINTER (delete_name); |
1759 | |
1760 | void *np = NULL, *dp = NULL; |
1761 | demangle_component *ndc = cplus_demangle_v3_components (mangled: new_str, options: 0, mem: &np); |
1762 | demangle_component *ddc = cplus_demangle_v3_components (mangled: del_str, options: 0, mem: &dp); |
1763 | bool mismatch = new_delete_mismatch_p (newc: *ndc, delc: *ddc); |
1764 | free (ptr: np); |
1765 | free (ptr: dp); |
1766 | return mismatch; |
1767 | } |
1768 | |
1769 | /* ALLOC_DECL and DEALLOC_DECL are pair of allocation and deallocation |
1770 | functions. Return true if the latter is suitable to deallocate objects |
1771 | allocated by calls to the former. */ |
1772 | |
1773 | static bool |
1774 | matching_alloc_calls_p (tree alloc_decl, tree dealloc_decl) |
1775 | { |
1776 | /* Set to alloc_kind_t::builtin if ALLOC_DECL is associated with |
1777 | a built-in deallocator. */ |
1778 | enum class alloc_kind_t { none, builtin, user } |
1779 | alloc_dealloc_kind = alloc_kind_t::none; |
1780 | |
1781 | if (DECL_IS_OPERATOR_NEW_P (alloc_decl)) |
1782 | { |
1783 | if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl)) |
1784 | /* Return true iff both functions are of the same array or |
1785 | singleton form and false otherwise. */ |
1786 | return !new_delete_mismatch_p (new_decl: alloc_decl, delete_decl: dealloc_decl); |
1787 | |
1788 | /* Return false for deallocation functions that are known not |
1789 | to match. */ |
1790 | if (fndecl_built_in_p (node: dealloc_decl, name1: BUILT_IN_FREE, names: BUILT_IN_REALLOC)) |
1791 | return false; |
1792 | /* Otherwise proceed below to check the deallocation function's |
1793 | "*dealloc" attributes to look for one that mentions this operator |
1794 | new. */ |
1795 | } |
1796 | else if (fndecl_built_in_p (node: alloc_decl, klass: BUILT_IN_NORMAL)) |
1797 | { |
1798 | switch (DECL_FUNCTION_CODE (decl: alloc_decl)) |
1799 | { |
1800 | case BUILT_IN_ALLOCA: |
1801 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
1802 | return false; |
1803 | |
1804 | case BUILT_IN_ALIGNED_ALLOC: |
1805 | case BUILT_IN_CALLOC: |
1806 | case BUILT_IN_GOMP_ALLOC: |
1807 | case BUILT_IN_MALLOC: |
1808 | case BUILT_IN_REALLOC: |
1809 | case BUILT_IN_STRDUP: |
1810 | case BUILT_IN_STRNDUP: |
1811 | if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl)) |
1812 | return false; |
1813 | |
1814 | if (fndecl_built_in_p (node: dealloc_decl, name1: BUILT_IN_FREE, |
1815 | names: BUILT_IN_REALLOC)) |
1816 | return true; |
1817 | |
1818 | alloc_dealloc_kind = alloc_kind_t::builtin; |
1819 | break; |
1820 | |
1821 | default: |
1822 | break; |
1823 | } |
1824 | } |
1825 | |
1826 | /* Set if DEALLOC_DECL both allocates and deallocates. */ |
1827 | alloc_kind_t realloc_kind = alloc_kind_t::none; |
1828 | |
1829 | if (fndecl_built_in_p (node: dealloc_decl, klass: BUILT_IN_NORMAL)) |
1830 | { |
1831 | built_in_function dealloc_code = DECL_FUNCTION_CODE (decl: dealloc_decl); |
1832 | if (dealloc_code == BUILT_IN_REALLOC) |
1833 | realloc_kind = alloc_kind_t::builtin; |
1834 | |
1835 | for (tree amats = DECL_ATTRIBUTES (alloc_decl); |
1836 | (amats = lookup_attribute (attr_name: "malloc" , list: amats)); |
1837 | amats = TREE_CHAIN (amats)) |
1838 | { |
1839 | tree args = TREE_VALUE (amats); |
1840 | if (!args) |
1841 | continue; |
1842 | |
1843 | tree fndecl = TREE_VALUE (args); |
1844 | if (!fndecl || !DECL_P (fndecl)) |
1845 | continue; |
1846 | |
1847 | if (fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL) |
1848 | && dealloc_code == DECL_FUNCTION_CODE (decl: fndecl)) |
1849 | return true; |
1850 | } |
1851 | } |
1852 | |
1853 | const bool alloc_builtin = fndecl_built_in_p (node: alloc_decl, klass: BUILT_IN_NORMAL); |
1854 | alloc_kind_t realloc_dealloc_kind = alloc_kind_t::none; |
1855 | |
1856 | /* If DEALLOC_DECL has an internal "*dealloc" attribute scan the list |
1857 | of its associated allocation functions for ALLOC_DECL. |
1858 | If the corresponding ALLOC_DECL is found they're a matching pair, |
1859 | otherwise they're not. |
1860 | With DDATS set to the Deallocator's *Dealloc ATtributes... */ |
1861 | for (tree ddats = DECL_ATTRIBUTES (dealloc_decl); |
1862 | (ddats = lookup_attribute (attr_name: "*dealloc" , list: ddats)); |
1863 | ddats = TREE_CHAIN (ddats)) |
1864 | { |
1865 | tree args = TREE_VALUE (ddats); |
1866 | if (!args) |
1867 | continue; |
1868 | |
1869 | tree alloc = TREE_VALUE (args); |
1870 | if (!alloc) |
1871 | continue; |
1872 | |
1873 | if (alloc == DECL_NAME (dealloc_decl)) |
1874 | realloc_kind = alloc_kind_t::user; |
1875 | |
1876 | if (DECL_P (alloc)) |
1877 | { |
1878 | gcc_checking_assert (fndecl_built_in_p (alloc, BUILT_IN_NORMAL)); |
1879 | |
1880 | switch (DECL_FUNCTION_CODE (decl: alloc)) |
1881 | { |
1882 | case BUILT_IN_ALIGNED_ALLOC: |
1883 | case BUILT_IN_CALLOC: |
1884 | case BUILT_IN_GOMP_ALLOC: |
1885 | case BUILT_IN_MALLOC: |
1886 | case BUILT_IN_REALLOC: |
1887 | case BUILT_IN_STRDUP: |
1888 | case BUILT_IN_STRNDUP: |
1889 | realloc_dealloc_kind = alloc_kind_t::builtin; |
1890 | break; |
1891 | default: |
1892 | break; |
1893 | } |
1894 | |
1895 | if (!alloc_builtin) |
1896 | continue; |
1897 | |
1898 | if (DECL_FUNCTION_CODE (decl: alloc) != DECL_FUNCTION_CODE (decl: alloc_decl)) |
1899 | continue; |
1900 | |
1901 | return true; |
1902 | } |
1903 | |
1904 | if (alloc == DECL_NAME (alloc_decl)) |
1905 | return true; |
1906 | } |
1907 | |
1908 | if (realloc_kind == alloc_kind_t::none) |
1909 | return false; |
1910 | |
1911 | hash_set<tree> common_deallocs; |
1912 | /* Special handling for deallocators. Iterate over both the allocator's |
1913 | and the reallocator's associated deallocator functions looking for |
1914 | the first one in common. If one is found, the de/reallocator is |
1915 | a match for the allocator even though the latter isn't directly |
1916 | associated with the former. This simplifies declarations in system |
1917 | headers. |
1918 | With AMATS set to the Allocator's Malloc ATtributes, |
1919 | and RMATS set to Reallocator's Malloc ATtributes... */ |
1920 | for (tree amats = DECL_ATTRIBUTES (alloc_decl), |
1921 | rmats = DECL_ATTRIBUTES (dealloc_decl); |
1922 | (amats = lookup_attribute (attr_name: "malloc" , list: amats)) |
1923 | || (rmats = lookup_attribute (attr_name: "malloc" , list: rmats)); |
1924 | amats = amats ? TREE_CHAIN (amats) : NULL_TREE, |
1925 | rmats = rmats ? TREE_CHAIN (rmats) : NULL_TREE) |
1926 | { |
1927 | if (tree args = amats ? TREE_VALUE (amats) : NULL_TREE) |
1928 | if (tree adealloc = TREE_VALUE (args)) |
1929 | { |
1930 | if (DECL_P (adealloc) |
1931 | && fndecl_built_in_p (node: adealloc, klass: BUILT_IN_NORMAL)) |
1932 | { |
1933 | built_in_function fncode = DECL_FUNCTION_CODE (decl: adealloc); |
1934 | if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC) |
1935 | { |
1936 | if (realloc_kind == alloc_kind_t::builtin) |
1937 | return true; |
1938 | alloc_dealloc_kind = alloc_kind_t::builtin; |
1939 | } |
1940 | continue; |
1941 | } |
1942 | |
1943 | common_deallocs.add (k: adealloc); |
1944 | } |
1945 | |
1946 | if (tree args = rmats ? TREE_VALUE (rmats) : NULL_TREE) |
1947 | if (tree ddealloc = TREE_VALUE (args)) |
1948 | { |
1949 | if (DECL_P (ddealloc) |
1950 | && fndecl_built_in_p (node: ddealloc, klass: BUILT_IN_NORMAL)) |
1951 | { |
1952 | built_in_function fncode = DECL_FUNCTION_CODE (decl: ddealloc); |
1953 | if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC) |
1954 | { |
1955 | if (alloc_dealloc_kind == alloc_kind_t::builtin) |
1956 | return true; |
1957 | realloc_dealloc_kind = alloc_kind_t::builtin; |
1958 | } |
1959 | continue; |
1960 | } |
1961 | |
1962 | if (common_deallocs.add (k: ddealloc)) |
1963 | return true; |
1964 | } |
1965 | } |
1966 | |
1967 | /* Succeed only if ALLOC_DECL and the reallocator DEALLOC_DECL share |
1968 | a built-in deallocator. */ |
1969 | return (alloc_dealloc_kind == alloc_kind_t::builtin |
1970 | && realloc_dealloc_kind == alloc_kind_t::builtin); |
1971 | } |
1972 | |
1973 | /* Return true if DEALLOC_DECL is a function suitable to deallocate |
1974 | objects allocated by the ALLOC call. */ |
1975 | |
1976 | static bool |
1977 | matching_alloc_calls_p (gimple *alloc, tree dealloc_decl) |
1978 | { |
1979 | tree alloc_decl = gimple_call_fndecl (gs: alloc); |
1980 | if (!alloc_decl) |
1981 | return true; |
1982 | |
1983 | return matching_alloc_calls_p (alloc_decl, dealloc_decl); |
1984 | } |
1985 | |
1986 | /* Diagnose a call EXP to deallocate a pointer referenced by AREF if it |
1987 | includes a nonzero offset. Such a pointer cannot refer to the beginning |
1988 | of an allocated object. A negative offset may refer to it only if |
1989 | the target pointer is unknown. */ |
1990 | |
1991 | static bool |
1992 | warn_dealloc_offset (location_t loc, gimple *call, const access_ref &aref) |
1993 | { |
1994 | if (aref.deref || aref.offrng[0] <= 0 || aref.offrng[1] <= 0) |
1995 | return false; |
1996 | |
1997 | tree dealloc_decl = gimple_call_fndecl (gs: call); |
1998 | if (!dealloc_decl) |
1999 | return false; |
2000 | |
2001 | if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl) |
2002 | && !DECL_IS_REPLACEABLE_OPERATOR (dealloc_decl)) |
2003 | { |
2004 | /* A call to a user-defined operator delete with a pointer plus offset |
2005 | may be valid if it's returned from an unknown function (i.e., one |
2006 | that's not operator new). */ |
2007 | if (TREE_CODE (aref.ref) == SSA_NAME) |
2008 | { |
2009 | gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref); |
2010 | if (is_gimple_call (gs: def_stmt)) |
2011 | { |
2012 | tree alloc_decl = gimple_call_fndecl (gs: def_stmt); |
2013 | if (!alloc_decl || !DECL_IS_OPERATOR_NEW_P (alloc_decl)) |
2014 | return false; |
2015 | } |
2016 | } |
2017 | } |
2018 | |
2019 | char offstr[80]; |
2020 | offstr[0] = '\0'; |
2021 | if (wi::fits_shwi_p (x: aref.offrng[0])) |
2022 | { |
2023 | if (aref.offrng[0] == aref.offrng[1] |
2024 | || !wi::fits_shwi_p (x: aref.offrng[1])) |
2025 | sprintf (s: offstr, format: " %lli" , |
2026 | (long long)aref.offrng[0].to_shwi ()); |
2027 | else |
2028 | sprintf (s: offstr, format: " [%lli, %lli]" , |
2029 | (long long)aref.offrng[0].to_shwi (), |
2030 | (long long)aref.offrng[1].to_shwi ()); |
2031 | } |
2032 | |
2033 | auto_diagnostic_group d; |
2034 | if (!warning_at (loc, OPT_Wfree_nonheap_object, |
2035 | "%qD called on pointer %qE with nonzero offset%s" , |
2036 | dealloc_decl, aref.ref, offstr)) |
2037 | return false; |
2038 | |
2039 | if (DECL_P (aref.ref)) |
2040 | inform (get_location (x: aref.ref), "declared here" ); |
2041 | else if (TREE_CODE (aref.ref) == SSA_NAME) |
2042 | { |
2043 | gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref); |
2044 | if (is_gimple_call (gs: def_stmt)) |
2045 | { |
2046 | location_t def_loc = get_location (stmt: def_stmt); |
2047 | tree alloc_decl = gimple_call_fndecl (gs: def_stmt); |
2048 | if (alloc_decl) |
2049 | inform (def_loc, |
2050 | "returned from %qD" , alloc_decl); |
2051 | else if (tree alloc_fntype = gimple_call_fntype (gs: def_stmt)) |
2052 | inform (def_loc, |
2053 | "returned from %qT" , alloc_fntype); |
2054 | else |
2055 | inform (def_loc, "obtained here" ); |
2056 | } |
2057 | } |
2058 | |
2059 | return true; |
2060 | } |
2061 | |
2062 | namespace { |
2063 | |
2064 | const pass_data pass_data_waccess = { |
2065 | .type: GIMPLE_PASS, |
2066 | .name: "waccess" , |
2067 | .optinfo_flags: OPTGROUP_NONE, |
2068 | .tv_id: TV_WARN_ACCESS, /* timer variable */ |
2069 | PROP_cfg, /* properties_required */ |
2070 | .properties_provided: 0, /* properties_provided */ |
2071 | .properties_destroyed: 0, /* properties_destroyed */ |
2072 | .todo_flags_start: 0, /* properties_start */ |
2073 | .todo_flags_finish: 0, /* properties_finish */ |
2074 | }; |
2075 | |
2076 | /* Pass to detect invalid accesses. */ |
2077 | class pass_waccess : public gimple_opt_pass |
2078 | { |
2079 | public: |
2080 | pass_waccess (gcc::context *); |
2081 | |
2082 | ~pass_waccess (); |
2083 | |
2084 | opt_pass *clone () final override; |
2085 | |
2086 | bool gate (function *) final override; |
2087 | |
2088 | void set_pass_param (unsigned, bool) final override; |
2089 | |
2090 | unsigned int execute (function *) final override; |
2091 | |
2092 | private: |
2093 | /* Not copyable or assignable. */ |
2094 | pass_waccess (pass_waccess &) = delete; |
2095 | void operator= (pass_waccess &) = delete; |
2096 | |
2097 | /* Check a call to an atomic built-in function. */ |
2098 | bool check_atomic_builtin (gcall *); |
2099 | |
2100 | /* Check a call to a built-in function. */ |
2101 | bool check_builtin (gcall *); |
2102 | |
2103 | /* Check a call to an ordinary function for invalid accesses. */ |
2104 | bool check_call_access (gcall *); |
2105 | |
2106 | /* Check a non-call statement. */ |
2107 | void check_stmt (gimple *); |
2108 | |
2109 | /* Check statements in a basic block. */ |
2110 | void check_block (basic_block); |
2111 | |
2112 | /* Check a call to a function. */ |
2113 | void check_call (gcall *); |
2114 | |
2115 | /* Check a call to the named built-in function. */ |
2116 | void check_alloca (gcall *); |
2117 | void check_alloc_size_call (gcall *); |
2118 | void check_strcat (gcall *); |
2119 | void check_strncat (gcall *); |
2120 | void check_stxcpy (gcall *); |
2121 | void check_stxncpy (gcall *); |
2122 | void check_strncmp (gcall *); |
2123 | void check_memop_access (gimple *, tree, tree, tree); |
2124 | void check_read_access (gimple *, tree, tree = NULL_TREE, int = 1); |
2125 | |
2126 | void maybe_check_dealloc_call (gcall *); |
2127 | void maybe_check_access_sizes (rdwr_map *, tree, tree, gimple *); |
2128 | bool maybe_warn_memmodel (gimple *, tree, tree, const unsigned char *); |
2129 | void check_atomic_memmodel (gimple *, tree, tree, const unsigned char *); |
2130 | |
2131 | /* Check for uses of indeterminate pointers. */ |
2132 | void check_pointer_uses (gimple *, tree, tree = NULL_TREE, bool = false); |
2133 | |
2134 | /* Return the argument that a call returns. */ |
2135 | tree gimple_call_return_arg (gcall *); |
2136 | |
2137 | /* Check a call for uses of a dangling pointer arguments. */ |
2138 | void check_call_dangling (gcall *); |
2139 | |
2140 | /* Check uses of a dangling pointer or those derived from it. */ |
2141 | void check_dangling_uses (tree, tree, bool = false, bool = false); |
2142 | void check_dangling_uses (); |
2143 | void check_dangling_stores (); |
2144 | bool check_dangling_stores (basic_block, hash_set<tree> &); |
2145 | |
2146 | void warn_invalid_pointer (tree, gimple *, gimple *, tree, bool, bool = false); |
2147 | |
2148 | /* Return true if use follows an invalidating statement. */ |
2149 | bool use_after_inval_p (gimple *, gimple *, bool = false); |
2150 | |
2151 | /* A pointer_query object to store information about pointers and |
2152 | their targets in. */ |
2153 | pointer_query m_ptr_qry; |
2154 | /* Mapping from DECLs and their clobber statements in the function. */ |
2155 | hash_map<tree, gimple *> m_clobbers; |
2156 | /* A bit is set for each basic block whose statements have been assigned |
2157 | valid UIDs. */ |
2158 | bitmap m_bb_uids_set; |
2159 | /* The current function. */ |
2160 | function *m_func; |
2161 | /* True to run checks for uses of dangling pointers. */ |
2162 | bool m_check_dangling_p; |
2163 | /* True to run checks early on in the optimization pipeline. */ |
2164 | bool m_early_checks_p; |
2165 | }; |
2166 | |
2167 | /* Construct the pass. */ |
2168 | |
2169 | pass_waccess::pass_waccess (gcc::context *ctxt) |
2170 | : gimple_opt_pass (pass_data_waccess, ctxt), |
2171 | m_ptr_qry (NULL), |
2172 | m_clobbers (), |
2173 | m_bb_uids_set (), |
2174 | m_func (), |
2175 | m_check_dangling_p (), |
2176 | m_early_checks_p () |
2177 | { |
2178 | } |
2179 | |
2180 | /* Return a copy of the pass with RUN_NUMBER one greater than THIS. */ |
2181 | |
2182 | opt_pass* |
2183 | pass_waccess::clone () |
2184 | { |
2185 | return new pass_waccess (m_ctxt); |
2186 | } |
2187 | |
2188 | /* Release pointer_query cache. */ |
2189 | |
2190 | pass_waccess::~pass_waccess () |
2191 | { |
2192 | m_ptr_qry.flush_cache (); |
2193 | } |
2194 | |
2195 | void |
2196 | pass_waccess::set_pass_param (unsigned int n, bool early) |
2197 | { |
2198 | gcc_assert (n == 0); |
2199 | |
2200 | m_early_checks_p = early; |
2201 | } |
2202 | |
2203 | /* Return true when any checks performed by the pass are enabled. */ |
2204 | |
2205 | bool |
2206 | pass_waccess::gate (function *) |
2207 | { |
2208 | return (warn_free_nonheap_object |
2209 | || warn_mismatched_alloc |
2210 | || warn_mismatched_new_delete); |
2211 | } |
2212 | |
2213 | /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than= |
2214 | setting if the option is specified, or to the maximum object size if it |
2215 | is not. Return the initialized value. */ |
2216 | |
2217 | static tree |
2218 | alloc_max_size (void) |
2219 | { |
2220 | HOST_WIDE_INT limit = warn_alloc_size_limit; |
2221 | if (limit == HOST_WIDE_INT_MAX) |
2222 | limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node)); |
2223 | |
2224 | return build_int_cst (size_type_node, limit); |
2225 | } |
2226 | |
2227 | /* Diagnose a call EXP to function FN decorated with attribute alloc_size |
2228 | whose argument numbers given by IDX with values given by ARGS exceed |
2229 | the maximum object size or cause an unsigned overflow (wrapping) when |
2230 | multiplied. FN is null when EXP is a call via a function pointer. |
2231 | When ARGS[0] is null the function does nothing. ARGS[1] may be null |
2232 | for functions like malloc, and non-null for those like calloc that |
2233 | are decorated with a two-argument attribute alloc_size. */ |
2234 | |
2235 | void |
2236 | maybe_warn_alloc_args_overflow (gimple *stmt, const tree args[2], |
2237 | const int idx[2]) |
2238 | { |
2239 | /* The range each of the (up to) two arguments is known to be in. */ |
2240 | tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } }; |
2241 | |
2242 | /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */ |
2243 | tree maxobjsize = alloc_max_size (); |
2244 | |
2245 | location_t loc = get_location (stmt); |
2246 | |
2247 | tree fn = gimple_call_fndecl (gs: stmt); |
2248 | tree fntype = fn ? TREE_TYPE (fn) : gimple_call_fntype (gs: stmt); |
2249 | bool warned = false; |
2250 | |
2251 | /* Validate each argument individually. */ |
2252 | for (unsigned i = 0; i != 2 && args[i]; ++i) |
2253 | { |
2254 | if (TREE_CODE (args[i]) == INTEGER_CST) |
2255 | { |
2256 | argrange[i][0] = args[i]; |
2257 | argrange[i][1] = args[i]; |
2258 | |
2259 | if (tree_int_cst_lt (t1: args[i], integer_zero_node)) |
2260 | { |
2261 | warned = warning_at (loc, OPT_Walloc_size_larger_than_, |
2262 | "argument %i value %qE is negative" , |
2263 | idx[i] + 1, args[i]); |
2264 | } |
2265 | else if (integer_zerop (args[i])) |
2266 | { |
2267 | /* Avoid issuing -Walloc-zero for allocation functions other |
2268 | than __builtin_alloca that are declared with attribute |
2269 | returns_nonnull because there's no portability risk. This |
2270 | avoids warning for such calls to libiberty's xmalloc and |
2271 | friends. |
2272 | Also avoid issuing the warning for calls to function named |
2273 | "alloca". */ |
2274 | if (fn && fndecl_built_in_p (node: fn, name1: BUILT_IN_ALLOCA) |
2275 | ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6 |
2276 | : !lookup_attribute (attr_name: "returns_nonnull" , |
2277 | TYPE_ATTRIBUTES (fntype))) |
2278 | warned = warning_at (loc, OPT_Walloc_zero, |
2279 | "argument %i value is zero" , |
2280 | idx[i] + 1); |
2281 | } |
2282 | else if (tree_int_cst_lt (t1: maxobjsize, t2: args[i])) |
2283 | { |
2284 | /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98 |
2285 | mode and with -fno-exceptions as a way to indicate array |
2286 | size overflow. There's no good way to detect C++98 here |
2287 | so avoid diagnosing these calls for all C++ modes. */ |
2288 | if (i == 0 |
2289 | && fn |
2290 | && !args[1] |
2291 | && lang_GNU_CXX () |
2292 | && DECL_IS_OPERATOR_NEW_P (fn) |
2293 | && integer_all_onesp (args[i])) |
2294 | continue; |
2295 | |
2296 | warned = warning_at (loc, OPT_Walloc_size_larger_than_, |
2297 | "argument %i value %qE exceeds " |
2298 | "maximum object size %E" , |
2299 | idx[i] + 1, args[i], maxobjsize); |
2300 | } |
2301 | } |
2302 | else if (TREE_CODE (args[i]) == SSA_NAME |
2303 | && get_size_range (args[i], argrange[i])) |
2304 | { |
2305 | /* Verify that the argument's range is not negative (including |
2306 | upper bound of zero). */ |
2307 | if (tree_int_cst_lt (t1: argrange[i][0], integer_zero_node) |
2308 | && tree_int_cst_le (t1: argrange[i][1], integer_zero_node)) |
2309 | { |
2310 | warned = warning_at (loc, OPT_Walloc_size_larger_than_, |
2311 | "argument %i range [%E, %E] is negative" , |
2312 | idx[i] + 1, |
2313 | argrange[i][0], argrange[i][1]); |
2314 | } |
2315 | else if (tree_int_cst_lt (t1: maxobjsize, t2: argrange[i][0])) |
2316 | { |
2317 | warned = warning_at (loc, OPT_Walloc_size_larger_than_, |
2318 | "argument %i range [%E, %E] exceeds " |
2319 | "maximum object size %E" , |
2320 | idx[i] + 1, |
2321 | argrange[i][0], argrange[i][1], |
2322 | maxobjsize); |
2323 | } |
2324 | } |
2325 | } |
2326 | |
2327 | if (!argrange[0][0]) |
2328 | return; |
2329 | |
2330 | /* For a two-argument alloc_size, validate the product of the two |
2331 | arguments if both of their values or ranges are known. */ |
2332 | if (!warned && tree_fits_uhwi_p (argrange[0][0]) |
2333 | && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0]) |
2334 | && !integer_onep (argrange[0][0]) |
2335 | && !integer_onep (argrange[1][0])) |
2336 | { |
2337 | /* Check for overflow in the product of a function decorated with |
2338 | attribute alloc_size (X, Y). */ |
2339 | unsigned szprec = TYPE_PRECISION (size_type_node); |
2340 | wide_int x = wi::to_wide (t: argrange[0][0], prec: szprec); |
2341 | wide_int y = wi::to_wide (t: argrange[1][0], prec: szprec); |
2342 | |
2343 | wi::overflow_type vflow; |
2344 | wide_int prod = wi::umul (x, y, overflow: &vflow); |
2345 | |
2346 | if (vflow) |
2347 | warned = warning_at (loc, OPT_Walloc_size_larger_than_, |
2348 | "product %<%E * %E%> of arguments %i and %i " |
2349 | "exceeds %<SIZE_MAX%>" , |
2350 | argrange[0][0], argrange[1][0], |
2351 | idx[0] + 1, idx[1] + 1); |
2352 | else if (wi::ltu_p (x: wi::to_wide (t: maxobjsize, prec: szprec), y: prod)) |
2353 | warned = warning_at (loc, OPT_Walloc_size_larger_than_, |
2354 | "product %<%E * %E%> of arguments %i and %i " |
2355 | "exceeds maximum object size %E" , |
2356 | argrange[0][0], argrange[1][0], |
2357 | idx[0] + 1, idx[1] + 1, |
2358 | maxobjsize); |
2359 | |
2360 | if (warned) |
2361 | { |
2362 | /* Print the full range of each of the two arguments to make |
2363 | it clear when it is, in fact, in a range and not constant. */ |
2364 | if (argrange[0][0] != argrange [0][1]) |
2365 | inform (loc, "argument %i in the range [%E, %E]" , |
2366 | idx[0] + 1, argrange[0][0], argrange[0][1]); |
2367 | if (argrange[1][0] != argrange [1][1]) |
2368 | inform (loc, "argument %i in the range [%E, %E]" , |
2369 | idx[1] + 1, argrange[1][0], argrange[1][1]); |
2370 | } |
2371 | } |
2372 | |
2373 | if (warned && fn) |
2374 | { |
2375 | location_t fnloc = DECL_SOURCE_LOCATION (fn); |
2376 | |
2377 | if (DECL_IS_UNDECLARED_BUILTIN (fn)) |
2378 | inform (loc, |
2379 | "in a call to built-in allocation function %qD" , fn); |
2380 | else |
2381 | inform (fnloc, |
2382 | "in a call to allocation function %qD declared here" , fn); |
2383 | } |
2384 | } |
2385 | |
2386 | /* Check a call to an alloca function for an excessive size. */ |
2387 | |
2388 | void |
2389 | pass_waccess::check_alloca (gcall *stmt) |
2390 | { |
2391 | if (m_early_checks_p) |
2392 | return; |
2393 | |
2394 | if ((warn_vla_limit >= HOST_WIDE_INT_MAX |
2395 | && warn_alloc_size_limit < warn_vla_limit) |
2396 | || (warn_alloca_limit >= HOST_WIDE_INT_MAX |
2397 | && warn_alloc_size_limit < warn_alloca_limit)) |
2398 | { |
2399 | /* -Walloca-larger-than and -Wvla-larger-than settings of less |
2400 | than HWI_MAX override the more general -Walloc-size-larger-than |
2401 | so unless either of the former options is smaller than the last |
2402 | one (which would imply that the call was already checked), check |
2403 | the alloca arguments for overflow. */ |
2404 | const tree alloc_args[] = { call_arg (stmt, argno: 0), NULL_TREE }; |
2405 | const int idx[] = { 0, -1 }; |
2406 | maybe_warn_alloc_args_overflow (stmt, args: alloc_args, idx); |
2407 | } |
2408 | } |
2409 | |
2410 | /* Check a call to an allocation function for an excessive size. */ |
2411 | |
2412 | void |
2413 | pass_waccess::check_alloc_size_call (gcall *stmt) |
2414 | { |
2415 | if (m_early_checks_p) |
2416 | return; |
2417 | |
2418 | if (gimple_call_num_args (gs: stmt) < 1) |
2419 | /* Avoid invalid calls to functions without a prototype. */ |
2420 | return; |
2421 | |
2422 | tree fndecl = gimple_call_fndecl (gs: stmt); |
2423 | if (fndecl && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
2424 | { |
2425 | /* Alloca is handled separately. */ |
2426 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
2427 | { |
2428 | case BUILT_IN_ALLOCA: |
2429 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
2430 | case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX: |
2431 | return; |
2432 | default: |
2433 | break; |
2434 | } |
2435 | } |
2436 | |
2437 | tree fntype = gimple_call_fntype (gs: stmt); |
2438 | tree fntypeattrs = TYPE_ATTRIBUTES (fntype); |
2439 | |
2440 | tree alloc_size = lookup_attribute (attr_name: "alloc_size" , list: fntypeattrs); |
2441 | if (!alloc_size) |
2442 | return; |
2443 | |
2444 | /* Extract attribute alloc_size from the type of the called expression |
2445 | (which could be a function or a function pointer) and if set, store |
2446 | the indices of the corresponding arguments in ALLOC_IDX, and then |
2447 | the actual argument(s) at those indices in ALLOC_ARGS. */ |
2448 | int idx[2] = { -1, -1 }; |
2449 | tree alloc_args[] = { NULL_TREE, NULL_TREE }; |
2450 | unsigned nargs = gimple_call_num_args (gs: stmt); |
2451 | |
2452 | tree args = TREE_VALUE (alloc_size); |
2453 | idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1; |
2454 | /* Avoid invalid calls to functions without a prototype. */ |
2455 | if ((unsigned) idx[0] >= nargs) |
2456 | return; |
2457 | alloc_args[0] = call_arg (stmt, argno: idx[0]); |
2458 | if (TREE_CHAIN (args)) |
2459 | { |
2460 | idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1; |
2461 | if ((unsigned) idx[1] >= nargs) |
2462 | return; |
2463 | alloc_args[1] = call_arg (stmt, argno: idx[1]); |
2464 | } |
2465 | |
2466 | maybe_warn_alloc_args_overflow (stmt, args: alloc_args, idx); |
2467 | } |
2468 | |
2469 | /* Check a call STMT to strcat() for overflow and warn if it does. */ |
2470 | |
2471 | void |
2472 | pass_waccess::check_strcat (gcall *stmt) |
2473 | { |
2474 | if (m_early_checks_p) |
2475 | return; |
2476 | |
2477 | if (!warn_stringop_overflow && !warn_stringop_overread) |
2478 | return; |
2479 | |
2480 | tree dest = call_arg (stmt, argno: 0); |
2481 | tree src = call_arg (stmt, argno: 1); |
2482 | |
2483 | /* There is no way here to determine the length of the string in |
2484 | the destination to which the SRC string is being appended so |
2485 | just diagnose cases when the source string is longer than |
2486 | the destination object. */ |
2487 | access_data data (m_ptr_qry.rvals, stmt, access_read_write, NULL_TREE, |
2488 | true, NULL_TREE, true); |
2489 | const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1; |
2490 | compute_objsize (src, stmt, ost, &data.src, &m_ptr_qry); |
2491 | tree destsize = compute_objsize (dest, stmt, ost, &data.dst, &m_ptr_qry); |
2492 | |
2493 | check_access (exp: stmt, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE, |
2494 | srcstr: src, dstsize: destsize, mode: data.mode, pad: &data, rvals: m_ptr_qry.rvals); |
2495 | } |
2496 | |
2497 | /* Check a call STMT to strcat() for overflow and warn if it does. */ |
2498 | |
2499 | void |
2500 | pass_waccess::check_strncat (gcall *stmt) |
2501 | { |
2502 | if (m_early_checks_p) |
2503 | return; |
2504 | |
2505 | if (!warn_stringop_overflow && !warn_stringop_overread) |
2506 | return; |
2507 | |
2508 | tree dest = call_arg (stmt, argno: 0); |
2509 | tree src = call_arg (stmt, argno: 1); |
2510 | /* The upper bound on the number of bytes to write. */ |
2511 | tree maxread = call_arg (stmt, argno: 2); |
2512 | |
2513 | /* Detect unterminated source (only). */ |
2514 | if (!check_nul_terminated_array (expr: stmt, src, bound: maxread)) |
2515 | return; |
2516 | |
2517 | /* The length of the source sequence. */ |
2518 | tree slen = c_strlen (src, 1); |
2519 | |
2520 | /* Try to determine the range of lengths that the source expression |
2521 | refers to. Since the lengths are only used for warning and not |
2522 | for code generation disable strict mode below. */ |
2523 | tree maxlen = slen; |
2524 | if (!maxlen) |
2525 | { |
2526 | c_strlen_data lendata = { }; |
2527 | get_range_strlen (src, &lendata, /* eltsize = */ 1); |
2528 | maxlen = lendata.maxbound; |
2529 | } |
2530 | |
2531 | access_data data (m_ptr_qry.rvals, stmt, access_read_write); |
2532 | /* Try to verify that the destination is big enough for the shortest |
2533 | string. First try to determine the size of the destination object |
2534 | into which the source is being copied. */ |
2535 | const int ost = warn_stringop_overflow - 1; |
2536 | tree destsize = compute_objsize (dest, stmt, ost, &data.dst, &m_ptr_qry); |
2537 | |
2538 | /* Add one for the terminating nul. */ |
2539 | tree srclen = (maxlen |
2540 | ? fold_build2 (PLUS_EXPR, size_type_node, maxlen, |
2541 | size_one_node) |
2542 | : NULL_TREE); |
2543 | |
2544 | /* The strncat function copies at most MAXREAD bytes and always appends |
2545 | the terminating nul so the specified upper bound should never be equal |
2546 | to (or greater than) the size of the destination. */ |
2547 | if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize) |
2548 | && tree_int_cst_equal (destsize, maxread)) |
2549 | { |
2550 | location_t loc = get_location (stmt); |
2551 | warning_at (loc, OPT_Wstringop_overflow_, |
2552 | "%qD specified bound %E equals destination size" , |
2553 | get_callee_fndecl (stmt), maxread); |
2554 | |
2555 | return; |
2556 | } |
2557 | |
2558 | if (!srclen |
2559 | || (maxread && tree_fits_uhwi_p (maxread) |
2560 | && tree_fits_uhwi_p (srclen) |
2561 | && tree_int_cst_lt (t1: maxread, t2: srclen))) |
2562 | srclen = maxread; |
2563 | |
2564 | check_access (exp: stmt, /*dstwrite=*/NULL_TREE, maxread, srcstr: srclen, |
2565 | dstsize: destsize, mode: data.mode, pad: &data, rvals: m_ptr_qry.rvals); |
2566 | } |
2567 | |
2568 | /* Check a call STMT to stpcpy() or strcpy() for overflow and warn |
2569 | if it does. */ |
2570 | |
2571 | void |
2572 | pass_waccess::check_stxcpy (gcall *stmt) |
2573 | { |
2574 | if (m_early_checks_p) |
2575 | return; |
2576 | |
2577 | tree dst = call_arg (stmt, argno: 0); |
2578 | tree src = call_arg (stmt, argno: 1); |
2579 | |
2580 | tree size; |
2581 | bool exact; |
2582 | if (tree nonstr = unterminated_array (exp: src, size: &size, exact: &exact)) |
2583 | { |
2584 | /* NONSTR refers to the non-nul terminated constant array. */ |
2585 | warn_string_no_nul (loc: get_location (stmt), stmt, NULL, arg: src, decl: nonstr, |
2586 | size, exact); |
2587 | return; |
2588 | } |
2589 | |
2590 | if (warn_stringop_overflow) |
2591 | { |
2592 | access_data data (m_ptr_qry.rvals, stmt, access_read_write, NULL_TREE, |
2593 | true, NULL_TREE, true); |
2594 | const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1; |
2595 | compute_objsize (src, stmt, ost, &data.src, &m_ptr_qry); |
2596 | tree dstsize = compute_objsize (dst, stmt, ost, &data.dst, &m_ptr_qry); |
2597 | check_access (exp: stmt, /*dstwrite=*/ NULL_TREE, |
2598 | /*maxread=*/ NULL_TREE, /*srcstr=*/ src, |
2599 | dstsize, mode: data.mode, pad: &data, rvals: m_ptr_qry.rvals); |
2600 | } |
2601 | |
2602 | /* Check to see if the argument was declared attribute nonstring |
2603 | and if so, issue a warning since at this point it's not known |
2604 | to be nul-terminated. */ |
2605 | tree fndecl = get_callee_fndecl (stmt); |
2606 | maybe_warn_nonstring_arg (fndecl, exp: stmt); |
2607 | } |
2608 | |
2609 | /* Check a call STMT to stpncpy() or strncpy() for overflow and warn |
2610 | if it does. */ |
2611 | |
2612 | void |
2613 | pass_waccess::check_stxncpy (gcall *stmt) |
2614 | { |
2615 | if (m_early_checks_p || !warn_stringop_overflow) |
2616 | return; |
2617 | |
2618 | tree dst = call_arg (stmt, argno: 0); |
2619 | tree src = call_arg (stmt, argno: 1); |
2620 | /* The number of bytes to write (not the maximum). */ |
2621 | tree len = call_arg (stmt, argno: 2); |
2622 | |
2623 | access_data data (m_ptr_qry.rvals, stmt, access_read_write, len, true, len, |
2624 | true); |
2625 | const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1; |
2626 | compute_objsize (src, stmt, ost, &data.src, &m_ptr_qry); |
2627 | tree dstsize = compute_objsize (dst, stmt, ost, &data.dst, &m_ptr_qry); |
2628 | |
2629 | check_access (exp: stmt, /*dstwrite=*/len, /*maxread=*/len, srcstr: src, dstsize, |
2630 | mode: data.mode, pad: &data, rvals: m_ptr_qry.rvals); |
2631 | } |
2632 | |
2633 | /* Check a call STMT to stpncpy() or strncpy() for overflow and warn |
2634 | if it does. */ |
2635 | |
2636 | void |
2637 | pass_waccess::check_strncmp (gcall *stmt) |
2638 | { |
2639 | if (m_early_checks_p || !warn_stringop_overread) |
2640 | return; |
2641 | |
2642 | tree arg1 = call_arg (stmt, argno: 0); |
2643 | tree arg2 = call_arg (stmt, argno: 1); |
2644 | tree bound = call_arg (stmt, argno: 2); |
2645 | |
2646 | /* First check each argument separately, considering the bound. */ |
2647 | if (!check_nul_terminated_array (expr: stmt, src: arg1, bound) |
2648 | || !check_nul_terminated_array (expr: stmt, src: arg2, bound)) |
2649 | return; |
2650 | |
2651 | /* A strncmp read from each argument is constrained not just by |
2652 | the bound but also by the length of the shorter string. Specifying |
2653 | a bound that's larger than the size of either array makes no sense |
2654 | and is likely a bug. When the length of neither of the two strings |
2655 | is known but the sizes of both of the arrays they are stored in is, |
2656 | issue a warning if the bound is larger than the size of |
2657 | the larger of the two arrays. */ |
2658 | |
2659 | c_strlen_data lendata1{ }, lendata2{ }; |
2660 | tree len1 = c_strlen (arg1, 1, &lendata1); |
2661 | tree len2 = c_strlen (arg2, 1, &lendata2); |
2662 | |
2663 | if (len1 && TREE_CODE (len1) != INTEGER_CST) |
2664 | len1 = NULL_TREE; |
2665 | if (len2 && TREE_CODE (len2) != INTEGER_CST) |
2666 | len2 = NULL_TREE; |
2667 | |
2668 | if (len1 && len2) |
2669 | /* If the length of both arguments was computed they must both be |
2670 | nul-terminated and no further checking is necessary regardless |
2671 | of the bound. */ |
2672 | return; |
2673 | |
2674 | /* Check to see if the argument was declared with attribute nonstring |
2675 | and if so, issue a warning since at this point it's not known to be |
2676 | nul-terminated. */ |
2677 | if (maybe_warn_nonstring_arg (fndecl: get_callee_fndecl (stmt), exp: stmt)) |
2678 | return; |
2679 | |
2680 | access_data adata1 (m_ptr_qry.rvals, stmt, access_read_only, NULL_TREE, false, |
2681 | bound, true); |
2682 | access_data adata2 (m_ptr_qry.rvals, stmt, access_read_only, NULL_TREE, false, |
2683 | bound, true); |
2684 | |
2685 | /* Determine the range of the bound first and bail if it fails; it's |
2686 | cheaper than computing the size of the objects. */ |
2687 | tree bndrng[2] = { NULL_TREE, NULL_TREE }; |
2688 | get_size_range (query: m_ptr_qry.rvals, bound, stmt, range: bndrng, flags: 0, bndrng: adata1.src_bndrng); |
2689 | if (!bndrng[0] || integer_zerop (bndrng[0])) |
2690 | return; |
2691 | |
2692 | if (len1 && tree_int_cst_lt (t1: len1, t2: bndrng[0])) |
2693 | bndrng[0] = len1; |
2694 | if (len2 && tree_int_cst_lt (t1: len2, t2: bndrng[0])) |
2695 | bndrng[0] = len2; |
2696 | |
2697 | /* compute_objsize almost never fails (and ultimately should never |
2698 | fail). Don't bother to handle the rare case when it does. */ |
2699 | if (!compute_objsize (arg1, stmt, 1, &adata1.src, &m_ptr_qry) |
2700 | || !compute_objsize (arg2, stmt, 1, &adata2.src, &m_ptr_qry)) |
2701 | return; |
2702 | |
2703 | /* Compute the size of the remaining space in each array after |
2704 | subtracting any offset into it. */ |
2705 | offset_int rem1 = adata1.src.size_remaining (); |
2706 | offset_int rem2 = adata2.src.size_remaining (); |
2707 | |
2708 | /* Cap REM1 and REM2 at the other if the other's argument is known |
2709 | to be an unterminated array, either because there's no space |
2710 | left in it after adding its offset or because it's constant and |
2711 | has no nul. */ |
2712 | if (rem1 == 0 || (rem1 < rem2 && lendata1.decl)) |
2713 | rem2 = rem1; |
2714 | else if (rem2 == 0 || (rem2 < rem1 && lendata2.decl)) |
2715 | rem1 = rem2; |
2716 | |
2717 | /* Point PAD at the array to reference in the note if a warning |
2718 | is issued. */ |
2719 | access_data *pad = len1 ? &adata2 : &adata1; |
2720 | offset_int maxrem = wi::max (x: rem1, y: rem2, sgn: UNSIGNED); |
2721 | if (lendata1.decl || lendata2.decl |
2722 | || maxrem < wi::to_offset (t: bndrng[0])) |
2723 | { |
2724 | /* Warn when either argument isn't nul-terminated or the maximum |
2725 | remaining space in the two arrays is less than the bound. */ |
2726 | tree func = get_callee_fndecl (stmt); |
2727 | location_t loc = gimple_location (g: stmt); |
2728 | maybe_warn_for_bound (opt: OPT_Wstringop_overread, loc, exp: stmt, func, |
2729 | bndrng, size: wide_int_to_tree (sizetype, cst: maxrem), |
2730 | pad); |
2731 | } |
2732 | } |
2733 | |
2734 | /* Determine and check the sizes of the source and the destination |
2735 | of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. STMT is |
2736 | the call statement, DEST is the destination argument, SRC is the source |
2737 | argument or null, and SIZE is the number of bytes being accessed. Use |
2738 | Object Size type-0 regardless of the OPT_Wstringop_overflow_ setting. |
2739 | Return true on success (no overflow or invalid sizes), false otherwise. */ |
2740 | |
2741 | void |
2742 | pass_waccess::check_memop_access (gimple *stmt, tree dest, tree src, tree size) |
2743 | { |
2744 | if (m_early_checks_p) |
2745 | return; |
2746 | |
2747 | /* For functions like memset and memcpy that operate on raw memory |
2748 | try to determine the size of the largest source and destination |
2749 | object using type-0 Object Size regardless of the object size |
2750 | type specified by the option. */ |
2751 | access_data data (m_ptr_qry.rvals, stmt, access_read_write); |
2752 | tree srcsize |
2753 | = src ? compute_objsize (src, stmt, 0, &data.src, &m_ptr_qry) : NULL_TREE; |
2754 | tree dstsize = compute_objsize (dest, stmt, 0, &data.dst, &m_ptr_qry); |
2755 | |
2756 | check_access (stmt, dstwrite: size, /*maxread=*/NULL_TREE, srcstr: srcsize, dstsize, |
2757 | mode: data.mode, pad: &data, rvals: m_ptr_qry.rvals); |
2758 | } |
2759 | |
2760 | /* A convenience wrapper for check_access to check access by a read-only |
2761 | function like puts or strcmp. */ |
2762 | |
2763 | void |
2764 | pass_waccess::check_read_access (gimple *stmt, tree src, |
2765 | tree bound /* = NULL_TREE */, |
2766 | int ost /* = 1 */) |
2767 | { |
2768 | if (m_early_checks_p || !warn_stringop_overread) |
2769 | return; |
2770 | |
2771 | if (bound && !useless_type_conversion_p (size_type_node, TREE_TYPE (bound))) |
2772 | bound = fold_convert (size_type_node, bound); |
2773 | |
2774 | tree fndecl = get_callee_fndecl (stmt); |
2775 | maybe_warn_nonstring_arg (fndecl, stmt); |
2776 | |
2777 | access_data data (m_ptr_qry.rvals, stmt, access_read_only, NULL_TREE, |
2778 | false, bound, true); |
2779 | compute_objsize (src, stmt, ost, &data.src, &m_ptr_qry); |
2780 | check_access (stmt, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound, |
2781 | /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, mode: data.mode, |
2782 | pad: &data, rvals: m_ptr_qry.rvals); |
2783 | } |
2784 | |
2785 | /* Return true if memory model ORD is constant in the context of STMT and |
2786 | set *CSTVAL to the constant value. Otherwise return false. Warn for |
2787 | invalid ORD. */ |
2788 | |
2789 | bool |
2790 | memmodel_to_uhwi (tree ord, gimple *stmt, unsigned HOST_WIDE_INT *cstval) |
2791 | { |
2792 | unsigned HOST_WIDE_INT val; |
2793 | |
2794 | if (TREE_CODE (ord) == INTEGER_CST) |
2795 | { |
2796 | if (!tree_fits_uhwi_p (ord)) |
2797 | return false; |
2798 | val = tree_to_uhwi (ord); |
2799 | } |
2800 | else |
2801 | { |
2802 | /* Use the range query to determine constant values in the absence |
2803 | of constant propagation (such as at -O0). */ |
2804 | Value_Range rng (TREE_TYPE (ord)); |
2805 | if (!get_range_query (cfun)->range_of_expr (r&: rng, expr: ord, stmt) |
2806 | || !rng.singleton_p (result: &ord)) |
2807 | return false; |
2808 | |
2809 | wide_int lob = rng.lower_bound (); |
2810 | if (!wi::fits_uhwi_p (x: lob)) |
2811 | return false; |
2812 | |
2813 | val = lob.to_shwi (); |
2814 | } |
2815 | |
2816 | if (targetm.memmodel_check) |
2817 | /* This might warn for an invalid VAL but return a conservatively |
2818 | valid result. */ |
2819 | val = targetm.memmodel_check (val); |
2820 | else if (val & ~MEMMODEL_MASK) |
2821 | { |
2822 | tree fndecl = gimple_call_fndecl (gs: stmt); |
2823 | location_t loc = gimple_location (g: stmt); |
2824 | loc = expansion_point_location_if_in_system_header (loc); |
2825 | |
2826 | warning_at (loc, OPT_Winvalid_memory_model, |
2827 | "unknown architecture specifier in memory model " |
2828 | "%wi for %qD" , val, fndecl); |
2829 | return false; |
2830 | } |
2831 | |
2832 | *cstval = val; |
2833 | |
2834 | return true; |
2835 | } |
2836 | |
2837 | /* Valid memory model for each set of atomic built-in functions. */ |
2838 | |
2839 | struct memmodel_pair |
2840 | { |
2841 | memmodel modval; |
2842 | const char* modname; |
2843 | |
2844 | #define MEMMODEL_PAIR(val, str) \ |
2845 | { MEMMODEL_ ## val, "memory_order_" str } |
2846 | }; |
2847 | |
2848 | /* Valid memory models in the order of increasing strength. */ |
2849 | |
2850 | static const memmodel_pair memory_models[] = |
2851 | { MEMMODEL_PAIR (RELAXED, "relaxed" ), |
2852 | MEMMODEL_PAIR (SEQ_CST, "seq_cst" ), |
2853 | MEMMODEL_PAIR (ACQUIRE, "acquire" ), |
2854 | MEMMODEL_PAIR (CONSUME, "consume" ), |
2855 | MEMMODEL_PAIR (RELEASE, "release" ), |
2856 | MEMMODEL_PAIR (ACQ_REL, "acq_rel" ) |
2857 | }; |
2858 | |
2859 | /* Return the name of the memory model VAL. */ |
2860 | |
2861 | static const char* |
2862 | memmodel_name (unsigned HOST_WIDE_INT val) |
2863 | { |
2864 | val = memmodel_base (val); |
2865 | |
2866 | for (unsigned i = 0; i != ARRAY_SIZE (memory_models); ++i) |
2867 | { |
2868 | if (val == memory_models[i].modval) |
2869 | return memory_models[i].modname; |
2870 | } |
2871 | return NULL; |
2872 | } |
2873 | |
2874 | /* Indices of valid MEMORY_MODELS above for corresponding atomic operations. */ |
2875 | static const unsigned char load_models[] = { 0, 1, 2, 3, UCHAR_MAX }; |
2876 | static const unsigned char store_models[] = { 0, 1, 4, UCHAR_MAX }; |
2877 | static const unsigned char xchg_models[] = { 0, 1, 3, 4, 5, UCHAR_MAX }; |
2878 | static const unsigned char flag_clr_models[] = { 0, 1, 4, UCHAR_MAX }; |
2879 | static const unsigned char all_models[] = { 0, 1, 2, 3, 4, 5, UCHAR_MAX }; |
2880 | |
2881 | /* Check the success memory model argument ORD_SUCS to the call STMT to |
2882 | an atomic function and warn if it's invalid. If nonnull, also check |
2883 | the failure memory model ORD_FAIL and warn if it's invalid. Return |
2884 | true if a warning has been issued. */ |
2885 | |
2886 | bool |
2887 | pass_waccess::maybe_warn_memmodel (gimple *stmt, tree ord_sucs, |
2888 | tree ord_fail, const unsigned char *valid) |
2889 | { |
2890 | unsigned HOST_WIDE_INT sucs, fail = 0; |
2891 | if (!memmodel_to_uhwi (ord: ord_sucs, stmt, cstval: &sucs) |
2892 | || (ord_fail && !memmodel_to_uhwi (ord: ord_fail, stmt, cstval: &fail))) |
2893 | return false; |
2894 | |
2895 | bool is_valid = false; |
2896 | if (valid) |
2897 | for (unsigned i = 0; valid[i] != UCHAR_MAX; ++i) |
2898 | { |
2899 | memmodel model = memory_models[valid[i]].modval; |
2900 | if (memmodel_base (val: sucs) == model) |
2901 | { |
2902 | is_valid = true; |
2903 | break; |
2904 | } |
2905 | } |
2906 | else |
2907 | is_valid = true; |
2908 | |
2909 | tree fndecl = gimple_call_fndecl (gs: stmt); |
2910 | location_t loc = gimple_location (g: stmt); |
2911 | loc = expansion_point_location_if_in_system_header (loc); |
2912 | |
2913 | if (!is_valid) |
2914 | { |
2915 | bool warned = false; |
2916 | auto_diagnostic_group d; |
2917 | if (const char *modname = memmodel_name (val: sucs)) |
2918 | warned = warning_at (loc, OPT_Winvalid_memory_model, |
2919 | "invalid memory model %qs for %qD" , |
2920 | modname, fndecl); |
2921 | else |
2922 | warned = warning_at (loc, OPT_Winvalid_memory_model, |
2923 | "invalid memory model %wi for %qD" , |
2924 | sucs, fndecl); |
2925 | |
2926 | if (!warned) |
2927 | return false; |
2928 | |
2929 | /* Print a note with the valid memory models. */ |
2930 | pretty_printer pp; |
2931 | pp_show_color (&pp) = pp_show_color (global_dc->printer); |
2932 | for (unsigned i = 0; valid[i] != UCHAR_MAX; ++i) |
2933 | { |
2934 | const char *modname = memory_models[valid[i]].modname; |
2935 | pp_printf (&pp, "%s%qs" , i ? ", " : "" , modname); |
2936 | } |
2937 | |
2938 | inform (loc, "valid models are %s" , pp_formatted_text (&pp)); |
2939 | return true; |
2940 | } |
2941 | |
2942 | if (!ord_fail) |
2943 | return false; |
2944 | |
2945 | if (fail == MEMMODEL_RELEASE || fail == MEMMODEL_ACQ_REL) |
2946 | if (const char *failname = memmodel_name (val: fail)) |
2947 | { |
2948 | /* If both memory model arguments are valid but their combination |
2949 | is not, use their names in the warning. */ |
2950 | auto_diagnostic_group d; |
2951 | if (!warning_at (loc, OPT_Winvalid_memory_model, |
2952 | "invalid failure memory model %qs for %qD" , |
2953 | failname, fndecl)) |
2954 | return false; |
2955 | |
2956 | inform (loc, |
2957 | "valid failure models are %qs, %qs, %qs, %qs" , |
2958 | "memory_order_relaxed" , "memory_order_seq_cst" , |
2959 | "memory_order_acquire" , "memory_order_consume" ); |
2960 | return true; |
2961 | } |
2962 | |
2963 | if (memmodel_base (val: fail) <= memmodel_base (val: sucs)) |
2964 | return false; |
2965 | |
2966 | if (const char *sucsname = memmodel_name (val: sucs)) |
2967 | if (const char *failname = memmodel_name (val: fail)) |
2968 | { |
2969 | /* If both memory model arguments are valid but their combination |
2970 | is not, use their names in the warning. */ |
2971 | auto_diagnostic_group d; |
2972 | if (!warning_at (loc, OPT_Winvalid_memory_model, |
2973 | "failure memory model %qs cannot be stronger " |
2974 | "than success memory model %qs for %qD" , |
2975 | failname, sucsname, fndecl)) |
2976 | return false; |
2977 | |
2978 | /* Print a note with the valid failure memory models which are |
2979 | those with a value less than or equal to the success mode. */ |
2980 | char buf[120]; |
2981 | *buf = '\0'; |
2982 | for (unsigned i = 0; |
2983 | memory_models[i].modval <= memmodel_base (val: sucs); ++i) |
2984 | { |
2985 | if (*buf) |
2986 | strcat (dest: buf, src: ", " ); |
2987 | |
2988 | const char *modname = memory_models[valid[i]].modname; |
2989 | sprintf (s: buf + strlen (s: buf), format: "'%s'" , modname); |
2990 | } |
2991 | |
2992 | inform (loc, "valid models are %s" , buf); |
2993 | return true; |
2994 | } |
2995 | |
2996 | /* If either memory model argument value is invalid use the numerical |
2997 | value of both in the message. */ |
2998 | return warning_at (loc, OPT_Winvalid_memory_model, |
2999 | "failure memory model %wi cannot be stronger " |
3000 | "than success memory model %wi for %qD" , |
3001 | fail, sucs, fndecl); |
3002 | } |
3003 | |
3004 | /* Wrapper for the above. */ |
3005 | |
3006 | void |
3007 | pass_waccess::check_atomic_memmodel (gimple *stmt, tree ord_sucs, |
3008 | tree ord_fail, const unsigned char *valid) |
3009 | { |
3010 | if (warning_suppressed_p (stmt, OPT_Winvalid_memory_model)) |
3011 | return; |
3012 | |
3013 | if (!maybe_warn_memmodel (stmt, ord_sucs, ord_fail, valid)) |
3014 | return; |
3015 | |
3016 | suppress_warning (stmt, OPT_Winvalid_memory_model); |
3017 | } |
3018 | |
3019 | /* Check a call STMT to an atomic or sync built-in. */ |
3020 | |
3021 | bool |
3022 | pass_waccess::check_atomic_builtin (gcall *stmt) |
3023 | { |
3024 | tree callee = gimple_call_fndecl (gs: stmt); |
3025 | if (!callee) |
3026 | return false; |
3027 | |
3028 | /* The size in bytes of the access by the function, and the number |
3029 | of the second argument to check (if any). */ |
3030 | unsigned bytes = 0, arg2 = UINT_MAX; |
3031 | unsigned sucs_arg = UINT_MAX, fail_arg = UINT_MAX; |
3032 | /* Points to the array of indices of valid memory models. */ |
3033 | const unsigned char *pvalid_models = NULL; |
3034 | |
3035 | switch (DECL_FUNCTION_CODE (decl: callee)) |
3036 | { |
3037 | #define BUILTIN_ACCESS_SIZE_FNSPEC(N) \ |
3038 | BUILT_IN_SYNC_FETCH_AND_ADD_ ## N: \ |
3039 | case BUILT_IN_SYNC_FETCH_AND_SUB_ ## N: \ |
3040 | case BUILT_IN_SYNC_FETCH_AND_OR_ ## N: \ |
3041 | case BUILT_IN_SYNC_FETCH_AND_AND_ ## N: \ |
3042 | case BUILT_IN_SYNC_FETCH_AND_XOR_ ## N: \ |
3043 | case BUILT_IN_SYNC_FETCH_AND_NAND_ ## N: \ |
3044 | case BUILT_IN_SYNC_ADD_AND_FETCH_ ## N: \ |
3045 | case BUILT_IN_SYNC_SUB_AND_FETCH_ ## N: \ |
3046 | case BUILT_IN_SYNC_OR_AND_FETCH_ ## N: \ |
3047 | case BUILT_IN_SYNC_AND_AND_FETCH_ ## N: \ |
3048 | case BUILT_IN_SYNC_XOR_AND_FETCH_ ## N: \ |
3049 | case BUILT_IN_SYNC_NAND_AND_FETCH_ ## N: \ |
3050 | case BUILT_IN_SYNC_LOCK_TEST_AND_SET_ ## N: \ |
3051 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_ ## N: \ |
3052 | case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_ ## N: \ |
3053 | case BUILT_IN_SYNC_LOCK_RELEASE_ ## N: \ |
3054 | bytes = N; \ |
3055 | break; \ |
3056 | case BUILT_IN_ATOMIC_LOAD_ ## N: \ |
3057 | pvalid_models = load_models; \ |
3058 | sucs_arg = 1; \ |
3059 | /* FALLTHROUGH */ \ |
3060 | case BUILT_IN_ATOMIC_STORE_ ## N: \ |
3061 | if (!pvalid_models) \ |
3062 | pvalid_models = store_models; \ |
3063 | /* FALLTHROUGH */ \ |
3064 | case BUILT_IN_ATOMIC_ADD_FETCH_ ## N: \ |
3065 | case BUILT_IN_ATOMIC_SUB_FETCH_ ## N: \ |
3066 | case BUILT_IN_ATOMIC_AND_FETCH_ ## N: \ |
3067 | case BUILT_IN_ATOMIC_NAND_FETCH_ ## N: \ |
3068 | case BUILT_IN_ATOMIC_XOR_FETCH_ ## N: \ |
3069 | case BUILT_IN_ATOMIC_OR_FETCH_ ## N: \ |
3070 | case BUILT_IN_ATOMIC_FETCH_ADD_ ## N: \ |
3071 | case BUILT_IN_ATOMIC_FETCH_SUB_ ## N: \ |
3072 | case BUILT_IN_ATOMIC_FETCH_AND_ ## N: \ |
3073 | case BUILT_IN_ATOMIC_FETCH_NAND_ ## N: \ |
3074 | case BUILT_IN_ATOMIC_FETCH_OR_ ## N: \ |
3075 | case BUILT_IN_ATOMIC_FETCH_XOR_ ## N: \ |
3076 | bytes = N; \ |
3077 | if (sucs_arg == UINT_MAX) \ |
3078 | sucs_arg = 2; \ |
3079 | if (!pvalid_models) \ |
3080 | pvalid_models = all_models; \ |
3081 | break; \ |
3082 | case BUILT_IN_ATOMIC_EXCHANGE_ ## N: \ |
3083 | bytes = N; \ |
3084 | sucs_arg = 3; \ |
3085 | pvalid_models = xchg_models; \ |
3086 | break; \ |
3087 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_ ## N: \ |
3088 | bytes = N; \ |
3089 | sucs_arg = 4; \ |
3090 | fail_arg = 5; \ |
3091 | pvalid_models = all_models; \ |
3092 | arg2 = 1 |
3093 | |
3094 | case BUILTIN_ACCESS_SIZE_FNSPEC (1); |
3095 | break; |
3096 | case BUILTIN_ACCESS_SIZE_FNSPEC (2); |
3097 | break; |
3098 | case BUILTIN_ACCESS_SIZE_FNSPEC (4); |
3099 | break; |
3100 | case BUILTIN_ACCESS_SIZE_FNSPEC (8); |
3101 | break; |
3102 | case BUILTIN_ACCESS_SIZE_FNSPEC (16); |
3103 | break; |
3104 | |
3105 | case BUILT_IN_ATOMIC_CLEAR: |
3106 | sucs_arg = 1; |
3107 | pvalid_models = flag_clr_models; |
3108 | break; |
3109 | |
3110 | default: |
3111 | return false; |
3112 | } |
3113 | |
3114 | unsigned nargs = gimple_call_num_args (gs: stmt); |
3115 | if (sucs_arg < nargs) |
3116 | { |
3117 | tree ord_sucs = gimple_call_arg (gs: stmt, index: sucs_arg); |
3118 | tree ord_fail = NULL_TREE; |
3119 | if (fail_arg < nargs) |
3120 | ord_fail = gimple_call_arg (gs: stmt, index: fail_arg); |
3121 | check_atomic_memmodel (stmt, ord_sucs, ord_fail, valid: pvalid_models); |
3122 | } |
3123 | |
3124 | if (!bytes) |
3125 | return true; |
3126 | |
3127 | tree size = build_int_cstu (sizetype, bytes); |
3128 | tree dst = gimple_call_arg (gs: stmt, index: 0); |
3129 | check_memop_access (stmt, dest: dst, NULL_TREE, size); |
3130 | |
3131 | if (arg2 != UINT_MAX) |
3132 | { |
3133 | tree dst = gimple_call_arg (gs: stmt, index: arg2); |
3134 | check_memop_access (stmt, dest: dst, NULL_TREE, size); |
3135 | } |
3136 | |
3137 | return true; |
3138 | } |
3139 | |
3140 | /* Check call STMT to a built-in function for invalid accesses. Return |
3141 | true if a call has been handled. */ |
3142 | |
3143 | bool |
3144 | pass_waccess::check_builtin (gcall *stmt) |
3145 | { |
3146 | tree callee = gimple_call_fndecl (gs: stmt); |
3147 | if (!callee) |
3148 | return false; |
3149 | |
3150 | switch (DECL_FUNCTION_CODE (decl: callee)) |
3151 | { |
3152 | case BUILT_IN_ALLOCA: |
3153 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
3154 | case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX: |
3155 | check_alloca (stmt); |
3156 | return true; |
3157 | |
3158 | case BUILT_IN_EXECL: |
3159 | case BUILT_IN_EXECLE: |
3160 | case BUILT_IN_EXECLP: |
3161 | case BUILT_IN_EXECV: |
3162 | case BUILT_IN_EXECVE: |
3163 | case BUILT_IN_EXECVP: |
3164 | check_read_access (stmt, src: call_arg (stmt, argno: 0)); |
3165 | return true; |
3166 | |
3167 | case BUILT_IN_FREE: |
3168 | case BUILT_IN_REALLOC: |
3169 | if (!m_early_checks_p) |
3170 | { |
3171 | tree arg = call_arg (stmt, argno: 0); |
3172 | if (TREE_CODE (arg) == SSA_NAME) |
3173 | check_pointer_uses (stmt, arg); |
3174 | } |
3175 | return true; |
3176 | |
3177 | case BUILT_IN_GETTEXT: |
3178 | case BUILT_IN_PUTS: |
3179 | case BUILT_IN_PUTS_UNLOCKED: |
3180 | case BUILT_IN_STRDUP: |
3181 | check_read_access (stmt, src: call_arg (stmt, argno: 0)); |
3182 | return true; |
3183 | |
3184 | case BUILT_IN_INDEX: |
3185 | case BUILT_IN_RINDEX: |
3186 | case BUILT_IN_STRCHR: |
3187 | case BUILT_IN_STRRCHR: |
3188 | case BUILT_IN_STRLEN: |
3189 | check_read_access (stmt, src: call_arg (stmt, argno: 0)); |
3190 | return true; |
3191 | |
3192 | case BUILT_IN_FPUTS: |
3193 | case BUILT_IN_FPUTS_UNLOCKED: |
3194 | check_read_access (stmt, src: call_arg (stmt, argno: 0)); |
3195 | return true; |
3196 | |
3197 | case BUILT_IN_STRNDUP: |
3198 | case BUILT_IN_STRNLEN: |
3199 | { |
3200 | tree str = call_arg (stmt, argno: 0); |
3201 | tree len = call_arg (stmt, argno: 1); |
3202 | check_read_access (stmt, src: str, bound: len); |
3203 | return true; |
3204 | } |
3205 | |
3206 | case BUILT_IN_STRCAT: |
3207 | check_strcat (stmt); |
3208 | return true; |
3209 | |
3210 | case BUILT_IN_STRNCAT: |
3211 | check_strncat (stmt); |
3212 | return true; |
3213 | |
3214 | case BUILT_IN_STPCPY: |
3215 | case BUILT_IN_STRCPY: |
3216 | check_stxcpy (stmt); |
3217 | return true; |
3218 | |
3219 | case BUILT_IN_STPNCPY: |
3220 | case BUILT_IN_STRNCPY: |
3221 | check_stxncpy (stmt); |
3222 | return true; |
3223 | |
3224 | case BUILT_IN_STRCASECMP: |
3225 | case BUILT_IN_STRCMP: |
3226 | case BUILT_IN_STRPBRK: |
3227 | case BUILT_IN_STRSPN: |
3228 | case BUILT_IN_STRCSPN: |
3229 | case BUILT_IN_STRSTR: |
3230 | check_read_access (stmt, src: call_arg (stmt, argno: 0)); |
3231 | check_read_access (stmt, src: call_arg (stmt, argno: 1)); |
3232 | return true; |
3233 | |
3234 | case BUILT_IN_STRNCASECMP: |
3235 | case BUILT_IN_STRNCMP: |
3236 | check_strncmp (stmt); |
3237 | return true; |
3238 | |
3239 | case BUILT_IN_MEMCMP: |
3240 | { |
3241 | tree a1 = call_arg (stmt, argno: 0); |
3242 | tree a2 = call_arg (stmt, argno: 1); |
3243 | tree len = call_arg (stmt, argno: 2); |
3244 | check_read_access (stmt, src: a1, bound: len, ost: 0); |
3245 | check_read_access (stmt, src: a2, bound: len, ost: 0); |
3246 | return true; |
3247 | } |
3248 | |
3249 | case BUILT_IN_MEMCPY: |
3250 | case BUILT_IN_MEMPCPY: |
3251 | case BUILT_IN_MEMMOVE: |
3252 | { |
3253 | tree dst = call_arg (stmt, argno: 0); |
3254 | tree src = call_arg (stmt, argno: 1); |
3255 | tree len = call_arg (stmt, argno: 2); |
3256 | check_memop_access (stmt, dest: dst, src, size: len); |
3257 | return true; |
3258 | } |
3259 | |
3260 | case BUILT_IN_MEMCHR: |
3261 | { |
3262 | tree src = call_arg (stmt, argno: 0); |
3263 | tree len = call_arg (stmt, argno: 2); |
3264 | check_read_access (stmt, src, bound: len, ost: 0); |
3265 | return true; |
3266 | } |
3267 | |
3268 | case BUILT_IN_MEMSET: |
3269 | { |
3270 | tree dst = call_arg (stmt, argno: 0); |
3271 | tree len = call_arg (stmt, argno: 2); |
3272 | check_memop_access (stmt, dest: dst, NULL_TREE, size: len); |
3273 | return true; |
3274 | } |
3275 | |
3276 | default: |
3277 | if (check_atomic_builtin (stmt)) |
3278 | return true; |
3279 | break; |
3280 | } |
3281 | |
3282 | return false; |
3283 | } |
3284 | |
3285 | /* Returns the type of the argument ARGNO to function with type FNTYPE |
3286 | or null when the type cannot be determined or no such argument exists. */ |
3287 | |
3288 | static tree |
3289 | fntype_argno_type (tree fntype, unsigned argno) |
3290 | { |
3291 | if (!prototype_p (fntype)) |
3292 | return NULL_TREE; |
3293 | |
3294 | tree argtype; |
3295 | function_args_iterator it; |
3296 | FOREACH_FUNCTION_ARGS (fntype, argtype, it) |
3297 | if (argno-- == 0) |
3298 | return argtype; |
3299 | |
3300 | return NULL_TREE; |
3301 | } |
3302 | |
3303 | /* Helper to append the "human readable" attribute access specification |
3304 | described by ACCESS to the array ATTRSTR with size STRSIZE. Used in |
3305 | diagnostics. */ |
3306 | |
3307 | static inline void |
3308 | append_attrname (const std::pair<int, attr_access> &access, |
3309 | char *attrstr, size_t strsize) |
3310 | { |
3311 | if (access.second.internal_p) |
3312 | return; |
3313 | |
3314 | tree str = access.second.to_external_string (); |
3315 | gcc_assert (strsize >= (size_t) TREE_STRING_LENGTH (str)); |
3316 | strcpy (dest: attrstr, TREE_STRING_POINTER (str)); |
3317 | } |
3318 | |
3319 | /* Iterate over attribute access read-only, read-write, and write-only |
3320 | arguments and diagnose past-the-end accesses and related problems |
3321 | in the function call EXP. */ |
3322 | |
3323 | void |
3324 | pass_waccess::maybe_check_access_sizes (rdwr_map *rwm, tree fndecl, tree fntype, |
3325 | gimple *stmt) |
3326 | { |
3327 | if (warning_suppressed_p (stmt, OPT_Wnonnull) |
3328 | || warning_suppressed_p (stmt, OPT_Wstringop_overflow_)) |
3329 | return; |
3330 | |
3331 | auto_diagnostic_group adg; |
3332 | |
3333 | /* Set if a warning has been issued for any argument (used to decide |
3334 | whether to emit an informational note at the end). */ |
3335 | opt_code opt_warned = no_warning; |
3336 | |
3337 | /* A string describing the attributes that the warnings issued by this |
3338 | function apply to. Used to print one informational note per function |
3339 | call, rather than one per warning. That reduces clutter. */ |
3340 | char attrstr[80]; |
3341 | attrstr[0] = 0; |
3342 | |
3343 | for (rdwr_map::iterator it = rwm->begin (); it != rwm->end (); ++it) |
3344 | { |
3345 | std::pair<int, attr_access> access = *it; |
3346 | |
3347 | /* Get the function call arguments corresponding to the attribute's |
3348 | positional arguments. When both arguments have been specified |
3349 | there will be two entries in *RWM, one for each. They are |
3350 | cross-referenced by their respective argument numbers in |
3351 | ACCESS.PTRARG and ACCESS.SIZARG. */ |
3352 | const int ptridx = access.second.ptrarg; |
3353 | const int sizidx = access.second.sizarg; |
3354 | |
3355 | gcc_assert (ptridx != -1); |
3356 | gcc_assert (access.first == ptridx || access.first == sizidx); |
3357 | |
3358 | /* The pointer is set to null for the entry corresponding to |
3359 | the size argument. Skip it. It's handled when the entry |
3360 | corresponding to the pointer argument comes up. */ |
3361 | if (!access.second.ptr) |
3362 | continue; |
3363 | |
3364 | tree ptrtype = fntype_argno_type (fntype, argno: ptridx); |
3365 | if (!ptrtype) |
3366 | /* A function with a prototype was redeclared without one and |
3367 | the prototype has been lost. See pr102759. Avoid dealing |
3368 | with this pathological case. */ |
3369 | return; |
3370 | |
3371 | tree argtype = TREE_TYPE (ptrtype); |
3372 | |
3373 | /* The size of the access by the call in elements. */ |
3374 | tree access_nelts; |
3375 | if (sizidx == -1) |
3376 | { |
3377 | /* If only the pointer attribute operand was specified and |
3378 | not size, set SIZE to the greater of MINSIZE or size of |
3379 | one element of the pointed to type to detect smaller |
3380 | objects (null pointers are diagnosed in this case only |
3381 | if the pointer is also declared with attribute nonnull. */ |
3382 | if (access.second.minsize |
3383 | && access.second.minsize != HOST_WIDE_INT_M1U) |
3384 | access_nelts = build_int_cstu (sizetype, access.second.minsize); |
3385 | else if (VOID_TYPE_P (argtype) && access.second.mode == access_none) |
3386 | /* Treat access mode none on a void* argument as expecting |
3387 | as little as zero bytes. */ |
3388 | access_nelts = size_zero_node; |
3389 | else |
3390 | access_nelts = size_one_node; |
3391 | } |
3392 | else |
3393 | access_nelts = rwm->get (k: sizidx)->size; |
3394 | |
3395 | /* Format the value or range to avoid an explosion of messages. */ |
3396 | char sizstr[80]; |
3397 | tree sizrng[2] = { size_zero_node, build_all_ones_cst (sizetype) }; |
3398 | if (get_size_range (m_ptr_qry.rvals, access_nelts, stmt, sizrng, 1)) |
3399 | { |
3400 | char *s0 = print_generic_expr_to_str (sizrng[0]); |
3401 | if (tree_int_cst_equal (sizrng[0], sizrng[1])) |
3402 | { |
3403 | gcc_checking_assert (strlen (s0) < sizeof sizstr); |
3404 | strcpy (dest: sizstr, src: s0); |
3405 | } |
3406 | else |
3407 | { |
3408 | char *s1 = print_generic_expr_to_str (sizrng[1]); |
3409 | gcc_checking_assert (strlen (s0) + strlen (s1) |
3410 | < sizeof sizstr - 4); |
3411 | sprintf (s: sizstr, format: "[%.37s, %.37s]" , s0, s1); |
3412 | free (ptr: s1); |
3413 | } |
3414 | free (ptr: s0); |
3415 | } |
3416 | else |
3417 | *sizstr = '\0'; |
3418 | |
3419 | /* Set if a warning has been issued for the current argument. */ |
3420 | opt_code arg_warned = no_warning; |
3421 | location_t loc = get_location (stmt); |
3422 | tree ptr = access.second.ptr; |
3423 | if (*sizstr |
3424 | && tree_int_cst_sgn (sizrng[0]) < 0 |
3425 | && tree_int_cst_sgn (sizrng[1]) < 0) |
3426 | { |
3427 | /* Warn about negative sizes. */ |
3428 | if (access.second.internal_p) |
3429 | { |
3430 | const std::string argtypestr |
3431 | = access.second.array_as_string (ptrtype); |
3432 | |
3433 | if (warning_at (loc, OPT_Wstringop_overflow_, |
3434 | "bound argument %i value %s is " |
3435 | "negative for a variable length array " |
3436 | "argument %i of type %s" , |
3437 | sizidx + 1, sizstr, |
3438 | ptridx + 1, argtypestr.c_str ())) |
3439 | arg_warned = OPT_Wstringop_overflow_; |
3440 | } |
3441 | else if (warning_at (loc, OPT_Wstringop_overflow_, |
3442 | "argument %i value %s is negative" , |
3443 | sizidx + 1, sizstr)) |
3444 | arg_warned = OPT_Wstringop_overflow_; |
3445 | |
3446 | if (arg_warned != no_warning) |
3447 | { |
3448 | append_attrname (access, attrstr, strsize: sizeof attrstr); |
3449 | /* Remember a warning has been issued and avoid warning |
3450 | again below for the same attribute. */ |
3451 | opt_warned = arg_warned; |
3452 | continue; |
3453 | } |
3454 | } |
3455 | |
3456 | /* The size of the access by the call in bytes. */ |
3457 | tree access_size = NULL_TREE; |
3458 | if (tree_int_cst_sgn (sizrng[0]) >= 0) |
3459 | { |
3460 | if (COMPLETE_TYPE_P (argtype)) |
3461 | { |
3462 | /* Multiply ACCESS_SIZE by the size of the type the pointer |
3463 | argument points to. If it's incomplete the size is used |
3464 | as is. */ |
3465 | if (tree argsize = TYPE_SIZE_UNIT (argtype)) |
3466 | if (TREE_CODE (argsize) == INTEGER_CST) |
3467 | { |
3468 | const int prec = TYPE_PRECISION (sizetype); |
3469 | wide_int minsize = wi::to_wide (t: sizrng[0], prec); |
3470 | minsize *= wi::to_wide (t: argsize, prec); |
3471 | access_size = wide_int_to_tree (sizetype, cst: minsize); |
3472 | } |
3473 | } |
3474 | else |
3475 | access_size = access_nelts; |
3476 | } |
3477 | |
3478 | if (integer_zerop (ptr)) |
3479 | { |
3480 | if (!access.second.internal_p |
3481 | && sizidx >= 0 && tree_int_cst_sgn (sizrng[0]) > 0) |
3482 | { |
3483 | /* Warn about null pointers with positive sizes. This is |
3484 | different from also declaring the pointer argument with |
3485 | attribute nonnull when the function accepts null pointers |
3486 | only when the corresponding size is zero. */ |
3487 | if (warning_at (loc, OPT_Wnonnull, |
3488 | "argument %i is null but " |
3489 | "the corresponding size argument " |
3490 | "%i value is %s" , |
3491 | ptridx + 1, sizidx + 1, sizstr)) |
3492 | arg_warned = OPT_Wnonnull; |
3493 | } |
3494 | else if (access_size && access.second.static_p) |
3495 | { |
3496 | /* Warn about null pointers for [static N] array arguments |
3497 | but do not warn for ordinary (i.e., nonstatic) arrays. */ |
3498 | if (warning_at (loc, OPT_Wnonnull, |
3499 | "argument %i to %<%T[static %E]%> " |
3500 | "is null where non-null expected" , |
3501 | ptridx + 1, argtype, access_nelts)) |
3502 | arg_warned = OPT_Wnonnull; |
3503 | } |
3504 | |
3505 | if (arg_warned != no_warning) |
3506 | { |
3507 | append_attrname (access, attrstr, strsize: sizeof attrstr); |
3508 | /* Remember a warning has been issued and avoid warning |
3509 | again below for the same attribute. */ |
3510 | opt_warned = OPT_Wnonnull; |
3511 | continue; |
3512 | } |
3513 | } |
3514 | |
3515 | access_data data (m_ptr_qry.rvals, stmt, access.second.mode, |
3516 | NULL_TREE, false, NULL_TREE, false); |
3517 | access_ref* const pobj = (access.second.mode == access_write_only |
3518 | ? &data.dst : &data.src); |
3519 | tree objsize = compute_objsize (ptr, stmt, 1, pobj, &m_ptr_qry); |
3520 | |
3521 | /* The size of the destination or source object. */ |
3522 | tree dstsize = NULL_TREE, srcsize = NULL_TREE; |
3523 | if (access.second.mode == access_read_only |
3524 | || access.second.mode == access_none) |
3525 | { |
3526 | /* For a read-only argument there is no destination. For |
3527 | no access, set the source as well and differentiate via |
3528 | the access flag below. */ |
3529 | srcsize = objsize; |
3530 | if (access.second.mode == access_read_only |
3531 | || access.second.mode == access_none) |
3532 | { |
3533 | /* For a read-only attribute there is no destination so |
3534 | clear OBJSIZE. This emits "reading N bytes" kind of |
3535 | diagnostics instead of the "writing N bytes" kind, |
3536 | unless MODE is none. */ |
3537 | objsize = NULL_TREE; |
3538 | } |
3539 | } |
3540 | else |
3541 | dstsize = objsize; |
3542 | |
3543 | /* Clear the no-warning bit in case it was set by check_access |
3544 | in a prior iteration so that accesses via different arguments |
3545 | are diagnosed. */ |
3546 | suppress_warning (stmt, OPT_Wstringop_overflow_, false); |
3547 | access_mode mode = data.mode; |
3548 | if (mode == access_deferred) |
3549 | mode = TYPE_READONLY (argtype) ? access_read_only : access_read_write; |
3550 | check_access (stmt, dstwrite: access_size, /*maxread=*/ NULL_TREE, srcstr: srcsize, |
3551 | dstsize, mode, pad: &data, rvals: m_ptr_qry.rvals); |
3552 | |
3553 | if (warning_suppressed_p (stmt, OPT_Wstringop_overflow_)) |
3554 | opt_warned = OPT_Wstringop_overflow_; |
3555 | if (opt_warned != no_warning) |
3556 | { |
3557 | if (access.second.internal_p) |
3558 | { |
3559 | unsigned HOST_WIDE_INT nelts = |
3560 | access_nelts ? access.second.minsize : HOST_WIDE_INT_M1U; |
3561 | tree arrtype = build_printable_array_type (argtype, nelts); |
3562 | inform (loc, "referencing argument %u of type %qT" , |
3563 | ptridx + 1, arrtype); |
3564 | } |
3565 | else |
3566 | /* If check_access issued a warning above, append the relevant |
3567 | attribute to the string. */ |
3568 | append_attrname (access, attrstr, strsize: sizeof attrstr); |
3569 | } |
3570 | } |
3571 | |
3572 | if (*attrstr) |
3573 | { |
3574 | if (fndecl) |
3575 | inform (get_location (x: fndecl), |
3576 | "in a call to function %qD declared with attribute %qs" , |
3577 | fndecl, attrstr); |
3578 | else |
3579 | inform (get_location (stmt), |
3580 | "in a call with type %qT and attribute %qs" , |
3581 | fntype, attrstr); |
3582 | } |
3583 | else if (opt_warned != no_warning) |
3584 | { |
3585 | if (fndecl) |
3586 | inform (get_location (x: fndecl), |
3587 | "in a call to function %qD" , fndecl); |
3588 | else |
3589 | inform (get_location (stmt), |
3590 | "in a call with type %qT" , fntype); |
3591 | } |
3592 | |
3593 | /* Set the bit in case it was cleared and not set above. */ |
3594 | if (opt_warned != no_warning) |
3595 | suppress_warning (stmt, opt_warned); |
3596 | } |
3597 | |
3598 | /* Check call STMT to an ordinary (non-built-in) function for invalid |
3599 | accesses. Return true if a call has been handled. */ |
3600 | |
3601 | bool |
3602 | pass_waccess::check_call_access (gcall *stmt) |
3603 | { |
3604 | tree fntype = gimple_call_fntype (gs: stmt); |
3605 | if (!fntype) |
3606 | return false; |
3607 | |
3608 | tree fntypeattrs = TYPE_ATTRIBUTES (fntype); |
3609 | if (!fntypeattrs) |
3610 | return false; |
3611 | |
3612 | /* Map of attribute access specifications for function arguments. */ |
3613 | rdwr_map rdwr_idx; |
3614 | init_attr_rdwr_indices (&rdwr_idx, fntypeattrs); |
3615 | |
3616 | unsigned nargs = call_nargs (stmt); |
3617 | for (unsigned i = 0; i != nargs; ++i) |
3618 | { |
3619 | tree arg = call_arg (stmt, argno: i); |
3620 | |
3621 | /* Save the actual argument that corresponds to the access attribute |
3622 | operand for later processing. */ |
3623 | if (attr_access *access = rdwr_idx.get (k: i)) |
3624 | { |
3625 | if (POINTER_TYPE_P (TREE_TYPE (arg))) |
3626 | { |
3627 | access->ptr = arg; |
3628 | /* A nonnull ACCESS->SIZE contains VLA bounds. */ |
3629 | } |
3630 | else |
3631 | { |
3632 | access->size = arg; |
3633 | gcc_assert (access->ptr == NULL_TREE); |
3634 | } |
3635 | } |
3636 | } |
3637 | |
3638 | /* Check attribute access arguments. */ |
3639 | tree fndecl = gimple_call_fndecl (gs: stmt); |
3640 | maybe_check_access_sizes (rwm: &rdwr_idx, fndecl, fntype, stmt); |
3641 | |
3642 | check_alloc_size_call (stmt); |
3643 | return true; |
3644 | } |
3645 | |
3646 | /* Check arguments in a call STMT for attribute nonstring. */ |
3647 | |
3648 | static void |
3649 | check_nonstring_args (gcall *stmt) |
3650 | { |
3651 | tree fndecl = gimple_call_fndecl (gs: stmt); |
3652 | |
3653 | /* Detect passing non-string arguments to functions expecting |
3654 | nul-terminated strings. */ |
3655 | maybe_warn_nonstring_arg (fndecl, exp: stmt); |
3656 | } |
3657 | |
3658 | /* Issue a warning if a deallocation function such as free, realloc, |
3659 | or C++ operator delete is called with an argument not returned by |
3660 | a matching allocation function such as malloc or the corresponding |
3661 | form of C++ operator new. */ |
3662 | |
3663 | void |
3664 | pass_waccess::maybe_check_dealloc_call (gcall *call) |
3665 | { |
3666 | tree fndecl = gimple_call_fndecl (gs: call); |
3667 | if (!fndecl) |
3668 | return; |
3669 | |
3670 | unsigned argno = fndecl_dealloc_argno (fndecl); |
3671 | if ((unsigned) call_nargs (stmt: call) <= argno) |
3672 | return; |
3673 | |
3674 | tree ptr = gimple_call_arg (gs: call, index: argno); |
3675 | if (integer_zerop (ptr)) |
3676 | return; |
3677 | |
3678 | access_ref aref; |
3679 | if (!compute_objsize (ptr, call, 0, &aref, &m_ptr_qry)) |
3680 | return; |
3681 | |
3682 | tree ref = aref.ref; |
3683 | if (integer_zerop (ref)) |
3684 | return; |
3685 | |
3686 | tree dealloc_decl = fndecl; |
3687 | location_t loc = gimple_location (g: call); |
3688 | |
3689 | if (DECL_P (ref) || EXPR_P (ref)) |
3690 | { |
3691 | /* Diagnose freeing a declared object. */ |
3692 | if (aref.ref_declared ()) |
3693 | { |
3694 | auto_diagnostic_group d; |
3695 | if (warning_at (loc, OPT_Wfree_nonheap_object, |
3696 | "%qD called on unallocated object %qD" , |
3697 | dealloc_decl, ref)) |
3698 | { |
3699 | inform (get_location (x: ref), "declared here" ); |
3700 | return; |
3701 | } |
3702 | } |
3703 | |
3704 | /* Diagnose freeing a pointer that includes a positive offset. |
3705 | Such a pointer cannot refer to the beginning of an allocated |
3706 | object. A negative offset may refer to it. */ |
3707 | if (aref.sizrng[0] != aref.sizrng[1] |
3708 | && warn_dealloc_offset (loc, call, aref)) |
3709 | return; |
3710 | } |
3711 | else if (CONSTANT_CLASS_P (ref)) |
3712 | { |
3713 | auto_diagnostic_group d; |
3714 | if (warning_at (loc, OPT_Wfree_nonheap_object, |
3715 | "%qD called on a pointer to an unallocated " |
3716 | "object %qE" , dealloc_decl, ref)) |
3717 | { |
3718 | if (TREE_CODE (ptr) == SSA_NAME) |
3719 | { |
3720 | gimple *def_stmt = SSA_NAME_DEF_STMT (ptr); |
3721 | if (is_gimple_assign (gs: def_stmt)) |
3722 | { |
3723 | location_t loc = gimple_location (g: def_stmt); |
3724 | inform (loc, "assigned here" ); |
3725 | } |
3726 | } |
3727 | return; |
3728 | } |
3729 | } |
3730 | else if (TREE_CODE (ref) == SSA_NAME) |
3731 | { |
3732 | /* Also warn if the pointer argument refers to the result |
3733 | of an allocation call like alloca or VLA. */ |
3734 | gimple *def_stmt = SSA_NAME_DEF_STMT (ref); |
3735 | if (!def_stmt) |
3736 | return; |
3737 | |
3738 | if (is_gimple_call (gs: def_stmt)) |
3739 | { |
3740 | bool warned = false; |
3741 | if (gimple_call_alloc_p (stmt: def_stmt)) |
3742 | { |
3743 | if (matching_alloc_calls_p (alloc: def_stmt, dealloc_decl)) |
3744 | { |
3745 | if (warn_dealloc_offset (loc, call, aref)) |
3746 | return; |
3747 | } |
3748 | else |
3749 | { |
3750 | tree alloc_decl = gimple_call_fndecl (gs: def_stmt); |
3751 | const opt_code opt = |
3752 | (DECL_IS_OPERATOR_NEW_P (alloc_decl) |
3753 | || DECL_IS_OPERATOR_DELETE_P (dealloc_decl) |
3754 | ? OPT_Wmismatched_new_delete |
3755 | : OPT_Wmismatched_dealloc); |
3756 | warned = warning_at (loc, opt, |
3757 | "%qD called on pointer returned " |
3758 | "from a mismatched allocation " |
3759 | "function" , dealloc_decl); |
3760 | } |
3761 | } |
3762 | else if (gimple_call_builtin_p (def_stmt, BUILT_IN_ALLOCA) |
3763 | || gimple_call_builtin_p (def_stmt, |
3764 | BUILT_IN_ALLOCA_WITH_ALIGN)) |
3765 | warned = warning_at (loc, OPT_Wfree_nonheap_object, |
3766 | "%qD called on pointer to " |
3767 | "an unallocated object" , |
3768 | dealloc_decl); |
3769 | else if (warn_dealloc_offset (loc, call, aref)) |
3770 | return; |
3771 | |
3772 | if (warned) |
3773 | { |
3774 | tree fndecl = gimple_call_fndecl (gs: def_stmt); |
3775 | inform (gimple_location (g: def_stmt), |
3776 | "returned from %qD" , fndecl); |
3777 | return; |
3778 | } |
3779 | } |
3780 | else if (gimple_nop_p (g: def_stmt)) |
3781 | { |
3782 | ref = SSA_NAME_VAR (ref); |
3783 | /* Diagnose freeing a pointer that includes a positive offset. */ |
3784 | if (TREE_CODE (ref) == PARM_DECL |
3785 | && !aref.deref |
3786 | && aref.sizrng[0] != aref.sizrng[1] |
3787 | && aref.offrng[0] > 0 && aref.offrng[1] > 0 |
3788 | && warn_dealloc_offset (loc, call, aref)) |
3789 | return; |
3790 | } |
3791 | } |
3792 | } |
3793 | |
3794 | /* Return true if either USE_STMT's basic block (that of a pointer's use) |
3795 | is dominated by INVAL_STMT's (that of a pointer's invalidating statement, |
3796 | which is either a clobber or a deallocation call), or if they're in |
3797 | the same block, USE_STMT follows INVAL_STMT. */ |
3798 | |
3799 | bool |
3800 | pass_waccess::use_after_inval_p (gimple *inval_stmt, gimple *use_stmt, |
3801 | bool last_block /* = false */) |
3802 | { |
3803 | tree clobvar = |
3804 | gimple_clobber_p (s: inval_stmt) ? gimple_assign_lhs (gs: inval_stmt) : NULL_TREE; |
3805 | |
3806 | basic_block inval_bb = gimple_bb (g: inval_stmt); |
3807 | basic_block use_bb = gimple_bb (g: use_stmt); |
3808 | |
3809 | if (!inval_bb || !use_bb) |
3810 | return false; |
3811 | |
3812 | if (inval_bb != use_bb) |
3813 | { |
3814 | if (dominated_by_p (CDI_DOMINATORS, use_bb, inval_bb)) |
3815 | return true; |
3816 | |
3817 | if (!clobvar || !last_block) |
3818 | return false; |
3819 | |
3820 | /* Proceed only when looking for uses of dangling pointers. */ |
3821 | auto gsi = gsi_for_stmt (use_stmt); |
3822 | |
3823 | /* A use statement in the last basic block in a function or one that |
3824 | falls through to it is after any other prior clobber of the used |
3825 | variable unless it's followed by a clobber of the same variable. */ |
3826 | basic_block bb = use_bb; |
3827 | while (bb != inval_bb |
3828 | && single_succ_p (bb) |
3829 | && !(single_succ_edge (bb)->flags |
3830 | & (EDGE_EH | EDGE_ABNORMAL | EDGE_DFS_BACK))) |
3831 | { |
3832 | for (; !gsi_end_p (i: gsi); gsi_next_nondebug (i: &gsi)) |
3833 | { |
3834 | gimple *stmt = gsi_stmt (i: gsi); |
3835 | if (gimple_clobber_p (s: stmt)) |
3836 | { |
3837 | if (clobvar == gimple_assign_lhs (gs: stmt)) |
3838 | /* The use is followed by a clobber. */ |
3839 | return false; |
3840 | } |
3841 | } |
3842 | |
3843 | bb = single_succ (bb); |
3844 | gsi = gsi_start_bb (bb); |
3845 | } |
3846 | |
3847 | /* The use is one of a dangling pointer if a clobber of the variable |
3848 | [the pointer points to] has not been found before the function exit |
3849 | point. */ |
3850 | return bb == EXIT_BLOCK_PTR_FOR_FN (cfun); |
3851 | } |
3852 | |
3853 | if (bitmap_set_bit (m_bb_uids_set, inval_bb->index)) |
3854 | /* The first time this basic block is visited assign increasing ids |
3855 | to consecutive statements in it. Use the ids to determine which |
3856 | precedes which. This avoids the linear traversal on subsequent |
3857 | visits to the same block. */ |
3858 | renumber_gimple_stmt_uids_in_block (m_func, inval_bb); |
3859 | |
3860 | return gimple_uid (g: inval_stmt) < gimple_uid (g: use_stmt); |
3861 | } |
3862 | |
3863 | /* Issue a warning for the USE_STMT of pointer or reference REF rendered |
3864 | invalid by INVAL_STMT. REF may be null when it's been optimized away. |
3865 | When nonnull, INVAL_STMT is the deallocation function that rendered |
3866 | the pointer or reference dangling. Otherwise, VAR is the auto variable |
3867 | (including an unnamed temporary such as a compound literal) whose |
3868 | lifetime's rended it dangling. MAYBE is true to issue the "maybe" |
3869 | kind of warning. EQUALITY is true when the pointer is used in |
3870 | an equality expression. */ |
3871 | |
3872 | void |
3873 | pass_waccess::warn_invalid_pointer (tree ref, gimple *use_stmt, |
3874 | gimple *inval_stmt, tree var, |
3875 | bool maybe, bool equality /* = false */) |
3876 | { |
3877 | /* Avoid printing the unhelpful "<unknown>" in the diagnostics. */ |
3878 | if (ref && TREE_CODE (ref) == SSA_NAME) |
3879 | { |
3880 | tree var = SSA_NAME_VAR (ref); |
3881 | if (!var) |
3882 | ref = NULL_TREE; |
3883 | /* Don't warn for cases like when a cdtor returns 'this' on ARM. */ |
3884 | else if (warning_suppressed_p (var, OPT_Wuse_after_free)) |
3885 | return; |
3886 | else if (DECL_ARTIFICIAL (var)) |
3887 | ref = NULL_TREE; |
3888 | } |
3889 | |
3890 | location_t use_loc = gimple_location (g: use_stmt); |
3891 | if (use_loc == UNKNOWN_LOCATION) |
3892 | { |
3893 | use_loc = m_func->function_end_locus; |
3894 | if (!ref) |
3895 | /* Avoid issuing a warning with no context other than |
3896 | the function. That would make it difficult to debug |
3897 | in any but very simple cases. */ |
3898 | return; |
3899 | } |
3900 | |
3901 | if (is_gimple_call (gs: inval_stmt)) |
3902 | { |
3903 | if (!m_early_checks_p |
3904 | || (equality && warn_use_after_free < 3) |
3905 | || (maybe && warn_use_after_free < 2) |
3906 | || warning_suppressed_p (use_stmt, OPT_Wuse_after_free)) |
3907 | return; |
3908 | |
3909 | const tree inval_decl = gimple_call_fndecl (gs: inval_stmt); |
3910 | |
3911 | auto_diagnostic_group d; |
3912 | if ((ref && warning_at (use_loc, OPT_Wuse_after_free, |
3913 | (maybe |
3914 | ? G_("pointer %qE may be used after %qD" ) |
3915 | : G_("pointer %qE used after %qD" )), |
3916 | ref, inval_decl)) |
3917 | || (!ref && warning_at (use_loc, OPT_Wuse_after_free, |
3918 | (maybe |
3919 | ? G_("pointer may be used after %qD" ) |
3920 | : G_("pointer used after %qD" )), |
3921 | inval_decl))) |
3922 | { |
3923 | location_t loc = gimple_location (g: inval_stmt); |
3924 | inform (loc, "call to %qD here" , inval_decl); |
3925 | suppress_warning (use_stmt, OPT_Wuse_after_free); |
3926 | } |
3927 | return; |
3928 | } |
3929 | |
3930 | if (equality |
3931 | || (maybe && warn_dangling_pointer < 2) |
3932 | || warning_suppressed_p (use_stmt, OPT_Wdangling_pointer_)) |
3933 | return; |
3934 | |
3935 | if (DECL_NAME (var)) |
3936 | { |
3937 | auto_diagnostic_group d; |
3938 | if ((ref |
3939 | && warning_at (use_loc, OPT_Wdangling_pointer_, |
3940 | (maybe |
3941 | ? G_("dangling pointer %qE to %qD may be used" ) |
3942 | : G_("using dangling pointer %qE to %qD" )), |
3943 | ref, var)) |
3944 | || (!ref |
3945 | && warning_at (use_loc, OPT_Wdangling_pointer_, |
3946 | (maybe |
3947 | ? G_("dangling pointer to %qD may be used" ) |
3948 | : G_("using a dangling pointer to %qD" )), |
3949 | var))) |
3950 | inform (DECL_SOURCE_LOCATION (var), |
3951 | "%qD declared here" , var); |
3952 | suppress_warning (use_stmt, OPT_Wdangling_pointer_); |
3953 | return; |
3954 | } |
3955 | |
3956 | if ((ref |
3957 | && warning_at (use_loc, OPT_Wdangling_pointer_, |
3958 | (maybe |
3959 | ? G_("dangling pointer %qE to an unnamed temporary " |
3960 | "may be used" ) |
3961 | : G_("using dangling pointer %qE to an unnamed " |
3962 | "temporary" )), |
3963 | ref)) |
3964 | || (!ref |
3965 | && warning_at (use_loc, OPT_Wdangling_pointer_, |
3966 | (maybe |
3967 | ? G_("dangling pointer to an unnamed temporary " |
3968 | "may be used" ) |
3969 | : G_("using a dangling pointer to an unnamed " |
3970 | "temporary" ))))) |
3971 | { |
3972 | inform (DECL_SOURCE_LOCATION (var), |
3973 | "unnamed temporary defined here" ); |
3974 | suppress_warning (use_stmt, OPT_Wdangling_pointer_); |
3975 | } |
3976 | } |
3977 | |
3978 | /* If STMT is a call to either the standard realloc or to a user-defined |
3979 | reallocation function returns its LHS and set *PTR to the reallocated |
3980 | pointer. Otherwise return null. */ |
3981 | |
3982 | static tree |
3983 | get_realloc_lhs (gimple *stmt, tree *ptr) |
3984 | { |
3985 | if (gimple_call_builtin_p (stmt, BUILT_IN_REALLOC)) |
3986 | { |
3987 | *ptr = gimple_call_arg (gs: stmt, index: 0); |
3988 | return gimple_call_lhs (gs: stmt); |
3989 | } |
3990 | |
3991 | gcall *call = dyn_cast<gcall *>(p: stmt); |
3992 | if (!call) |
3993 | return NULL_TREE; |
3994 | |
3995 | tree fnattr = NULL_TREE; |
3996 | tree fndecl = gimple_call_fndecl (gs: call); |
3997 | if (fndecl) |
3998 | fnattr = DECL_ATTRIBUTES (fndecl); |
3999 | else |
4000 | { |
4001 | tree fntype = gimple_call_fntype (gs: stmt); |
4002 | if (!fntype) |
4003 | return NULL_TREE; |
4004 | fnattr = TYPE_ATTRIBUTES (fntype); |
4005 | } |
4006 | |
4007 | if (!fnattr) |
4008 | return NULL_TREE; |
4009 | |
4010 | for (tree ats = fnattr; (ats = lookup_attribute (attr_name: "*dealloc" , list: ats)); |
4011 | ats = TREE_CHAIN (ats)) |
4012 | { |
4013 | tree args = TREE_VALUE (ats); |
4014 | if (!args) |
4015 | continue; |
4016 | |
4017 | tree alloc = TREE_VALUE (args); |
4018 | if (!alloc) |
4019 | continue; |
4020 | |
4021 | if (alloc == DECL_NAME (fndecl)) |
4022 | { |
4023 | unsigned argno = 0; |
4024 | if (tree index = TREE_CHAIN (args)) |
4025 | argno = TREE_INT_CST_LOW (TREE_VALUE (index)) - 1; |
4026 | *ptr = gimple_call_arg (gs: stmt, index: argno); |
4027 | return gimple_call_lhs (gs: stmt); |
4028 | } |
4029 | } |
4030 | |
4031 | return NULL_TREE; |
4032 | } |
4033 | |
4034 | /* Warn if STMT is a call to a deallocation function that's not a match |
4035 | for the REALLOC_STMT call. Return true if warned. */ |
4036 | |
4037 | static bool |
4038 | maybe_warn_mismatched_realloc (tree ptr, gimple *realloc_stmt, gimple *stmt) |
4039 | { |
4040 | if (!is_gimple_call (gs: stmt)) |
4041 | return false; |
4042 | |
4043 | tree fndecl = gimple_call_fndecl (gs: stmt); |
4044 | if (!fndecl) |
4045 | return false; |
4046 | |
4047 | unsigned argno = fndecl_dealloc_argno (fndecl); |
4048 | if (call_nargs (stmt) <= argno) |
4049 | return false; |
4050 | |
4051 | if (matching_alloc_calls_p (alloc: realloc_stmt, dealloc_decl: fndecl)) |
4052 | return false; |
4053 | |
4054 | /* Avoid printing the unhelpful "<unknown>" in the diagnostics. */ |
4055 | if (ptr && TREE_CODE (ptr) == SSA_NAME |
4056 | && (!SSA_NAME_VAR (ptr) || DECL_ARTIFICIAL (SSA_NAME_VAR (ptr)))) |
4057 | ptr = NULL_TREE; |
4058 | |
4059 | location_t loc = gimple_location (g: stmt); |
4060 | tree realloc_decl = gimple_call_fndecl (gs: realloc_stmt); |
4061 | tree dealloc_decl = gimple_call_fndecl (gs: stmt); |
4062 | if (ptr && !warning_at (loc, OPT_Wmismatched_dealloc, |
4063 | "%qD called on pointer %qE passed to mismatched " |
4064 | "allocation function %qD" , |
4065 | dealloc_decl, ptr, realloc_decl)) |
4066 | return false; |
4067 | if (!ptr && !warning_at (loc, OPT_Wmismatched_dealloc, |
4068 | "%qD called on a pointer passed to mismatched " |
4069 | "reallocation function %qD" , |
4070 | dealloc_decl, realloc_decl)) |
4071 | return false; |
4072 | |
4073 | inform (gimple_location (g: realloc_stmt), |
4074 | "call to %qD" , realloc_decl); |
4075 | return true; |
4076 | } |
4077 | |
4078 | /* Return true if P and Q point to the same object, and false if they |
4079 | either don't or their relationship cannot be determined. */ |
4080 | |
4081 | static bool |
4082 | pointers_related_p (gimple *stmt, tree p, tree q, pointer_query &qry, |
4083 | auto_bitmap &visited) |
4084 | { |
4085 | if (!ptr_derefs_may_alias_p (p, q)) |
4086 | return false; |
4087 | |
4088 | /* TODO: Work harder to rule out relatedness. */ |
4089 | access_ref pref, qref; |
4090 | if (!qry.get_ref (p, stmt, &pref, 0) |
4091 | || !qry.get_ref (q, stmt, &qref, 0)) |
4092 | /* GET_REF() only rarely fails. When it does, it's likely because |
4093 | it involves a self-referential PHI. Return a conservative result. */ |
4094 | return false; |
4095 | |
4096 | if (pref.ref == qref.ref) |
4097 | return true; |
4098 | |
4099 | /* If either pointer is a PHI, iterate over all its operands and |
4100 | return true if they're all related to the other pointer. */ |
4101 | tree ptr = q; |
4102 | unsigned version; |
4103 | gphi *phi = pref.phi (); |
4104 | if (phi) |
4105 | version = SSA_NAME_VERSION (pref.ref); |
4106 | else |
4107 | { |
4108 | phi = qref.phi (); |
4109 | if (!phi) |
4110 | return false; |
4111 | |
4112 | ptr = p; |
4113 | version = SSA_NAME_VERSION (qref.ref); |
4114 | } |
4115 | |
4116 | if (!bitmap_set_bit (visited, version)) |
4117 | return true; |
4118 | |
4119 | unsigned nargs = gimple_phi_num_args (gs: phi); |
4120 | for (unsigned i = 0; i != nargs; ++i) |
4121 | { |
4122 | tree arg = gimple_phi_arg_def (gs: phi, index: i); |
4123 | if (!pointers_related_p (stmt, p: arg, q: ptr, qry, visited)) |
4124 | return false; |
4125 | } |
4126 | |
4127 | return true; |
4128 | } |
4129 | |
4130 | /* Convenience wrapper for the above. */ |
4131 | |
4132 | static bool |
4133 | pointers_related_p (gimple *stmt, tree p, tree q, pointer_query &qry) |
4134 | { |
4135 | auto_bitmap visited; |
4136 | return pointers_related_p (stmt, p, q, qry, visited); |
4137 | } |
4138 | |
4139 | /* For a STMT either a call to a deallocation function or a clobber, warn |
4140 | for uses of the pointer PTR it was called with (including its copies |
4141 | or others derived from it by pointer arithmetic). If STMT is a clobber, |
4142 | VAR is the decl of the clobbered variable. When MAYBE is true use |
4143 | a "maybe" form of diagnostic. */ |
4144 | |
4145 | void |
4146 | pass_waccess::check_pointer_uses (gimple *stmt, tree ptr, |
4147 | tree var /* = NULL_TREE */, |
4148 | bool maybe /* = false */) |
4149 | { |
4150 | gcc_assert (TREE_CODE (ptr) == SSA_NAME); |
4151 | |
4152 | const bool check_dangling = !is_gimple_call (gs: stmt); |
4153 | basic_block stmt_bb = gimple_bb (g: stmt); |
4154 | |
4155 | /* If STMT is a reallocation function set to the reallocated pointer |
4156 | and the LHS of the call, respectively. */ |
4157 | tree realloc_ptr = NULL_TREE; |
4158 | tree realloc_lhs = get_realloc_lhs (stmt, ptr: &realloc_ptr); |
4159 | |
4160 | auto_bitmap visited; |
4161 | |
4162 | auto_vec<tree, 8> pointers; |
4163 | pointers.quick_push (obj: ptr); |
4164 | hash_map<tree, int> *phi_map = nullptr; |
4165 | |
4166 | /* Starting with PTR, iterate over POINTERS added by the loop, and |
4167 | either warn for their uses in basic blocks dominated by the STMT |
4168 | or in statements that follow it in the same basic block, or add |
4169 | them to POINTERS if they point into the same object as PTR (i.e., |
4170 | are obtained by pointer arithmetic on PTR). */ |
4171 | for (unsigned i = 0; i != pointers.length (); ++i) |
4172 | { |
4173 | tree ptr = pointers[i]; |
4174 | if (!bitmap_set_bit (visited, SSA_NAME_VERSION (ptr))) |
4175 | /* Avoid revisiting the same pointer. */ |
4176 | continue; |
4177 | |
4178 | use_operand_p use_p; |
4179 | imm_use_iterator iter; |
4180 | FOR_EACH_IMM_USE_FAST (use_p, iter, ptr) |
4181 | { |
4182 | gimple *use_stmt = USE_STMT (use_p); |
4183 | if (use_stmt == stmt || is_gimple_debug (gs: use_stmt)) |
4184 | continue; |
4185 | |
4186 | /* A clobber isn't a use. */ |
4187 | if (gimple_clobber_p (s: use_stmt)) |
4188 | continue; |
4189 | |
4190 | if (realloc_lhs) |
4191 | { |
4192 | /* Check to see if USE_STMT is a mismatched deallocation |
4193 | call for the pointer passed to realloc. That's a bug |
4194 | regardless of the pointer's value and so warn. */ |
4195 | if (maybe_warn_mismatched_realloc (ptr: *use_p->use, realloc_stmt: stmt, stmt: use_stmt)) |
4196 | continue; |
4197 | |
4198 | /* Pointers passed to realloc that are used in basic blocks |
4199 | where the realloc call is known to have failed are valid. |
4200 | Ignore pointers that nothing is known about. Those could |
4201 | have escaped along with their nullness. */ |
4202 | value_range vr; |
4203 | if (m_ptr_qry.rvals->range_of_expr (r&: vr, expr: realloc_lhs, use_stmt)) |
4204 | { |
4205 | if (vr.zero_p ()) |
4206 | continue; |
4207 | |
4208 | if (!pointers_related_p (stmt, p: ptr, q: realloc_ptr, qry&: m_ptr_qry)) |
4209 | continue; |
4210 | } |
4211 | } |
4212 | |
4213 | if (check_dangling |
4214 | && gimple_code (g: use_stmt) == GIMPLE_RETURN) |
4215 | /* Avoid interfering with -Wreturn-local-addr (which runs only |
4216 | with optimization enabled so it won't diagnose cases that |
4217 | would be caught here when optimization is disabled). */ |
4218 | continue; |
4219 | |
4220 | bool equality = false; |
4221 | if (is_gimple_assign (gs: use_stmt)) |
4222 | { |
4223 | tree_code code = gimple_assign_rhs_code (gs: use_stmt); |
4224 | equality = code == EQ_EXPR || code == NE_EXPR; |
4225 | } |
4226 | else if (gcond *cond = dyn_cast<gcond *>(p: use_stmt)) |
4227 | { |
4228 | tree_code code = gimple_cond_code (gs: cond); |
4229 | equality = code == EQ_EXPR || code == NE_EXPR; |
4230 | } |
4231 | else if (gphi *phi = dyn_cast <gphi *> (p: use_stmt)) |
4232 | { |
4233 | /* Only add a PHI result to POINTERS if all its |
4234 | operands are related to PTR, otherwise continue. The |
4235 | PHI result is related once we've reached all arguments |
4236 | through this iteration. That also means any invariant |
4237 | argument will make the PHI not related. For arguments |
4238 | flowing over natural loop backedges we are optimistic |
4239 | (and diagnose the first iteration). */ |
4240 | tree lhs = gimple_phi_result (gs: phi); |
4241 | if (!phi_map) |
4242 | phi_map = new hash_map<tree, int>; |
4243 | bool existed_p; |
4244 | int &related = phi_map->get_or_insert (k: lhs, existed: &existed_p); |
4245 | if (!existed_p) |
4246 | { |
4247 | related = gimple_phi_num_args (gs: phi) - 1; |
4248 | for (unsigned j = 0; j < gimple_phi_num_args (gs: phi); ++j) |
4249 | { |
4250 | if ((unsigned) phi_arg_index_from_use (use: use_p) == j) |
4251 | continue; |
4252 | tree arg = gimple_phi_arg_def (gs: phi, index: j); |
4253 | edge e = gimple_phi_arg_edge (phi, i: j); |
4254 | basic_block arg_bb; |
4255 | if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest) |
4256 | /* Make sure we are not forward visiting a |
4257 | backedge argument. */ |
4258 | && (TREE_CODE (arg) != SSA_NAME |
4259 | || (!SSA_NAME_IS_DEFAULT_DEF (arg) |
4260 | && ((arg_bb |
4261 | = gimple_bb (SSA_NAME_DEF_STMT (arg))) |
4262 | != e->dest) |
4263 | && !dominated_by_p (CDI_DOMINATORS, |
4264 | e->dest, arg_bb)))) |
4265 | related--; |
4266 | } |
4267 | } |
4268 | else |
4269 | related--; |
4270 | |
4271 | if (related == 0) |
4272 | pointers.safe_push (obj: lhs); |
4273 | continue; |
4274 | } |
4275 | |
4276 | /* Warn if USE_STMT is dominated by the deallocation STMT. |
4277 | Otherwise, add the pointer to POINTERS so that the uses |
4278 | of any other pointers derived from it can be checked. */ |
4279 | if (use_after_inval_p (inval_stmt: stmt, use_stmt, last_block: check_dangling)) |
4280 | { |
4281 | basic_block use_bb = gimple_bb (g: use_stmt); |
4282 | bool this_maybe |
4283 | = (maybe |
4284 | || !dominated_by_p (CDI_POST_DOMINATORS, stmt_bb, use_bb)); |
4285 | warn_invalid_pointer (ref: *use_p->use, use_stmt, inval_stmt: stmt, var, |
4286 | maybe: this_maybe, equality); |
4287 | continue; |
4288 | } |
4289 | |
4290 | if (is_gimple_assign (gs: use_stmt)) |
4291 | { |
4292 | tree lhs = gimple_assign_lhs (gs: use_stmt); |
4293 | if (TREE_CODE (lhs) == SSA_NAME) |
4294 | { |
4295 | tree_code rhs_code = gimple_assign_rhs_code (gs: use_stmt); |
4296 | if (rhs_code == POINTER_PLUS_EXPR || rhs_code == SSA_NAME) |
4297 | pointers.safe_push (obj: lhs); |
4298 | } |
4299 | continue; |
4300 | } |
4301 | |
4302 | if (gcall *call = dyn_cast <gcall *>(p: use_stmt)) |
4303 | { |
4304 | if (gimple_call_return_arg (call) == ptr) |
4305 | if (tree lhs = gimple_call_lhs (gs: call)) |
4306 | if (TREE_CODE (lhs) == SSA_NAME) |
4307 | pointers.safe_push (obj: lhs); |
4308 | continue; |
4309 | } |
4310 | } |
4311 | } |
4312 | |
4313 | if (phi_map) |
4314 | delete phi_map; |
4315 | } |
4316 | |
4317 | /* Check call STMT for invalid accesses. */ |
4318 | |
4319 | void |
4320 | pass_waccess::check_call (gcall *stmt) |
4321 | { |
4322 | /* Skip special calls generated by the compiler. */ |
4323 | if (gimple_call_from_thunk_p (s: stmt)) |
4324 | return; |
4325 | |
4326 | /* .ASAN_MARK doesn't access any vars, only modifies shadow memory. */ |
4327 | if (gimple_call_internal_p (gs: stmt) |
4328 | && gimple_call_internal_fn (gs: stmt) == IFN_ASAN_MARK) |
4329 | return; |
4330 | |
4331 | if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
4332 | check_builtin (stmt); |
4333 | |
4334 | if (tree callee = gimple_call_fndecl (gs: stmt)) |
4335 | { |
4336 | /* Check for uses of the pointer passed to either a standard |
4337 | or a user-defined deallocation function. */ |
4338 | unsigned argno = fndecl_dealloc_argno (callee); |
4339 | if (argno < (unsigned) call_nargs (stmt)) |
4340 | { |
4341 | tree arg = call_arg (stmt, argno); |
4342 | if (TREE_CODE (arg) == SSA_NAME) |
4343 | check_pointer_uses (stmt, ptr: arg); |
4344 | } |
4345 | } |
4346 | |
4347 | check_call_access (stmt); |
4348 | check_call_dangling (stmt); |
4349 | |
4350 | if (m_early_checks_p) |
4351 | return; |
4352 | |
4353 | maybe_check_dealloc_call (call: stmt); |
4354 | check_nonstring_args (stmt); |
4355 | } |
4356 | |
4357 | /* Check non-call STMT for invalid accesses. */ |
4358 | |
4359 | void |
4360 | pass_waccess::check_stmt (gimple *stmt) |
4361 | { |
4362 | if (m_check_dangling_p |
4363 | && gimple_clobber_p (s: stmt, kind: CLOBBER_EOL)) |
4364 | { |
4365 | /* Ignore clobber statements in blocks with exceptional edges. */ |
4366 | basic_block bb = gimple_bb (g: stmt); |
4367 | edge e = EDGE_PRED (bb, 0); |
4368 | if (e->flags & EDGE_EH) |
4369 | return; |
4370 | |
4371 | tree var = gimple_assign_lhs (gs: stmt); |
4372 | m_clobbers.put (k: var, v: stmt); |
4373 | return; |
4374 | } |
4375 | |
4376 | if (is_gimple_assign (gs: stmt)) |
4377 | { |
4378 | /* Clobbered unnamed temporaries such as compound literals can be |
4379 | revived. Check for an assignment to one and remove it from |
4380 | M_CLOBBERS. */ |
4381 | tree lhs = gimple_assign_lhs (gs: stmt); |
4382 | while (handled_component_p (t: lhs)) |
4383 | lhs = TREE_OPERAND (lhs, 0); |
4384 | |
4385 | if (auto_var_p (lhs)) |
4386 | m_clobbers.remove (k: lhs); |
4387 | return; |
4388 | } |
4389 | |
4390 | if (greturn *ret = dyn_cast <greturn *> (p: stmt)) |
4391 | { |
4392 | if (optimize && flag_isolate_erroneous_paths_dereference) |
4393 | /* Avoid interfering with -Wreturn-local-addr (which runs only |
4394 | with optimization enabled). */ |
4395 | return; |
4396 | |
4397 | tree arg = gimple_return_retval (gs: ret); |
4398 | if (!arg || TREE_CODE (arg) != ADDR_EXPR) |
4399 | return; |
4400 | |
4401 | arg = TREE_OPERAND (arg, 0); |
4402 | while (handled_component_p (t: arg)) |
4403 | arg = TREE_OPERAND (arg, 0); |
4404 | |
4405 | if (!auto_var_p (arg)) |
4406 | return; |
4407 | |
4408 | gimple **pclobber = m_clobbers.get (k: arg); |
4409 | if (!pclobber) |
4410 | return; |
4411 | |
4412 | if (!use_after_inval_p (inval_stmt: *pclobber, use_stmt: stmt)) |
4413 | return; |
4414 | |
4415 | warn_invalid_pointer (NULL_TREE, use_stmt: stmt, inval_stmt: *pclobber, var: arg, maybe: false); |
4416 | } |
4417 | } |
4418 | |
4419 | /* Check basic block BB for invalid accesses. */ |
4420 | |
4421 | void |
4422 | pass_waccess::check_block (basic_block bb) |
4423 | { |
4424 | /* Iterate over statements, looking for function calls. */ |
4425 | for (auto si = gsi_start_bb (bb); !gsi_end_p (i: si); |
4426 | gsi_next_nondebug (i: &si)) |
4427 | { |
4428 | gimple *stmt = gsi_stmt (i: si); |
4429 | if (gcall *call = dyn_cast <gcall *> (p: stmt)) |
4430 | check_call (stmt: call); |
4431 | else |
4432 | check_stmt (stmt); |
4433 | } |
4434 | } |
4435 | |
4436 | /* Return the argument that the call STMT to a built-in function returns |
4437 | (including with an offset) or null if it doesn't. */ |
4438 | |
4439 | tree |
4440 | pass_waccess::gimple_call_return_arg (gcall *call) |
4441 | { |
4442 | /* Check for attribute fn spec to see if the function returns one |
4443 | of its arguments. */ |
4444 | attr_fnspec fnspec = gimple_call_fnspec (stmt: call); |
4445 | unsigned int argno; |
4446 | if (!fnspec.returns_arg (arg_no: &argno)) |
4447 | { |
4448 | if (gimple_call_num_args (gs: call) < 1) |
4449 | return NULL_TREE; |
4450 | |
4451 | if (!gimple_call_builtin_p (call, BUILT_IN_NORMAL)) |
4452 | return NULL_TREE; |
4453 | |
4454 | tree fndecl = gimple_call_fndecl (gs: call); |
4455 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
4456 | { |
4457 | case BUILT_IN_MEMPCPY: |
4458 | case BUILT_IN_MEMPCPY_CHK: |
4459 | case BUILT_IN_MEMCHR: |
4460 | case BUILT_IN_STRCHR: |
4461 | case BUILT_IN_STRRCHR: |
4462 | case BUILT_IN_STRSTR: |
4463 | case BUILT_IN_STPCPY: |
4464 | case BUILT_IN_STPCPY_CHK: |
4465 | case BUILT_IN_STPNCPY: |
4466 | case BUILT_IN_STPNCPY_CHK: |
4467 | argno = 0; |
4468 | break; |
4469 | |
4470 | default: |
4471 | return NULL_TREE; |
4472 | } |
4473 | } |
4474 | |
4475 | if (gimple_call_num_args (gs: call) <= argno) |
4476 | return NULL_TREE; |
4477 | |
4478 | return gimple_call_arg (gs: call, index: argno); |
4479 | } |
4480 | |
4481 | /* Check for and diagnose all uses of the dangling pointer VAR to the auto |
4482 | object DECL whose lifetime has ended. OBJREF is true when VAR denotes |
4483 | an access to a DECL that may have been clobbered. */ |
4484 | |
4485 | void |
4486 | pass_waccess::check_dangling_uses (tree var, tree decl, bool maybe /* = false */, |
4487 | bool objref /* = false */) |
4488 | { |
4489 | if (!decl || !auto_var_p (decl)) |
4490 | return; |
4491 | |
4492 | gimple **pclob = m_clobbers.get (k: decl); |
4493 | if (!pclob) |
4494 | return; |
4495 | |
4496 | if (!objref) |
4497 | { |
4498 | check_pointer_uses (stmt: *pclob, ptr: var, var: decl, maybe); |
4499 | return; |
4500 | } |
4501 | |
4502 | gimple *use_stmt = SSA_NAME_DEF_STMT (var); |
4503 | if (!use_after_inval_p (inval_stmt: *pclob, use_stmt, last_block: true)) |
4504 | return; |
4505 | |
4506 | basic_block use_bb = gimple_bb (g: use_stmt); |
4507 | basic_block clob_bb = gimple_bb (g: *pclob); |
4508 | maybe = maybe || !dominated_by_p (CDI_POST_DOMINATORS, clob_bb, use_bb); |
4509 | warn_invalid_pointer (ref: var, use_stmt, inval_stmt: *pclob, var: decl, maybe, equality: false); |
4510 | } |
4511 | |
4512 | /* Diagnose stores in BB and (recursively) its predecessors of the addresses |
4513 | of local variables into nonlocal pointers that are left dangling after |
4514 | the function returns. Returns true when we can continue walking |
4515 | the CFG to predecessors. */ |
4516 | |
4517 | bool |
4518 | pass_waccess::check_dangling_stores (basic_block bb, |
4519 | hash_set<tree> &stores) |
4520 | { |
4521 | /* Iterate backwards over the statements looking for a store of |
4522 | the address of a local variable into a nonlocal pointer. */ |
4523 | for (auto gsi = gsi_last_nondebug_bb (bb); ; gsi_prev_nondebug (i: &gsi)) |
4524 | { |
4525 | gimple *stmt = gsi_stmt (i: gsi); |
4526 | if (!stmt) |
4527 | break; |
4528 | |
4529 | if (warning_suppressed_p (stmt, OPT_Wdangling_pointer_)) |
4530 | continue; |
4531 | |
4532 | if (is_gimple_call (gs: stmt) |
4533 | && !(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))) |
4534 | /* Avoid looking before nonconst, nonpure calls since those might |
4535 | use the escaped locals. */ |
4536 | return false; |
4537 | |
4538 | if (!is_gimple_assign (gs: stmt) || gimple_clobber_p (s: stmt) |
4539 | || !gimple_store_p (gs: stmt)) |
4540 | continue; |
4541 | |
4542 | access_ref lhs_ref; |
4543 | tree lhs = gimple_assign_lhs (gs: stmt); |
4544 | if (!m_ptr_qry.get_ref (lhs, stmt, &lhs_ref, 0)) |
4545 | continue; |
4546 | |
4547 | if (TREE_CODE (lhs_ref.ref) == MEM_REF) |
4548 | { |
4549 | lhs_ref.ref = TREE_OPERAND (lhs_ref.ref, 0); |
4550 | ++lhs_ref.deref; |
4551 | } |
4552 | if (TREE_CODE (lhs_ref.ref) == ADDR_EXPR) |
4553 | { |
4554 | lhs_ref.ref = TREE_OPERAND (lhs_ref.ref, 0); |
4555 | --lhs_ref.deref; |
4556 | } |
4557 | if (TREE_CODE (lhs_ref.ref) == SSA_NAME) |
4558 | { |
4559 | gimple *def_stmt = SSA_NAME_DEF_STMT (lhs_ref.ref); |
4560 | if (!gimple_nop_p (g: def_stmt)) |
4561 | /* Avoid looking at or before stores into unknown objects. */ |
4562 | return false; |
4563 | |
4564 | lhs_ref.ref = SSA_NAME_VAR (lhs_ref.ref); |
4565 | } |
4566 | |
4567 | if (TREE_CODE (lhs_ref.ref) == PARM_DECL |
4568 | && (lhs_ref.deref - DECL_BY_REFERENCE (lhs_ref.ref)) > 0) |
4569 | /* Assignment through a (real) pointer/reference parameter. */; |
4570 | else if (VAR_P (lhs_ref.ref) |
4571 | && !auto_var_p (lhs_ref.ref)) |
4572 | /* Assignment to/through a non-local variable. */; |
4573 | else |
4574 | /* Something else, don't warn. */ |
4575 | continue; |
4576 | |
4577 | if (stores.add (k: lhs_ref.ref)) |
4578 | continue; |
4579 | |
4580 | /* FIXME: Handle stores of alloca() and VLA. */ |
4581 | access_ref rhs_ref; |
4582 | tree rhs = gimple_assign_rhs1 (gs: stmt); |
4583 | if (!m_ptr_qry.get_ref (rhs, stmt, &rhs_ref, 0) |
4584 | || rhs_ref.deref != -1) |
4585 | continue; |
4586 | |
4587 | if (!auto_var_p (rhs_ref.ref)) |
4588 | continue; |
4589 | |
4590 | auto_diagnostic_group d; |
4591 | location_t loc = gimple_location (g: stmt); |
4592 | if (warning_at (loc, OPT_Wdangling_pointer_, |
4593 | "storing the address of local variable %qD in %qE" , |
4594 | rhs_ref.ref, lhs)) |
4595 | { |
4596 | suppress_warning (stmt, OPT_Wdangling_pointer_); |
4597 | |
4598 | location_t loc = DECL_SOURCE_LOCATION (rhs_ref.ref); |
4599 | inform (loc, "%qD declared here" , rhs_ref.ref); |
4600 | |
4601 | loc = DECL_SOURCE_LOCATION (lhs_ref.ref); |
4602 | inform (loc, "%qD declared here" , lhs_ref.ref); |
4603 | } |
4604 | } |
4605 | |
4606 | return true; |
4607 | } |
4608 | |
4609 | /* Diagnose stores of the addresses of local variables into nonlocal |
4610 | pointers that are left dangling after the function returns. */ |
4611 | |
4612 | void |
4613 | pass_waccess::check_dangling_stores () |
4614 | { |
4615 | if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (m_func)->preds) == 0) |
4616 | return; |
4617 | |
4618 | auto_bitmap bbs; |
4619 | hash_set<tree> stores; |
4620 | auto_vec<edge_iterator, 8> worklist (n_basic_blocks_for_fn (cfun) + 1); |
4621 | worklist.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (m_func)->preds)); |
4622 | do |
4623 | { |
4624 | edge_iterator ei = worklist.last (); |
4625 | basic_block src = ei_edge (i: ei)->src; |
4626 | if (bitmap_set_bit (bbs, src->index)) |
4627 | { |
4628 | if (check_dangling_stores (bb: src, stores) |
4629 | && EDGE_COUNT (src->preds) > 0) |
4630 | worklist.quick_push (ei_start (src->preds)); |
4631 | } |
4632 | else |
4633 | { |
4634 | if (ei_one_before_end_p (i: ei)) |
4635 | worklist.pop (); |
4636 | else |
4637 | ei_next (i: &worklist.last ()); |
4638 | } |
4639 | } |
4640 | while (!worklist.is_empty ()); |
4641 | } |
4642 | |
4643 | /* Check for and diagnose uses of dangling pointers to auto objects |
4644 | whose lifetime has ended. */ |
4645 | |
4646 | void |
4647 | pass_waccess::check_dangling_uses () |
4648 | { |
4649 | tree var; |
4650 | unsigned i; |
4651 | FOR_EACH_SSA_NAME (i, var, m_func) |
4652 | { |
4653 | /* For each SSA_NAME pointer VAR find the object it points to. |
4654 | If the object is a clobbered local variable, check to see |
4655 | if any of VAR's uses (or those of other pointers derived |
4656 | from VAR) happens after the clobber. If so, warn. */ |
4657 | |
4658 | gimple *def_stmt = SSA_NAME_DEF_STMT (var); |
4659 | if (is_gimple_assign (gs: def_stmt)) |
4660 | { |
4661 | tree rhs = gimple_assign_rhs1 (gs: def_stmt); |
4662 | if (TREE_CODE (rhs) == ADDR_EXPR) |
4663 | { |
4664 | if (!POINTER_TYPE_P (TREE_TYPE (var))) |
4665 | continue; |
4666 | check_dangling_uses (var, TREE_OPERAND (rhs, 0)); |
4667 | } |
4668 | else |
4669 | { |
4670 | /* For other expressions, check the base DECL to see |
4671 | if it's been clobbered, most likely as a result of |
4672 | inlining a reference to it. */ |
4673 | tree decl = get_base_address (t: rhs); |
4674 | if (DECL_P (decl)) |
4675 | check_dangling_uses (var, decl, maybe: false, objref: true); |
4676 | } |
4677 | } |
4678 | else if (POINTER_TYPE_P (TREE_TYPE (var))) |
4679 | { |
4680 | if (gcall *call = dyn_cast<gcall *>(p: def_stmt)) |
4681 | { |
4682 | if (tree arg = gimple_call_return_arg (call)) |
4683 | { |
4684 | access_ref aref; |
4685 | if (m_ptr_qry.get_ref (arg, call, &aref, 0) |
4686 | && aref.deref < 0) |
4687 | check_dangling_uses (var, decl: aref.ref); |
4688 | } |
4689 | } |
4690 | else if (gphi *phi = dyn_cast <gphi *>(p: def_stmt)) |
4691 | { |
4692 | unsigned nargs = gimple_phi_num_args (gs: phi); |
4693 | for (unsigned i = 0; i != nargs; ++i) |
4694 | { |
4695 | access_ref aref; |
4696 | tree arg = gimple_phi_arg_def (gs: phi, index: i); |
4697 | if (m_ptr_qry.get_ref (arg, phi, &aref, 0) |
4698 | && aref.deref < 0) |
4699 | check_dangling_uses (var, decl: aref.ref, maybe: true); |
4700 | } |
4701 | } |
4702 | } |
4703 | } |
4704 | } |
4705 | |
4706 | /* Check CALL arguments for dangling pointers (those that have been |
4707 | clobbered) and warn if found. */ |
4708 | |
4709 | void |
4710 | pass_waccess::check_call_dangling (gcall *call) |
4711 | { |
4712 | unsigned nargs = gimple_call_num_args (gs: call); |
4713 | for (unsigned i = 0; i != nargs; ++i) |
4714 | { |
4715 | tree arg = gimple_call_arg (gs: call, index: i); |
4716 | if (TREE_CODE (arg) != ADDR_EXPR) |
4717 | continue; |
4718 | |
4719 | arg = TREE_OPERAND (arg, 0); |
4720 | if (!DECL_P (arg)) |
4721 | continue; |
4722 | |
4723 | gimple **pclobber = m_clobbers.get (k: arg); |
4724 | if (!pclobber) |
4725 | continue; |
4726 | |
4727 | if (!use_after_inval_p (inval_stmt: *pclobber, use_stmt: call)) |
4728 | continue; |
4729 | |
4730 | warn_invalid_pointer (NULL_TREE, use_stmt: call, inval_stmt: *pclobber, var: arg, maybe: false); |
4731 | } |
4732 | } |
4733 | |
4734 | /* Check function FUN for invalid accesses. */ |
4735 | |
4736 | unsigned |
4737 | pass_waccess::execute (function *fun) |
4738 | { |
4739 | calculate_dominance_info (CDI_DOMINATORS); |
4740 | calculate_dominance_info (CDI_POST_DOMINATORS); |
4741 | |
4742 | /* Set or clear EDGE_DFS_BACK bits on back edges. */ |
4743 | mark_dfs_back_edges (fun); |
4744 | |
4745 | /* Create a new ranger instance and associate it with FUN. */ |
4746 | m_ptr_qry.rvals = enable_ranger (m: fun); |
4747 | m_func = fun; |
4748 | |
4749 | /* Check for dangling pointers in the earliest run of the pass. |
4750 | The latest point -Wdangling-pointer should run is just before |
4751 | loop unrolling which introduces uses after clobbers. Most cases |
4752 | can be detected without optimization; cases where the address of |
4753 | the local variable is passed to and then returned from a user- |
4754 | defined function before its lifetime ends and the returned pointer |
4755 | becomes dangling depend on inlining. */ |
4756 | m_check_dangling_p = m_early_checks_p; |
4757 | |
4758 | auto_bitmap bb_uids_set (&bitmap_default_obstack); |
4759 | m_bb_uids_set = bb_uids_set; |
4760 | |
4761 | set_gimple_stmt_max_uid (fn: m_func, maxid: 0); |
4762 | |
4763 | basic_block bb; |
4764 | FOR_EACH_BB_FN (bb, fun) |
4765 | check_block (bb); |
4766 | |
4767 | if (m_check_dangling_p) |
4768 | { |
4769 | check_dangling_uses (); |
4770 | check_dangling_stores (); |
4771 | } |
4772 | |
4773 | if (dump_file) |
4774 | m_ptr_qry.dump (dump_file, (dump_flags & TDF_DETAILS) != 0); |
4775 | |
4776 | m_ptr_qry.flush_cache (); |
4777 | |
4778 | /* Release the ranger instance and replace it with a global ranger. |
4779 | Also reset the pointer since calling disable_ranger() deletes it. */ |
4780 | disable_ranger (fun); |
4781 | m_ptr_qry.rvals = NULL; |
4782 | |
4783 | m_clobbers.empty (); |
4784 | m_bb_uids_set = NULL; |
4785 | |
4786 | free_dominance_info (CDI_POST_DOMINATORS); |
4787 | free_dominance_info (CDI_DOMINATORS); |
4788 | return 0; |
4789 | } |
4790 | |
4791 | } // namespace |
4792 | |
4793 | /* Return a new instance of the pass. */ |
4794 | |
4795 | gimple_opt_pass * |
4796 | make_pass_warn_access (gcc::context *ctxt) |
4797 | { |
4798 | return new pass_waccess (ctxt); |
4799 | } |
4800 | |