1/* UndefinedBehaviorSanitizer, undefined behavior detector.
2 Copyright (C) 2013-2024 Free Software Foundation, Inc.
3 Contributed by Marek Polacek <polacek@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "c-family/c-common.h"
26#include "ubsan.h"
27#include "c-family/c-ubsan.h"
28#include "stor-layout.h"
29#include "builtins.h"
30#include "gimplify.h"
31#include "stringpool.h"
32#include "attribs.h"
33#include "asan.h"
34#include "langhooks.h"
35
36/* Instrument division by zero and INT_MIN / -1. If not instrumenting,
37 return NULL_TREE. */
38
39tree
40ubsan_instrument_division (location_t loc, tree op0, tree op1)
41{
42 tree t, tt, x = NULL_TREE;
43 tree type = TREE_TYPE (op0);
44 enum sanitize_code flag = SANITIZE_DIVIDE;
45
46 /* At this point both operands should have the same type,
47 because they are already converted to RESULT_TYPE.
48 Use TYPE_MAIN_VARIANT since typedefs can confuse us. */
49 tree top0 = TYPE_MAIN_VARIANT (type);
50 tree top1 = TYPE_MAIN_VARIANT (TREE_TYPE (op1));
51 gcc_checking_assert (lang_hooks.types_compatible_p (top0, top1));
52
53 op0 = unshare_expr (op0);
54 op1 = unshare_expr (op1);
55
56 if (INTEGRAL_TYPE_P (type)
57 && sanitize_flags_p (flag: SANITIZE_DIVIDE))
58 t = fold_build2 (EQ_EXPR, boolean_type_node,
59 op1, build_int_cst (type, 0));
60 else if (SCALAR_FLOAT_TYPE_P (type)
61 && sanitize_flags_p (flag: SANITIZE_FLOAT_DIVIDE))
62 {
63 t = fold_build2 (EQ_EXPR, boolean_type_node,
64 op1, build_real (type, dconst0));
65 flag = SANITIZE_FLOAT_DIVIDE;
66 }
67 else
68 t = NULL_TREE;
69
70 /* We check INT_MIN / -1 only for signed types. */
71 if (INTEGRAL_TYPE_P (type)
72 && sanitize_flags_p (flag: SANITIZE_SI_OVERFLOW)
73 && !TYPE_UNSIGNED (type))
74 {
75 tt = fold_build2 (EQ_EXPR, boolean_type_node, unshare_expr (op1),
76 build_int_cst (type, -1));
77 x = fold_build2 (EQ_EXPR, boolean_type_node, op0,
78 TYPE_MIN_VALUE (type));
79 x = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, x, tt);
80 if (t == NULL_TREE || integer_zerop (t))
81 {
82 t = x;
83 x = NULL_TREE;
84 flag = SANITIZE_SI_OVERFLOW;
85 }
86 else if ((((flag_sanitize_trap & SANITIZE_DIVIDE) == 0)
87 == ((flag_sanitize_trap & SANITIZE_SI_OVERFLOW) == 0))
88 && (((flag_sanitize_recover & SANITIZE_DIVIDE) == 0)
89 == ((flag_sanitize_recover & SANITIZE_SI_OVERFLOW) == 0)))
90 {
91 t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, x);
92 x = NULL_TREE;
93 }
94 else if (integer_zerop (x))
95 x = NULL_TREE;
96 }
97 else if (t == NULL_TREE)
98 return NULL_TREE;
99
100 /* If the condition was folded to 0, no need to instrument
101 this expression. */
102 if (integer_zerop (t))
103 return NULL_TREE;
104
105 /* In case we have a SAVE_EXPR in a conditional context, we need to
106 make sure it gets evaluated before the condition. */
107 t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), unshare_expr (op0), t);
108 t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), unshare_expr (op1), t);
109 if ((flag_sanitize_trap & flag) && x == NULL_TREE)
110 tt = build_call_expr_loc (loc, builtin_decl_explicit (fncode: BUILT_IN_TRAP), 0);
111 else
112 {
113 tree data = ubsan_create_data ("__ubsan_overflow_data", 1, &loc,
114 ubsan_type_descriptor (type), NULL_TREE,
115 NULL_TREE);
116 data = build_fold_addr_expr_loc (loc, data);
117 if (flag_sanitize_trap & flag)
118 tt = build_call_expr_loc (loc, builtin_decl_explicit (fncode: BUILT_IN_TRAP),
119 0);
120 else
121 {
122 enum built_in_function bcode
123 = (flag_sanitize_recover & flag)
124 ? BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW
125 : BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW_ABORT;
126 tt = builtin_decl_explicit (fncode: bcode);
127 op0 = unshare_expr (op0);
128 op1 = unshare_expr (op1);
129 tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0),
130 ubsan_encode_value (op1));
131 }
132 if (x)
133 {
134 tree xt;
135 if (flag_sanitize_trap & SANITIZE_SI_OVERFLOW)
136 xt = build_call_expr_loc (loc,
137 builtin_decl_explicit (fncode: BUILT_IN_TRAP),
138 0);
139 else
140 {
141 enum built_in_function bcode
142 = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
143 ? BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW
144 : BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW_ABORT;
145 xt = builtin_decl_explicit (fncode: bcode);
146 op0 = unshare_expr (op0);
147 op1 = unshare_expr (op1);
148 xt = build_call_expr_loc (loc, xt, 3, data,
149 ubsan_encode_value (op0),
150 ubsan_encode_value (op1));
151 }
152 x = fold_build3 (COND_EXPR, void_type_node, x, xt, void_node);
153 }
154 }
155 t = fold_build3 (COND_EXPR, void_type_node, t, tt, x ? x : void_node);
156
157 return t;
158}
159
160/* Instrument left and right shifts. */
161
162tree
163ubsan_instrument_shift (location_t loc, enum tree_code code,
164 tree op0, tree op1)
165{
166 tree t, tt = NULL_TREE;
167 tree type0 = TREE_TYPE (op0);
168 tree type1 = TREE_TYPE (op1);
169 if (!INTEGRAL_TYPE_P (type0))
170 return NULL_TREE;
171
172 tree op1_utype = unsigned_type_for (type1);
173 HOST_WIDE_INT op0_prec = TYPE_PRECISION (type0);
174 tree uprecm1 = build_int_cst (op1_utype, op0_prec - 1);
175
176 op0 = unshare_expr (op0);
177 op1 = unshare_expr (op1);
178
179 t = fold_convert_loc (loc, op1_utype, op1);
180 t = fold_build2 (GT_EXPR, boolean_type_node, t, uprecm1);
181
182 /* If this is not a signed operation, don't perform overflow checks.
183 Also punt on bit-fields. */
184 if (TYPE_OVERFLOW_WRAPS (type0)
185 || maybe_ne (a: GET_MODE_BITSIZE (TYPE_MODE (type0)),
186 TYPE_PRECISION (type0))
187 || !sanitize_flags_p (flag: SANITIZE_SHIFT_BASE)
188 /* In C++20 and later, shifts are well defined except when
189 the second operand is not within bounds. */
190 || cxx_dialect >= cxx20)
191 ;
192
193 /* For signed x << y, in C99 and later, the following:
194 (unsigned) x >> (uprecm1 - y)
195 if non-zero, is undefined. */
196 else if (code == LSHIFT_EXPR && flag_isoc99 && cxx_dialect < cxx11)
197 {
198 tree x = fold_build2 (MINUS_EXPR, op1_utype, uprecm1,
199 fold_convert (op1_utype, unshare_expr (op1)));
200 tt = fold_convert_loc (loc, unsigned_type_for (type0), op0);
201 tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x);
202 tt = fold_build2 (NE_EXPR, boolean_type_node, tt,
203 build_int_cst (TREE_TYPE (tt), 0));
204 }
205
206 /* For signed x << y, in C++11 to C++17, the following:
207 x < 0 || ((unsigned) x >> (uprecm1 - y))
208 if > 1, is undefined. */
209 else if (code == LSHIFT_EXPR && cxx_dialect >= cxx11)
210 {
211 tree x = fold_build2 (MINUS_EXPR, op1_utype, uprecm1,
212 fold_convert (op1_utype, unshare_expr (op1)));
213 tt = fold_convert_loc (loc, unsigned_type_for (type0),
214 unshare_expr (op0));
215 tt = fold_build2 (RSHIFT_EXPR, TREE_TYPE (tt), tt, x);
216 tt = fold_build2 (GT_EXPR, boolean_type_node, tt,
217 build_int_cst (TREE_TYPE (tt), 1));
218 x = fold_build2 (LT_EXPR, boolean_type_node, unshare_expr (op0),
219 build_int_cst (type0, 0));
220 tt = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, x, tt);
221 }
222
223 /* If the condition was folded to 0, no need to instrument
224 this expression. */
225 if (integer_zerop (t) && (tt == NULL_TREE || integer_zerop (tt)))
226 return NULL_TREE;
227
228 /* In case we have a SAVE_EXPR in a conditional context, we need to
229 make sure it gets evaluated before the condition. */
230 t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), unshare_expr (op0), t);
231 t = fold_build2 (COMPOUND_EXPR, TREE_TYPE (t), unshare_expr (op1), t);
232
233 enum sanitize_code recover_kind = SANITIZE_SHIFT_EXPONENT;
234 tree else_t = void_node;
235 if (tt)
236 {
237 if (!sanitize_flags_p (flag: SANITIZE_SHIFT_EXPONENT))
238 {
239 t = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, t);
240 t = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, t, tt);
241 recover_kind = SANITIZE_SHIFT_BASE;
242 }
243 else
244 {
245 if (((!(flag_sanitize_trap & SANITIZE_SHIFT_EXPONENT))
246 == (!(flag_sanitize_trap & SANITIZE_SHIFT_BASE)))
247 && ((!(flag_sanitize_recover & SANITIZE_SHIFT_EXPONENT))
248 == (!(flag_sanitize_recover & SANITIZE_SHIFT_BASE))))
249 t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt);
250 else
251 else_t = tt;
252 }
253 }
254
255 if ((flag_sanitize_trap & recover_kind) && else_t == void_node)
256 tt = build_call_expr_loc (loc, builtin_decl_explicit (fncode: BUILT_IN_TRAP), 0);
257 else
258 {
259 if (TREE_CODE (type1) == BITINT_TYPE
260 && TYPE_PRECISION (type1) > MAX_FIXED_MODE_SIZE)
261 {
262 /* Workaround for missing _BitInt support in libsanitizer.
263 Instead of crashing in the library, pretend values above
264 maximum value of normal integral type or below minimum value
265 of that type are those extremes. */
266 tree type2 = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
267 TYPE_UNSIGNED (type1));
268 tree op2 = op1;
269 if (!TYPE_UNSIGNED (type1))
270 {
271 op2 = fold_build2 (LT_EXPR, boolean_type_node, unshare_expr (op1),
272 fold_convert (type1, TYPE_MIN_VALUE (type2)));
273 op2 = fold_build3 (COND_EXPR, type2, op2, TYPE_MIN_VALUE (type2),
274 fold_convert (type2, unshare_expr (op1)));
275 }
276 else
277 op2 = fold_convert (type2, op1);
278 tree op3
279 = fold_build2 (GT_EXPR, boolean_type_node, unshare_expr (op1),
280 fold_convert (type1, TYPE_MAX_VALUE (type2)));
281 op1 = fold_build3 (COND_EXPR, type2, op3, TYPE_MAX_VALUE (type2),
282 op2);
283 type1 = type2;
284 }
285 tree utd0 = ubsan_type_descriptor (type0, UBSAN_PRINT_FORCE_INT);
286 tree data = ubsan_create_data ("__ubsan_shift_data", 1, &loc, utd0,
287 ubsan_type_descriptor (type1), NULL_TREE,
288 NULL_TREE);
289 data = build_fold_addr_expr_loc (loc, data);
290
291 if (flag_sanitize_trap & recover_kind)
292 tt = build_call_expr_loc (loc, builtin_decl_explicit (fncode: BUILT_IN_TRAP), 0);
293 else
294 {
295 enum built_in_function bcode
296 = (flag_sanitize_recover & recover_kind)
297 ? BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS
298 : BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS_ABORT;
299 tt = builtin_decl_explicit (fncode: bcode);
300 op0 = unshare_expr (op0);
301 op1 = unshare_expr (op1);
302 tt = build_call_expr_loc (loc, tt, 3, data, ubsan_encode_value (op0),
303 ubsan_encode_value (op1));
304 }
305 if (else_t != void_node)
306 {
307 tree else_tt;
308 if (flag_sanitize_trap & SANITIZE_SHIFT_BASE)
309 else_tt
310 = build_call_expr_loc (loc,
311 builtin_decl_explicit (fncode: BUILT_IN_TRAP), 0);
312 else
313 {
314 enum built_in_function bcode
315 = (flag_sanitize_recover & SANITIZE_SHIFT_BASE)
316 ? BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS
317 : BUILT_IN_UBSAN_HANDLE_SHIFT_OUT_OF_BOUNDS_ABORT;
318 else_tt = builtin_decl_explicit (fncode: bcode);
319 op0 = unshare_expr (op0);
320 op1 = unshare_expr (op1);
321 else_tt = build_call_expr_loc (loc, else_tt, 3, data,
322 ubsan_encode_value (op0),
323 ubsan_encode_value (op1));
324 }
325 else_t = fold_build3 (COND_EXPR, void_type_node, else_t,
326 else_tt, void_node);
327 }
328 }
329 t = fold_build3 (COND_EXPR, void_type_node, t, tt, else_t);
330
331 return t;
332}
333
334/* Instrument variable length array bound. */
335
336tree
337ubsan_instrument_vla (location_t loc, tree size)
338{
339 tree type = TREE_TYPE (size);
340 tree t, tt;
341
342 t = fold_build2 (LE_EXPR, boolean_type_node, size, build_int_cst (type, 0));
343 if (flag_sanitize_trap & SANITIZE_VLA)
344 tt = build_call_expr_loc (loc, builtin_decl_explicit (fncode: BUILT_IN_TRAP), 0);
345 else
346 {
347 tree data = ubsan_create_data ("__ubsan_vla_data", 1, &loc,
348 ubsan_type_descriptor (type), NULL_TREE,
349 NULL_TREE);
350 data = build_fold_addr_expr_loc (loc, data);
351 enum built_in_function bcode
352 = (flag_sanitize_recover & SANITIZE_VLA)
353 ? BUILT_IN_UBSAN_HANDLE_VLA_BOUND_NOT_POSITIVE
354 : BUILT_IN_UBSAN_HANDLE_VLA_BOUND_NOT_POSITIVE_ABORT;
355 tt = builtin_decl_explicit (fncode: bcode);
356 tt = build_call_expr_loc (loc, tt, 2, data, ubsan_encode_value (size));
357 }
358 t = fold_build3 (COND_EXPR, void_type_node, t, tt, void_node);
359
360 return t;
361}
362
363/* Instrument missing return in C++ functions returning non-void. */
364
365tree
366ubsan_instrument_return (location_t loc)
367{
368 if (flag_sanitize_trap & SANITIZE_RETURN)
369 /* pass_warn_function_return checks for BUILTINS_LOCATION. */
370 return build_call_expr_loc (BUILTINS_LOCATION,
371 builtin_decl_explicit (fncode: BUILT_IN_TRAP), 0);
372
373 tree data = ubsan_create_data ("__ubsan_missing_return_data", 1, &loc,
374 NULL_TREE, NULL_TREE);
375 tree t = builtin_decl_explicit (fncode: BUILT_IN_UBSAN_HANDLE_MISSING_RETURN);
376 return build_call_expr_loc (loc, t, 1, build_fold_addr_expr_loc (loc, data));
377}
378
379/* Instrument array bounds for ARRAY_REFs. We create special builtin,
380 that gets expanded in the sanopt pass, and make an array dimension
381 of it. ARRAY is the array, *INDEX is an index to the array.
382 Return NULL_TREE if no instrumentation is emitted.
383 IGNORE_OFF_BY_ONE is true if the ARRAY_REF is inside an ADDR_EXPR. */
384
385tree
386ubsan_instrument_bounds (location_t loc, tree array, tree *index,
387 bool ignore_off_by_one)
388{
389 tree type = TREE_TYPE (array);
390 tree domain = TYPE_DOMAIN (type);
391
392 if (domain == NULL_TREE)
393 return NULL_TREE;
394
395 tree bound = TYPE_MAX_VALUE (domain);
396 if (!bound)
397 {
398 /* Handle C [0] arrays, which have TYPE_MAX_VALUE NULL, like
399 C++ [0] arrays which have TYPE_MIN_VALUE 0 TYPE_MAX_VALUE -1. */
400 if (!c_dialect_cxx ()
401 && COMPLETE_TYPE_P (type)
402 && integer_zerop (TYPE_SIZE (type)))
403 bound = build_int_cst (TREE_TYPE (TYPE_MIN_VALUE (domain)), -1);
404 else
405 return NULL_TREE;
406 }
407
408 bound = fold_build2 (PLUS_EXPR, TREE_TYPE (bound), bound,
409 build_int_cst (TREE_TYPE (bound),
410 1 + ignore_off_by_one));
411
412 /* Detect flexible array members and suchlike, unless
413 -fsanitize=bounds-strict. */
414 tree base = get_base_address (t: array);
415 if (!sanitize_flags_p (flag: SANITIZE_BOUNDS_STRICT)
416 && TREE_CODE (array) == COMPONENT_REF
417 && base && (INDIRECT_REF_P (base) || TREE_CODE (base) == MEM_REF))
418 {
419 tree next = NULL_TREE;
420 tree cref = array;
421
422 /* Walk all structs/unions. */
423 while (TREE_CODE (cref) == COMPONENT_REF)
424 {
425 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (cref, 0))) == RECORD_TYPE)
426 for (next = DECL_CHAIN (TREE_OPERAND (cref, 1));
427 next && TREE_CODE (next) != FIELD_DECL;
428 next = DECL_CHAIN (next))
429 ;
430 if (next)
431 /* Not a last element. Instrument it. */
432 break;
433 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (cref, 1))) == ARRAY_TYPE
434 && !c_dialect_cxx ())
435 {
436 unsigned l
437 = c_strict_flex_array_level_of (TREE_OPERAND (cref, 1));
438 tree type2 = TREE_TYPE (TREE_OPERAND (cref, 1));
439 if (TYPE_DOMAIN (type2) != NULL_TREE)
440 {
441 tree max = TYPE_MAX_VALUE (TYPE_DOMAIN (type2));
442 if (max == NULL_TREE)
443 {
444 /* C [0] */
445 if (COMPLETE_TYPE_P (type2)
446 && integer_zerop (TYPE_SIZE (type2))
447 && l == 3)
448 next = TREE_OPERAND (cref, 1);
449 }
450 else if (TREE_CODE (max) == INTEGER_CST)
451 {
452 if (c_dialect_cxx ()
453 && integer_all_onesp (max))
454 {
455 /* C++ [0] */
456 if (l == 3)
457 next = TREE_OPERAND (cref, 1);
458 }
459 else if (integer_zerop (max))
460 {
461 /* C/C++ [1] */
462 if (l >= 2)
463 next = TREE_OPERAND (cref, 1);
464 }
465 else if (l >= 1)
466 next = TREE_OPERAND (cref, 1);
467 }
468 }
469 if (next)
470 break;
471 }
472 /* Ok, this is the last field of the structure/union. But the
473 aggregate containing the field must be the last field too,
474 recursively. */
475 cref = TREE_OPERAND (cref, 0);
476 }
477 if (!next)
478 /* Don't instrument this flexible array member-like array in non-strict
479 -fsanitize=bounds mode. */
480 return NULL_TREE;
481 }
482
483 /* Don't emit instrumentation in the most common cases. */
484 tree idx = NULL_TREE;
485 if (TREE_CODE (*index) == INTEGER_CST)
486 idx = *index;
487 else if (TREE_CODE (*index) == BIT_AND_EXPR
488 && TREE_CODE (TREE_OPERAND (*index, 1)) == INTEGER_CST)
489 idx = TREE_OPERAND (*index, 1);
490 if (idx
491 && TREE_CODE (bound) == INTEGER_CST
492 && tree_int_cst_sgn (idx) >= 0
493 && tree_int_cst_lt (t1: idx, t2: bound))
494 return NULL_TREE;
495
496 *index = save_expr (*index);
497 /* Create a "(T *) 0" tree node to describe the array type. */
498 tree zero_with_type = build_int_cst (build_pointer_type (type), 0);
499 return build_call_expr_internal_loc (loc, IFN_UBSAN_BOUNDS,
500 void_type_node, 3, zero_with_type,
501 *index, bound);
502}
503
504/* Return true iff T is an array that was instrumented by SANITIZE_BOUNDS. */
505
506bool
507ubsan_array_ref_instrumented_p (const_tree t)
508{
509 if (TREE_CODE (t) != ARRAY_REF)
510 return false;
511
512 tree op1 = TREE_OPERAND (t, 1);
513 return TREE_CODE (op1) == COMPOUND_EXPR
514 && TREE_CODE (TREE_OPERAND (op1, 0)) == CALL_EXPR
515 && CALL_EXPR_FN (TREE_OPERAND (op1, 0)) == NULL_TREE
516 && CALL_EXPR_IFN (TREE_OPERAND (op1, 0)) == IFN_UBSAN_BOUNDS;
517}
518
519/* Instrument an ARRAY_REF, if it hasn't already been instrumented.
520 IGNORE_OFF_BY_ONE is true if the ARRAY_REF is inside a ADDR_EXPR. */
521
522void
523ubsan_maybe_instrument_array_ref (tree *expr_p, bool ignore_off_by_one)
524{
525 if (!ubsan_array_ref_instrumented_p (t: *expr_p)
526 && sanitize_flags_p (flag: SANITIZE_BOUNDS | SANITIZE_BOUNDS_STRICT)
527 && current_function_decl != NULL_TREE)
528 {
529 tree op0 = TREE_OPERAND (*expr_p, 0);
530 tree op1 = TREE_OPERAND (*expr_p, 1);
531 tree e = ubsan_instrument_bounds (EXPR_LOCATION (*expr_p), array: op0, index: &op1,
532 ignore_off_by_one);
533 if (e != NULL_TREE)
534 TREE_OPERAND (*expr_p, 1) = build2 (COMPOUND_EXPR, TREE_TYPE (op1),
535 e, op1);
536 }
537}
538
539static tree
540ubsan_maybe_instrument_reference_or_call (location_t loc, tree op, tree ptype,
541 enum ubsan_null_ckind ckind)
542{
543 if (!sanitize_flags_p (flag: SANITIZE_ALIGNMENT | SANITIZE_NULL)
544 || current_function_decl == NULL_TREE)
545 return NULL_TREE;
546
547 tree type = TREE_TYPE (ptype);
548 tree orig_op = op;
549 bool instrument = false;
550 unsigned int mina = 0;
551
552 if (sanitize_flags_p (flag: SANITIZE_ALIGNMENT))
553 {
554 mina = min_align_of_type (type);
555 if (mina <= 1)
556 mina = 0;
557 }
558 while ((TREE_CODE (op) == NOP_EXPR
559 || TREE_CODE (op) == NON_LVALUE_EXPR)
560 && TREE_CODE (TREE_TYPE (op)) == POINTER_TYPE)
561 op = TREE_OPERAND (op, 0);
562 if (TREE_CODE (op) == NOP_EXPR
563 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE)
564 {
565 if (mina && mina > min_align_of_type (TREE_TYPE (TREE_TYPE (op))))
566 instrument = true;
567 }
568 else
569 {
570 if (sanitize_flags_p (flag: SANITIZE_NULL) && TREE_CODE (op) == ADDR_EXPR)
571 {
572 bool strict_overflow_p = false;
573 /* tree_single_nonzero_warnv_p will not return true for non-weak
574 non-automatic decls with -fno-delete-null-pointer-checks,
575 which is disabled during -fsanitize=null. We don't want to
576 instrument those, just weak vars though. */
577 int save_flag_delete_null_pointer_checks
578 = flag_delete_null_pointer_checks;
579 flag_delete_null_pointer_checks = 1;
580 if (!tree_single_nonzero_warnv_p (op, &strict_overflow_p)
581 || strict_overflow_p)
582 instrument = true;
583 flag_delete_null_pointer_checks
584 = save_flag_delete_null_pointer_checks;
585 }
586 else if (sanitize_flags_p (flag: SANITIZE_NULL))
587 instrument = true;
588 if (mina && mina > 1)
589 {
590 if (!POINTER_TYPE_P (TREE_TYPE (op))
591 || mina > get_pointer_alignment (op) / BITS_PER_UNIT)
592 instrument = true;
593 }
594 }
595 if (!instrument)
596 return NULL_TREE;
597 op = save_expr (orig_op);
598 gcc_assert (POINTER_TYPE_P (ptype));
599 if (TREE_CODE (ptype) == REFERENCE_TYPE)
600 ptype = build_pointer_type (TREE_TYPE (ptype));
601 tree kind = build_int_cst (ptype, ckind);
602 tree align = build_int_cst (pointer_sized_int_node, mina);
603 tree call
604 = build_call_expr_internal_loc (loc, IFN_UBSAN_NULL, void_type_node,
605 3, op, kind, align);
606 TREE_SIDE_EFFECTS (call) = 1;
607 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (op), call, op);
608}
609
610/* Instrument a NOP_EXPR to REFERENCE_TYPE or INTEGER_CST with REFERENCE_TYPE
611 type if needed. */
612
613void
614ubsan_maybe_instrument_reference (tree *stmt_p)
615{
616 tree stmt = *stmt_p;
617 tree op = stmt;
618 if (TREE_CODE (stmt) == NOP_EXPR)
619 op = TREE_OPERAND (stmt, 0);
620 op = ubsan_maybe_instrument_reference_or_call (EXPR_LOCATION (stmt), op,
621 TREE_TYPE (stmt),
622 ckind: UBSAN_REF_BINDING);
623 if (op)
624 {
625 if (TREE_CODE (stmt) == NOP_EXPR)
626 TREE_OPERAND (stmt, 0) = op;
627 else
628 *stmt_p = op;
629 }
630}
631
632/* Instrument a CALL_EXPR to a method if needed. */
633
634void
635ubsan_maybe_instrument_member_call (tree stmt, bool is_ctor)
636{
637 if (call_expr_nargs (stmt) == 0)
638 return;
639 tree op = CALL_EXPR_ARG (stmt, 0);
640 if (op == error_mark_node
641 || !POINTER_TYPE_P (TREE_TYPE (op)))
642 return;
643 op = ubsan_maybe_instrument_reference_or_call (EXPR_LOCATION (stmt), op,
644 TREE_TYPE (op),
645 ckind: is_ctor ? UBSAN_CTOR_CALL
646 : UBSAN_MEMBER_CALL);
647 if (op)
648 CALL_EXPR_ARG (stmt, 0) = op;
649}
650

source code of gcc/c-family/c-ubsan.cc