1 | /* Optimize and expand sanitizer functions. |
2 | Copyright (C) 2014-2023 Free Software Foundation, Inc. |
3 | Contributed by Marek Polacek <polacek@redhat.com> |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free |
9 | Software Foundation; either version 3, or (at your option) any later |
10 | version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
15 | for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | #include "config.h" |
22 | #include "system.h" |
23 | #include "coretypes.h" |
24 | #include "backend.h" |
25 | #include "tree.h" |
26 | #include "gimple.h" |
27 | #include "ssa.h" |
28 | #include "tree-pass.h" |
29 | #include "tree-ssa-operands.h" |
30 | #include "gimple-pretty-print.h" |
31 | #include "fold-const.h" |
32 | #include "gimple-iterator.h" |
33 | #include "stringpool.h" |
34 | #include "attribs.h" |
35 | #include "asan.h" |
36 | #include "ubsan.h" |
37 | #include "tree-hash-traits.h" |
38 | #include "gimple-ssa.h" |
39 | #include "tree-phinodes.h" |
40 | #include "ssa-iterators.h" |
41 | #include "gimplify.h" |
42 | #include "gimple-iterator.h" |
43 | #include "gimple-walk.h" |
44 | #include "cfghooks.h" |
45 | #include "tree-dfa.h" |
46 | #include "tree-ssa.h" |
47 | #include "varasm.h" |
48 | |
49 | /* This is used to carry information about basic blocks. It is |
50 | attached to the AUX field of the standard CFG block. */ |
51 | |
52 | struct sanopt_info |
53 | { |
54 | /* True if this BB might call (directly or indirectly) free/munmap |
55 | or similar operation. */ |
56 | bool has_freeing_call_p; |
57 | |
58 | /* True if HAS_FREEING_CALL_P flag has been computed. */ |
59 | bool has_freeing_call_computed_p; |
60 | |
61 | /* True if there is a block with HAS_FREEING_CALL_P flag set |
62 | on any path between an immediate dominator of BB, denoted |
63 | imm(BB), and BB. */ |
64 | bool imm_dom_path_with_freeing_call_p; |
65 | |
66 | /* True if IMM_DOM_PATH_WITH_FREEING_CALL_P has been computed. */ |
67 | bool imm_dom_path_with_freeing_call_computed_p; |
68 | |
69 | /* Number of possibly freeing calls encountered in this bb |
70 | (so far). */ |
71 | uint64_t freeing_call_events; |
72 | |
73 | /* True if BB is currently being visited during computation |
74 | of IMM_DOM_PATH_WITH_FREEING_CALL_P flag. */ |
75 | bool being_visited_p; |
76 | |
77 | /* True if this BB has been visited in the dominator walk. */ |
78 | bool visited_p; |
79 | }; |
80 | |
81 | /* If T has a single definition of form T = T2, return T2. */ |
82 | |
83 | static gimple * |
84 | maybe_get_single_definition (tree t) |
85 | { |
86 | if (TREE_CODE (t) == SSA_NAME) |
87 | { |
88 | gimple *g = SSA_NAME_DEF_STMT (t); |
89 | if (gimple_assign_single_p (gs: g)) |
90 | return g; |
91 | } |
92 | return NULL; |
93 | } |
94 | |
95 | /* Tree triplet for vptr_check_map. */ |
96 | struct sanopt_tree_triplet |
97 | { |
98 | tree t1, t2, t3; |
99 | }; |
100 | |
101 | /* Traits class for tree triplet hash maps below. */ |
102 | |
103 | struct sanopt_tree_triplet_hash : typed_noop_remove <sanopt_tree_triplet> |
104 | { |
105 | typedef sanopt_tree_triplet value_type; |
106 | typedef sanopt_tree_triplet compare_type; |
107 | |
108 | static hashval_t |
109 | hash (const sanopt_tree_triplet &ref) |
110 | { |
111 | inchash::hash hstate (0); |
112 | inchash::add_expr (ref.t1, hstate); |
113 | inchash::add_expr (ref.t2, hstate); |
114 | inchash::add_expr (ref.t3, hstate); |
115 | return hstate.end (); |
116 | } |
117 | |
118 | static bool |
119 | equal (const sanopt_tree_triplet &ref1, const sanopt_tree_triplet &ref2) |
120 | { |
121 | return operand_equal_p (ref1.t1, ref2.t1, flags: 0) |
122 | && operand_equal_p (ref1.t2, ref2.t2, flags: 0) |
123 | && operand_equal_p (ref1.t3, ref2.t3, flags: 0); |
124 | } |
125 | |
126 | static void |
127 | mark_deleted (sanopt_tree_triplet &ref) |
128 | { |
129 | ref.t1 = reinterpret_cast<tree> (1); |
130 | } |
131 | |
132 | static const bool empty_zero_p = true; |
133 | |
134 | static void |
135 | mark_empty (sanopt_tree_triplet &ref) |
136 | { |
137 | ref.t1 = NULL; |
138 | } |
139 | |
140 | static bool |
141 | is_deleted (const sanopt_tree_triplet &ref) |
142 | { |
143 | return ref.t1 == reinterpret_cast<tree> (1); |
144 | } |
145 | |
146 | static bool |
147 | is_empty (const sanopt_tree_triplet &ref) |
148 | { |
149 | return ref.t1 == NULL; |
150 | } |
151 | }; |
152 | |
153 | /* Tree couple for ptr_check_map. */ |
154 | struct sanopt_tree_couple |
155 | { |
156 | tree ptr; |
157 | bool pos_p; |
158 | }; |
159 | |
160 | /* Traits class for tree triplet hash maps below. */ |
161 | |
162 | struct sanopt_tree_couple_hash : typed_noop_remove <sanopt_tree_couple> |
163 | { |
164 | typedef sanopt_tree_couple value_type; |
165 | typedef sanopt_tree_couple compare_type; |
166 | |
167 | static hashval_t |
168 | hash (const sanopt_tree_couple &ref) |
169 | { |
170 | inchash::hash hstate (0); |
171 | inchash::add_expr (ref.ptr, hstate); |
172 | hstate.add_int (v: ref.pos_p); |
173 | return hstate.end (); |
174 | } |
175 | |
176 | static bool |
177 | equal (const sanopt_tree_couple &ref1, const sanopt_tree_couple &ref2) |
178 | { |
179 | return operand_equal_p (ref1.ptr, ref2.ptr, flags: 0) |
180 | && ref1.pos_p == ref2.pos_p; |
181 | } |
182 | |
183 | static void |
184 | mark_deleted (sanopt_tree_couple &ref) |
185 | { |
186 | ref.ptr = reinterpret_cast<tree> (1); |
187 | } |
188 | |
189 | static const bool empty_zero_p = true; |
190 | |
191 | static void |
192 | mark_empty (sanopt_tree_couple &ref) |
193 | { |
194 | ref.ptr = NULL; |
195 | } |
196 | |
197 | static bool |
198 | is_deleted (const sanopt_tree_couple &ref) |
199 | { |
200 | return ref.ptr == reinterpret_cast<tree> (1); |
201 | } |
202 | |
203 | static bool |
204 | is_empty (const sanopt_tree_couple &ref) |
205 | { |
206 | return ref.ptr == NULL; |
207 | } |
208 | }; |
209 | |
210 | /* This is used to carry various hash maps and variables used |
211 | in sanopt_optimize_walker. */ |
212 | |
213 | class sanopt_ctx |
214 | { |
215 | public: |
216 | /* This map maps a pointer (the first argument of UBSAN_NULL) to |
217 | a vector of UBSAN_NULL call statements that check this pointer. */ |
218 | hash_map<tree, auto_vec<gimple *> > null_check_map; |
219 | |
220 | /* This map maps a pointer (the second argument of ASAN_CHECK) to |
221 | a vector of ASAN_CHECK call statements that check the access. */ |
222 | hash_map<tree_operand_hash, auto_vec<gimple *> > asan_check_map; |
223 | |
224 | /* This map maps a tree triplet (the first, second and fourth argument |
225 | of UBSAN_VPTR) to a vector of UBSAN_VPTR call statements that check |
226 | that virtual table pointer. */ |
227 | hash_map<sanopt_tree_triplet_hash, auto_vec<gimple *> > vptr_check_map; |
228 | |
229 | /* This map maps a couple (tree and boolean) to a vector of UBSAN_PTR |
230 | call statements that check that pointer overflow. */ |
231 | hash_map<sanopt_tree_couple_hash, auto_vec<gimple *> > ptr_check_map; |
232 | |
233 | /* Number of IFN_ASAN_CHECK statements. */ |
234 | int asan_num_accesses; |
235 | |
236 | /* True when the current functions constains an ASAN_MARK. */ |
237 | bool contains_asan_mark; |
238 | }; |
239 | |
240 | /* Return true if there might be any call to free/munmap operation |
241 | on any path in between DOM (which should be imm(BB)) and BB. */ |
242 | |
243 | static bool |
244 | imm_dom_path_with_freeing_call (basic_block bb, basic_block dom) |
245 | { |
246 | sanopt_info *info = (sanopt_info *) bb->aux; |
247 | edge e; |
248 | edge_iterator ei; |
249 | |
250 | if (info->imm_dom_path_with_freeing_call_computed_p) |
251 | return info->imm_dom_path_with_freeing_call_p; |
252 | |
253 | info->being_visited_p = true; |
254 | |
255 | FOR_EACH_EDGE (e, ei, bb->preds) |
256 | { |
257 | sanopt_info *pred_info = (sanopt_info *) e->src->aux; |
258 | |
259 | if (e->src == dom) |
260 | continue; |
261 | |
262 | if ((pred_info->imm_dom_path_with_freeing_call_computed_p |
263 | && pred_info->imm_dom_path_with_freeing_call_p) |
264 | || (pred_info->has_freeing_call_computed_p |
265 | && pred_info->has_freeing_call_p)) |
266 | { |
267 | info->imm_dom_path_with_freeing_call_computed_p = true; |
268 | info->imm_dom_path_with_freeing_call_p = true; |
269 | info->being_visited_p = false; |
270 | return true; |
271 | } |
272 | } |
273 | |
274 | FOR_EACH_EDGE (e, ei, bb->preds) |
275 | { |
276 | sanopt_info *pred_info = (sanopt_info *) e->src->aux; |
277 | |
278 | if (e->src == dom) |
279 | continue; |
280 | |
281 | if (pred_info->has_freeing_call_computed_p) |
282 | continue; |
283 | |
284 | gimple_stmt_iterator gsi; |
285 | for (gsi = gsi_start_bb (bb: e->src); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
286 | { |
287 | gimple *stmt = gsi_stmt (i: gsi); |
288 | gasm *asm_stmt; |
289 | |
290 | if ((is_gimple_call (gs: stmt) && !nonfreeing_call_p (stmt)) |
291 | || ((asm_stmt = dyn_cast <gasm *> (p: stmt)) |
292 | && (gimple_asm_clobbers_memory_p (asm_stmt) |
293 | || gimple_asm_volatile_p (asm_stmt)))) |
294 | { |
295 | pred_info->has_freeing_call_p = true; |
296 | break; |
297 | } |
298 | } |
299 | |
300 | pred_info->has_freeing_call_computed_p = true; |
301 | if (pred_info->has_freeing_call_p) |
302 | { |
303 | info->imm_dom_path_with_freeing_call_computed_p = true; |
304 | info->imm_dom_path_with_freeing_call_p = true; |
305 | info->being_visited_p = false; |
306 | return true; |
307 | } |
308 | } |
309 | |
310 | FOR_EACH_EDGE (e, ei, bb->preds) |
311 | { |
312 | if (e->src == dom) |
313 | continue; |
314 | |
315 | basic_block src; |
316 | for (src = e->src; src != dom; ) |
317 | { |
318 | sanopt_info *pred_info = (sanopt_info *) src->aux; |
319 | if (pred_info->being_visited_p) |
320 | break; |
321 | basic_block imm = get_immediate_dominator (CDI_DOMINATORS, src); |
322 | if (imm_dom_path_with_freeing_call (bb: src, dom: imm)) |
323 | { |
324 | info->imm_dom_path_with_freeing_call_computed_p = true; |
325 | info->imm_dom_path_with_freeing_call_p = true; |
326 | info->being_visited_p = false; |
327 | return true; |
328 | } |
329 | src = imm; |
330 | } |
331 | } |
332 | |
333 | info->imm_dom_path_with_freeing_call_computed_p = true; |
334 | info->imm_dom_path_with_freeing_call_p = false; |
335 | info->being_visited_p = false; |
336 | return false; |
337 | } |
338 | |
339 | /* Get the first dominating check from the list of stored checks. |
340 | Non-dominating checks are silently dropped. */ |
341 | |
342 | static gimple * |
343 | maybe_get_dominating_check (auto_vec<gimple *> &v) |
344 | { |
345 | for (; !v.is_empty (); v.pop ()) |
346 | { |
347 | gimple *g = v.last (); |
348 | sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux; |
349 | if (!si->visited_p) |
350 | /* At this point we shouldn't have any statements |
351 | that aren't dominating the current BB. */ |
352 | return g; |
353 | } |
354 | return NULL; |
355 | } |
356 | |
357 | /* Optimize away redundant UBSAN_NULL calls. */ |
358 | |
359 | static bool |
360 | maybe_optimize_ubsan_null_ifn (class sanopt_ctx *ctx, gimple *stmt) |
361 | { |
362 | gcc_assert (gimple_call_num_args (stmt) == 3); |
363 | tree ptr = gimple_call_arg (gs: stmt, index: 0); |
364 | tree cur_align = gimple_call_arg (gs: stmt, index: 2); |
365 | gcc_assert (TREE_CODE (cur_align) == INTEGER_CST); |
366 | bool remove = false; |
367 | |
368 | auto_vec<gimple *> &v = ctx->null_check_map.get_or_insert (k: ptr); |
369 | gimple *g = maybe_get_dominating_check (v); |
370 | if (!g) |
371 | { |
372 | /* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's |
373 | nothing to optimize yet. */ |
374 | v.safe_push (obj: stmt); |
375 | return false; |
376 | } |
377 | |
378 | /* We already have recorded a UBSAN_NULL check for this pointer. Perhaps we |
379 | can drop this one. But only if this check doesn't specify stricter |
380 | alignment. */ |
381 | |
382 | tree align = gimple_call_arg (gs: g, index: 2); |
383 | int kind = tree_to_shwi (gimple_call_arg (gs: g, index: 1)); |
384 | /* If this is a NULL pointer check where we had segv anyway, we can |
385 | remove it. */ |
386 | if (integer_zerop (align) |
387 | && (kind == UBSAN_LOAD_OF |
388 | || kind == UBSAN_STORE_OF |
389 | || kind == UBSAN_MEMBER_ACCESS)) |
390 | remove = true; |
391 | /* Otherwise remove the check in non-recovering mode, or if the |
392 | stmts have same location. */ |
393 | else if (integer_zerop (align)) |
394 | remove = (flag_sanitize_recover & SANITIZE_NULL) == 0 |
395 | || (flag_sanitize_trap & SANITIZE_NULL) != 0 |
396 | || gimple_location (g) == gimple_location (g: stmt); |
397 | else if (tree_int_cst_le (t1: cur_align, t2: align)) |
398 | remove = (flag_sanitize_recover & SANITIZE_ALIGNMENT) == 0 |
399 | || (flag_sanitize_trap & SANITIZE_ALIGNMENT) != 0 |
400 | || gimple_location (g) == gimple_location (g: stmt); |
401 | |
402 | if (!remove && gimple_bb (g) == gimple_bb (g: stmt) |
403 | && tree_int_cst_compare (t1: cur_align, t2: align) == 0) |
404 | v.pop (); |
405 | |
406 | if (!remove) |
407 | v.safe_push (obj: stmt); |
408 | return remove; |
409 | } |
410 | |
411 | /* Return true when pointer PTR for a given CUR_OFFSET is already sanitized |
412 | in a given sanitization context CTX. */ |
413 | |
414 | static bool |
415 | has_dominating_ubsan_ptr_check (sanopt_ctx *ctx, tree ptr, |
416 | offset_int &cur_offset) |
417 | { |
418 | bool pos_p = !wi::neg_p (x: cur_offset); |
419 | sanopt_tree_couple couple; |
420 | couple.ptr = ptr; |
421 | couple.pos_p = pos_p; |
422 | |
423 | auto_vec<gimple *> &v = ctx->ptr_check_map.get_or_insert (k: couple); |
424 | gimple *g = maybe_get_dominating_check (v); |
425 | if (!g) |
426 | return false; |
427 | |
428 | /* We already have recorded a UBSAN_PTR check for this pointer. Perhaps we |
429 | can drop this one. But only if this check doesn't specify larger offset. |
430 | */ |
431 | tree offset = gimple_call_arg (gs: g, index: 1); |
432 | gcc_assert (TREE_CODE (offset) == INTEGER_CST); |
433 | offset_int ooffset = wi::sext (x: wi::to_offset (t: offset), POINTER_SIZE); |
434 | |
435 | if (pos_p) |
436 | { |
437 | if (wi::les_p (x: cur_offset, y: ooffset)) |
438 | return true; |
439 | } |
440 | else if (!pos_p && wi::les_p (x: ooffset, y: cur_offset)) |
441 | return true; |
442 | |
443 | return false; |
444 | } |
445 | |
446 | /* Record UBSAN_PTR check of given context CTX. Register pointer PTR on |
447 | a given OFFSET that it's handled by GIMPLE STMT. */ |
448 | |
449 | static void |
450 | record_ubsan_ptr_check_stmt (sanopt_ctx *ctx, gimple *stmt, tree ptr, |
451 | const offset_int &offset) |
452 | { |
453 | sanopt_tree_couple couple; |
454 | couple.ptr = ptr; |
455 | couple.pos_p = !wi::neg_p (x: offset); |
456 | |
457 | auto_vec<gimple *> &v = ctx->ptr_check_map.get_or_insert (k: couple); |
458 | v.safe_push (obj: stmt); |
459 | } |
460 | |
461 | /* Optimize away redundant UBSAN_PTR calls. */ |
462 | |
463 | static bool |
464 | maybe_optimize_ubsan_ptr_ifn (sanopt_ctx *ctx, gimple *stmt) |
465 | { |
466 | poly_int64 bitsize, pbitpos; |
467 | machine_mode mode; |
468 | int volatilep = 0, reversep, unsignedp = 0; |
469 | tree offset; |
470 | |
471 | gcc_assert (gimple_call_num_args (stmt) == 2); |
472 | tree ptr = gimple_call_arg (gs: stmt, index: 0); |
473 | tree off = gimple_call_arg (gs: stmt, index: 1); |
474 | |
475 | if (TREE_CODE (off) != INTEGER_CST) |
476 | return false; |
477 | |
478 | if (integer_zerop (off)) |
479 | return true; |
480 | |
481 | offset_int cur_offset = wi::sext (x: wi::to_offset (t: off), POINTER_SIZE); |
482 | if (has_dominating_ubsan_ptr_check (ctx, ptr, cur_offset)) |
483 | return true; |
484 | |
485 | tree base = ptr; |
486 | if (TREE_CODE (base) == ADDR_EXPR) |
487 | { |
488 | base = TREE_OPERAND (base, 0); |
489 | |
490 | HOST_WIDE_INT bitpos; |
491 | base = get_inner_reference (base, &bitsize, &pbitpos, &offset, &mode, |
492 | &unsignedp, &reversep, &volatilep); |
493 | if ((offset == NULL_TREE || TREE_CODE (offset) == INTEGER_CST) |
494 | && DECL_P (base) |
495 | && ((!VAR_P (base) |
496 | && TREE_CODE (base) != PARM_DECL |
497 | && TREE_CODE (base) != RESULT_DECL) |
498 | || !DECL_REGISTER (base)) |
499 | && pbitpos.is_constant (const_value: &bitpos)) |
500 | { |
501 | offset_int expr_offset; |
502 | if (offset) |
503 | expr_offset = wi::to_offset (t: offset) + bitpos / BITS_PER_UNIT; |
504 | else |
505 | expr_offset = bitpos / BITS_PER_UNIT; |
506 | expr_offset = wi::sext (x: expr_offset, POINTER_SIZE); |
507 | offset_int total_offset = expr_offset + cur_offset; |
508 | if (total_offset != wi::sext (x: total_offset, POINTER_SIZE)) |
509 | { |
510 | record_ubsan_ptr_check_stmt (ctx, stmt, ptr, offset: cur_offset); |
511 | return false; |
512 | } |
513 | |
514 | /* If BASE is a fixed size automatic variable or |
515 | global variable defined in the current TU, we don't have |
516 | to instrument anything if offset is within address |
517 | of the variable. */ |
518 | if ((VAR_P (base) |
519 | || TREE_CODE (base) == PARM_DECL |
520 | || TREE_CODE (base) == RESULT_DECL) |
521 | && DECL_SIZE_UNIT (base) |
522 | && TREE_CODE (DECL_SIZE_UNIT (base)) == INTEGER_CST |
523 | && (!is_global_var (t: base) || decl_binds_to_current_def_p (base))) |
524 | { |
525 | offset_int base_size = wi::to_offset (DECL_SIZE_UNIT (base)); |
526 | if (!wi::neg_p (x: expr_offset) |
527 | && wi::les_p (x: total_offset, y: base_size)) |
528 | { |
529 | if (!wi::neg_p (x: total_offset) |
530 | && wi::les_p (x: total_offset, y: base_size)) |
531 | return true; |
532 | } |
533 | } |
534 | |
535 | /* Following expression: UBSAN_PTR (&MEM_REF[ptr + x], y) can be |
536 | handled as follows: |
537 | |
538 | 1) sign (x) == sign (y), then check for dominating check of (x + y) |
539 | 2) sign (x) != sign (y), then first check if we have a dominating |
540 | check for ptr + x. If so, then we have 2 situations: |
541 | a) sign (x) == sign (x + y), here we are done, example: |
542 | UBSAN_PTR (&MEM_REF[ptr + 100], -50) |
543 | b) check for dominating check of ptr + x + y. |
544 | */ |
545 | |
546 | bool sign_cur_offset = !wi::neg_p (x: cur_offset); |
547 | bool sign_expr_offset = !wi::neg_p (x: expr_offset); |
548 | |
549 | tree base_addr |
550 | = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (base)), base); |
551 | |
552 | bool add = false; |
553 | if (sign_cur_offset == sign_expr_offset) |
554 | { |
555 | if (has_dominating_ubsan_ptr_check (ctx, ptr: base_addr, cur_offset&: total_offset)) |
556 | return true; |
557 | else |
558 | add = true; |
559 | } |
560 | else |
561 | { |
562 | if (!has_dominating_ubsan_ptr_check (ctx, ptr: base_addr, cur_offset&: expr_offset)) |
563 | ; /* Don't record base_addr + expr_offset, it's not a guarding |
564 | check. */ |
565 | else |
566 | { |
567 | bool sign_total_offset = !wi::neg_p (x: total_offset); |
568 | if (sign_expr_offset == sign_total_offset) |
569 | return true; |
570 | else |
571 | { |
572 | if (has_dominating_ubsan_ptr_check (ctx, ptr: base_addr, |
573 | cur_offset&: total_offset)) |
574 | return true; |
575 | else |
576 | add = true; |
577 | } |
578 | } |
579 | } |
580 | |
581 | /* Record a new dominating check for base_addr + total_offset. */ |
582 | if (add && !operand_equal_p (base, base_addr, flags: 0)) |
583 | record_ubsan_ptr_check_stmt (ctx, stmt, ptr: base_addr, |
584 | offset: total_offset); |
585 | } |
586 | } |
587 | |
588 | /* For this PTR we don't have any UBSAN_PTR stmts recorded, so there's |
589 | nothing to optimize yet. */ |
590 | record_ubsan_ptr_check_stmt (ctx, stmt, ptr, offset: cur_offset); |
591 | |
592 | return false; |
593 | } |
594 | |
595 | /* Optimize away redundant UBSAN_VPTR calls. The second argument |
596 | is the value loaded from the virtual table, so rely on FRE to find out |
597 | when we can actually optimize. */ |
598 | |
599 | static bool |
600 | maybe_optimize_ubsan_vptr_ifn (class sanopt_ctx *ctx, gimple *stmt) |
601 | { |
602 | gcc_assert (gimple_call_num_args (stmt) == 5); |
603 | sanopt_tree_triplet triplet; |
604 | triplet.t1 = gimple_call_arg (gs: stmt, index: 0); |
605 | triplet.t2 = gimple_call_arg (gs: stmt, index: 1); |
606 | triplet.t3 = gimple_call_arg (gs: stmt, index: 3); |
607 | |
608 | auto_vec<gimple *> &v = ctx->vptr_check_map.get_or_insert (k: triplet); |
609 | gimple *g = maybe_get_dominating_check (v); |
610 | if (!g) |
611 | { |
612 | /* For this PTR we don't have any UBSAN_VPTR stmts recorded, so there's |
613 | nothing to optimize yet. */ |
614 | v.safe_push (obj: stmt); |
615 | return false; |
616 | } |
617 | |
618 | return true; |
619 | } |
620 | |
621 | /* Checks whether value of T in CHECK and USE is the same. */ |
622 | |
623 | static bool |
624 | same_value_p (gimple *check, gimple *use, tree t) |
625 | { |
626 | tree check_vuse = gimple_vuse (g: check); |
627 | tree use_vuse = gimple_vuse (g: use); |
628 | |
629 | if (TREE_CODE (t) == SSA_NAME |
630 | || is_gimple_min_invariant (t) |
631 | || ! use_vuse) |
632 | return true; |
633 | |
634 | if (check_vuse == use_vuse) |
635 | return true; |
636 | |
637 | return false; |
638 | } |
639 | |
640 | /* Returns TRUE if ASan check of length LEN in block BB can be removed |
641 | if preceded by checks in V. */ |
642 | |
643 | static bool |
644 | can_remove_asan_check (auto_vec<gimple *> &v, tree len, basic_block bb, |
645 | gimple *base_stmt, tree base_addr) |
646 | { |
647 | unsigned int i; |
648 | gimple *g; |
649 | gimple *to_pop = NULL; |
650 | bool remove = false; |
651 | basic_block last_bb = bb; |
652 | bool cleanup = false; |
653 | |
654 | FOR_EACH_VEC_ELT_REVERSE (v, i, g) |
655 | { |
656 | basic_block gbb = gimple_bb (g); |
657 | sanopt_info *si = (sanopt_info *) gbb->aux; |
658 | if (gimple_uid (g) < si->freeing_call_events) |
659 | { |
660 | /* If there is a potentially freeing call after g in gbb, we should |
661 | remove it from the vector, can't use in optimization. */ |
662 | cleanup = true; |
663 | continue; |
664 | } |
665 | |
666 | tree glen = gimple_call_arg (gs: g, index: 2); |
667 | gcc_assert (TREE_CODE (glen) == INTEGER_CST); |
668 | |
669 | /* If we've checked only smaller length than we want to check now, |
670 | we can't remove the current stmt. If g is in the same basic block, |
671 | we want to remove it though, as the current stmt is better. */ |
672 | if (tree_int_cst_lt (t1: glen, t2: len)) |
673 | { |
674 | if (gbb == bb) |
675 | { |
676 | to_pop = g; |
677 | cleanup = true; |
678 | } |
679 | continue; |
680 | } |
681 | |
682 | while (last_bb != gbb) |
683 | { |
684 | /* Paths from last_bb to bb have been checked before. |
685 | gbb is necessarily a dominator of last_bb, but not necessarily |
686 | immediate dominator. */ |
687 | if (((sanopt_info *) last_bb->aux)->freeing_call_events) |
688 | break; |
689 | |
690 | basic_block imm = get_immediate_dominator (CDI_DOMINATORS, last_bb); |
691 | gcc_assert (imm); |
692 | if (imm_dom_path_with_freeing_call (bb: last_bb, dom: imm)) |
693 | break; |
694 | |
695 | last_bb = imm; |
696 | } |
697 | if (last_bb != gbb) |
698 | break; |
699 | // In case of base_addr residing in memory we also need to check aliasing |
700 | remove = ! base_addr || same_value_p (check: g, use: base_stmt, t: base_addr); |
701 | break; |
702 | } |
703 | |
704 | if (cleanup) |
705 | { |
706 | unsigned int j = 0, l = v.length (); |
707 | for (i = 0; i < l; i++) |
708 | if (v[i] != to_pop |
709 | && (gimple_uid (g: v[i]) |
710 | == ((sanopt_info *) |
711 | gimple_bb (g: v[i])->aux)->freeing_call_events)) |
712 | { |
713 | if (i != j) |
714 | v[j] = v[i]; |
715 | j++; |
716 | } |
717 | v.truncate (size: j); |
718 | } |
719 | |
720 | return remove; |
721 | } |
722 | |
723 | /* Optimize away redundant ASAN_CHECK calls. */ |
724 | |
725 | static bool |
726 | maybe_optimize_asan_check_ifn (class sanopt_ctx *ctx, gimple *stmt) |
727 | { |
728 | gcc_assert (gimple_call_num_args (stmt) == 4); |
729 | tree ptr = gimple_call_arg (gs: stmt, index: 1); |
730 | tree len = gimple_call_arg (gs: stmt, index: 2); |
731 | basic_block bb = gimple_bb (g: stmt); |
732 | sanopt_info *info = (sanopt_info *) bb->aux; |
733 | |
734 | if (TREE_CODE (len) != INTEGER_CST) |
735 | return false; |
736 | if (integer_zerop (len)) |
737 | return false; |
738 | |
739 | gimple_set_uid (g: stmt, uid: info->freeing_call_events); |
740 | |
741 | auto_vec<gimple *> *ptr_checks = &ctx->asan_check_map.get_or_insert (k: ptr); |
742 | |
743 | gimple *base_stmt = maybe_get_single_definition (t: ptr); |
744 | tree base_addr = base_stmt ? gimple_assign_rhs1 (gs: base_stmt) : NULL_TREE; |
745 | auto_vec<gimple *> *base_checks = NULL; |
746 | if (base_addr) |
747 | { |
748 | base_checks = &ctx->asan_check_map.get_or_insert (k: base_addr); |
749 | /* Original pointer might have been invalidated. */ |
750 | ptr_checks = ctx->asan_check_map.get (k: ptr); |
751 | } |
752 | |
753 | gimple *g = maybe_get_dominating_check (v&: *ptr_checks); |
754 | gimple *g2 = NULL; |
755 | |
756 | if (base_checks) |
757 | /* Try with base address as well. */ |
758 | g2 = maybe_get_dominating_check (v&: *base_checks); |
759 | |
760 | if (g == NULL && g2 == NULL) |
761 | { |
762 | /* For this PTR we don't have any ASAN_CHECK stmts recorded, so there's |
763 | nothing to optimize yet. */ |
764 | ptr_checks->safe_push (obj: stmt); |
765 | if (base_checks) |
766 | base_checks->safe_push (obj: stmt); |
767 | return false; |
768 | } |
769 | |
770 | bool remove = false; |
771 | |
772 | if (ptr_checks) |
773 | remove = can_remove_asan_check (v&: *ptr_checks, len, bb, NULL, NULL); |
774 | |
775 | if (!remove && base_checks) |
776 | /* Try with base address as well. */ |
777 | remove = can_remove_asan_check (v&: *base_checks, len, bb, base_stmt, |
778 | base_addr); |
779 | |
780 | if (!remove) |
781 | { |
782 | ptr_checks->safe_push (obj: stmt); |
783 | if (base_checks) |
784 | base_checks->safe_push (obj: stmt); |
785 | } |
786 | |
787 | return remove; |
788 | } |
789 | |
790 | /* Try to optimize away redundant UBSAN_NULL and ASAN_CHECK calls. |
791 | |
792 | We walk blocks in the CFG via a depth first search of the dominator |
793 | tree; we push unique UBSAN_NULL or ASAN_CHECK statements into a vector |
794 | in the NULL_CHECK_MAP or ASAN_CHECK_MAP hash maps as we enter the |
795 | blocks. When leaving a block, we mark the block as visited; then |
796 | when checking the statements in the vector, we ignore statements that |
797 | are coming from already visited blocks, because these cannot dominate |
798 | anything anymore. CTX is a sanopt context. */ |
799 | |
800 | static void |
801 | sanopt_optimize_walker (basic_block bb, class sanopt_ctx *ctx) |
802 | { |
803 | basic_block son; |
804 | gimple_stmt_iterator gsi; |
805 | sanopt_info *info = (sanopt_info *) bb->aux; |
806 | bool asan_check_optimize |
807 | = ((flag_sanitize & (SANITIZE_ADDRESS | SANITIZE_HWADDRESS)) != 0); |
808 | |
809 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi);) |
810 | { |
811 | gimple *stmt = gsi_stmt (i: gsi); |
812 | bool remove = false; |
813 | |
814 | if (!is_gimple_call (gs: stmt)) |
815 | { |
816 | /* Handle asm volatile or asm with "memory" clobber |
817 | the same as potentionally freeing call. */ |
818 | gasm *asm_stmt = dyn_cast <gasm *> (p: stmt); |
819 | if (asm_stmt |
820 | && asan_check_optimize |
821 | && (gimple_asm_clobbers_memory_p (asm_stmt) |
822 | || gimple_asm_volatile_p (asm_stmt))) |
823 | info->freeing_call_events++; |
824 | gsi_next (i: &gsi); |
825 | continue; |
826 | } |
827 | |
828 | if (asan_check_optimize && !nonfreeing_call_p (stmt)) |
829 | info->freeing_call_events++; |
830 | |
831 | /* If __asan_before_dynamic_init ("module"); is followed by |
832 | __asan_after_dynamic_init (); without intervening memory loads/stores, |
833 | there is nothing to guard, so optimize both away. */ |
834 | if (asan_check_optimize |
835 | && gimple_call_builtin_p (stmt, BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT)) |
836 | { |
837 | gcc_assert (!hwasan_sanitize_p ()); |
838 | use_operand_p use; |
839 | gimple *use_stmt; |
840 | if (single_imm_use (var: gimple_vdef (g: stmt), use_p: &use, stmt: &use_stmt)) |
841 | { |
842 | if (is_gimple_call (gs: use_stmt) |
843 | && gimple_call_builtin_p (use_stmt, |
844 | BUILT_IN_ASAN_AFTER_DYNAMIC_INIT)) |
845 | { |
846 | unlink_stmt_vdef (use_stmt); |
847 | gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt); |
848 | gsi_remove (&gsi2, true); |
849 | remove = true; |
850 | } |
851 | } |
852 | } |
853 | |
854 | if (gimple_call_internal_p (gs: stmt)) |
855 | switch (gimple_call_internal_fn (gs: stmt)) |
856 | { |
857 | case IFN_UBSAN_NULL: |
858 | remove = maybe_optimize_ubsan_null_ifn (ctx, stmt); |
859 | break; |
860 | case IFN_UBSAN_VPTR: |
861 | remove = maybe_optimize_ubsan_vptr_ifn (ctx, stmt); |
862 | break; |
863 | case IFN_UBSAN_PTR: |
864 | remove = maybe_optimize_ubsan_ptr_ifn (ctx, stmt); |
865 | break; |
866 | case IFN_HWASAN_CHECK: |
867 | case IFN_ASAN_CHECK: |
868 | if (asan_check_optimize) |
869 | remove = maybe_optimize_asan_check_ifn (ctx, stmt); |
870 | if (!remove) |
871 | ctx->asan_num_accesses++; |
872 | break; |
873 | case IFN_ASAN_MARK: |
874 | ctx->contains_asan_mark = true; |
875 | break; |
876 | default: |
877 | break; |
878 | } |
879 | |
880 | if (remove) |
881 | { |
882 | /* Drop this check. */ |
883 | if (dump_file && (dump_flags & TDF_DETAILS)) |
884 | { |
885 | fprintf (stream: dump_file, format: "Optimizing out: " ); |
886 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
887 | } |
888 | unlink_stmt_vdef (stmt); |
889 | gsi_remove (&gsi, true); |
890 | } |
891 | else |
892 | { |
893 | if (dump_file && (dump_flags & TDF_DETAILS)) |
894 | { |
895 | fprintf (stream: dump_file, format: "Leaving: " ); |
896 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
897 | } |
898 | |
899 | gsi_next (i: &gsi); |
900 | } |
901 | } |
902 | |
903 | if (asan_check_optimize) |
904 | { |
905 | info->has_freeing_call_p = info->freeing_call_events != 0; |
906 | info->has_freeing_call_computed_p = true; |
907 | } |
908 | |
909 | for (son = first_dom_son (CDI_DOMINATORS, bb); |
910 | son; |
911 | son = next_dom_son (CDI_DOMINATORS, son)) |
912 | sanopt_optimize_walker (bb: son, ctx); |
913 | |
914 | /* We're leaving this BB, so mark it to that effect. */ |
915 | info->visited_p = true; |
916 | } |
917 | |
918 | /* Try to remove redundant sanitizer checks in function FUN. */ |
919 | |
920 | static int |
921 | sanopt_optimize (function *fun, bool *contains_asan_mark) |
922 | { |
923 | class sanopt_ctx ctx; |
924 | ctx.asan_num_accesses = 0; |
925 | ctx.contains_asan_mark = false; |
926 | |
927 | /* Set up block info for each basic block. */ |
928 | alloc_aux_for_blocks (sizeof (sanopt_info)); |
929 | |
930 | /* We're going to do a dominator walk, so ensure that we have |
931 | dominance information. */ |
932 | calculate_dominance_info (CDI_DOMINATORS); |
933 | |
934 | /* Recursively walk the dominator tree optimizing away |
935 | redundant checks. */ |
936 | sanopt_optimize_walker (ENTRY_BLOCK_PTR_FOR_FN (fun), ctx: &ctx); |
937 | |
938 | free_aux_for_blocks (); |
939 | |
940 | *contains_asan_mark = ctx.contains_asan_mark; |
941 | return ctx.asan_num_accesses; |
942 | } |
943 | |
944 | /* Perform optimization of sanitize functions. */ |
945 | |
946 | namespace { |
947 | |
948 | const pass_data pass_data_sanopt = |
949 | { |
950 | .type: GIMPLE_PASS, /* type */ |
951 | .name: "sanopt" , /* name */ |
952 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
953 | .tv_id: TV_NONE, /* tv_id */ |
954 | .properties_required: ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */ |
955 | .properties_provided: 0, /* properties_provided */ |
956 | .properties_destroyed: 0, /* properties_destroyed */ |
957 | .todo_flags_start: 0, /* todo_flags_start */ |
958 | TODO_update_ssa, /* todo_flags_finish */ |
959 | }; |
960 | |
961 | class pass_sanopt : public gimple_opt_pass |
962 | { |
963 | public: |
964 | pass_sanopt (gcc::context *ctxt) |
965 | : gimple_opt_pass (pass_data_sanopt, ctxt) |
966 | {} |
967 | |
968 | /* opt_pass methods: */ |
969 | bool gate (function *) final override |
970 | { |
971 | /* SANITIZE_RETURN is handled in the front-end. When trapping, |
972 | SANITIZE_UNREACHABLE is handled by builtin_decl_unreachable. */ |
973 | unsigned int mask = SANITIZE_RETURN; |
974 | if (flag_sanitize_trap & SANITIZE_UNREACHABLE) |
975 | mask |= SANITIZE_UNREACHABLE; |
976 | return flag_sanitize & ~mask; |
977 | } |
978 | unsigned int execute (function *) final override; |
979 | |
980 | }; // class pass_sanopt |
981 | |
982 | /* Sanitize all ASAN_MARK unpoison calls that are not reachable by a BB |
983 | that contains an ASAN_MARK poison. All these ASAN_MARK unpoison call |
984 | can be removed as all variables are unpoisoned in a function prologue. */ |
985 | |
986 | static void |
987 | sanitize_asan_mark_unpoison (void) |
988 | { |
989 | /* 1) Find all BBs that contain an ASAN_MARK poison call. */ |
990 | auto_bitmap with_poison; |
991 | basic_block bb; |
992 | |
993 | FOR_EACH_BB_FN (bb, cfun) |
994 | { |
995 | gimple_stmt_iterator gsi; |
996 | for (gsi = gsi_last_bb (bb); !gsi_end_p (i: gsi); gsi_prev (i: &gsi)) |
997 | { |
998 | gimple *stmt = gsi_stmt (i: gsi); |
999 | if (asan_mark_p (stmt, flag: ASAN_MARK_POISON)) |
1000 | { |
1001 | bitmap_set_bit (with_poison, bb->index); |
1002 | break; |
1003 | } |
1004 | } |
1005 | } |
1006 | |
1007 | auto_sbitmap poisoned (last_basic_block_for_fn (cfun) + 1); |
1008 | bitmap_clear (poisoned); |
1009 | /* We now treat with_poison as worklist. */ |
1010 | bitmap worklist = with_poison; |
1011 | |
1012 | /* 2) Propagate the information to all reachable blocks. */ |
1013 | while (!bitmap_empty_p (map: worklist)) |
1014 | { |
1015 | unsigned i = bitmap_clear_first_set_bit (worklist); |
1016 | basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i); |
1017 | gcc_assert (bb); |
1018 | |
1019 | edge e; |
1020 | edge_iterator ei; |
1021 | FOR_EACH_EDGE (e, ei, bb->succs) |
1022 | if (!bitmap_bit_p (map: poisoned, bitno: e->dest->index)) |
1023 | { |
1024 | bitmap_set_bit (map: poisoned, bitno: e->dest->index); |
1025 | bitmap_set_bit (worklist, e->dest->index); |
1026 | } |
1027 | } |
1028 | |
1029 | /* 3) Iterate all BBs not included in POISONED BBs and remove unpoison |
1030 | ASAN_MARK preceding an ASAN_MARK poison (which can still happen). */ |
1031 | FOR_EACH_BB_FN (bb, cfun) |
1032 | { |
1033 | if (bitmap_bit_p (map: poisoned, bitno: bb->index)) |
1034 | continue; |
1035 | |
1036 | gimple_stmt_iterator gsi; |
1037 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi);) |
1038 | { |
1039 | gimple *stmt = gsi_stmt (i: gsi); |
1040 | if (gimple_call_internal_p (gs: stmt, fn: IFN_ASAN_MARK)) |
1041 | { |
1042 | if (asan_mark_p (stmt, flag: ASAN_MARK_POISON)) |
1043 | break; |
1044 | else |
1045 | { |
1046 | if (dump_file) |
1047 | fprintf (stream: dump_file, format: "Removing ASAN_MARK unpoison\n" ); |
1048 | unlink_stmt_vdef (stmt); |
1049 | release_defs (stmt); |
1050 | gsi_remove (&gsi, true); |
1051 | continue; |
1052 | } |
1053 | } |
1054 | |
1055 | gsi_next (i: &gsi); |
1056 | } |
1057 | } |
1058 | } |
1059 | |
1060 | /* Return true when STMT is either ASAN_CHECK call or a call of a function |
1061 | that can contain an ASAN_CHECK. */ |
1062 | |
1063 | static bool |
1064 | maybe_contains_asan_check (gimple *stmt) |
1065 | { |
1066 | if (is_gimple_call (gs: stmt)) |
1067 | { |
1068 | if (gimple_call_internal_p (gs: stmt, fn: IFN_ASAN_MARK)) |
1069 | return false; |
1070 | else |
1071 | return !(gimple_call_flags (stmt) & ECF_CONST); |
1072 | } |
1073 | else if (is_a<gasm *> (p: stmt)) |
1074 | return true; |
1075 | |
1076 | return false; |
1077 | } |
1078 | |
1079 | /* Sanitize all ASAN_MARK poison calls that are not followed by an ASAN_CHECK |
1080 | call. These calls can be removed. */ |
1081 | |
1082 | static void |
1083 | sanitize_asan_mark_poison (void) |
1084 | { |
1085 | /* 1) Find all BBs that possibly contain an ASAN_CHECK. */ |
1086 | auto_bitmap with_check; |
1087 | basic_block bb; |
1088 | |
1089 | FOR_EACH_BB_FN (bb, cfun) |
1090 | { |
1091 | gimple_stmt_iterator gsi; |
1092 | for (gsi = gsi_last_bb (bb); !gsi_end_p (i: gsi); gsi_prev (i: &gsi)) |
1093 | { |
1094 | gimple *stmt = gsi_stmt (i: gsi); |
1095 | if (maybe_contains_asan_check (stmt)) |
1096 | { |
1097 | bitmap_set_bit (with_check, bb->index); |
1098 | break; |
1099 | } |
1100 | } |
1101 | } |
1102 | |
1103 | auto_sbitmap can_reach_check (last_basic_block_for_fn (cfun) + 1); |
1104 | bitmap_clear (can_reach_check); |
1105 | /* We now treat with_check as worklist. */ |
1106 | bitmap worklist = with_check; |
1107 | |
1108 | /* 2) Propagate the information to all definitions blocks. */ |
1109 | while (!bitmap_empty_p (map: worklist)) |
1110 | { |
1111 | unsigned i = bitmap_clear_first_set_bit (worklist); |
1112 | basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i); |
1113 | gcc_assert (bb); |
1114 | |
1115 | edge e; |
1116 | edge_iterator ei; |
1117 | FOR_EACH_EDGE (e, ei, bb->preds) |
1118 | if (!bitmap_bit_p (map: can_reach_check, bitno: e->src->index)) |
1119 | { |
1120 | bitmap_set_bit (map: can_reach_check, bitno: e->src->index); |
1121 | bitmap_set_bit (worklist, e->src->index); |
1122 | } |
1123 | } |
1124 | |
1125 | /* 3) Iterate all BBs not included in CAN_REACH_CHECK BBs and remove poison |
1126 | ASAN_MARK not followed by a call to function having an ASAN_CHECK. */ |
1127 | FOR_EACH_BB_FN (bb, cfun) |
1128 | { |
1129 | if (bitmap_bit_p (map: can_reach_check, bitno: bb->index)) |
1130 | continue; |
1131 | |
1132 | gimple_stmt_iterator gsi; |
1133 | for (gsi = gsi_last_bb (bb); !gsi_end_p (i: gsi);) |
1134 | { |
1135 | gimple *stmt = gsi_stmt (i: gsi); |
1136 | if (maybe_contains_asan_check (stmt)) |
1137 | break; |
1138 | else if (asan_mark_p (stmt, flag: ASAN_MARK_POISON)) |
1139 | { |
1140 | if (dump_file) |
1141 | fprintf (stream: dump_file, format: "Removing ASAN_MARK poison\n" ); |
1142 | unlink_stmt_vdef (stmt); |
1143 | release_defs (stmt); |
1144 | gimple_stmt_iterator gsi2 = gsi; |
1145 | gsi_prev (i: &gsi); |
1146 | gsi_remove (&gsi2, true); |
1147 | continue; |
1148 | } |
1149 | |
1150 | gsi_prev (i: &gsi); |
1151 | } |
1152 | } |
1153 | } |
1154 | |
1155 | /* Rewrite all usages of tree OP which is a PARM_DECL with a VAR_DECL |
1156 | that is it's DECL_VALUE_EXPR. */ |
1157 | |
1158 | static tree |
1159 | rewrite_usage_of_param (tree *op, int *walk_subtrees, void *) |
1160 | { |
1161 | if (TREE_CODE (*op) == PARM_DECL && DECL_HAS_VALUE_EXPR_P (*op)) |
1162 | { |
1163 | *op = DECL_VALUE_EXPR (*op); |
1164 | *walk_subtrees = 0; |
1165 | } |
1166 | |
1167 | return NULL; |
1168 | } |
1169 | |
1170 | /* For a given function FUN, rewrite all addressable parameters so that |
1171 | a new automatic variable is introduced. Right after function entry |
1172 | a parameter is assigned to the variable. */ |
1173 | |
1174 | static void |
1175 | sanitize_rewrite_addressable_params (function *fun) |
1176 | { |
1177 | gimple *g; |
1178 | gimple_seq stmts = NULL; |
1179 | bool has_any_addressable_param = false; |
1180 | auto_vec<tree> clear_value_expr_list; |
1181 | |
1182 | for (tree arg = DECL_ARGUMENTS (current_function_decl); |
1183 | arg; arg = DECL_CHAIN (arg)) |
1184 | { |
1185 | tree type = TREE_TYPE (arg); |
1186 | if (TREE_ADDRESSABLE (arg) |
1187 | && !TREE_ADDRESSABLE (type) |
1188 | && !TREE_THIS_VOLATILE (arg) |
1189 | && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST) |
1190 | { |
1191 | TREE_ADDRESSABLE (arg) = 0; |
1192 | DECL_NOT_GIMPLE_REG_P (arg) = 0; |
1193 | /* The parameter is no longer addressable. */ |
1194 | has_any_addressable_param = true; |
1195 | |
1196 | /* Create a new automatic variable. */ |
1197 | tree var = build_decl (DECL_SOURCE_LOCATION (arg), |
1198 | VAR_DECL, DECL_NAME (arg), type); |
1199 | TREE_ADDRESSABLE (var) = 1; |
1200 | DECL_IGNORED_P (var) = 1; |
1201 | |
1202 | gimple_add_tmp_var (var); |
1203 | |
1204 | /* We skip parameters that have a DECL_VALUE_EXPR. */ |
1205 | if (DECL_HAS_VALUE_EXPR_P (arg)) |
1206 | continue; |
1207 | |
1208 | if (dump_file) |
1209 | { |
1210 | fprintf (stream: dump_file, |
1211 | format: "Rewriting parameter whose address is taken: " ); |
1212 | print_generic_expr (dump_file, arg, dump_flags); |
1213 | fputc (c: '\n', stream: dump_file); |
1214 | } |
1215 | |
1216 | SET_DECL_PT_UID (var, DECL_PT_UID (arg)); |
1217 | |
1218 | /* Assign value of parameter to newly created variable. */ |
1219 | if ((TREE_CODE (type) == COMPLEX_TYPE |
1220 | || TREE_CODE (type) == VECTOR_TYPE)) |
1221 | { |
1222 | /* We need to create a SSA name that will be used for the |
1223 | assignment. */ |
1224 | tree tmp = get_or_create_ssa_default_def (cfun, arg); |
1225 | g = gimple_build_assign (var, tmp); |
1226 | gimple_set_location (g, DECL_SOURCE_LOCATION (arg)); |
1227 | gimple_seq_add_stmt (&stmts, g); |
1228 | } |
1229 | else |
1230 | { |
1231 | g = gimple_build_assign (var, arg); |
1232 | gimple_set_location (g, DECL_SOURCE_LOCATION (arg)); |
1233 | gimple_seq_add_stmt (&stmts, g); |
1234 | } |
1235 | |
1236 | if (target_for_debug_bind (arg)) |
1237 | { |
1238 | g = gimple_build_debug_bind (arg, var, NULL); |
1239 | gimple_seq_add_stmt (&stmts, g); |
1240 | clear_value_expr_list.safe_push (obj: arg); |
1241 | } |
1242 | |
1243 | DECL_HAS_VALUE_EXPR_P (arg) = 1; |
1244 | SET_DECL_VALUE_EXPR (arg, var); |
1245 | } |
1246 | } |
1247 | |
1248 | if (!has_any_addressable_param) |
1249 | return; |
1250 | |
1251 | /* Replace all usages of PARM_DECLs with the newly |
1252 | created variable VAR. */ |
1253 | basic_block bb; |
1254 | FOR_EACH_BB_FN (bb, fun) |
1255 | { |
1256 | gimple_stmt_iterator gsi; |
1257 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
1258 | { |
1259 | gimple *stmt = gsi_stmt (i: gsi); |
1260 | gimple_stmt_iterator it = gsi_for_stmt (stmt); |
1261 | walk_gimple_stmt (&it, NULL, rewrite_usage_of_param, NULL); |
1262 | } |
1263 | for (gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
1264 | { |
1265 | gphi *phi = dyn_cast<gphi *> (p: gsi_stmt (i: gsi)); |
1266 | for (unsigned i = 0; i < gimple_phi_num_args (gs: phi); ++i) |
1267 | { |
1268 | hash_set<tree> visited_nodes; |
1269 | walk_tree (gimple_phi_arg_def_ptr (phi, i), |
1270 | rewrite_usage_of_param, NULL, &visited_nodes); |
1271 | } |
1272 | } |
1273 | } |
1274 | |
1275 | /* Unset value expr for parameters for which we created debug bind |
1276 | expressions. */ |
1277 | for (tree arg : clear_value_expr_list) |
1278 | { |
1279 | DECL_HAS_VALUE_EXPR_P (arg) = 0; |
1280 | SET_DECL_VALUE_EXPR (arg, NULL_TREE); |
1281 | } |
1282 | |
1283 | /* Insert default assignments at the beginning of a function. */ |
1284 | basic_block entry_bb = ENTRY_BLOCK_PTR_FOR_FN (fun); |
1285 | entry_bb = split_edge (single_succ_edge (bb: entry_bb)); |
1286 | |
1287 | gimple_stmt_iterator gsi = gsi_start_bb (bb: entry_bb); |
1288 | gsi_insert_seq_before (&gsi, stmts, GSI_NEW_STMT); |
1289 | } |
1290 | |
1291 | unsigned int |
1292 | pass_sanopt::execute (function *fun) |
1293 | { |
1294 | /* n.b. ASAN_MARK is used for both HWASAN and ASAN. |
1295 | asan_num_accesses is hence used to count either HWASAN_CHECK or ASAN_CHECK |
1296 | stuff. This is fine because you can only have one of these active at a |
1297 | time. */ |
1298 | basic_block bb; |
1299 | int asan_num_accesses = 0; |
1300 | bool contains_asan_mark = false; |
1301 | int ret = 0; |
1302 | |
1303 | /* Try to remove redundant checks. */ |
1304 | if (optimize |
1305 | && (flag_sanitize |
1306 | & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_HWADDRESS |
1307 | | SANITIZE_ADDRESS | SANITIZE_VPTR | SANITIZE_POINTER_OVERFLOW))) |
1308 | asan_num_accesses = sanopt_optimize (fun, contains_asan_mark: &contains_asan_mark); |
1309 | else if (flag_sanitize & (SANITIZE_ADDRESS | SANITIZE_HWADDRESS)) |
1310 | { |
1311 | gimple_stmt_iterator gsi; |
1312 | FOR_EACH_BB_FN (bb, fun) |
1313 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
1314 | { |
1315 | gimple *stmt = gsi_stmt (i: gsi); |
1316 | if (gimple_call_internal_p (gs: stmt, fn: IFN_ASAN_CHECK)) |
1317 | ++asan_num_accesses; |
1318 | else if (gimple_call_internal_p (gs: stmt, fn: IFN_ASAN_MARK)) |
1319 | contains_asan_mark = true; |
1320 | } |
1321 | } |
1322 | |
1323 | if (contains_asan_mark) |
1324 | { |
1325 | sanitize_asan_mark_unpoison (); |
1326 | sanitize_asan_mark_poison (); |
1327 | } |
1328 | |
1329 | if (asan_sanitize_stack_p () || hwasan_sanitize_stack_p ()) |
1330 | sanitize_rewrite_addressable_params (fun); |
1331 | |
1332 | bool use_calls = param_asan_instrumentation_with_call_threshold < INT_MAX |
1333 | && asan_num_accesses >= param_asan_instrumentation_with_call_threshold; |
1334 | |
1335 | hash_map<tree, tree> shadow_vars_mapping; |
1336 | bool need_commit_edge_insert = false; |
1337 | FOR_EACH_BB_FN (bb, fun) |
1338 | { |
1339 | gimple_stmt_iterator gsi; |
1340 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); ) |
1341 | { |
1342 | gimple *stmt = gsi_stmt (i: gsi); |
1343 | bool no_next = false; |
1344 | |
1345 | if (!is_gimple_call (gs: stmt)) |
1346 | { |
1347 | gsi_next (i: &gsi); |
1348 | continue; |
1349 | } |
1350 | |
1351 | if (gimple_call_internal_p (gs: stmt)) |
1352 | { |
1353 | enum internal_fn ifn = gimple_call_internal_fn (gs: stmt); |
1354 | int this_ret = TODO_cleanup_cfg; |
1355 | switch (ifn) |
1356 | { |
1357 | case IFN_UBSAN_NULL: |
1358 | no_next = ubsan_expand_null_ifn (&gsi); |
1359 | break; |
1360 | case IFN_UBSAN_BOUNDS: |
1361 | no_next = ubsan_expand_bounds_ifn (&gsi); |
1362 | break; |
1363 | case IFN_UBSAN_OBJECT_SIZE: |
1364 | no_next = ubsan_expand_objsize_ifn (&gsi); |
1365 | break; |
1366 | case IFN_UBSAN_PTR: |
1367 | no_next = ubsan_expand_ptr_ifn (&gsi); |
1368 | break; |
1369 | case IFN_UBSAN_VPTR: |
1370 | no_next = ubsan_expand_vptr_ifn (&gsi); |
1371 | break; |
1372 | case IFN_HWASAN_CHECK: |
1373 | no_next = hwasan_expand_check_ifn (&gsi, use_calls); |
1374 | break; |
1375 | case IFN_ASAN_CHECK: |
1376 | no_next = asan_expand_check_ifn (&gsi, use_calls); |
1377 | break; |
1378 | case IFN_ASAN_MARK: |
1379 | no_next = asan_expand_mark_ifn (&gsi); |
1380 | break; |
1381 | case IFN_ASAN_POISON: |
1382 | no_next = asan_expand_poison_ifn (&gsi, |
1383 | &need_commit_edge_insert, |
1384 | shadow_vars_mapping); |
1385 | break; |
1386 | case IFN_HWASAN_MARK: |
1387 | no_next = hwasan_expand_mark_ifn (&gsi); |
1388 | break; |
1389 | default: |
1390 | this_ret = 0; |
1391 | break; |
1392 | } |
1393 | ret |= this_ret; |
1394 | } |
1395 | else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
1396 | { |
1397 | tree callee = gimple_call_fndecl (gs: stmt); |
1398 | switch (DECL_FUNCTION_CODE (decl: callee)) |
1399 | { |
1400 | case BUILT_IN_UNREACHABLE: |
1401 | if (sanitize_flags_p (flag: SANITIZE_UNREACHABLE)) |
1402 | no_next = ubsan_instrument_unreachable (&gsi); |
1403 | break; |
1404 | default: |
1405 | break; |
1406 | } |
1407 | } |
1408 | |
1409 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1410 | { |
1411 | fprintf (stream: dump_file, format: "Expanded: " ); |
1412 | print_gimple_stmt (dump_file, stmt, 0, dump_flags); |
1413 | } |
1414 | |
1415 | if (!no_next) |
1416 | gsi_next (i: &gsi); |
1417 | } |
1418 | } |
1419 | |
1420 | if (need_commit_edge_insert) |
1421 | gsi_commit_edge_inserts (); |
1422 | |
1423 | return ret; |
1424 | } |
1425 | |
1426 | } // anon namespace |
1427 | |
1428 | gimple_opt_pass * |
1429 | make_pass_sanopt (gcc::context *ctxt) |
1430 | { |
1431 | return new pass_sanopt (ctxt); |
1432 | } |
1433 | |