1/* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2023 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
11Free Software Foundation; either version 3, or (at your option) any
12later version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23/* Dead code elimination.
24
25 References:
26
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
37
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
44
45#include "config.h"
46#include "system.h"
47#include "coretypes.h"
48#include "backend.h"
49#include "rtl.h"
50#include "tree.h"
51#include "gimple.h"
52#include "cfghooks.h"
53#include "tree-pass.h"
54#include "ssa.h"
55#include "gimple-pretty-print.h"
56#include "fold-const.h"
57#include "calls.h"
58#include "cfganal.h"
59#include "tree-eh.h"
60#include "gimplify.h"
61#include "gimple-iterator.h"
62#include "tree-cfg.h"
63#include "tree-ssa-loop-niter.h"
64#include "tree-into-ssa.h"
65#include "tree-dfa.h"
66#include "cfgloop.h"
67#include "tree-scalar-evolution.h"
68#include "tree-ssa-propagate.h"
69#include "gimple-fold.h"
70#include "tree-ssa.h"
71
72static struct stmt_stats
73{
74 int total;
75 int total_phis;
76 int removed;
77 int removed_phis;
78} stats;
79
80#define STMT_NECESSARY GF_PLF_1
81
82static vec<gimple *> worklist;
83
84/* Vector indicating an SSA name has already been processed and marked
85 as necessary. */
86static sbitmap processed;
87
88/* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90static sbitmap last_stmt_necessary;
91
92/* Vector indicating that BB contains statements that are live. */
93static sbitmap bb_contains_live_stmts;
94
95/* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
97
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
101 on the Ith edge. */
102static control_dependences *cd;
103
104/* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106static sbitmap visited_control_parents;
107
108/* TRUE if this pass alters the CFG (by removing control statements).
109 FALSE otherwise.
110
111 If this pass alters the CFG, then it will arrange for the dominators
112 to be recomputed. */
113static bool cfg_altered;
114
115/* When non-NULL holds map from basic block index into the postorder. */
116static int *bb_postorder;
117
118
119/* True if we should treat any stmt with a vdef as necessary. */
120
121static inline bool
122keep_all_vdefs_p ()
123{
124 return optimize_debug;
125}
126
127/* If STMT is not already marked necessary, mark it, and add it to the
128 worklist if ADD_TO_WORKLIST is true. */
129
130static inline void
131mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
132{
133 gcc_assert (stmt);
134
135 if (gimple_plf (stmt, STMT_NECESSARY))
136 return;
137
138 if (dump_file && (dump_flags & TDF_DETAILS))
139 {
140 fprintf (stream: dump_file, format: "Marking useful stmt: ");
141 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
142 fprintf (stream: dump_file, format: "\n");
143 }
144
145 gimple_set_plf (stmt, STMT_NECESSARY, val_p: true);
146 if (add_to_worklist)
147 worklist.safe_push (obj: stmt);
148 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (gs: stmt))
149 bitmap_set_bit (map: bb_contains_live_stmts, bitno: gimple_bb (g: stmt)->index);
150}
151
152
153/* Mark the statement defining operand OP as necessary. */
154
155static inline void
156mark_operand_necessary (tree op)
157{
158 gimple *stmt;
159 int ver;
160
161 gcc_assert (op);
162
163 ver = SSA_NAME_VERSION (op);
164 if (bitmap_bit_p (map: processed, bitno: ver))
165 {
166 stmt = SSA_NAME_DEF_STMT (op);
167 gcc_assert (gimple_nop_p (stmt)
168 || gimple_plf (stmt, STMT_NECESSARY));
169 return;
170 }
171 bitmap_set_bit (map: processed, bitno: ver);
172
173 stmt = SSA_NAME_DEF_STMT (op);
174 gcc_assert (stmt);
175
176 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (g: stmt))
177 return;
178
179 if (dump_file && (dump_flags & TDF_DETAILS))
180 {
181 fprintf (stream: dump_file, format: "marking necessary through ");
182 print_generic_expr (dump_file, op);
183 fprintf (stream: dump_file, format: " stmt ");
184 print_gimple_stmt (dump_file, stmt, 0);
185 }
186
187 gimple_set_plf (stmt, STMT_NECESSARY, val_p: true);
188 if (bb_contains_live_stmts)
189 bitmap_set_bit (map: bb_contains_live_stmts, bitno: gimple_bb (g: stmt)->index);
190 worklist.safe_push (obj: stmt);
191}
192
193
194/* Mark STMT as necessary if it obviously is. Add it to the worklist if
195 it can make other statements necessary.
196
197 If AGGRESSIVE is false, control statements are conservatively marked as
198 necessary. */
199
200static void
201mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
202{
203 /* Statements that are implicitly live. Most function calls, asm
204 and return statements are required. Labels and GIMPLE_BIND nodes
205 are kept because they are control flow, and we have no way of
206 knowing whether they can be removed. DCE can eliminate all the
207 other statements in a block, and CFG can then remove the block
208 and labels. */
209 switch (gimple_code (g: stmt))
210 {
211 case GIMPLE_PREDICT:
212 case GIMPLE_LABEL:
213 mark_stmt_necessary (stmt, add_to_worklist: false);
214 return;
215
216 case GIMPLE_ASM:
217 case GIMPLE_RESX:
218 case GIMPLE_RETURN:
219 mark_stmt_necessary (stmt, add_to_worklist: true);
220 return;
221
222 case GIMPLE_CALL:
223 {
224 /* Never elide a noreturn call we pruned control-flow for. */
225 if ((gimple_call_flags (stmt) & ECF_NORETURN)
226 && gimple_call_ctrl_altering_p (gs: stmt))
227 {
228 mark_stmt_necessary (stmt, add_to_worklist: true);
229 return;
230 }
231
232 tree callee = gimple_call_fndecl (gs: stmt);
233 if (callee != NULL_TREE
234 && fndecl_built_in_p (node: callee, klass: BUILT_IN_NORMAL))
235 switch (DECL_FUNCTION_CODE (decl: callee))
236 {
237 case BUILT_IN_MALLOC:
238 case BUILT_IN_ALIGNED_ALLOC:
239 case BUILT_IN_CALLOC:
240 CASE_BUILT_IN_ALLOCA:
241 case BUILT_IN_STRDUP:
242 case BUILT_IN_STRNDUP:
243 case BUILT_IN_GOMP_ALLOC:
244 return;
245
246 default:;
247 }
248
249 if (callee != NULL_TREE
250 && flag_allocation_dce
251 && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
252 return;
253
254 /* IFN_GOACC_LOOP calls are necessary in that they are used to
255 represent parameter (i.e. step, bound) of a lowered OpenACC
256 partitioned loop. But this kind of partitioned loop might not
257 survive from aggressive loop removal for it has loop exit and
258 is assumed to be finite. Therefore, we need to explicitly mark
259 these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
260 if (gimple_call_internal_p (gs: stmt, fn: IFN_GOACC_LOOP))
261 {
262 mark_stmt_necessary (stmt, add_to_worklist: true);
263 return;
264 }
265 break;
266 }
267
268 case GIMPLE_DEBUG:
269 /* Debug temps without a value are not useful. ??? If we could
270 easily locate the debug temp bind stmt for a use thereof,
271 would could refrain from marking all debug temps here, and
272 mark them only if they're used. */
273 if (gimple_debug_nonbind_marker_p (s: stmt)
274 || !gimple_debug_bind_p (s: stmt)
275 || gimple_debug_bind_has_value_p (dbg: stmt)
276 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
277 mark_stmt_necessary (stmt, add_to_worklist: false);
278 return;
279
280 case GIMPLE_GOTO:
281 gcc_assert (!simple_goto_p (stmt));
282 mark_stmt_necessary (stmt, add_to_worklist: true);
283 return;
284
285 case GIMPLE_COND:
286 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
287 /* Fall through. */
288
289 case GIMPLE_SWITCH:
290 if (! aggressive)
291 mark_stmt_necessary (stmt, add_to_worklist: true);
292 break;
293
294 case GIMPLE_ASSIGN:
295 /* Mark indirect CLOBBERs to be lazily removed if their SSA operands
296 do not prevail. That also makes control flow leading to them
297 not necessary in aggressive mode. */
298 if (gimple_clobber_p (s: stmt) && !zero_ssa_operands (stmt, SSA_OP_USE))
299 return;
300 break;
301
302 default:
303 break;
304 }
305
306 /* If the statement has volatile operands, it needs to be preserved.
307 Same for statements that can alter control flow in unpredictable
308 ways. */
309 if (gimple_has_side_effects (stmt) || is_ctrl_altering_stmt (stmt))
310 {
311 mark_stmt_necessary (stmt, add_to_worklist: true);
312 return;
313 }
314
315 /* If a statement could throw, it can be deemed necessary unless we
316 are allowed to remove dead EH. Test this after checking for
317 new/delete operators since we always elide their EH. */
318 if (!cfun->can_delete_dead_exceptions
319 && stmt_could_throw_p (cfun, stmt))
320 {
321 mark_stmt_necessary (stmt, add_to_worklist: true);
322 return;
323 }
324
325 if ((gimple_vdef (g: stmt) && keep_all_vdefs_p ())
326 || stmt_may_clobber_global_p (stmt, false))
327 {
328 mark_stmt_necessary (stmt, add_to_worklist: true);
329 return;
330 }
331
332 return;
333}
334
335
336/* Mark the last statement of BB as necessary. */
337
338static bool
339mark_last_stmt_necessary (basic_block bb)
340{
341 if (!bitmap_set_bit (map: last_stmt_necessary, bitno: bb->index))
342 return true;
343
344 bitmap_set_bit (map: bb_contains_live_stmts, bitno: bb->index);
345
346 /* We actually mark the statement only if it is a control statement. */
347 gimple *stmt = *gsi_last_bb (bb);
348 if (stmt && is_ctrl_stmt (stmt))
349 {
350 mark_stmt_necessary (stmt, add_to_worklist: true);
351 return true;
352 }
353 return false;
354}
355
356
357/* Mark control dependent edges of BB as necessary. We have to do this only
358 once for each basic block so we set the appropriate bit after we're done.
359
360 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
361
362static void
363mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
364{
365 bitmap_iterator bi;
366 unsigned edge_number;
367 bool skipped = false;
368
369 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
370
371 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
372 return;
373
374 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
375 0, edge_number, bi)
376 {
377 basic_block cd_bb = cd->get_edge_src (edge_number);
378
379 if (ignore_self && cd_bb == bb)
380 {
381 skipped = true;
382 continue;
383 }
384
385 if (!mark_last_stmt_necessary (bb: cd_bb))
386 mark_control_dependent_edges_necessary (bb: cd_bb, ignore_self: false);
387 }
388
389 if (!skipped)
390 bitmap_set_bit (map: visited_control_parents, bitno: bb->index);
391}
392
393
394/* Find obviously necessary statements. These are things like most function
395 calls, and stores to file level variables.
396
397 If EL is NULL, control statements are conservatively marked as
398 necessary. Otherwise it contains the list of edges used by control
399 dependence analysis. */
400
401static void
402find_obviously_necessary_stmts (bool aggressive)
403{
404 basic_block bb;
405 gimple_stmt_iterator gsi;
406 edge e;
407 gimple *phi, *stmt;
408 int flags;
409
410 FOR_EACH_BB_FN (bb, cfun)
411 {
412 /* PHI nodes are never inherently necessary. */
413 for (gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
414 {
415 phi = gsi_stmt (i: gsi);
416 gimple_set_plf (stmt: phi, STMT_NECESSARY, val_p: false);
417 }
418
419 /* Check all statements in the block. */
420 for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
421 {
422 stmt = gsi_stmt (i: gsi);
423 gimple_set_plf (stmt, STMT_NECESSARY, val_p: false);
424 mark_stmt_if_obviously_necessary (stmt, aggressive);
425 }
426 }
427
428 /* Pure and const functions are finite and thus have no infinite loops in
429 them. */
430 flags = flags_from_decl_or_type (current_function_decl);
431 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
432 return;
433
434 /* Prevent the empty possibly infinite loops from being removed. This is
435 needed to make the logic in remove_dead_stmt work to identify the
436 correct edge to keep when removing a controlling condition. */
437 if (aggressive)
438 {
439 if (mark_irreducible_loops ())
440 FOR_EACH_BB_FN (bb, cfun)
441 {
442 edge_iterator ei;
443 FOR_EACH_EDGE (e, ei, bb->succs)
444 if ((e->flags & EDGE_DFS_BACK)
445 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
446 {
447 if (dump_file)
448 fprintf (stream: dump_file, format: "Marking back edge of irreducible "
449 "loop %i->%i\n", e->src->index, e->dest->index);
450 mark_control_dependent_edges_necessary (bb: e->dest, ignore_self: false);
451 }
452 }
453
454 for (auto loop : loops_list (cfun, 0))
455 /* For loops without an exit do not mark any condition. */
456 if (loop->exits->next->e && !finite_loop_p (loop))
457 {
458 if (dump_file)
459 fprintf (stream: dump_file, format: "cannot prove finiteness of loop %i\n",
460 loop->num);
461 mark_control_dependent_edges_necessary (bb: loop->latch, ignore_self: false);
462 }
463 }
464}
465
466
467/* Return true if REF is based on an aliased base, otherwise false. */
468
469static bool
470ref_may_be_aliased (tree ref)
471{
472 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
473 while (handled_component_p (t: ref))
474 ref = TREE_OPERAND (ref, 0);
475 if ((TREE_CODE (ref) == MEM_REF || TREE_CODE (ref) == TARGET_MEM_REF)
476 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
477 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
478 return !(DECL_P (ref)
479 && !may_be_aliased (var: ref));
480}
481
482static bitmap visited = NULL;
483static unsigned int longest_chain = 0;
484static unsigned int total_chain = 0;
485static unsigned int nr_walks = 0;
486static bool chain_ovfl = false;
487
488/* Worker for the walker that marks reaching definitions of REF,
489 which is based on a non-aliased decl, necessary. It returns
490 true whenever the defining statement of the current VDEF is
491 a kill for REF, as no dominating may-defs are necessary for REF
492 anymore. DATA points to the basic-block that contains the
493 stmt that refers to REF. */
494
495static bool
496mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
497{
498 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
499
500 /* All stmts we visit are necessary. */
501 if (! gimple_clobber_p (s: def_stmt))
502 mark_operand_necessary (op: vdef);
503
504 /* If the stmt lhs kills ref, then we can stop walking. */
505 if (gimple_has_lhs (stmt: def_stmt)
506 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
507 /* The assignment is not necessarily carried out if it can throw
508 and we can catch it in the current function where we could inspect
509 the previous value.
510 ??? We only need to care about the RHS throwing. For aggregate
511 assignments or similar calls and non-call exceptions the LHS
512 might throw as well. */
513 && !stmt_can_throw_internal (cfun, def_stmt))
514 {
515 tree base, lhs = gimple_get_lhs (def_stmt);
516 poly_int64 size, offset, max_size;
517 bool reverse;
518 ao_ref_base (ref);
519 base
520 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
521 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
522 so base == refd->base does not always hold. */
523 if (base == ref->base)
524 {
525 /* For a must-alias check we need to be able to constrain
526 the accesses properly. */
527 if (known_eq (size, max_size)
528 && known_subrange_p (pos1: ref->offset, size1: ref->max_size, pos2: offset, size2: size))
529 return true;
530 /* Or they need to be exactly the same. */
531 else if (ref->ref
532 /* Make sure there is no induction variable involved
533 in the references (gcc.c-torture/execute/pr42142.c).
534 The simplest way is to check if the kill dominates
535 the use. */
536 /* But when both are in the same block we cannot
537 easily tell whether we came from a backedge
538 unless we decide to compute stmt UIDs
539 (see PR58246). */
540 && (basic_block) data != gimple_bb (g: def_stmt)
541 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
542 gimple_bb (g: def_stmt))
543 && operand_equal_p (ref->ref, lhs, flags: 0))
544 return true;
545 }
546 }
547
548 /* Otherwise keep walking. */
549 return false;
550}
551
552static void
553mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
554{
555 /* Should have been caught before calling this function. */
556 gcc_checking_assert (!keep_all_vdefs_p ());
557
558 unsigned int chain;
559 ao_ref refd;
560 gcc_assert (!chain_ovfl);
561 ao_ref_init (&refd, ref);
562 chain = walk_aliased_vdefs (&refd, gimple_vuse (g: stmt),
563 mark_aliased_reaching_defs_necessary_1,
564 gimple_bb (g: stmt), NULL);
565 if (chain > longest_chain)
566 longest_chain = chain;
567 total_chain += chain;
568 nr_walks++;
569}
570
571/* Worker for the walker that marks reaching definitions of REF, which
572 is not based on a non-aliased decl. For simplicity we need to end
573 up marking all may-defs necessary that are not based on a non-aliased
574 decl. The only job of this walker is to skip may-defs based on
575 a non-aliased decl. */
576
577static bool
578mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
579 tree vdef, void *data ATTRIBUTE_UNUSED)
580{
581 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
582
583 /* We have to skip already visited (and thus necessary) statements
584 to make the chaining work after we dropped back to simple mode. */
585 if (chain_ovfl
586 && bitmap_bit_p (map: processed, SSA_NAME_VERSION (vdef)))
587 {
588 gcc_assert (gimple_nop_p (def_stmt)
589 || gimple_plf (def_stmt, STMT_NECESSARY));
590 return false;
591 }
592
593 /* We want to skip stores to non-aliased variables. */
594 if (!chain_ovfl
595 && gimple_assign_single_p (gs: def_stmt))
596 {
597 tree lhs = gimple_assign_lhs (gs: def_stmt);
598 if (!ref_may_be_aliased (ref: lhs))
599 return false;
600 }
601
602 /* We want to skip statments that do not constitute stores but have
603 a virtual definition. */
604 if (gcall *call = dyn_cast <gcall *> (p: def_stmt))
605 {
606 tree callee = gimple_call_fndecl (gs: call);
607 if (callee != NULL_TREE
608 && fndecl_built_in_p (node: callee, klass: BUILT_IN_NORMAL))
609 switch (DECL_FUNCTION_CODE (decl: callee))
610 {
611 case BUILT_IN_MALLOC:
612 case BUILT_IN_ALIGNED_ALLOC:
613 case BUILT_IN_CALLOC:
614 CASE_BUILT_IN_ALLOCA:
615 case BUILT_IN_FREE:
616 case BUILT_IN_GOMP_ALLOC:
617 case BUILT_IN_GOMP_FREE:
618 return false;
619
620 default:;
621 }
622
623 if (callee != NULL_TREE
624 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
625 || DECL_IS_OPERATOR_DELETE_P (callee))
626 && gimple_call_from_new_or_delete (s: call))
627 return false;
628 }
629
630 if (! gimple_clobber_p (s: def_stmt))
631 mark_operand_necessary (op: vdef);
632
633 return false;
634}
635
636static void
637mark_all_reaching_defs_necessary (gimple *stmt)
638{
639 /* Should have been caught before calling this function. */
640 gcc_checking_assert (!keep_all_vdefs_p ());
641 walk_aliased_vdefs (NULL, gimple_vuse (g: stmt),
642 mark_all_reaching_defs_necessary_1, NULL, &visited);
643}
644
645/* Return true for PHI nodes with one or identical arguments
646 can be removed. */
647static bool
648degenerate_phi_p (gimple *phi)
649{
650 unsigned int i;
651 tree op = gimple_phi_arg_def (gs: phi, index: 0);
652 for (i = 1; i < gimple_phi_num_args (gs: phi); i++)
653 if (gimple_phi_arg_def (gs: phi, index: i) != op)
654 return false;
655 return true;
656}
657
658/* Return that NEW_CALL and DELETE_CALL are a valid pair of new
659 and delete operators. */
660
661static bool
662valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
663{
664 tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
665 tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
666 return valid_new_delete_pair_p (new_asm, delete_asm);
667}
668
669/* Propagate necessity using the operands of necessary statements.
670 Process the uses on each statement in the worklist, and add all
671 feeding statements which contribute to the calculation of this
672 value to the worklist.
673
674 In conservative mode, EL is NULL. */
675
676static void
677propagate_necessity (bool aggressive)
678{
679 gimple *stmt;
680
681 if (dump_file && (dump_flags & TDF_DETAILS))
682 fprintf (stream: dump_file, format: "\nProcessing worklist:\n");
683
684 while (worklist.length () > 0)
685 {
686 /* Take STMT from worklist. */
687 stmt = worklist.pop ();
688
689 if (dump_file && (dump_flags & TDF_DETAILS))
690 {
691 fprintf (stream: dump_file, format: "processing: ");
692 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
693 fprintf (stream: dump_file, format: "\n");
694 }
695
696 if (aggressive)
697 {
698 /* Mark the last statement of the basic blocks on which the block
699 containing STMT is control dependent, but only if we haven't
700 already done so. */
701 basic_block bb = gimple_bb (g: stmt);
702 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
703 && !bitmap_bit_p (map: visited_control_parents, bitno: bb->index))
704 mark_control_dependent_edges_necessary (bb, ignore_self: false);
705 }
706
707 if (gimple_code (g: stmt) == GIMPLE_PHI
708 /* We do not process virtual PHI nodes nor do we track their
709 necessity. */
710 && !virtual_operand_p (op: gimple_phi_result (gs: stmt)))
711 {
712 /* PHI nodes are somewhat special in that each PHI alternative has
713 data and control dependencies. All the statements feeding the
714 PHI node's arguments are always necessary. In aggressive mode,
715 we also consider the control dependent edges leading to the
716 predecessor block associated with each PHI alternative as
717 necessary. */
718 gphi *phi = as_a <gphi *> (p: stmt);
719 size_t k;
720
721 for (k = 0; k < gimple_phi_num_args (gs: stmt); k++)
722 {
723 tree arg = PHI_ARG_DEF (stmt, k);
724 if (TREE_CODE (arg) == SSA_NAME)
725 mark_operand_necessary (op: arg);
726 }
727
728 /* For PHI operands it matters from where the control flow arrives
729 to the BB. Consider the following example:
730
731 a=exp1;
732 b=exp2;
733 if (test)
734 ;
735 else
736 ;
737 c=PHI(a,b)
738
739 We need to mark control dependence of the empty basic blocks, since they
740 contains computation of PHI operands.
741
742 Doing so is too restrictive in the case the predecestor block is in
743 the loop. Consider:
744
745 if (b)
746 {
747 int i;
748 for (i = 0; i<1000; ++i)
749 ;
750 j = 0;
751 }
752 return j;
753
754 There is PHI for J in the BB containing return statement.
755 In this case the control dependence of predecestor block (that is
756 within the empty loop) also contains the block determining number
757 of iterations of the block that would prevent removing of empty
758 loop in this case.
759
760 This scenario can be avoided by splitting critical edges.
761 To save the critical edge splitting pass we identify how the control
762 dependence would look like if the edge was split.
763
764 Consider the modified CFG created from current CFG by splitting
765 edge B->C. In the postdominance tree of modified CFG, C' is
766 always child of C. There are two cases how chlids of C' can look
767 like:
768
769 1) C' is leaf
770
771 In this case the only basic block C' is control dependent on is B.
772
773 2) C' has single child that is B
774
775 In this case control dependence of C' is same as control
776 dependence of B in original CFG except for block B itself.
777 (since C' postdominate B in modified CFG)
778
779 Now how to decide what case happens? There are two basic options:
780
781 a) C postdominate B. Then C immediately postdominate B and
782 case 2 happens iff there is no other way from B to C except
783 the edge B->C.
784
785 There is other way from B to C iff there is succesor of B that
786 is not postdominated by B. Testing this condition is somewhat
787 expensive, because we need to iterate all succesors of B.
788 We are safe to assume that this does not happen: we will mark B
789 as needed when processing the other path from B to C that is
790 conrol dependent on B and marking control dependencies of B
791 itself is harmless because they will be processed anyway after
792 processing control statement in B.
793
794 b) C does not postdominate B. Always case 1 happens since there is
795 path from C to exit that does not go through B and thus also C'. */
796
797 if (aggressive && !degenerate_phi_p (phi: stmt))
798 {
799 for (k = 0; k < gimple_phi_num_args (gs: stmt); k++)
800 {
801 basic_block arg_bb = gimple_phi_arg_edge (phi, i: k)->src;
802
803 if (gimple_bb (g: stmt)
804 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
805 {
806 if (!mark_last_stmt_necessary (bb: arg_bb))
807 mark_control_dependent_edges_necessary (bb: arg_bb, ignore_self: false);
808 }
809 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
810 && !bitmap_bit_p (map: visited_control_parents,
811 bitno: arg_bb->index))
812 mark_control_dependent_edges_necessary (bb: arg_bb, ignore_self: true);
813 }
814 }
815 }
816 else
817 {
818 /* Propagate through the operands. Examine all the USE, VUSE and
819 VDEF operands in this statement. Mark all the statements
820 which feed this statement's uses as necessary. */
821 ssa_op_iter iter;
822 tree use;
823
824 /* If this is a call to free which is directly fed by an
825 allocation function do not mark that necessary through
826 processing the argument. */
827 bool is_delete_operator
828 = (is_gimple_call (gs: stmt)
829 && gimple_call_from_new_or_delete (s: as_a <gcall *> (p: stmt))
830 && gimple_call_operator_delete_p (as_a <gcall *> (p: stmt)));
831 if (is_delete_operator
832 || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
833 || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
834 {
835 tree ptr = gimple_call_arg (gs: stmt, index: 0);
836 gcall *def_stmt;
837 tree def_callee;
838 /* If the pointer we free is defined by an allocation
839 function do not add the call to the worklist. */
840 if (TREE_CODE (ptr) == SSA_NAME
841 && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
842 && (def_callee = gimple_call_fndecl (gs: def_stmt))
843 && ((DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
844 && (DECL_FUNCTION_CODE (decl: def_callee) == BUILT_IN_ALIGNED_ALLOC
845 || DECL_FUNCTION_CODE (decl: def_callee) == BUILT_IN_MALLOC
846 || DECL_FUNCTION_CODE (decl: def_callee) == BUILT_IN_CALLOC
847 || DECL_FUNCTION_CODE (decl: def_callee) == BUILT_IN_GOMP_ALLOC))
848 || (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (def_callee)
849 && gimple_call_from_new_or_delete (s: def_stmt))))
850 {
851 if (is_delete_operator
852 && !valid_new_delete_pair_p (new_call: def_stmt, delete_call: stmt))
853 mark_operand_necessary (op: gimple_call_arg (gs: stmt, index: 0));
854
855 /* Delete operators can have alignment and (or) size
856 as next arguments. When being a SSA_NAME, they
857 must be marked as necessary. Similarly GOMP_free. */
858 if (gimple_call_num_args (gs: stmt) >= 2)
859 for (unsigned i = 1; i < gimple_call_num_args (gs: stmt);
860 i++)
861 {
862 tree arg = gimple_call_arg (gs: stmt, index: i);
863 if (TREE_CODE (arg) == SSA_NAME)
864 mark_operand_necessary (op: arg);
865 }
866
867 continue;
868 }
869 }
870
871 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
872 mark_operand_necessary (op: use);
873
874 use = gimple_vuse (g: stmt);
875 if (!use)
876 continue;
877
878 /* No need to search for vdefs if we intrinsicly keep them all. */
879 if (keep_all_vdefs_p ())
880 continue;
881
882 /* If we dropped to simple mode make all immediately
883 reachable definitions necessary. */
884 if (chain_ovfl)
885 {
886 mark_all_reaching_defs_necessary (stmt);
887 continue;
888 }
889
890 /* For statements that may load from memory (have a VUSE) we
891 have to mark all reaching (may-)definitions as necessary.
892 We partition this task into two cases:
893 1) explicit loads based on decls that are not aliased
894 2) implicit loads (like calls) and explicit loads not
895 based on decls that are not aliased (like indirect
896 references or loads from globals)
897 For 1) we mark all reaching may-defs as necessary, stopping
898 at dominating kills. For 2) we want to mark all dominating
899 references necessary, but non-aliased ones which we handle
900 in 1). By keeping a global visited bitmap for references
901 we walk for 2) we avoid quadratic behavior for those. */
902
903 if (gcall *call = dyn_cast <gcall *> (p: stmt))
904 {
905 tree callee = gimple_call_fndecl (gs: call);
906 unsigned i;
907
908 /* Calls to functions that are merely acting as barriers
909 or that only store to memory do not make any previous
910 stores necessary. */
911 if (callee != NULL_TREE
912 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
913 && (DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_MEMSET
914 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_MEMSET_CHK
915 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_MALLOC
916 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_ALIGNED_ALLOC
917 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_CALLOC
918 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_FREE
919 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_VA_END
920 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
921 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_STACK_SAVE
922 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_STACK_RESTORE
923 || DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_ASSUME_ALIGNED))
924 continue;
925
926 if (callee != NULL_TREE
927 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
928 || DECL_IS_OPERATOR_DELETE_P (callee))
929 && gimple_call_from_new_or_delete (s: call))
930 continue;
931
932 /* Calls implicitly load from memory, their arguments
933 in addition may explicitly perform memory loads. */
934 mark_all_reaching_defs_necessary (stmt: call);
935 for (i = 0; i < gimple_call_num_args (gs: call); ++i)
936 {
937 tree arg = gimple_call_arg (gs: call, index: i);
938 if (TREE_CODE (arg) == SSA_NAME
939 || is_gimple_min_invariant (arg))
940 continue;
941 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
942 arg = TREE_OPERAND (arg, 0);
943 if (!ref_may_be_aliased (ref: arg))
944 mark_aliased_reaching_defs_necessary (stmt: call, ref: arg);
945 }
946 }
947 else if (gimple_assign_single_p (gs: stmt))
948 {
949 tree rhs;
950 /* If this is a load mark things necessary. */
951 rhs = gimple_assign_rhs1 (gs: stmt);
952 if (TREE_CODE (rhs) != SSA_NAME
953 && !is_gimple_min_invariant (rhs)
954 && TREE_CODE (rhs) != CONSTRUCTOR)
955 {
956 if (!ref_may_be_aliased (ref: rhs))
957 mark_aliased_reaching_defs_necessary (stmt, ref: rhs);
958 else
959 mark_all_reaching_defs_necessary (stmt);
960 }
961 }
962 else if (greturn *return_stmt = dyn_cast <greturn *> (p: stmt))
963 {
964 tree rhs = gimple_return_retval (gs: return_stmt);
965 /* A return statement may perform a load. */
966 if (rhs
967 && TREE_CODE (rhs) != SSA_NAME
968 && !is_gimple_min_invariant (rhs)
969 && TREE_CODE (rhs) != CONSTRUCTOR)
970 {
971 if (!ref_may_be_aliased (ref: rhs))
972 mark_aliased_reaching_defs_necessary (stmt, ref: rhs);
973 else
974 mark_all_reaching_defs_necessary (stmt);
975 }
976 }
977 else if (gasm *asm_stmt = dyn_cast <gasm *> (p: stmt))
978 {
979 unsigned i;
980 mark_all_reaching_defs_necessary (stmt);
981 /* Inputs may perform loads. */
982 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
983 {
984 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
985 if (TREE_CODE (op) != SSA_NAME
986 && !is_gimple_min_invariant (op)
987 && TREE_CODE (op) != CONSTRUCTOR
988 && !ref_may_be_aliased (ref: op))
989 mark_aliased_reaching_defs_necessary (stmt, ref: op);
990 }
991 }
992 else if (gimple_code (g: stmt) == GIMPLE_TRANSACTION)
993 {
994 /* The beginning of a transaction is a memory barrier. */
995 /* ??? If we were really cool, we'd only be a barrier
996 for the memories touched within the transaction. */
997 mark_all_reaching_defs_necessary (stmt);
998 }
999 else
1000 gcc_unreachable ();
1001
1002 /* If we over-used our alias oracle budget drop to simple
1003 mode. The cost metric allows quadratic behavior
1004 (number of uses times number of may-defs queries) up to
1005 a constant maximal number of queries and after that falls back to
1006 super-linear complexity. */
1007 if (/* Constant but quadratic for small functions. */
1008 total_chain > 128 * 128
1009 /* Linear in the number of may-defs. */
1010 && total_chain > 32 * longest_chain
1011 /* Linear in the number of uses. */
1012 && total_chain > nr_walks * 32)
1013 {
1014 chain_ovfl = true;
1015 if (visited)
1016 bitmap_clear (visited);
1017 }
1018 }
1019 }
1020}
1021
1022/* Remove dead PHI nodes from block BB. */
1023
1024static bool
1025remove_dead_phis (basic_block bb)
1026{
1027 bool something_changed = false;
1028 gphi *phi;
1029 gphi_iterator gsi;
1030
1031 for (gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi);)
1032 {
1033 stats.total_phis++;
1034 phi = gsi.phi ();
1035
1036 /* We do not track necessity of virtual PHI nodes. Instead do
1037 very simple dead PHI removal here. */
1038 if (virtual_operand_p (op: gimple_phi_result (gs: phi)))
1039 {
1040 /* Virtual PHI nodes with one or identical arguments
1041 can be removed. */
1042 if (!loops_state_satisfies_p (flags: LOOP_CLOSED_SSA)
1043 && degenerate_phi_p (phi))
1044 {
1045 tree vdef = gimple_phi_result (gs: phi);
1046 tree vuse = gimple_phi_arg_def (gs: phi, index: 0);
1047
1048 use_operand_p use_p;
1049 imm_use_iterator iter;
1050 gimple *use_stmt;
1051 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1052 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1053 SET_USE (use_p, vuse);
1054 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1055 && TREE_CODE (vuse) == SSA_NAME)
1056 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1057 }
1058 else
1059 gimple_set_plf (stmt: phi, STMT_NECESSARY, val_p: true);
1060 }
1061
1062 if (!gimple_plf (stmt: phi, STMT_NECESSARY))
1063 {
1064 something_changed = true;
1065 if (dump_file && (dump_flags & TDF_DETAILS))
1066 {
1067 fprintf (stream: dump_file, format: "Deleting : ");
1068 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1069 fprintf (stream: dump_file, format: "\n");
1070 }
1071
1072 remove_phi_node (&gsi, true);
1073 stats.removed_phis++;
1074 continue;
1075 }
1076
1077 gsi_next (i: &gsi);
1078 }
1079 return something_changed;
1080}
1081
1082
1083/* Remove dead statement pointed to by iterator I. Receives the basic block BB
1084 containing I so that we don't have to look it up. */
1085
1086static void
1087remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1088 vec<edge> &to_remove_edges)
1089{
1090 gimple *stmt = gsi_stmt (i: *i);
1091
1092 if (dump_file && (dump_flags & TDF_DETAILS))
1093 {
1094 fprintf (stream: dump_file, format: "Deleting : ");
1095 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1096 fprintf (stream: dump_file, format: "\n");
1097 }
1098
1099 stats.removed++;
1100
1101 /* If we have determined that a conditional branch statement contributes
1102 nothing to the program, then we not only remove it, but we need to update
1103 the CFG. We can chose any of edges out of BB as long as we are sure to not
1104 close infinite loops. This is done by always choosing the edge closer to
1105 exit in inverted_rev_post_order_compute order. */
1106 if (is_ctrl_stmt (stmt))
1107 {
1108 edge_iterator ei;
1109 edge e = NULL, e2;
1110
1111 /* See if there is only one non-abnormal edge. */
1112 if (single_succ_p (bb))
1113 e = single_succ_edge (bb);
1114 /* Otherwise chose one that is closer to bb with live statement in it.
1115 To be able to chose one, we compute inverted post order starting from
1116 all BBs with live statements. */
1117 if (!e)
1118 {
1119 if (!bb_postorder)
1120 {
1121 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
1122 int n = inverted_rev_post_order_compute (cfun, rpo,
1123 start_points: &bb_contains_live_stmts);
1124 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1125 for (int i = 0; i < n; ++i)
1126 bb_postorder[rpo[i]] = i;
1127 free (ptr: rpo);
1128 }
1129 FOR_EACH_EDGE (e2, ei, bb->succs)
1130 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1131 || bb_postorder [e->dest->index]
1132 >= bb_postorder [e2->dest->index])
1133 e = e2;
1134 }
1135 gcc_assert (e);
1136 e->probability = profile_probability::always ();
1137
1138 /* The edge is no longer associated with a conditional, so it does
1139 not have TRUE/FALSE flags.
1140 We are also safe to drop EH/ABNORMAL flags and turn them into
1141 normal control flow, because we know that all the destinations (including
1142 those odd edges) are equivalent for program execution. */
1143 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1144
1145 /* The lone outgoing edge from BB will be a fallthru edge. */
1146 e->flags |= EDGE_FALLTHRU;
1147
1148 /* Remove the remaining outgoing edges. */
1149 FOR_EACH_EDGE (e2, ei, bb->succs)
1150 if (e != e2)
1151 {
1152 /* If we made a BB unconditionally exit a loop or removed
1153 an entry into an irreducible region, then this transform
1154 alters the set of BBs in the loop. Schedule a fixup. */
1155 if (loop_exit_edge_p (bb->loop_father, e)
1156 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1157 loops_state_set (flags: LOOPS_NEED_FIXUP);
1158 to_remove_edges.safe_push (obj: e2);
1159 }
1160 }
1161
1162 /* If this is a store into a variable that is being optimized away,
1163 add a debug bind stmt if possible. */
1164 if (MAY_HAVE_DEBUG_BIND_STMTS
1165 && gimple_assign_single_p (gs: stmt)
1166 && is_gimple_val (gimple_assign_rhs1 (gs: stmt)))
1167 {
1168 tree lhs = gimple_assign_lhs (gs: stmt);
1169 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1170 && !DECL_IGNORED_P (lhs)
1171 && is_gimple_reg_type (TREE_TYPE (lhs))
1172 && !is_global_var (t: lhs)
1173 && !DECL_HAS_VALUE_EXPR_P (lhs))
1174 {
1175 tree rhs = gimple_assign_rhs1 (gs: stmt);
1176 gdebug *note
1177 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1178 gsi_insert_after (i, note, GSI_SAME_STMT);
1179 }
1180 }
1181
1182 unlink_stmt_vdef (stmt);
1183 gsi_remove (i, true);
1184 release_defs (stmt);
1185}
1186
1187/* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1188 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1189
1190static tree
1191find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1192{
1193 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1194 *walk_subtrees = 0;
1195 if (*tp == (tree) data)
1196 return *tp;
1197 return NULL_TREE;
1198}
1199
1200/* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1201 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1202 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1203 uses. */
1204
1205static void
1206maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1207 enum tree_code subcode)
1208{
1209 gimple *stmt = gsi_stmt (i: *gsi);
1210 tree lhs = gimple_call_lhs (gs: stmt);
1211
1212 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1213 return;
1214
1215 imm_use_iterator imm_iter;
1216 use_operand_p use_p;
1217 bool has_debug_uses = false;
1218 bool has_realpart_uses = false;
1219 bool has_other_uses = false;
1220 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1221 {
1222 gimple *use_stmt = USE_STMT (use_p);
1223 if (is_gimple_debug (gs: use_stmt))
1224 has_debug_uses = true;
1225 else if (is_gimple_assign (gs: use_stmt)
1226 && gimple_assign_rhs_code (gs: use_stmt) == REALPART_EXPR
1227 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1228 has_realpart_uses = true;
1229 else
1230 {
1231 has_other_uses = true;
1232 break;
1233 }
1234 }
1235
1236 if (!has_realpart_uses || has_other_uses)
1237 return;
1238
1239 tree arg0 = gimple_call_arg (gs: stmt, index: 0);
1240 tree arg1 = gimple_call_arg (gs: stmt, index: 1);
1241 location_t loc = gimple_location (g: stmt);
1242 tree type = TREE_TYPE (TREE_TYPE (lhs));
1243 tree utype = type;
1244 if (!TYPE_UNSIGNED (type))
1245 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1246 tree result = fold_build2_loc (loc, subcode, utype,
1247 fold_convert_loc (loc, utype, arg0),
1248 fold_convert_loc (loc, utype, arg1));
1249 result = fold_convert_loc (loc, type, result);
1250
1251 if (has_debug_uses)
1252 {
1253 gimple *use_stmt;
1254 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1255 {
1256 if (!gimple_debug_bind_p (s: use_stmt))
1257 continue;
1258 tree v = gimple_debug_bind_get_value (dbg: use_stmt);
1259 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1260 {
1261 gimple_debug_bind_reset_value (dbg: use_stmt);
1262 update_stmt (s: use_stmt);
1263 }
1264 }
1265 }
1266
1267 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1268 result = drop_tree_overflow (result);
1269 tree overflow = build_zero_cst (type);
1270 tree ctype = build_complex_type (type);
1271 if (TREE_CODE (result) == INTEGER_CST)
1272 result = build_complex (ctype, result, overflow);
1273 else
1274 result = build2_loc (loc: gimple_location (g: stmt), code: COMPLEX_EXPR,
1275 type: ctype, arg0: result, arg1: overflow);
1276
1277 if (dump_file && (dump_flags & TDF_DETAILS))
1278 {
1279 fprintf (stream: dump_file, format: "Transforming call: ");
1280 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1281 fprintf (stream: dump_file, format: "because the overflow result is never used into: ");
1282 print_generic_stmt (dump_file, result, TDF_SLIM);
1283 fprintf (stream: dump_file, format: "\n");
1284 }
1285
1286 gimplify_and_update_call_from_tree (gsi, result);
1287}
1288
1289/* Returns whether the control parents of BB are preserved. */
1290
1291static bool
1292control_parents_preserved_p (basic_block bb)
1293{
1294 /* If we marked the control parents from BB they are preserved. */
1295 if (bitmap_bit_p (map: visited_control_parents, bitno: bb->index))
1296 return true;
1297
1298 /* But they can also end up being marked from elsewhere. */
1299 bitmap_iterator bi;
1300 unsigned edge_number;
1301 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
1302 0, edge_number, bi)
1303 {
1304 basic_block cd_bb = cd->get_edge_src (edge_number);
1305 if (cd_bb != bb
1306 && !bitmap_bit_p (map: last_stmt_necessary, bitno: cd_bb->index))
1307 return false;
1308 }
1309 /* And cache the result. */
1310 bitmap_set_bit (map: visited_control_parents, bitno: bb->index);
1311 return true;
1312}
1313
1314/* Eliminate unnecessary statements. Any instruction not marked as necessary
1315 contributes nothing to the program, and can be deleted. */
1316
1317static bool
1318eliminate_unnecessary_stmts (bool aggressive)
1319{
1320 bool something_changed = false;
1321 basic_block bb;
1322 gimple_stmt_iterator gsi, psi;
1323 gimple *stmt;
1324 tree call;
1325 auto_vec<edge> to_remove_edges;
1326
1327 if (dump_file && (dump_flags & TDF_DETAILS))
1328 fprintf (stream: dump_file, format: "\nEliminating unnecessary statements:\n");
1329
1330 bool had_setjmp = cfun->calls_setjmp;
1331 clear_special_calls ();
1332
1333 /* Walking basic blocks and statements in reverse order avoids
1334 releasing SSA names before any other DEFs that refer to them are
1335 released. This helps avoid loss of debug information, as we get
1336 a chance to propagate all RHSs of removed SSAs into debug uses,
1337 rather than only the latest ones. E.g., consider:
1338
1339 x_3 = y_1 + z_2;
1340 a_5 = x_3 - b_4;
1341 # DEBUG a => a_5
1342
1343 If we were to release x_3 before a_5, when we reached a_5 and
1344 tried to substitute it into the debug stmt, we'd see x_3 there,
1345 but x_3's DEF, type, etc would have already been disconnected.
1346 By going backwards, the debug stmt first changes to:
1347
1348 # DEBUG a => x_3 - b_4
1349
1350 and then to:
1351
1352 # DEBUG a => y_1 + z_2 - b_4
1353
1354 as desired. */
1355 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1356 auto_vec<basic_block> h;
1357 h = get_all_dominated_blocks (CDI_DOMINATORS,
1358 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1359
1360 while (h.length ())
1361 {
1362 bb = h.pop ();
1363
1364 /* Remove dead statements. */
1365 auto_bitmap debug_seen;
1366 for (gsi = gsi_last_bb (bb); !gsi_end_p (i: gsi); gsi = psi)
1367 {
1368 stmt = gsi_stmt (i: gsi);
1369
1370 psi = gsi;
1371 gsi_prev (i: &psi);
1372
1373 stats.total++;
1374
1375 /* We can mark a call to free as not necessary if the
1376 defining statement of its argument is not necessary
1377 (and thus is getting removed). */
1378 if (gimple_plf (stmt, STMT_NECESSARY)
1379 && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1380 || (is_gimple_call (gs: stmt)
1381 && gimple_call_from_new_or_delete (s: as_a <gcall *> (p: stmt))
1382 && gimple_call_operator_delete_p (as_a <gcall *> (p: stmt)))))
1383 {
1384 tree ptr = gimple_call_arg (gs: stmt, index: 0);
1385 if (TREE_CODE (ptr) == SSA_NAME)
1386 {
1387 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1388 if (!gimple_nop_p (g: def_stmt)
1389 && !gimple_plf (stmt: def_stmt, STMT_NECESSARY))
1390 gimple_set_plf (stmt, STMT_NECESSARY, val_p: false);
1391 }
1392 }
1393
1394 /* If GSI is not necessary then remove it. */
1395 if (!gimple_plf (stmt, STMT_NECESSARY))
1396 {
1397 /* Keep clobbers that we can keep live live. */
1398 if (gimple_clobber_p (s: stmt))
1399 {
1400 ssa_op_iter iter;
1401 use_operand_p use_p;
1402 bool dead = false;
1403 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1404 {
1405 tree name = USE_FROM_PTR (use_p);
1406 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1407 && !bitmap_bit_p (map: processed, SSA_NAME_VERSION (name)))
1408 {
1409 dead = true;
1410 break;
1411 }
1412 }
1413 if (!dead
1414 /* When doing CD-DCE we have to ensure all controls
1415 of the stmt are still live. */
1416 && (!aggressive || control_parents_preserved_p (bb)))
1417 {
1418 bitmap_clear (debug_seen);
1419 continue;
1420 }
1421 }
1422 if (!is_gimple_debug (gs: stmt))
1423 something_changed = true;
1424 remove_dead_stmt (i: &gsi, bb, to_remove_edges);
1425 continue;
1426 }
1427 else if (is_gimple_call (gs: stmt))
1428 {
1429 tree name = gimple_call_lhs (gs: stmt);
1430
1431 notice_special_calls (as_a <gcall *> (p: stmt));
1432
1433 /* When LHS of var = call (); is dead, simplify it into
1434 call (); saving one operand. */
1435 if (name
1436 && TREE_CODE (name) == SSA_NAME
1437 && !bitmap_bit_p (map: processed, SSA_NAME_VERSION (name))
1438 /* Avoid doing so for allocation calls which we
1439 did not mark as necessary, it will confuse the
1440 special logic we apply to malloc/free pair removal. */
1441 && (!(call = gimple_call_fndecl (gs: stmt))
1442 || ((DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1443 || (DECL_FUNCTION_CODE (decl: call) != BUILT_IN_ALIGNED_ALLOC
1444 && DECL_FUNCTION_CODE (decl: call) != BUILT_IN_MALLOC
1445 && DECL_FUNCTION_CODE (decl: call) != BUILT_IN_CALLOC
1446 && !ALLOCA_FUNCTION_CODE_P
1447 (DECL_FUNCTION_CODE (call))))
1448 && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (call))))
1449 {
1450 something_changed = true;
1451 if (dump_file && (dump_flags & TDF_DETAILS))
1452 {
1453 fprintf (stream: dump_file, format: "Deleting LHS of call: ");
1454 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1455 fprintf (stream: dump_file, format: "\n");
1456 }
1457
1458 gimple_call_set_lhs (gs: stmt, NULL_TREE);
1459 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1460 update_stmt (s: stmt);
1461 release_ssa_name (name);
1462
1463 /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1464 without lhs is not needed. */
1465 if (gimple_call_internal_p (gs: stmt))
1466 switch (gimple_call_internal_fn (gs: stmt))
1467 {
1468 case IFN_GOMP_SIMD_LANE:
1469 if (gimple_call_num_args (gs: stmt) >= 3
1470 && !integer_nonzerop (gimple_call_arg (gs: stmt, index: 2)))
1471 break;
1472 /* FALLTHRU */
1473 case IFN_ASAN_POISON:
1474 remove_dead_stmt (i: &gsi, bb, to_remove_edges);
1475 break;
1476 default:
1477 break;
1478 }
1479 }
1480 else if (gimple_call_internal_p (gs: stmt))
1481 switch (gimple_call_internal_fn (gs: stmt))
1482 {
1483 case IFN_ADD_OVERFLOW:
1484 maybe_optimize_arith_overflow (gsi: &gsi, subcode: PLUS_EXPR);
1485 break;
1486 case IFN_SUB_OVERFLOW:
1487 maybe_optimize_arith_overflow (gsi: &gsi, subcode: MINUS_EXPR);
1488 break;
1489 case IFN_MUL_OVERFLOW:
1490 maybe_optimize_arith_overflow (gsi: &gsi, subcode: MULT_EXPR);
1491 break;
1492 case IFN_UADDC:
1493 if (integer_zerop (gimple_call_arg (gs: stmt, index: 2)))
1494 maybe_optimize_arith_overflow (gsi: &gsi, subcode: PLUS_EXPR);
1495 break;
1496 case IFN_USUBC:
1497 if (integer_zerop (gimple_call_arg (gs: stmt, index: 2)))
1498 maybe_optimize_arith_overflow (gsi: &gsi, subcode: MINUS_EXPR);
1499 break;
1500 default:
1501 break;
1502 }
1503 }
1504 else if (gimple_debug_bind_p (s: stmt))
1505 {
1506 /* We are only keeping the last debug-bind of a
1507 non-DEBUG_EXPR_DECL variable in a series of
1508 debug-bind stmts. */
1509 tree var = gimple_debug_bind_get_var (dbg: stmt);
1510 if (TREE_CODE (var) != DEBUG_EXPR_DECL
1511 && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1512 remove_dead_stmt (i: &gsi, bb, to_remove_edges);
1513 continue;
1514 }
1515 bitmap_clear (debug_seen);
1516 }
1517
1518 /* Remove dead PHI nodes. */
1519 something_changed |= remove_dead_phis (bb);
1520 }
1521
1522 /* First remove queued edges. */
1523 if (!to_remove_edges.is_empty ())
1524 {
1525 /* Remove edges. We've delayed this to not get bogus debug stmts
1526 during PHI node removal. */
1527 for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1528 remove_edge (to_remove_edges[i]);
1529 cfg_altered = true;
1530 }
1531 /* When we cleared calls_setjmp we can purge all abnormal edges. Do so.
1532 ??? We'd like to assert that setjmp calls do not pop out of nothing
1533 but we currently lack a per-stmt way of noting whether a call was
1534 recognized as returns-twice (or rather receives-control). */
1535 if (!cfun->calls_setjmp && had_setjmp)
1536 {
1537 /* Make sure we only remove the edges, not dominated blocks. Using
1538 gimple_purge_dead_abnormal_call_edges would do that and we
1539 cannot free dominators yet. */
1540 FOR_EACH_BB_FN (bb, cfun)
1541 if (gcall *stmt = safe_dyn_cast <gcall *> (p: *gsi_last_bb (bb)))
1542 if (!stmt_can_make_abnormal_goto (stmt))
1543 {
1544 edge_iterator ei;
1545 edge e;
1546 for (ei = ei_start (bb->succs); (e = ei_safe_edge (i: ei)); )
1547 {
1548 if (e->flags & EDGE_ABNORMAL)
1549 {
1550 if (e->flags & EDGE_FALLTHRU)
1551 e->flags &= ~EDGE_ABNORMAL;
1552 else
1553 remove_edge (e);
1554 cfg_altered = true;
1555 }
1556 else
1557 ei_next (i: &ei);
1558 }
1559 }
1560 }
1561
1562 /* Now remove the unreachable blocks. */
1563 if (cfg_altered)
1564 {
1565 basic_block prev_bb;
1566
1567 find_unreachable_blocks ();
1568
1569 /* Delete all unreachable basic blocks in reverse dominator order. */
1570 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1571 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1572 {
1573 prev_bb = bb->prev_bb;
1574
1575 if ((bb_contains_live_stmts
1576 && !bitmap_bit_p (map: bb_contains_live_stmts, bitno: bb->index))
1577 || !(bb->flags & BB_REACHABLE))
1578 {
1579 /* Since we don't track liveness of virtual PHI nodes, it is
1580 possible that we rendered some PHI nodes unreachable while
1581 they are still in use. Mark them for renaming. */
1582 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi);
1583 gsi_next (i: &gsi))
1584 if (virtual_operand_p (op: gimple_phi_result (gs: gsi.phi ())))
1585 {
1586 bool found = false;
1587 imm_use_iterator iter;
1588
1589 FOR_EACH_IMM_USE_STMT (stmt, iter,
1590 gimple_phi_result (gsi.phi ()))
1591 {
1592 if (!(gimple_bb (g: stmt)->flags & BB_REACHABLE))
1593 continue;
1594 if (gimple_code (g: stmt) == GIMPLE_PHI
1595 || gimple_plf (stmt, STMT_NECESSARY))
1596 {
1597 found = true;
1598 break;
1599 }
1600 }
1601 if (found)
1602 mark_virtual_phi_result_for_renaming (gsi.phi ());
1603 }
1604
1605 if (!(bb->flags & BB_REACHABLE))
1606 {
1607 /* Speed up the removal of blocks that don't
1608 dominate others. Walking backwards, this should
1609 be the common case. ??? Do we need to recompute
1610 dominators because of cfg_altered? */
1611 if (!first_dom_son (CDI_DOMINATORS, bb))
1612 delete_basic_block (bb);
1613 else
1614 {
1615 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1616
1617 while (h.length ())
1618 {
1619 bb = h.pop ();
1620 prev_bb = bb->prev_bb;
1621 /* Rearrangements to the CFG may have failed
1622 to update the dominators tree, so that
1623 formerly-dominated blocks are now
1624 otherwise reachable. */
1625 if (!!(bb->flags & BB_REACHABLE))
1626 continue;
1627 delete_basic_block (bb);
1628 }
1629
1630 h.release ();
1631 }
1632 }
1633 }
1634 }
1635 }
1636
1637 if (bb_postorder)
1638 free (ptr: bb_postorder);
1639 bb_postorder = NULL;
1640
1641 return something_changed;
1642}
1643
1644
1645/* Print out removed statement statistics. */
1646
1647static void
1648print_stats (void)
1649{
1650 float percg;
1651
1652 percg = ((float) stats.removed / (float) stats.total) * 100;
1653 fprintf (stream: dump_file, format: "Removed %d of %d statements (%d%%)\n",
1654 stats.removed, stats.total, (int) percg);
1655
1656 if (stats.total_phis == 0)
1657 percg = 0;
1658 else
1659 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1660
1661 fprintf (stream: dump_file, format: "Removed %d of %d PHI nodes (%d%%)\n",
1662 stats.removed_phis, stats.total_phis, (int) percg);
1663}
1664
1665/* Initialization for this pass. Set up the used data structures. */
1666
1667static void
1668tree_dce_init (bool aggressive)
1669{
1670 memset (s: (void *) &stats, c: 0, n: sizeof (stats));
1671
1672 if (aggressive)
1673 {
1674 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1675 bitmap_clear (last_stmt_necessary);
1676 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1677 bitmap_clear (bb_contains_live_stmts);
1678 }
1679
1680 processed = sbitmap_alloc (num_ssa_names + 1);
1681 bitmap_clear (processed);
1682
1683 worklist.create (nelems: 64);
1684 cfg_altered = false;
1685}
1686
1687/* Cleanup after this pass. */
1688
1689static void
1690tree_dce_done (bool aggressive)
1691{
1692 if (aggressive)
1693 {
1694 delete cd;
1695 sbitmap_free (map: visited_control_parents);
1696 sbitmap_free (map: last_stmt_necessary);
1697 sbitmap_free (map: bb_contains_live_stmts);
1698 bb_contains_live_stmts = NULL;
1699 }
1700
1701 sbitmap_free (map: processed);
1702
1703 worklist.release ();
1704}
1705
1706/* Sort PHI argument values for make_forwarders_with_degenerate_phis. */
1707
1708static int
1709sort_phi_args (const void *a_, const void *b_)
1710{
1711 auto *a = (const std::pair<edge, hashval_t> *) a_;
1712 auto *b = (const std::pair<edge, hashval_t> *) b_;
1713 hashval_t ha = a->second;
1714 hashval_t hb = b->second;
1715 if (ha < hb)
1716 return -1;
1717 else if (ha > hb)
1718 return 1;
1719 else if (a->first->dest_idx < b->first->dest_idx)
1720 return -1;
1721 else if (a->first->dest_idx > b->first->dest_idx)
1722 return 1;
1723 else
1724 return 0;
1725}
1726
1727/* Look for a non-virtual PHIs and make a forwarder block when all PHIs
1728 have the same argument on a set of edges. This is to not consider
1729 control dependences of individual edges for same values but only for
1730 the common set. */
1731
1732static unsigned
1733make_forwarders_with_degenerate_phis (function *fn)
1734{
1735 unsigned todo = 0;
1736
1737 basic_block bb;
1738 FOR_EACH_BB_FN (bb, fn)
1739 {
1740 /* Only PHIs with three or more arguments have opportunities. */
1741 if (EDGE_COUNT (bb->preds) < 3)
1742 continue;
1743 /* Do not touch loop headers or blocks with abnormal predecessors.
1744 ??? This is to avoid creating valid loops here, see PR103458.
1745 We might want to improve things to either explicitely add those
1746 loops or at least consider blocks with no backedges. */
1747 if (bb->loop_father->header == bb
1748 || bb_has_abnormal_pred (bb))
1749 continue;
1750
1751 /* Take one PHI node as template to look for identical
1752 arguments. Build a vector of candidates forming sets
1753 of argument edges with equal values. Note optimality
1754 depends on the particular choice of the template PHI
1755 since equal arguments are unordered leaving other PHIs
1756 with more than one set of equal arguments within this
1757 argument range unsorted. We'd have to break ties by
1758 looking at other PHI nodes. */
1759 gphi_iterator gsi = gsi_start_nonvirtual_phis (bb);
1760 if (gsi_end_p (i: gsi))
1761 continue;
1762 gphi *phi = gsi.phi ();
1763 auto_vec<std::pair<edge, hashval_t>, 8> args;
1764 bool need_resort = false;
1765 for (unsigned i = 0; i < gimple_phi_num_args (gs: phi); ++i)
1766 {
1767 edge e = gimple_phi_arg_edge (phi, i);
1768 /* Skip abnormal edges since we cannot redirect them. */
1769 if (e->flags & EDGE_ABNORMAL)
1770 continue;
1771 /* Skip loop exit edges when we are in loop-closed SSA form
1772 since the forwarder we'd create does not have a PHI node. */
1773 if (loops_state_satisfies_p (flags: LOOP_CLOSED_SSA)
1774 && loop_exit_edge_p (e->src->loop_father, e))
1775 continue;
1776
1777 tree arg = gimple_phi_arg_def (gs: phi, index: i);
1778 if (!CONSTANT_CLASS_P (arg) && TREE_CODE (arg) != SSA_NAME)
1779 need_resort = true;
1780 args.safe_push (obj: std::make_pair (x&: e, y: iterative_hash_expr (tree: arg, seed: 0)));
1781 }
1782 if (args.length () < 2)
1783 continue;
1784 args.qsort (sort_phi_args);
1785 /* The above sorting can be different between -g and -g0, as e.g. decls
1786 can have different uids (-g could have bigger gaps in between them).
1787 So, only use that to determine which args are equal, then change
1788 second from hash value to smallest dest_idx of the edges which have
1789 equal argument and sort again. If all the phi arguments are
1790 constants or SSA_NAME, there is no need for the second sort, the hash
1791 values are stable in that case. */
1792 hashval_t hash = args[0].second;
1793 args[0].second = args[0].first->dest_idx;
1794 bool any_equal = false;
1795 for (unsigned i = 1; i < args.length (); ++i)
1796 if (hash == args[i].second
1797 && operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, args[i - 1].first),
1798 PHI_ARG_DEF_FROM_EDGE (phi, args[i].first)))
1799 {
1800 args[i].second = args[i - 1].second;
1801 any_equal = true;
1802 }
1803 else
1804 {
1805 hash = args[i].second;
1806 args[i].second = args[i].first->dest_idx;
1807 }
1808 if (!any_equal)
1809 continue;
1810 if (need_resort)
1811 args.qsort (sort_phi_args);
1812
1813 /* From the candidates vector now verify true candidates for
1814 forwarders and create them. */
1815 gphi *vphi = get_virtual_phi (bb);
1816 unsigned start = 0;
1817 while (start < args.length () - 1)
1818 {
1819 unsigned i;
1820 for (i = start + 1; i < args.length (); ++i)
1821 if (args[start].second != args[i].second)
1822 break;
1823 /* args[start]..args[i-1] are equal. */
1824 if (start != i - 1)
1825 {
1826 /* Check all PHI nodes for argument equality. */
1827 bool equal = true;
1828 gphi_iterator gsi2 = gsi;
1829 gsi_next (i: &gsi2);
1830 for (; !gsi_end_p (i: gsi2); gsi_next (i: &gsi2))
1831 {
1832 gphi *phi2 = gsi2.phi ();
1833 if (virtual_operand_p (op: gimple_phi_result (gs: phi2)))
1834 continue;
1835 tree start_arg
1836 = PHI_ARG_DEF_FROM_EDGE (phi2, args[start].first);
1837 for (unsigned j = start + 1; j < i; ++j)
1838 {
1839 if (!operand_equal_p (start_arg,
1840 PHI_ARG_DEF_FROM_EDGE
1841 (phi2, args[j].first)))
1842 {
1843 /* Another PHI might have a shorter set of
1844 equivalent args. Go for that. */
1845 i = j;
1846 if (j == start + 1)
1847 equal = false;
1848 break;
1849 }
1850 }
1851 if (!equal)
1852 break;
1853 }
1854 if (equal)
1855 {
1856 /* If we are asked to forward all edges the block
1857 has all degenerate PHIs. Do nothing in that case. */
1858 if (start == 0
1859 && i == args.length ()
1860 && args.length () == gimple_phi_num_args (gs: phi))
1861 break;
1862 /* Instead of using make_forwarder_block we are
1863 rolling our own variant knowing that the forwarder
1864 does not need PHI nodes apart from eventually
1865 a virtual one. */
1866 auto_vec<tree, 8> vphi_args;
1867 if (vphi)
1868 {
1869 vphi_args.reserve_exact (nelems: i - start);
1870 for (unsigned j = start; j < i; ++j)
1871 vphi_args.quick_push
1872 (PHI_ARG_DEF_FROM_EDGE (vphi, args[j].first));
1873 }
1874 free_dominance_info (fn, CDI_DOMINATORS);
1875 basic_block forwarder = split_edge (args[start].first);
1876 profile_count count = profile_count::zero ();
1877 for (unsigned j = start + 1; j < i; ++j)
1878 {
1879 edge e = args[j].first;
1880 redirect_edge_and_branch_force (e, forwarder);
1881 redirect_edge_var_map_clear (e);
1882 count += e->count ();
1883 }
1884 forwarder->count = count;
1885 if (vphi)
1886 {
1887 tree def = copy_ssa_name (var: vphi_args[0]);
1888 gphi *vphi_copy = create_phi_node (def, forwarder);
1889 for (unsigned j = start; j < i; ++j)
1890 add_phi_arg (vphi_copy, vphi_args[j - start],
1891 args[j].first, UNKNOWN_LOCATION);
1892 SET_PHI_ARG_DEF
1893 (vphi, single_succ_edge (forwarder)->dest_idx, def);
1894 }
1895 todo |= TODO_cleanup_cfg;
1896 }
1897 }
1898 /* Continue searching for more opportunities. */
1899 start = i;
1900 }
1901 }
1902 return todo;
1903}
1904
1905/* Main routine to eliminate dead code.
1906
1907 AGGRESSIVE controls the aggressiveness of the algorithm.
1908 In conservative mode, we ignore control dependence and simply declare
1909 all but the most trivially dead branches necessary. This mode is fast.
1910 In aggressive mode, control dependences are taken into account, which
1911 results in more dead code elimination, but at the cost of some time.
1912
1913 FIXME: Aggressive mode before PRE doesn't work currently because
1914 the dominance info is not invalidated after DCE1. This is
1915 not an issue right now because we only run aggressive DCE
1916 as the last tree SSA pass, but keep this in mind when you
1917 start experimenting with pass ordering. */
1918
1919static unsigned int
1920perform_tree_ssa_dce (bool aggressive)
1921{
1922 bool something_changed = 0;
1923 unsigned todo = 0;
1924
1925 /* Preheaders are needed for SCEV to work.
1926 Simple lateches and recorded exits improve chances that loop will
1927 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1928 bool in_loop_pipeline = scev_initialized_p ();
1929 if (aggressive && ! in_loop_pipeline)
1930 {
1931 loop_optimizer_init (LOOPS_NORMAL
1932 | LOOPS_HAVE_RECORDED_EXITS);
1933 scev_initialize ();
1934 }
1935
1936 if (aggressive)
1937 todo |= make_forwarders_with_degenerate_phis (cfun);
1938
1939 calculate_dominance_info (CDI_DOMINATORS);
1940
1941 tree_dce_init (aggressive);
1942
1943 if (aggressive)
1944 {
1945 /* Compute control dependence. */
1946 calculate_dominance_info (CDI_POST_DOMINATORS);
1947 cd = new control_dependences ();
1948
1949 visited_control_parents =
1950 sbitmap_alloc (last_basic_block_for_fn (cfun));
1951 bitmap_clear (visited_control_parents);
1952
1953 mark_dfs_back_edges ();
1954 }
1955
1956 find_obviously_necessary_stmts (aggressive);
1957
1958 if (aggressive && ! in_loop_pipeline)
1959 {
1960 scev_finalize ();
1961 loop_optimizer_finalize ();
1962 }
1963
1964 longest_chain = 0;
1965 total_chain = 0;
1966 nr_walks = 0;
1967 chain_ovfl = false;
1968 visited = BITMAP_ALLOC (NULL);
1969 propagate_necessity (aggressive);
1970 BITMAP_FREE (visited);
1971
1972 something_changed |= eliminate_unnecessary_stmts (aggressive);
1973 something_changed |= cfg_altered;
1974
1975 /* We do not update postdominators, so free them unconditionally. */
1976 free_dominance_info (CDI_POST_DOMINATORS);
1977
1978 /* If we removed paths in the CFG, then we need to update
1979 dominators as well. I haven't investigated the possibility
1980 of incrementally updating dominators. */
1981 if (cfg_altered)
1982 free_dominance_info (CDI_DOMINATORS);
1983
1984 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1985 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1986
1987 /* Debugging dumps. */
1988 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1989 print_stats ();
1990
1991 tree_dce_done (aggressive);
1992
1993 if (something_changed)
1994 {
1995 free_numbers_of_iterations_estimates (cfun);
1996 if (in_loop_pipeline)
1997 scev_reset ();
1998 todo |= TODO_update_ssa | TODO_cleanup_cfg;
1999 }
2000 return todo;
2001}
2002
2003/* Pass entry points. */
2004static unsigned int
2005tree_ssa_dce (void)
2006{
2007 return perform_tree_ssa_dce (/*aggressive=*/false);
2008}
2009
2010static unsigned int
2011tree_ssa_cd_dce (void)
2012{
2013 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
2014}
2015
2016namespace {
2017
2018const pass_data pass_data_dce =
2019{
2020 .type: GIMPLE_PASS, /* type */
2021 .name: "dce", /* name */
2022 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
2023 .tv_id: TV_TREE_DCE, /* tv_id */
2024 .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */
2025 .properties_provided: 0, /* properties_provided */
2026 .properties_destroyed: 0, /* properties_destroyed */
2027 .todo_flags_start: 0, /* todo_flags_start */
2028 .todo_flags_finish: 0, /* todo_flags_finish */
2029};
2030
2031class pass_dce : public gimple_opt_pass
2032{
2033public:
2034 pass_dce (gcc::context *ctxt)
2035 : gimple_opt_pass (pass_data_dce, ctxt), update_address_taken_p (false)
2036 {}
2037
2038 /* opt_pass methods: */
2039 opt_pass * clone () final override { return new pass_dce (m_ctxt); }
2040 void set_pass_param (unsigned n, bool param) final override
2041 {
2042 gcc_assert (n == 0);
2043 update_address_taken_p = param;
2044 }
2045 bool gate (function *) final override { return flag_tree_dce != 0; }
2046 unsigned int execute (function *) final override
2047 {
2048 return (tree_ssa_dce ()
2049 | (update_address_taken_p ? TODO_update_address_taken : 0));
2050 }
2051
2052private:
2053 bool update_address_taken_p;
2054}; // class pass_dce
2055
2056} // anon namespace
2057
2058gimple_opt_pass *
2059make_pass_dce (gcc::context *ctxt)
2060{
2061 return new pass_dce (ctxt);
2062}
2063
2064namespace {
2065
2066const pass_data pass_data_cd_dce =
2067{
2068 .type: GIMPLE_PASS, /* type */
2069 .name: "cddce", /* name */
2070 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
2071 .tv_id: TV_TREE_CD_DCE, /* tv_id */
2072 .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */
2073 .properties_provided: 0, /* properties_provided */
2074 .properties_destroyed: 0, /* properties_destroyed */
2075 .todo_flags_start: 0, /* todo_flags_start */
2076 .todo_flags_finish: 0, /* todo_flags_finish */
2077};
2078
2079class pass_cd_dce : public gimple_opt_pass
2080{
2081public:
2082 pass_cd_dce (gcc::context *ctxt)
2083 : gimple_opt_pass (pass_data_cd_dce, ctxt), update_address_taken_p (false)
2084 {}
2085
2086 /* opt_pass methods: */
2087 opt_pass * clone () final override { return new pass_cd_dce (m_ctxt); }
2088 void set_pass_param (unsigned n, bool param) final override
2089 {
2090 gcc_assert (n == 0);
2091 update_address_taken_p = param;
2092 }
2093 bool gate (function *) final override { return flag_tree_dce != 0; }
2094 unsigned int execute (function *) final override
2095 {
2096 return (tree_ssa_cd_dce ()
2097 | (update_address_taken_p ? TODO_update_address_taken : 0));
2098 }
2099
2100private:
2101 bool update_address_taken_p;
2102}; // class pass_cd_dce
2103
2104} // anon namespace
2105
2106gimple_opt_pass *
2107make_pass_cd_dce (gcc::context *ctxt)
2108{
2109 return new pass_cd_dce (ctxt);
2110}
2111
2112
2113/* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
2114 is consumed by this function. The function has linear complexity in
2115 the number of dead stmts with a constant factor like the average SSA
2116 use operands number. */
2117
2118void
2119simple_dce_from_worklist (bitmap worklist, bitmap need_eh_cleanup)
2120{
2121 int phiremoved = 0;
2122 int stmtremoved = 0;
2123 while (! bitmap_empty_p (map: worklist))
2124 {
2125 /* Pop item. */
2126 unsigned i = bitmap_clear_first_set_bit (worklist);
2127
2128 tree def = ssa_name (i);
2129 /* Removed by somebody else or still in use.
2130 Note use in itself for a phi node is not counted as still in use. */
2131 if (!def)
2132 continue;
2133 if (!has_zero_uses (var: def))
2134 {
2135 gimple *def_stmt = SSA_NAME_DEF_STMT (def);
2136
2137 if (gimple_code (g: def_stmt) != GIMPLE_PHI)
2138 continue;
2139
2140 gimple *use_stmt;
2141 imm_use_iterator use_iter;
2142 bool canremove = true;
2143
2144 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, def)
2145 {
2146 /* Ignore debug statements. */
2147 if (is_gimple_debug (gs: use_stmt))
2148 continue;
2149 if (use_stmt != def_stmt)
2150 {
2151 canremove = false;
2152 break;
2153 }
2154 }
2155 if (!canremove)
2156 continue;
2157 }
2158
2159 gimple *t = SSA_NAME_DEF_STMT (def);
2160 if (gimple_has_side_effects (t))
2161 continue;
2162
2163 /* The defining statement needs to be defining only this name.
2164 ASM is the only statement that can define more than one
2165 name. */
2166 if (is_a<gasm *>(p: t)
2167 && !single_ssa_def_operand (stmt: t, SSA_OP_ALL_DEFS))
2168 continue;
2169
2170 /* Don't remove statements that are needed for non-call
2171 eh to work. */
2172 if (stmt_unremovable_because_of_non_call_eh_p (cfun, t))
2173 continue;
2174
2175 /* Tell the caller that we removed a statement that might
2176 throw so it could cleanup the cfg for that block. */
2177 if (need_eh_cleanup && stmt_could_throw_p (cfun, t))
2178 bitmap_set_bit (need_eh_cleanup, gimple_bb (g: t)->index);
2179
2180 /* Add uses to the worklist. */
2181 ssa_op_iter iter;
2182 use_operand_p use_p;
2183 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
2184 {
2185 tree use = USE_FROM_PTR (use_p);
2186 if (TREE_CODE (use) == SSA_NAME
2187 && ! SSA_NAME_IS_DEFAULT_DEF (use))
2188 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
2189 }
2190
2191 /* Remove stmt. */
2192 if (dump_file && (dump_flags & TDF_DETAILS))
2193 {
2194 fprintf (stream: dump_file, format: "Removing dead stmt:");
2195 print_gimple_stmt (dump_file, t, 0);
2196 }
2197 gimple_stmt_iterator gsi = gsi_for_stmt (t);
2198 if (gimple_code (g: t) == GIMPLE_PHI)
2199 {
2200 remove_phi_node (&gsi, true);
2201 phiremoved++;
2202 }
2203 else
2204 {
2205 unlink_stmt_vdef (t);
2206 gsi_remove (&gsi, true);
2207 release_defs (t);
2208 stmtremoved++;
2209 }
2210 }
2211 statistics_counter_event (cfun, "PHIs removed",
2212 phiremoved);
2213 statistics_counter_event (cfun, "Statements removed",
2214 stmtremoved);
2215}
2216

source code of gcc/tree-ssa-dce.cc