1/* Liveness for SSA trees.
2 Copyright (C) 2003-2023 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "rtl.h"
26#include "tree.h"
27#include "gimple.h"
28#include "timevar.h"
29#include "ssa.h"
30#include "cgraph.h"
31#include "gimple-pretty-print.h"
32#include "diagnostic-core.h"
33#include "gimple-iterator.h"
34#include "tree-dfa.h"
35#include "dumpfile.h"
36#include "tree-ssa-live.h"
37#include "debug.h"
38#include "tree-ssa.h"
39#include "ipa-utils.h"
40#include "cfgloop.h"
41#include "stringpool.h"
42#include "attribs.h"
43#include "optinfo.h"
44#include "gimple-walk.h"
45#include "cfganal.h"
46#include "tree-cfg.h"
47
48static void verify_live_on_entry (tree_live_info_p);
49
50
51/* VARMAP maintains a mapping from SSA version number to real variables.
52
53 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
54 only member of it's own partition. Coalescing will attempt to group any
55 ssa_names which occur in a copy or in a PHI node into the same partition.
56
57 At the end of out-of-ssa, each partition becomes a "real" variable and is
58 rewritten as a compiler variable.
59
60 The var_map data structure is used to manage these partitions. It allows
61 partitions to be combined, and determines which partition belongs to what
62 ssa_name or variable, and vice versa. */
63
64
65/* Remove the base table in MAP. */
66
67static void
68var_map_base_fini (var_map map)
69{
70 /* Free the basevar info if it is present. */
71 if (map->partition_to_base_index != NULL)
72 {
73 free (ptr: map->partition_to_base_index);
74 map->partition_to_base_index = NULL;
75 map->num_basevars = 0;
76 }
77}
78/* Create a variable partition map of SIZE for region, initialize and return
79 it. Region is a loop if LOOP is non-NULL, otherwise is the current
80 function. If BITINT is non-NULL, only SSA_NAMEs from that bitmap
81 will be coalesced. */
82
83var_map
84init_var_map (int size, class loop *loop, bitmap bitint)
85{
86 var_map map;
87
88 map = (var_map) xmalloc (sizeof (struct _var_map));
89 map->var_partition = partition_new (size);
90
91 map->partition_to_view = NULL;
92 map->view_to_partition = NULL;
93 map->num_partitions = size;
94 map->partition_size = size;
95 map->num_basevars = 0;
96 map->partition_to_base_index = NULL;
97 map->vec_bbs = vNULL;
98 if (loop)
99 {
100 map->bmp_bbs = BITMAP_ALLOC (NULL);
101 map->outofssa_p = false;
102 basic_block *bbs = get_loop_body_in_dom_order (loop);
103 for (unsigned i = 0; i < loop->num_nodes; ++i)
104 {
105 bitmap_set_bit (map->bmp_bbs, bbs[i]->index);
106 map->vec_bbs.safe_push (obj: bbs[i]);
107 }
108 free (ptr: bbs);
109 }
110 else
111 {
112 map->bmp_bbs = NULL;
113 map->outofssa_p = bitint == NULL;
114 map->bitint = bitint;
115 basic_block bb;
116 FOR_EACH_BB_FN (bb, cfun)
117 map->vec_bbs.safe_push (obj: bb);
118 }
119 return map;
120}
121
122
123/* Free memory associated with MAP. */
124
125void
126delete_var_map (var_map map)
127{
128 var_map_base_fini (map);
129 partition_delete (map->var_partition);
130 free (ptr: map->partition_to_view);
131 free (ptr: map->view_to_partition);
132 if (map->bmp_bbs)
133 BITMAP_FREE (map->bmp_bbs);
134 map->vec_bbs.release ();
135 free (ptr: map);
136}
137
138
139/* This function will combine the partitions in MAP for VAR1 and VAR2. It
140 Returns the partition which represents the new partition. If the two
141 partitions cannot be combined, NO_PARTITION is returned. */
142
143int
144var_union (var_map map, tree var1, tree var2)
145{
146 int p1, p2, p3;
147
148 gcc_assert (TREE_CODE (var1) == SSA_NAME);
149 gcc_assert (TREE_CODE (var2) == SSA_NAME);
150
151 /* This is independent of partition_to_view. If partition_to_view is
152 on, then whichever one of these partitions is absorbed will never have a
153 dereference into the partition_to_view array any more. */
154
155 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
156 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
157
158 gcc_assert (p1 != NO_PARTITION);
159 gcc_assert (p2 != NO_PARTITION);
160
161 if (p1 == p2)
162 p3 = p1;
163 else
164 p3 = partition_union (map->var_partition, p1, p2);
165
166 if (map->partition_to_view)
167 p3 = map->partition_to_view[p3];
168
169 return p3;
170}
171
172
173/* Compress the partition numbers in MAP such that they fall in the range
174 0..(num_partitions-1) instead of wherever they turned out during
175 the partitioning exercise. This removes any references to unused
176 partitions, thereby allowing bitmaps and other vectors to be much
177 denser.
178
179 This is implemented such that compaction doesn't affect partitioning.
180 Ie., once partitions are created and possibly merged, running one
181 or more different kind of compaction will not affect the partitions
182 themselves. Their index might change, but all the same variables will
183 still be members of the same partition group. This allows work on reduced
184 sets, and no loss of information when a larger set is later desired.
185
186 In particular, coalescing can work on partitions which have 2 or more
187 definitions, and then 'recompact' later to include all the single
188 definitions for assignment to program variables. */
189
190
191/* Set MAP back to the initial state of having no partition view. Return a
192 bitmap which has a bit set for each partition number which is in use in the
193 varmap. */
194
195static bitmap
196partition_view_init (var_map map)
197{
198 bitmap used;
199 int tmp;
200 unsigned int x;
201
202 used = BITMAP_ALLOC (NULL);
203
204 /* Already in a view? Abandon the old one. */
205 if (map->partition_to_view)
206 {
207 free (ptr: map->partition_to_view);
208 map->partition_to_view = NULL;
209 }
210 if (map->view_to_partition)
211 {
212 free (ptr: map->view_to_partition);
213 map->view_to_partition = NULL;
214 }
215
216 /* Find out which partitions are actually referenced. */
217 for (x = 0; x < map->partition_size; x++)
218 {
219 tmp = partition_find (map->var_partition, x);
220 if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
221 && (!has_zero_uses (ssa_name (tmp))
222 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))
223 || (SSA_NAME_VAR (ssa_name (tmp))
224 && !VAR_P (SSA_NAME_VAR (ssa_name (tmp))))))
225 bitmap_set_bit (used, tmp);
226 }
227
228 map->num_partitions = map->partition_size;
229 return used;
230}
231
232
233/* This routine will finalize the view data for MAP based on the partitions
234 set in SELECTED. This is either the same bitmap returned from
235 partition_view_init, or a trimmed down version if some of those partitions
236 were not desired in this view. SELECTED is freed before returning. */
237
238static void
239partition_view_fini (var_map map, bitmap selected)
240{
241 bitmap_iterator bi;
242 unsigned count, i, x, limit;
243
244 gcc_assert (selected);
245
246 count = bitmap_count_bits (selected);
247 limit = map->partition_size;
248
249 /* If its a one-to-one ratio, we don't need any view compaction. */
250 if (count < limit)
251 {
252 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
253 memset (s: map->partition_to_view, c: 0xff, n: (limit * sizeof (int)));
254 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
255
256 i = 0;
257 /* Give each selected partition an index. */
258 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
259 {
260 map->partition_to_view[x] = i;
261 map->view_to_partition[i] = x;
262 i++;
263 }
264 gcc_assert (i == count);
265 map->num_partitions = i;
266 }
267
268 BITMAP_FREE (selected);
269}
270
271
272/* Create a partition view which includes all the used partitions in MAP. */
273
274void
275partition_view_normal (var_map map)
276{
277 bitmap used;
278
279 used = partition_view_init (map);
280 partition_view_fini (map, selected: used);
281
282 var_map_base_fini (map);
283}
284
285
286/* Create a partition view in MAP which includes just partitions which occur in
287 the bitmap ONLY. If WANT_BASES is true, create the base variable map
288 as well. */
289
290void
291partition_view_bitmap (var_map map, bitmap only)
292{
293 bitmap used;
294 bitmap new_partitions = BITMAP_ALLOC (NULL);
295 unsigned x, p;
296 bitmap_iterator bi;
297
298 used = partition_view_init (map);
299 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
300 {
301 p = partition_find (map->var_partition, x);
302 gcc_assert (bitmap_bit_p (used, p));
303 bitmap_set_bit (new_partitions, p);
304 }
305 partition_view_fini (map, selected: new_partitions);
306
307 var_map_base_fini (map);
308}
309
310
311static bitmap usedvars;
312
313/* Mark VAR as used, so that it'll be preserved during rtl expansion.
314 Returns true if VAR wasn't marked before. */
315
316static inline bool
317set_is_used (tree var)
318{
319 return bitmap_set_bit (usedvars, DECL_UID (var));
320}
321
322/* Return true if VAR is marked as used. */
323
324static inline bool
325is_used_p (tree var)
326{
327 return bitmap_bit_p (usedvars, DECL_UID (var));
328}
329
330static inline void mark_all_vars_used (tree *);
331
332/* Helper function for mark_all_vars_used, called via walk_tree. */
333
334static tree
335mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
336{
337 tree t = *tp;
338 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
339 tree b;
340
341 if (TREE_CODE (t) == SSA_NAME)
342 {
343 *walk_subtrees = 0;
344 t = SSA_NAME_VAR (t);
345 if (!t)
346 return NULL;
347 }
348
349 if (IS_EXPR_CODE_CLASS (c)
350 && (b = TREE_BLOCK (t)) != NULL)
351 TREE_USED (b) = true;
352
353 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
354 fields do not contain vars. */
355 if (TREE_CODE (t) == TARGET_MEM_REF)
356 {
357 mark_all_vars_used (&TMR_BASE (t));
358 mark_all_vars_used (&TMR_INDEX (t));
359 mark_all_vars_used (&TMR_INDEX2 (t));
360 *walk_subtrees = 0;
361 return NULL;
362 }
363
364 /* Only need to mark VAR_DECLS; parameters and return results are not
365 eliminated as unused. */
366 if (VAR_P (t))
367 {
368 /* When a global var becomes used for the first time also walk its
369 initializer (non global ones don't have any). */
370 if (set_is_used (t) && is_global_var (t)
371 && DECL_CONTEXT (t) == current_function_decl)
372 mark_all_vars_used (&DECL_INITIAL (t));
373 }
374 /* remove_unused_scope_block_p requires information about labels
375 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
376 else if (TREE_CODE (t) == LABEL_DECL)
377 /* Although the TREE_USED values that the frontend uses would be
378 acceptable (albeit slightly over-conservative) for our purposes,
379 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
380 must re-compute it here. */
381 TREE_USED (t) = 1;
382
383 if (IS_TYPE_OR_DECL_P (t))
384 *walk_subtrees = 0;
385
386 return NULL;
387}
388
389/* Mark the scope block SCOPE and its subblocks unused when they can be
390 possibly eliminated if dead. */
391
392static void
393mark_scope_block_unused (tree scope)
394{
395 tree t;
396 TREE_USED (scope) = false;
397 if (!(*debug_hooks->ignore_block) (scope))
398 TREE_USED (scope) = true;
399 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
400 mark_scope_block_unused (scope: t);
401}
402
403/* Look if the block is dead (by possibly eliminating its dead subblocks)
404 and return true if so.
405 Block is declared dead if:
406 1) No statements are associated with it.
407 2) Declares no live variables
408 3) All subblocks are dead
409 or there is precisely one subblocks and the block
410 has same abstract origin as outer block and declares
411 no variables, so it is pure wrapper.
412 When we are not outputting full debug info, we also eliminate dead variables
413 out of scope blocks to let them to be recycled by GGC and to save copying work
414 done by the inliner. */
415
416static bool
417remove_unused_scope_block_p (tree scope, bool in_ctor_dtor_block)
418{
419 tree *t, *next;
420 bool unused = !TREE_USED (scope);
421 int nsubblocks = 0;
422
423 /* For ipa-polymorphic-call.cc purposes, preserve blocks:
424 1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones */
425 if (inlined_polymorphic_ctor_dtor_block_p (scope, true))
426 {
427 in_ctor_dtor_block = true;
428 unused = false;
429 }
430 /* 2) inside such blocks, the outermost block with block_ultimate_origin
431 being a FUNCTION_DECL. */
432 else if (in_ctor_dtor_block)
433 {
434 tree fn = block_ultimate_origin (scope);
435 if (fn && TREE_CODE (fn) == FUNCTION_DECL)
436 {
437 in_ctor_dtor_block = false;
438 unused = false;
439 }
440 }
441
442 for (t = &BLOCK_VARS (scope); *t; t = next)
443 {
444 next = &DECL_CHAIN (*t);
445
446 /* Debug info of nested function refers to the block of the
447 function. We might stil call it even if all statements
448 of function it was nested into was elliminated.
449
450 TODO: We can actually look into cgraph to see if function
451 will be output to file. */
452 if (TREE_CODE (*t) == FUNCTION_DECL)
453 unused = false;
454
455 /* If a decl has a value expr, we need to instantiate it
456 regardless of debug info generation, to avoid codegen
457 differences in memory overlap tests. update_equiv_regs() may
458 indirectly call validate_equiv_mem() to test whether a
459 SET_DEST overlaps with others, and if the value expr changes
460 by virtual register instantiation, we may get end up with
461 different results. */
462 else if (VAR_P (*t) && DECL_HAS_VALUE_EXPR_P (*t))
463 unused = false;
464
465 /* Remove everything we don't generate debug info for. */
466 else if (DECL_IGNORED_P (*t))
467 {
468 *t = DECL_CHAIN (*t);
469 next = t;
470 }
471
472 /* When we are outputting debug info, we usually want to output
473 info about optimized-out variables in the scope blocks.
474 Exception are the scope blocks not containing any instructions
475 at all so user can't get into the scopes at first place. */
476 else if (is_used_p (var: *t))
477 unused = false;
478 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
479 /* For labels that are still used in the IL, the decision to
480 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
481 risk having different ordering in debug vs. non-debug builds
482 during inlining or versioning.
483 A label appearing here (we have already checked DECL_IGNORED_P)
484 should not be used in the IL unless it has been explicitly used
485 before, so we use TREE_USED as an approximation. */
486 /* In principle, we should do the same here as for the debug case
487 below, however, when debugging, there might be additional nested
488 levels that keep an upper level with a label live, so we have to
489 force this block to be considered used, too. */
490 unused = false;
491
492 /* When we are not doing full debug info, we however can keep around
493 only the used variables for cfgexpand's memory packing saving quite
494 a lot of memory.
495
496 For sake of -g3, we keep around those vars but we don't count this as
497 use of block, so innermost block with no used vars and no instructions
498 can be considered dead. We only want to keep around blocks user can
499 breakpoint into and ask about value of optimized out variables.
500
501 Similarly we need to keep around types at least until all
502 variables of all nested blocks are gone. We track no
503 information on whether given type is used or not, so we have
504 to keep them even when not emitting debug information,
505 otherwise we may end up remapping variables and their (local)
506 types in different orders depending on whether debug
507 information is being generated. */
508
509 else if (TREE_CODE (*t) == TYPE_DECL
510 || debug_info_level == DINFO_LEVEL_NORMAL
511 || debug_info_level == DINFO_LEVEL_VERBOSE)
512 ;
513 else
514 {
515 *t = DECL_CHAIN (*t);
516 next = t;
517 }
518 }
519
520 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
521 if (remove_unused_scope_block_p (scope: *t, in_ctor_dtor_block))
522 {
523 if (BLOCK_SUBBLOCKS (*t))
524 {
525 tree next = BLOCK_CHAIN (*t);
526 tree supercontext = BLOCK_SUPERCONTEXT (*t);
527
528 *t = BLOCK_SUBBLOCKS (*t);
529 while (BLOCK_CHAIN (*t))
530 {
531 BLOCK_SUPERCONTEXT (*t) = supercontext;
532 t = &BLOCK_CHAIN (*t);
533 }
534 BLOCK_CHAIN (*t) = next;
535 BLOCK_SUPERCONTEXT (*t) = supercontext;
536 t = &BLOCK_CHAIN (*t);
537 nsubblocks ++;
538 }
539 else
540 *t = BLOCK_CHAIN (*t);
541 }
542 else
543 {
544 t = &BLOCK_CHAIN (*t);
545 nsubblocks ++;
546 }
547
548
549 if (!unused)
550 ;
551 /* Outer scope is always used. */
552 else if (!BLOCK_SUPERCONTEXT (scope)
553 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
554 unused = false;
555 /* Innermost blocks with no live variables nor statements can be always
556 eliminated. */
557 else if (!nsubblocks)
558 ;
559 /* When not generating debug info we can eliminate info on unused
560 variables. */
561 else if (!flag_auto_profile
562 && debug_info_level == DINFO_LEVEL_NONE
563 && !optinfo_wants_inlining_info_p ())
564 {
565 /* Even for -g0 don't prune outer scopes from inlined functions,
566 otherwise late diagnostics from such functions will not be
567 emitted or suppressed properly. */
568 if (inlined_function_outer_scope_p (block: scope))
569 {
570 gcc_assert (TREE_CODE (BLOCK_ORIGIN (scope)) == FUNCTION_DECL);
571 unused = false;
572 }
573 }
574 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
575 unused = false;
576 /* See if this block is important for representation of inlined
577 function. Inlined functions are always represented by block
578 with block_ultimate_origin being set to FUNCTION_DECL and
579 DECL_SOURCE_LOCATION set, unless they expand to nothing... */
580 else if (inlined_function_outer_scope_p (block: scope))
581 unused = false;
582 else
583 /* Verfify that only blocks with source location set
584 are entry points to the inlined functions. */
585 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
586 == UNKNOWN_LOCATION);
587
588 TREE_USED (scope) = !unused;
589 return unused;
590}
591
592/* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
593 eliminated during the tree->rtl conversion process. */
594
595static inline void
596mark_all_vars_used (tree *expr_p)
597{
598 walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
599}
600
601/* Helper function for clear_unused_block_pointer, called via walk_tree. */
602
603static tree
604clear_unused_block_pointer_1 (tree *tp, int *, void *)
605{
606 if (EXPR_P (*tp) && TREE_BLOCK (*tp)
607 && !TREE_USED (TREE_BLOCK (*tp)))
608 TREE_SET_BLOCK (*tp, NULL);
609 return NULL_TREE;
610}
611
612/* Set all block pointer in debug or clobber stmt to NULL if the block
613 is unused, so that they will not be streamed out. */
614
615static void
616clear_unused_block_pointer (void)
617{
618 basic_block bb;
619 gimple_stmt_iterator gsi;
620
621 FOR_EACH_BB_FN (bb, cfun)
622 for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
623 {
624 unsigned i;
625 tree b;
626 gimple *stmt;
627
628 next:
629 stmt = gsi_stmt (i: gsi);
630 if (!is_gimple_debug (gs: stmt) && !gimple_clobber_p (s: stmt))
631 continue;
632 b = gimple_block (g: stmt);
633 if (b && !TREE_USED (b))
634 {
635 /* Elide debug marker stmts that have an associated BLOCK from an
636 inline instance removed with also the outermost scope BLOCK of
637 said inline instance removed. If the outermost scope BLOCK of
638 said inline instance is preserved use that in place of the
639 removed BLOCK. That keeps the marker associated to the correct
640 inline instance (or no inline instance in case it was not from
641 an inline instance). */
642 if (gimple_debug_nonbind_marker_p (s: stmt)
643 && BLOCK_ABSTRACT_ORIGIN (b))
644 {
645 while (TREE_CODE (b) == BLOCK
646 && !inlined_function_outer_scope_p (block: b))
647 b = BLOCK_SUPERCONTEXT (b);
648 if (TREE_CODE (b) == BLOCK)
649 {
650 if (TREE_USED (b))
651 {
652 gimple_set_block (g: stmt, block: b);
653 continue;
654 }
655 gsi_remove (&gsi, true);
656 if (gsi_end_p (i: gsi))
657 break;
658 goto next;
659 }
660 }
661 gimple_set_block (g: stmt, NULL);
662 }
663 for (i = 0; i < gimple_num_ops (gs: stmt); i++)
664 walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
665 NULL, NULL);
666 }
667}
668
669/* Dump scope blocks starting at SCOPE to FILE. INDENT is the
670 indentation level and FLAGS is as in print_generic_expr. */
671
672static void
673dump_scope_block (FILE *file, int indent, tree scope, dump_flags_t flags)
674{
675 tree var, t;
676 unsigned int i;
677
678 fprintf (stream: file, format: "\n%*s{ Scope block #%i%s",indent, "" , BLOCK_NUMBER (scope),
679 TREE_USED (scope) ? "" : " (unused)");
680 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
681 {
682 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
683 fprintf (stream: file, format: " %s:%i", s.file, s.line);
684 }
685 if (BLOCK_ABSTRACT_ORIGIN (scope))
686 {
687 tree origin = block_ultimate_origin (scope);
688 if (origin)
689 {
690 fprintf (stream: file, format: " Originating from :");
691 if (DECL_P (origin))
692 print_generic_decl (file, origin, flags);
693 else
694 fprintf (stream: file, format: "#%i", BLOCK_NUMBER (origin));
695 }
696 }
697 if (BLOCK_FRAGMENT_ORIGIN (scope))
698 fprintf (stream: file, format: " Fragment of : #%i",
699 BLOCK_NUMBER (BLOCK_FRAGMENT_ORIGIN (scope)));
700 else if (BLOCK_FRAGMENT_CHAIN (scope))
701 {
702 fprintf (stream: file, format: " Fragment chain :");
703 for (t = BLOCK_FRAGMENT_CHAIN (scope); t ;
704 t = BLOCK_FRAGMENT_CHAIN (t))
705 fprintf (stream: file, format: " #%i", BLOCK_NUMBER (t));
706 }
707 fprintf (stream: file, format: " \n");
708 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
709 {
710 fprintf (stream: file, format: "%*s", indent, "");
711 print_generic_decl (file, var, flags);
712 fprintf (stream: file, format: "\n");
713 }
714 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
715 {
716 fprintf (stream: file, format: "%*s",indent, "");
717 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
718 flags);
719 fprintf (stream: file, format: " (nonlocalized)\n");
720 }
721 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
722 dump_scope_block (file, indent: indent + 2, scope: t, flags);
723 fprintf (stream: file, format: "\n%*s}\n",indent, "");
724}
725
726/* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
727 is as in print_generic_expr. */
728
729DEBUG_FUNCTION void
730debug_scope_block (tree scope, dump_flags_t flags)
731{
732 dump_scope_block (stderr, indent: 0, scope, flags);
733}
734
735
736/* Dump the tree of lexical scopes of current_function_decl to FILE.
737 FLAGS is as in print_generic_expr. */
738
739void
740dump_scope_blocks (FILE *file, dump_flags_t flags)
741{
742 dump_scope_block (file, indent: 0, DECL_INITIAL (current_function_decl), flags);
743}
744
745
746/* Dump the tree of lexical scopes of current_function_decl to stderr.
747 FLAGS is as in print_generic_expr. */
748
749DEBUG_FUNCTION void
750debug_scope_blocks (dump_flags_t flags)
751{
752 dump_scope_blocks (stderr, flags);
753}
754
755/* Remove local variables that are not referenced in the IL. */
756
757void
758remove_unused_locals (void)
759{
760 basic_block bb;
761 tree var;
762 unsigned srcidx, dstidx, num;
763 bool have_local_clobbers = false;
764
765 /* Removing declarations from lexical blocks when not optimizing is
766 not only a waste of time, it actually causes differences in stack
767 layout. */
768 if (!optimize)
769 return;
770
771 timevar_push (tv: TV_REMOVE_UNUSED);
772
773 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
774
775 usedvars = BITMAP_ALLOC (NULL);
776 auto_bitmap useddebug;
777
778 /* Walk the CFG marking all referenced symbols. */
779 FOR_EACH_BB_FN (bb, cfun)
780 {
781 gimple_stmt_iterator gsi;
782 size_t i;
783 edge_iterator ei;
784 edge e;
785
786 /* Walk the statements. */
787 for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
788 {
789 gimple *stmt = gsi_stmt (i: gsi);
790 tree b = gimple_block (g: stmt);
791
792 /* If we wanted to mark the block referenced by the inline
793 entry point marker as used, this would be a good spot to
794 do it. If the block is not otherwise used, the stmt will
795 be cleaned up in clean_unused_block_pointer. */
796 if (is_gimple_debug (gs: stmt))
797 {
798 if (gimple_debug_bind_p (s: stmt))
799 {
800 tree var = gimple_debug_bind_get_var (dbg: stmt);
801 if (VAR_P (var))
802 {
803 if (!gimple_debug_bind_get_value (dbg: stmt))
804 /* Run the 2nd phase. */
805 have_local_clobbers = true;
806 else
807 bitmap_set_bit (useddebug, DECL_UID (var));
808 }
809 }
810 continue;
811 }
812
813 if (gimple_clobber_p (s: stmt))
814 {
815 have_local_clobbers = true;
816 continue;
817 }
818
819 if (gimple_call_internal_p (gs: stmt, fn: IFN_DEFERRED_INIT))
820 {
821 have_local_clobbers = true;
822 continue;
823 }
824
825 if (b)
826 TREE_USED (b) = true;
827
828 for (i = 0; i < gimple_num_ops (gs: stmt); i++)
829 mark_all_vars_used (expr_p: gimple_op_ptr (gs: gsi_stmt (i: gsi), i));
830 }
831
832 for (gphi_iterator gpi = gsi_start_phis (bb);
833 !gsi_end_p (i: gpi);
834 gsi_next (i: &gpi))
835 {
836 use_operand_p arg_p;
837 ssa_op_iter i;
838 tree def;
839 gphi *phi = gpi.phi ();
840
841 if (virtual_operand_p (op: gimple_phi_result (gs: phi)))
842 continue;
843
844 def = gimple_phi_result (gs: phi);
845 mark_all_vars_used (expr_p: &def);
846
847 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
848 {
849 tree arg = USE_FROM_PTR (arg_p);
850 int index = PHI_ARG_INDEX_FROM_USE (arg_p);
851 tree block =
852 LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
853 if (block != NULL)
854 TREE_USED (block) = true;
855 mark_all_vars_used (expr_p: &arg);
856 }
857 }
858
859 FOR_EACH_EDGE (e, ei, bb->succs)
860 if (LOCATION_BLOCK (e->goto_locus) != NULL)
861 TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
862 }
863
864 /* We do a two-pass approach about the out-of-scope clobbers. We want
865 to remove them if they are the only references to a local variable,
866 but we want to retain them when there's any other. So the first pass
867 ignores them, and the second pass (if there were any) tries to remove
868 them. We do the same for .DEFERRED_INIT. */
869 if (have_local_clobbers)
870 FOR_EACH_BB_FN (bb, cfun)
871 {
872 gimple_stmt_iterator gsi;
873
874 for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi);)
875 {
876 gimple *stmt = gsi_stmt (i: gsi);
877 tree b = gimple_block (g: stmt);
878
879 if (gimple_clobber_p (s: stmt))
880 {
881 tree lhs = gimple_assign_lhs (gs: stmt);
882 tree base = get_base_address (t: lhs);
883 /* Remove clobbers referencing unused vars, or clobbers
884 with MEM_REF lhs referencing uninitialized pointers. */
885 if ((VAR_P (base) && !is_used_p (var: base))
886 || (TREE_CODE (lhs) == MEM_REF
887 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
888 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
889 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
890 != PARM_DECL)))
891 {
892 unlink_stmt_vdef (stmt);
893 gsi_remove (&gsi, true);
894 release_defs (stmt);
895 continue;
896 }
897 if (b)
898 TREE_USED (b) = true;
899 }
900 else if (gimple_call_internal_p (gs: stmt, fn: IFN_DEFERRED_INIT))
901 {
902 tree lhs = gimple_call_lhs (gs: stmt);
903 tree base = get_base_address (t: lhs);
904 if (DECL_P (base) && !is_used_p (var: base))
905 {
906 unlink_stmt_vdef (stmt);
907 gsi_remove (&gsi, true);
908 release_defs (stmt);
909 continue;
910 }
911 if (b)
912 TREE_USED (b) = true;
913 }
914 else if (gimple_debug_bind_p (s: stmt))
915 {
916 tree var = gimple_debug_bind_get_var (dbg: stmt);
917 if (VAR_P (var)
918 && !bitmap_bit_p (useddebug, DECL_UID (var))
919 && !is_used_p (var))
920 {
921 if (dump_file && (dump_flags & TDF_DETAILS))
922 fprintf (stream: dump_file, format: "Dead debug bind reset to %u\n",
923 DECL_UID (var));
924 gsi_remove (&gsi, true);
925 continue;
926 }
927 }
928 gsi_next (i: &gsi);
929 }
930 }
931
932 if (cfun->has_simduid_loops)
933 {
934 for (auto loop : loops_list (cfun, 0))
935 if (loop->simduid && !is_used_p (var: loop->simduid))
936 loop->simduid = NULL_TREE;
937 }
938
939 cfun->has_local_explicit_reg_vars = false;
940
941 /* Remove unmarked local and global vars from local_decls. */
942 num = vec_safe_length (cfun->local_decls);
943 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
944 {
945 var = (*cfun->local_decls)[srcidx];
946 if (VAR_P (var))
947 {
948 if (!is_used_p (var))
949 {
950 tree def;
951 if (cfun->nonlocal_goto_save_area
952 && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
953 cfun->nonlocal_goto_save_area = NULL;
954 /* Release any default def associated with var. */
955 if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
956 {
957 set_ssa_default_def (cfun, var, NULL_TREE);
958 release_ssa_name (name: def);
959 }
960 continue;
961 }
962 }
963 if (VAR_P (var) && DECL_HARD_REGISTER (var) && !is_global_var (t: var))
964 cfun->has_local_explicit_reg_vars = true;
965
966 if (srcidx != dstidx)
967 (*cfun->local_decls)[dstidx] = var;
968 dstidx++;
969 }
970 if (dstidx != num)
971 {
972 statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
973 cfun->local_decls->truncate (size: dstidx);
974 }
975
976 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl),
977 in_ctor_dtor_block: polymorphic_ctor_dtor_p (current_function_decl,
978 true) != NULL_TREE);
979 clear_unused_block_pointer ();
980
981 BITMAP_FREE (usedvars);
982
983 if (dump_file && (dump_flags & TDF_DETAILS))
984 {
985 fprintf (stream: dump_file, format: "Scope blocks after cleanups:\n");
986 dump_scope_blocks (file: dump_file, flags: dump_flags);
987 }
988
989 timevar_pop (tv: TV_REMOVE_UNUSED);
990}
991
992/* Allocate and return a new live range information object base on MAP. */
993
994static tree_live_info_p
995new_tree_live_info (var_map map)
996{
997 tree_live_info_p live;
998 basic_block bb;
999
1000 live = XNEW (struct tree_live_info_d);
1001 live->map = map;
1002 live->num_blocks = last_basic_block_for_fn (cfun);
1003
1004 bitmap_obstack_initialize (&live->livein_obstack);
1005 bitmap_obstack_initialize (&live->liveout_obstack);
1006
1007 live->livein = XCNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
1008 live->liveout = XCNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
1009 for (unsigned i = 0; map->vec_bbs.iterate (ix: i, ptr: &bb); ++i)
1010 {
1011 bitmap_initialize (head: &live->livein[bb->index], obstack: &live->livein_obstack);
1012 bitmap_initialize (head: &live->liveout[bb->index], obstack: &live->liveout_obstack);
1013 }
1014
1015 live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
1016 live->stack_top = live->work_stack;
1017
1018 live->global = BITMAP_ALLOC (NULL);
1019 return live;
1020}
1021
1022
1023/* Free storage for live range info object LIVE. */
1024
1025void
1026delete_tree_live_info (tree_live_info_p live)
1027{
1028 if (live->livein)
1029 {
1030 bitmap_obstack_release (&live->livein_obstack);
1031 free (ptr: live->livein);
1032 }
1033 if (live->liveout)
1034 {
1035 bitmap_obstack_release (&live->liveout_obstack);
1036 free (ptr: live->liveout);
1037 }
1038 BITMAP_FREE (live->global);
1039 free (ptr: live->work_stack);
1040 free (ptr: live);
1041}
1042
1043
1044/* Visit basic block BB and propagate any required live on entry bits from
1045 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
1046 TMP is a temporary work bitmap which is passed in to avoid reallocating
1047 it each time. */
1048
1049static void
1050loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited)
1051{
1052 edge e;
1053 bool change;
1054 edge_iterator ei;
1055 basic_block pred_bb;
1056 bitmap loe;
1057
1058 gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
1059 bitmap_set_bit (map: visited, bitno: bb->index);
1060
1061 loe = live_on_entry (live, bb);
1062
1063 FOR_EACH_EDGE (e, ei, bb->preds)
1064 {
1065 pred_bb = e->src;
1066 if (!region_contains_p (map: live->map, bb: pred_bb))
1067 continue;
1068 /* Variables live-on-entry from BB that aren't defined in the
1069 predecessor block. This should be the live on entry vars to pred.
1070 Note that liveout is the DEFs in a block while live on entry is
1071 being calculated.
1072 Add these bits to live-on-entry for the pred. if there are any
1073 changes, and pred_bb has been visited already, add it to the
1074 revisit stack. */
1075 change = bitmap_ior_and_compl_into (A: live_on_entry (live, bb: pred_bb),
1076 B: loe, C: &live->liveout[pred_bb->index]);
1077 if (change
1078 && bitmap_bit_p (map: visited, bitno: pred_bb->index))
1079 {
1080 bitmap_clear_bit (map: visited, bitno: pred_bb->index);
1081 *(live->stack_top)++ = pred_bb->index;
1082 }
1083 }
1084}
1085
1086
1087/* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
1088 of all the variables. */
1089
1090static void
1091live_worklist (tree_live_info_p live)
1092{
1093 unsigned b;
1094 basic_block bb;
1095 auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1096
1097 bitmap_clear (visited);
1098
1099 /* Visit region's blocks in reverse order and propagate live on entry values
1100 into the predecessors blocks. */
1101 for (unsigned i = live->map->vec_bbs.length () - 1;
1102 live->map->vec_bbs.iterate (ix: i, ptr: &bb); --i)
1103 loe_visit_block (live, bb, visited);
1104
1105 /* Process any blocks which require further iteration. */
1106 while (live->stack_top != live->work_stack)
1107 {
1108 b = *--(live->stack_top);
1109 loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
1110 }
1111}
1112
1113
1114/* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1115 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1116 in the liveout vector. */
1117
1118static void
1119set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1120{
1121 int p;
1122 gimple *stmt;
1123 use_operand_p use;
1124 basic_block def_bb = NULL;
1125 imm_use_iterator imm_iter;
1126 bool global = false;
1127
1128 p = var_to_partition (map: live->map, var: ssa_name);
1129 if (p == NO_PARTITION)
1130 return;
1131
1132 stmt = SSA_NAME_DEF_STMT (ssa_name);
1133 if (stmt)
1134 {
1135 def_bb = gimple_bb (g: stmt);
1136 /* Mark defs in liveout bitmap temporarily. */
1137 if (def_bb && region_contains_p (map: live->map, bb: def_bb))
1138 bitmap_set_bit (&live->liveout[def_bb->index], p);
1139 }
1140 else
1141 def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1142
1143 /* An undefined local variable does not need to be very alive. */
1144 if (ssa_undefined_value_p (ssa_name, false))
1145 return;
1146
1147 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1148 add it to the list of live on entry blocks. */
1149 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1150 {
1151 gimple *use_stmt = USE_STMT (use);
1152 basic_block add_block = NULL;
1153
1154 if (gimple_code (g: use_stmt) == GIMPLE_PHI)
1155 {
1156 /* Uses in PHI's are considered to be live at exit of the SRC block
1157 as this is where a copy would be inserted. Check to see if it is
1158 defined in that block, or whether its live on entry. */
1159 int index = PHI_ARG_INDEX_FROM_USE (use);
1160 edge e = gimple_phi_arg_edge (phi: as_a <gphi *> (p: use_stmt), i: index);
1161 if (e->src != def_bb && region_contains_p (map: live->map, bb: e->src))
1162 add_block = e->src;
1163 }
1164 else if (is_gimple_debug (gs: use_stmt))
1165 continue;
1166 else
1167 {
1168 /* If its not defined in this block, its live on entry. */
1169 basic_block use_bb = gimple_bb (g: use_stmt);
1170 if (use_bb != def_bb && region_contains_p (map: live->map, bb: use_bb))
1171 add_block = use_bb;
1172 }
1173
1174 /* If there was a live on entry use, set the bit. */
1175 if (add_block)
1176 {
1177 global = true;
1178 bitmap_set_bit (&live->livein[add_block->index], p);
1179 }
1180 }
1181
1182 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1183 on entry blocks between the def and all the uses. */
1184 if (global)
1185 bitmap_set_bit (live->global, p);
1186}
1187
1188
1189/* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1190
1191static void
1192calculate_live_on_exit (tree_live_info_p liveinfo)
1193{
1194 basic_block bb;
1195 edge e;
1196 edge_iterator ei;
1197
1198 /* live on entry calculations used liveout vectors for defs, clear them. */
1199 for (unsigned i = 0; liveinfo->map->vec_bbs.iterate (ix: i, ptr: &bb); ++i)
1200 bitmap_clear (&liveinfo->liveout[bb->index]);
1201
1202 /* Set all the live-on-exit bits for uses in PHIs. */
1203 FOR_EACH_BB_FN (bb, cfun)
1204 {
1205 gphi_iterator gsi;
1206 size_t i;
1207
1208 /* Mark the PHI arguments which are live on exit to the pred block. */
1209 for (gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
1210 {
1211 gphi *phi = gsi.phi ();
1212 if (virtual_operand_p (op: gimple_phi_result (gs: phi)))
1213 continue;
1214 for (i = 0; i < gimple_phi_num_args (gs: phi); i++)
1215 {
1216 tree t = PHI_ARG_DEF (phi, i);
1217 int p;
1218
1219 if (TREE_CODE (t) != SSA_NAME)
1220 continue;
1221
1222 p = var_to_partition (map: liveinfo->map, var: t);
1223 if (p == NO_PARTITION)
1224 continue;
1225 e = gimple_phi_arg_edge (phi, i);
1226 if (region_contains_p (map: liveinfo->map, bb: e->src))
1227 bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
1228 }
1229 }
1230
1231 if (!region_contains_p (map: liveinfo->map, bb))
1232 continue;
1233
1234 /* Add each successors live on entry to this bock live on exit. */
1235 FOR_EACH_EDGE (e, ei, bb->succs)
1236 if (region_contains_p (map: liveinfo->map, bb: e->dest))
1237 bitmap_ior_into (&liveinfo->liveout[bb->index],
1238 live_on_entry (live: liveinfo, bb: e->dest));
1239 }
1240}
1241
1242
1243/* Given partition map MAP, calculate all the live on entry bitmaps for
1244 each partition. Return a new live info object. */
1245
1246tree_live_info_p
1247calculate_live_ranges (var_map map, bool want_livein)
1248{
1249 tree var;
1250 unsigned i;
1251 tree_live_info_p live;
1252
1253 live = new_tree_live_info (map);
1254 for (i = 0; i < num_var_partitions (map); i++)
1255 {
1256 var = partition_to_var (map, i);
1257 if (var != NULL_TREE)
1258 set_var_live_on_entry (ssa_name: var, live);
1259 }
1260
1261 live_worklist (live);
1262
1263 if (flag_checking)
1264 verify_live_on_entry (live);
1265
1266 calculate_live_on_exit (liveinfo: live);
1267
1268 if (!want_livein)
1269 {
1270 bitmap_obstack_release (&live->livein_obstack);
1271 free (ptr: live->livein);
1272 live->livein = NULL;
1273 }
1274
1275 return live;
1276}
1277
1278/* Data structure for compute_live_vars* functions. */
1279
1280struct compute_live_vars_data {
1281 /* Vector of bitmaps for live vars indices at the end of basic blocks,
1282 indexed by bb->index. ACTIVE[ENTRY_BLOCK] must be empty bitmap,
1283 ACTIVE[EXIT_BLOCK] is used for STOP_AFTER. */
1284 vec<bitmap_head> active;
1285 /* Work bitmap of currently live variables. */
1286 bitmap work;
1287 /* Set of interesting variables. Variables with uids not in this
1288 hash_map are not tracked. */
1289 live_vars_map *vars;
1290};
1291
1292/* Callback for walk_stmt_load_store_addr_ops. If OP is a VAR_DECL with
1293 uid set in DATA->vars, enter its corresponding index into bitmap
1294 DATA->work. */
1295
1296static bool
1297compute_live_vars_visit (gimple *, tree op, tree, void *pdata)
1298{
1299 compute_live_vars_data *data = (compute_live_vars_data *) pdata;
1300 op = get_base_address (t: op);
1301 if (op && VAR_P (op))
1302 if (unsigned int *v = data->vars->get (DECL_UID (op)))
1303 bitmap_set_bit (data->work, *v);
1304 return false;
1305}
1306
1307/* Helper routine for compute_live_vars, calculating the sets of live
1308 variables at the end of BB, leaving the result in DATA->work.
1309 If STOP_AFTER is non-NULL, stop processing after that stmt. */
1310
1311static void
1312compute_live_vars_1 (basic_block bb, compute_live_vars_data *data,
1313 gimple *stop_after)
1314{
1315 edge e;
1316 edge_iterator ei;
1317 gimple_stmt_iterator gsi;
1318 walk_stmt_load_store_addr_fn visit = compute_live_vars_visit;
1319
1320 bitmap_clear (data->work);
1321 FOR_EACH_EDGE (e, ei, bb->preds)
1322 bitmap_ior_into (data->work, &data->active[e->src->index]);
1323
1324 for (gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
1325 walk_stmt_load_store_addr_ops (gsi_stmt (i: gsi), data, NULL, NULL, visit);
1326 for (gsi = gsi_after_labels (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
1327 {
1328 gimple *stmt = gsi_stmt (i: gsi);
1329
1330 if (gimple_clobber_p (s: stmt))
1331 {
1332 tree lhs = gimple_assign_lhs (gs: stmt);
1333 if (VAR_P (lhs))
1334 if (unsigned int *v = data->vars->get (DECL_UID (lhs)))
1335 bitmap_clear_bit (data->work, *v);
1336 }
1337 else if (!is_gimple_debug (gs: stmt))
1338 walk_stmt_load_store_addr_ops (stmt, data, visit, visit, visit);
1339 if (stmt == stop_after)
1340 break;
1341 }
1342}
1343
1344/* For function FN and live_vars_map (hash map from DECL_UIDs to a dense set of
1345 indexes of automatic variables VARS, compute which of those variables are
1346 (might be) live at the end of each basic block. */
1347
1348vec<bitmap_head>
1349compute_live_vars (struct function *fn, live_vars_map *vars)
1350{
1351 vec<bitmap_head> active;
1352
1353 /* We approximate the live range of a stack variable by taking the first
1354 mention of its name as starting point(s), and by the end-of-scope
1355 death clobber added by gimplify as ending point(s) of the range.
1356 This overapproximates in the case we for instance moved an address-taken
1357 operation upward, without also moving a dereference to it upwards.
1358 But it's conservatively correct as a variable never can hold values
1359 before its name is mentioned at least once.
1360
1361 We then do a mostly classical bitmap liveness algorithm. */
1362
1363 active.create (last_basic_block_for_fn (fn));
1364 active.quick_grow_cleared (last_basic_block_for_fn (fn));
1365 for (int i = 0; i < last_basic_block_for_fn (fn); i++)
1366 bitmap_initialize (head: &active[i], obstack: &bitmap_default_obstack);
1367
1368 bitmap work = BITMAP_ALLOC (NULL);
1369
1370 int *rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
1371 int n_bbs = pre_and_rev_post_order_compute_fn (fn, NULL, rpo, false);
1372
1373 bool changed = true;
1374 compute_live_vars_data data = { .active: active, .work: work, .vars: vars };
1375 while (changed)
1376 {
1377 int i;
1378 changed = false;
1379 for (i = 0; i < n_bbs; i++)
1380 {
1381 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
1382 compute_live_vars_1 (bb, data: &data, NULL);
1383 if (bitmap_ior_into (&active[bb->index], work))
1384 changed = true;
1385 }
1386 }
1387
1388 free (ptr: rpo);
1389 BITMAP_FREE (work);
1390
1391 return active;
1392}
1393
1394/* For ACTIVE computed by compute_live_vars, compute a bitmap of variables
1395 live after the STOP_AFTER statement and return that bitmap. */
1396
1397bitmap
1398live_vars_at_stmt (vec<bitmap_head> &active, live_vars_map *vars,
1399 gimple *stop_after)
1400{
1401 bitmap work = BITMAP_ALLOC (NULL);
1402 compute_live_vars_data data = { .active: active, .work: work, .vars: vars };
1403 basic_block bb = gimple_bb (g: stop_after);
1404 compute_live_vars_1 (bb, data: &data, stop_after);
1405 return work;
1406}
1407
1408/* Destroy what compute_live_vars has returned when it is no longer needed. */
1409
1410void
1411destroy_live_vars (vec<bitmap_head> &active)
1412{
1413 unsigned len = active.length ();
1414 for (unsigned i = 0; i < len; i++)
1415 bitmap_clear (&active[i]);
1416
1417 active.release ();
1418}
1419
1420/* Output partition map MAP to file F. */
1421
1422void
1423dump_var_map (FILE *f, var_map map)
1424{
1425 int t;
1426 unsigned x, y;
1427 int p;
1428
1429 fprintf (stream: f, format: "\nPartition map \n\n");
1430
1431 for (x = 0; x < map->num_partitions; x++)
1432 {
1433 if (map->view_to_partition != NULL)
1434 p = map->view_to_partition[x];
1435 else
1436 p = x;
1437
1438 if (ssa_name (p) == NULL_TREE
1439 || virtual_operand_p (ssa_name (p)))
1440 continue;
1441
1442 t = 0;
1443 for (y = 1; y < num_ssa_names; y++)
1444 {
1445 p = partition_find (map->var_partition, y);
1446 if (map->partition_to_view)
1447 p = map->partition_to_view[p];
1448 if (p == (int)x)
1449 {
1450 if (t++ == 0)
1451 {
1452 fprintf (stream: f, format: "Partition %d (", x);
1453 print_generic_expr (f, partition_to_var (map, i: p), TDF_SLIM);
1454 fprintf (stream: f, format: " - ");
1455 }
1456 fprintf (stream: f, format: "%d ", y);
1457 }
1458 }
1459 if (t != 0)
1460 fprintf (stream: f, format: ")\n");
1461 }
1462 fprintf (stream: f, format: "\n");
1463}
1464
1465
1466/* Generic dump for the above. */
1467
1468DEBUG_FUNCTION void
1469debug (_var_map &ref)
1470{
1471 dump_var_map (stderr, map: &ref);
1472}
1473
1474DEBUG_FUNCTION void
1475debug (_var_map *ptr)
1476{
1477 if (ptr)
1478 debug (ref&: *ptr);
1479 else
1480 fprintf (stderr, format: "<nil>\n");
1481}
1482
1483
1484/* Output live range info LIVE to file F, controlled by FLAG. */
1485
1486void
1487dump_live_info (FILE *f, tree_live_info_p live, int flag)
1488{
1489 basic_block bb;
1490 unsigned i;
1491 var_map map = live->map;
1492 bitmap_iterator bi;
1493
1494 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1495 {
1496 FOR_EACH_BB_FN (bb, cfun)
1497 {
1498 fprintf (stream: f, format: "\nLive on entry to BB%d : ", bb->index);
1499 EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
1500 {
1501 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1502 fprintf (stream: f, format: " ");
1503 }
1504 fprintf (stream: f, format: "\n");
1505 }
1506 }
1507
1508 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1509 {
1510 FOR_EACH_BB_FN (bb, cfun)
1511 {
1512 fprintf (stream: f, format: "\nLive on exit from BB%d : ", bb->index);
1513 EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
1514 {
1515 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1516 fprintf (stream: f, format: " ");
1517 }
1518 fprintf (stream: f, format: "\n");
1519 }
1520 }
1521}
1522
1523
1524/* Generic dump for the above. */
1525
1526DEBUG_FUNCTION void
1527debug (tree_live_info_d &ref)
1528{
1529 dump_live_info (stderr, live: &ref, flag: 0);
1530}
1531
1532DEBUG_FUNCTION void
1533debug (tree_live_info_d *ptr)
1534{
1535 if (ptr)
1536 debug (ref&: *ptr);
1537 else
1538 fprintf (stderr, format: "<nil>\n");
1539}
1540
1541
1542/* Verify that the info in LIVE matches the current cfg. */
1543
1544static void
1545verify_live_on_entry (tree_live_info_p live)
1546{
1547 unsigned i;
1548 tree var;
1549 gimple *stmt;
1550 basic_block bb;
1551 edge e;
1552 int num;
1553 edge_iterator ei;
1554 var_map map = live->map;
1555
1556 /* Check for live on entry partitions and report those with a DEF in
1557 the program. This will typically mean an optimization has done
1558 something wrong. */
1559 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1560 num = 0;
1561 FOR_EACH_EDGE (e, ei, bb->succs)
1562 {
1563 int entry_block = e->dest->index;
1564 if (!region_contains_p (map: live->map, bb: e->dest))
1565 continue;
1566 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1567 {
1568 basic_block tmp;
1569 tree d = NULL_TREE;
1570 bitmap loe;
1571 var = partition_to_var (map, i);
1572 stmt = SSA_NAME_DEF_STMT (var);
1573 tmp = gimple_bb (g: stmt);
1574 if (SSA_NAME_VAR (var))
1575 d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1576
1577 loe = live_on_entry (live, bb: e->dest);
1578 if (loe && bitmap_bit_p (loe, i))
1579 {
1580 if (!gimple_nop_p (g: stmt))
1581 {
1582 num++;
1583 print_generic_expr (stderr, var, TDF_SLIM);
1584 fprintf (stderr, format: " is defined ");
1585 if (tmp)
1586 fprintf (stderr, format: " in BB%d, ", tmp->index);
1587 fprintf (stderr, format: "by:\n");
1588 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1589 fprintf (stderr, format: "\nIt is also live-on-entry to entry BB %d",
1590 entry_block);
1591 fprintf (stderr, format: " So it appears to have multiple defs.\n");
1592 }
1593 else
1594 {
1595 if (d != var)
1596 {
1597 num++;
1598 print_generic_expr (stderr, var, TDF_SLIM);
1599 fprintf (stderr, format: " is live-on-entry to BB%d ",
1600 entry_block);
1601 if (d)
1602 {
1603 fprintf (stderr, format: " but is not the default def of ");
1604 print_generic_expr (stderr, d, TDF_SLIM);
1605 fprintf (stderr, format: "\n");
1606 }
1607 else
1608 fprintf (stderr, format: " and there is no default def.\n");
1609 }
1610 }
1611 }
1612 else
1613 if (d == var)
1614 {
1615 /* An undefined local variable does not need to be very
1616 alive. */
1617 if (ssa_undefined_value_p (var, false))
1618 continue;
1619
1620 /* The only way this var shouldn't be marked live on entry is
1621 if it occurs in a PHI argument of the block. */
1622 size_t z;
1623 bool ok = false;
1624 gphi_iterator gsi;
1625 for (gsi = gsi_start_phis (e->dest);
1626 !gsi_end_p (i: gsi) && !ok;
1627 gsi_next (i: &gsi))
1628 {
1629 gphi *phi = gsi.phi ();
1630 if (virtual_operand_p (op: gimple_phi_result (gs: phi)))
1631 continue;
1632 for (z = 0; z < gimple_phi_num_args (gs: phi); z++)
1633 if (var == gimple_phi_arg_def (gs: phi, index: z))
1634 {
1635 ok = true;
1636 break;
1637 }
1638 }
1639 if (ok)
1640 continue;
1641 /* Expand adds unused default defs for PARM_DECLs and
1642 RESULT_DECLs. They're ok. */
1643 if (has_zero_uses (var)
1644 && SSA_NAME_VAR (var)
1645 && !VAR_P (SSA_NAME_VAR (var)))
1646 continue;
1647 num++;
1648 print_generic_expr (stderr, var, TDF_SLIM);
1649 fprintf (stderr, format: " is not marked live-on-entry to entry BB%d ",
1650 entry_block);
1651 fprintf (stderr, format: "but it is a default def so it should be.\n");
1652 }
1653 }
1654 }
1655 gcc_assert (num <= 0);
1656}
1657
1658
1659/* Virtual operand liveness analysis data init. */
1660
1661void
1662virtual_operand_live::init ()
1663{
1664 liveout = XCNEWVEC (tree, last_basic_block_for_fn (cfun) + 1);
1665 liveout[ENTRY_BLOCK] = ssa_default_def (cfun, gimple_vop (cfun));
1666}
1667
1668/* Compute live-in of BB from cached live-out. */
1669
1670tree
1671virtual_operand_live::get_live_in (basic_block bb)
1672{
1673 /* A virtual PHI is a convenient cache for live-in. */
1674 gphi *phi = get_virtual_phi (bb);
1675 if (phi)
1676 return gimple_phi_result (gs: phi);
1677
1678 if (!liveout)
1679 init ();
1680
1681 /* Since we don't have a virtual PHI and we don't know whether there's
1682 a downstream virtual use (and thus PHIs are inserted where necessary)
1683 we now have to check each incoming edge live-out. */
1684 edge_iterator ei;
1685 edge e;
1686 tree livein = NULL_TREE;
1687 FOR_EACH_EDGE (e, ei, bb->preds)
1688 if (e->flags & EDGE_DFS_BACK)
1689 /* We can ignore backedges since if there's a def there it would
1690 have forced a PHI in the source because it also acts as use
1691 downstream. */
1692 continue;
1693 else if (!livein)
1694 livein = get_live_out (bb: e->src);
1695 else if (get_live_out (bb: e->src) != livein)
1696 /* When there's no virtual use downstream this indicates a point
1697 where we'd insert a PHI merging the different live virtual
1698 operands. */
1699 return NULL_TREE;
1700
1701 return livein;
1702}
1703
1704/* Compute live-out of BB. */
1705
1706tree
1707virtual_operand_live::get_live_out (basic_block bb)
1708{
1709 if (!liveout)
1710 init ();
1711
1712 if (liveout[bb->index])
1713 return liveout[bb->index];
1714
1715 tree lo = NULL_TREE;
1716 for (auto gsi = gsi_last_bb (bb); !gsi_end_p (i: gsi); gsi_prev (i: &gsi))
1717 {
1718 gimple *stmt = gsi_stmt (i: gsi);
1719 if (gimple_vdef (g: stmt))
1720 {
1721 lo = gimple_vdef (g: stmt);
1722 break;
1723 }
1724 if (gimple_vuse (g: stmt))
1725 {
1726 lo = gimple_vuse (g: stmt);
1727 break;
1728 }
1729 }
1730 if (!lo)
1731 lo = get_live_in (bb);
1732 liveout[bb->index] = lo;
1733 return lo;
1734}
1735

source code of gcc/tree-ssa-live.cc