1/* Data flow functions for trees.
2 Copyright (C) 2001-2023 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "rtl.h"
26#include "tree.h"
27#include "gimple.h"
28#include "tree-pass.h"
29#include "ssa.h"
30#include "tree-pretty-print.h"
31#include "fold-const.h"
32#include "stor-layout.h"
33#include "langhooks.h"
34#include "gimple-iterator.h"
35#include "gimple-walk.h"
36#include "tree-dfa.h"
37#include "gimple-range.h"
38
39/* Build and maintain data flow information for trees. */
40
41/* Counters used to display DFA and SSA statistics. */
42struct dfa_stats_d
43{
44 long num_defs;
45 long num_uses;
46 long num_phis;
47 long num_phi_args;
48 size_t max_num_phi_args;
49 long num_vdefs;
50 long num_vuses;
51};
52
53
54/* Local functions. */
55static void collect_dfa_stats (struct dfa_stats_d *);
56
57
58/*---------------------------------------------------------------------------
59 Dataflow analysis (DFA) routines
60---------------------------------------------------------------------------*/
61
62/* Renumber the gimple stmt uids in one block. The caller is responsible
63 of calling set_gimple_stmt_max_uid (fun, 0) at some point. */
64
65void
66renumber_gimple_stmt_uids_in_block (struct function *fun, basic_block bb)
67{
68 gimple_stmt_iterator bsi;
69 for (bsi = gsi_start_phis (bb); !gsi_end_p (i: bsi); gsi_next (i: &bsi))
70 {
71 gimple *stmt = gsi_stmt (i: bsi);
72 gimple_set_uid (g: stmt, uid: inc_gimple_stmt_max_uid (fn: fun));
73 }
74 for (bsi = gsi_start_bb (bb); !gsi_end_p (i: bsi); gsi_next (i: &bsi))
75 {
76 gimple *stmt = gsi_stmt (i: bsi);
77 gimple_set_uid (g: stmt, uid: inc_gimple_stmt_max_uid (fn: fun));
78 }
79}
80
81/* Renumber all of the gimple stmt uids. */
82
83void
84renumber_gimple_stmt_uids (struct function *fun)
85{
86 basic_block bb;
87
88 set_gimple_stmt_max_uid (fn: fun, maxid: 0);
89 FOR_ALL_BB_FN (bb, fun)
90 renumber_gimple_stmt_uids_in_block (fun, bb);
91}
92
93/* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
94 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
95
96void
97renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
98{
99 int i;
100
101 set_gimple_stmt_max_uid (cfun, maxid: 0);
102 for (i = 0; i < n_blocks; i++)
103 renumber_gimple_stmt_uids_in_block (cfun, bb: blocks[i]);
104}
105
106
107
108/*---------------------------------------------------------------------------
109 Debugging functions
110---------------------------------------------------------------------------*/
111
112/* Dump variable VAR and its may-aliases to FILE. */
113
114void
115dump_variable (FILE *file, tree var)
116{
117 if (TREE_CODE (var) == SSA_NAME)
118 {
119 if (POINTER_TYPE_P (TREE_TYPE (var)))
120 dump_points_to_info_for (file, var);
121 var = SSA_NAME_VAR (var);
122 }
123
124 if (var == NULL_TREE)
125 {
126 fprintf (stream: file, format: "<nil>");
127 return;
128 }
129
130 print_generic_expr (file, var, dump_flags);
131
132 fprintf (stream: file, format: ", UID D.%u", (unsigned) DECL_UID (var));
133 if (DECL_PT_UID (var) != DECL_UID (var))
134 fprintf (stream: file, format: ", PT-UID D.%u", (unsigned) DECL_PT_UID (var));
135
136 fprintf (stream: file, format: ", ");
137 print_generic_expr (file, TREE_TYPE (var), dump_flags);
138
139 if (TREE_ADDRESSABLE (var))
140 fprintf (stream: file, format: ", is addressable");
141
142 if (is_global_var (t: var))
143 fprintf (stream: file, format: ", is global");
144
145 if (TREE_THIS_VOLATILE (var))
146 fprintf (stream: file, format: ", is volatile");
147
148 if (cfun && ssa_default_def (cfun, var))
149 {
150 fprintf (stream: file, format: ", default def: ");
151 print_generic_expr (file, ssa_default_def (cfun, var), dump_flags);
152 }
153
154 if (DECL_INITIAL (var))
155 {
156 fprintf (stream: file, format: ", initial: ");
157 print_generic_expr (file, DECL_INITIAL (var), dump_flags);
158 }
159
160 fprintf (stream: file, format: "\n");
161}
162
163
164/* Dump variable VAR and its may-aliases to stderr. */
165
166DEBUG_FUNCTION void
167debug_variable (tree var)
168{
169 dump_variable (stderr, var);
170}
171
172
173/* Dump various DFA statistics to FILE. */
174
175void
176dump_dfa_stats (FILE *file)
177{
178 struct dfa_stats_d dfa_stats;
179
180 unsigned long size, total = 0;
181 const char * const fmt_str = "%-30s%-13s%12s\n";
182 const char * const fmt_str_1 = "%-30s%13lu" PRsa (11) "\n";
183 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
184 const char *funcname
185 = lang_hooks.decl_printable_name (current_function_decl, 2);
186
187 collect_dfa_stats (&dfa_stats);
188
189 fprintf (stream: file, format: "\nDFA Statistics for %s\n\n", funcname);
190
191 fprintf (stream: file, format: "---------------------------------------------------------\n");
192 fprintf (stream: file, format: fmt_str, "", " Number of ", "Memory");
193 fprintf (stream: file, format: fmt_str, "", " instances ", "used ");
194 fprintf (stream: file, format: "---------------------------------------------------------\n");
195
196 size = dfa_stats.num_uses * sizeof (tree *);
197 total += size;
198 fprintf (stream: file, format: fmt_str_1, "USE operands", dfa_stats.num_uses,
199 SIZE_AMOUNT (size));
200
201 size = dfa_stats.num_defs * sizeof (tree *);
202 total += size;
203 fprintf (stream: file, format: fmt_str_1, "DEF operands", dfa_stats.num_defs,
204 SIZE_AMOUNT (size));
205
206 size = dfa_stats.num_vuses * sizeof (tree *);
207 total += size;
208 fprintf (stream: file, format: fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
209 SIZE_AMOUNT (size));
210
211 size = dfa_stats.num_vdefs * sizeof (tree *);
212 total += size;
213 fprintf (stream: file, format: fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
214 SIZE_AMOUNT (size));
215
216 size = dfa_stats.num_phis * sizeof (struct gphi);
217 total += size;
218 fprintf (stream: file, format: fmt_str_1, "PHI nodes", dfa_stats.num_phis,
219 SIZE_AMOUNT (size));
220
221 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
222 total += size;
223 fprintf (stream: file, format: fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
224 SIZE_AMOUNT (size));
225
226 fprintf (stream: file, format: "---------------------------------------------------------\n");
227 fprintf (stream: file, format: fmt_str_3, "Total memory used by DFA/SSA data",
228 SIZE_AMOUNT (total));
229 fprintf (stream: file, format: "---------------------------------------------------------\n");
230 fprintf (stream: file, format: "\n");
231
232 if (dfa_stats.num_phis)
233 fprintf (stream: file, format: "Average number of arguments per PHI node: %.1f (max: %ld)\n",
234 (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
235 (long) dfa_stats.max_num_phi_args);
236
237 fprintf (stream: file, format: "\n");
238}
239
240
241/* Dump DFA statistics on stderr. */
242
243DEBUG_FUNCTION void
244debug_dfa_stats (void)
245{
246 dump_dfa_stats (stderr);
247}
248
249
250/* Collect DFA statistics and store them in the structure pointed to by
251 DFA_STATS_P. */
252
253static void
254collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
255{
256 basic_block bb;
257
258 gcc_assert (dfa_stats_p);
259
260 memset (s: (void *)dfa_stats_p, c: 0, n: sizeof (struct dfa_stats_d));
261
262 /* Walk all the statements in the function counting references. */
263 FOR_EACH_BB_FN (bb, cfun)
264 {
265 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (i: si);
266 gsi_next (i: &si))
267 {
268 gphi *phi = si.phi ();
269 dfa_stats_p->num_phis++;
270 dfa_stats_p->num_phi_args += gimple_phi_num_args (gs: phi);
271 if (gimple_phi_num_args (gs: phi) > dfa_stats_p->max_num_phi_args)
272 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (gs: phi);
273 }
274
275 for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (i: si);
276 gsi_next (i: &si))
277 {
278 gimple *stmt = gsi_stmt (i: si);
279 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
280 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
281 dfa_stats_p->num_vdefs += gimple_vdef (g: stmt) ? 1 : 0;
282 dfa_stats_p->num_vuses += gimple_vuse (g: stmt) ? 1 : 0;
283 }
284 }
285}
286
287
288/*---------------------------------------------------------------------------
289 Miscellaneous helpers
290---------------------------------------------------------------------------*/
291
292/* Lookup VAR UID in the default_defs hashtable and return the associated
293 variable. */
294
295tree
296ssa_default_def (struct function *fn, tree var)
297{
298 struct tree_decl_minimal ind;
299 struct tree_ssa_name in;
300 gcc_assert (VAR_P (var)
301 || TREE_CODE (var) == PARM_DECL
302 || TREE_CODE (var) == RESULT_DECL);
303
304 /* Always NULL_TREE for rtl function dumps. */
305 if (!fn->gimple_df)
306 return NULL_TREE;
307
308 in.var = (tree)&ind;
309 ind.uid = DECL_UID (var);
310 return DEFAULT_DEFS (fn)->find_with_hash (comparable: (tree)&in, DECL_UID (var));
311}
312
313/* Insert the pair VAR's UID, DEF into the default_defs hashtable
314 of function FN. */
315
316void
317set_ssa_default_def (struct function *fn, tree var, tree def)
318{
319 struct tree_decl_minimal ind;
320 struct tree_ssa_name in;
321
322 gcc_assert (VAR_P (var)
323 || TREE_CODE (var) == PARM_DECL
324 || TREE_CODE (var) == RESULT_DECL);
325 in.var = (tree)&ind;
326 ind.uid = DECL_UID (var);
327 if (!def)
328 {
329 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash (comparable: (tree)&in,
330 DECL_UID (var),
331 insert: NO_INSERT);
332 if (loc)
333 {
334 SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false;
335 DEFAULT_DEFS (fn)->clear_slot (slot: loc);
336 }
337 return;
338 }
339 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
340 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash (comparable: (tree)&in,
341 DECL_UID (var), insert: INSERT);
342
343 /* Default definition might be changed by tail call optimization. */
344 if (*loc)
345 SSA_NAME_IS_DEFAULT_DEF (*loc) = false;
346
347 /* Mark DEF as the default definition for VAR. */
348 *loc = def;
349 SSA_NAME_IS_DEFAULT_DEF (def) = true;
350}
351
352/* Retrieve or create a default definition for VAR. */
353
354tree
355get_or_create_ssa_default_def (struct function *fn, tree var)
356{
357 tree ddef = ssa_default_def (fn, var);
358 if (ddef == NULL_TREE)
359 {
360 ddef = make_ssa_name_fn (fn, var, gimple_build_nop ());
361 set_ssa_default_def (fn, var, def: ddef);
362 }
363 return ddef;
364}
365
366
367/* If EXP is a handled component reference for a structure, return the
368 base variable. The access range is delimited by bit positions *POFFSET and
369 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
370 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
371 and *PMAX_SIZE are equal, the access is non-variable. If *PREVERSE is
372 true, the storage order of the reference is reversed. */
373
374tree
375get_ref_base_and_extent (tree exp, poly_int64 *poffset,
376 poly_int64 *psize,
377 poly_int64 *pmax_size,
378 bool *preverse)
379{
380 poly_offset_int bitsize = -1;
381 poly_offset_int maxsize;
382 tree size_tree = NULL_TREE;
383 poly_offset_int bit_offset = 0;
384 bool seen_variable_array_ref = false;
385
386 /* First get the final access size and the storage order from just the
387 outermost expression. */
388 if (TREE_CODE (exp) == COMPONENT_REF)
389 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
390 else if (TREE_CODE (exp) == BIT_FIELD_REF)
391 size_tree = TREE_OPERAND (exp, 1);
392 else if (TREE_CODE (exp) == WITH_SIZE_EXPR)
393 {
394 size_tree = TREE_OPERAND (exp, 1);
395 exp = TREE_OPERAND (exp, 0);
396 }
397 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
398 {
399 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
400 if (mode == BLKmode)
401 size_tree = TYPE_SIZE (TREE_TYPE (exp));
402 else
403 bitsize = GET_MODE_BITSIZE (mode);
404 }
405 if (size_tree != NULL_TREE
406 && poly_int_tree_p (t: size_tree))
407 bitsize = wi::to_poly_offset (t: size_tree);
408
409 *preverse = reverse_storage_order_for_component_p (t: exp);
410
411 /* Initially, maxsize is the same as the accessed element size.
412 In the following it will only grow (or become -1). */
413 maxsize = bitsize;
414
415 /* Compute cumulative bit-offset for nested component-refs and array-refs,
416 and find the ultimate containing object. */
417 while (1)
418 {
419 switch (TREE_CODE (exp))
420 {
421 case BIT_FIELD_REF:
422 bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
423 break;
424
425 case COMPONENT_REF:
426 {
427 tree field = TREE_OPERAND (exp, 1);
428 tree this_offset = component_ref_field_offset (exp);
429
430 if (this_offset && poly_int_tree_p (t: this_offset))
431 {
432 poly_offset_int woffset = (wi::to_poly_offset (t: this_offset)
433 << LOG2_BITS_PER_UNIT);
434 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
435 bit_offset += woffset;
436
437 /* If we had seen a variable array ref already and we just
438 referenced the last field of a struct or a union member
439 then we have to adjust maxsize by the padding at the end
440 of our field. */
441 if (seen_variable_array_ref)
442 {
443 tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
444 tree next = DECL_CHAIN (field);
445 while (next && TREE_CODE (next) != FIELD_DECL)
446 next = DECL_CHAIN (next);
447 if (!next
448 || TREE_CODE (stype) != RECORD_TYPE)
449 {
450 tree fsize = DECL_SIZE (field);
451 tree ssize = TYPE_SIZE (stype);
452 if (fsize == NULL
453 || !poly_int_tree_p (t: fsize)
454 || ssize == NULL
455 || !poly_int_tree_p (t: ssize))
456 maxsize = -1;
457 else if (known_size_p (a: maxsize))
458 {
459 poly_offset_int tem
460 = (wi::to_poly_offset (t: ssize)
461 - wi::to_poly_offset (t: fsize));
462 tem -= woffset;
463 maxsize += tem;
464 }
465 }
466 /* An component ref with an adjacent field up in the
467 structure hierarchy constrains the size of any variable
468 array ref lower in the access hierarchy. */
469 else
470 seen_variable_array_ref = false;
471 }
472 }
473 else
474 {
475 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
476 /* We need to adjust maxsize to the whole structure bitsize.
477 But we can subtract any constant offset seen so far,
478 because that would get us out of the structure otherwise. */
479 if (known_size_p (a: maxsize)
480 && csize
481 && poly_int_tree_p (t: csize))
482 maxsize = wi::to_poly_offset (t: csize) - bit_offset;
483 else
484 maxsize = -1;
485 }
486 }
487 break;
488
489 case ARRAY_REF:
490 case ARRAY_RANGE_REF:
491 {
492 tree index = TREE_OPERAND (exp, 1);
493 tree low_bound, unit_size;
494
495 /* If the resulting bit-offset is constant, track it. */
496 if (poly_int_tree_p (t: index)
497 && (low_bound = array_ref_low_bound (exp),
498 poly_int_tree_p (t: low_bound))
499 && (unit_size = array_ref_element_size (exp),
500 TREE_CODE (unit_size) == INTEGER_CST))
501 {
502 poly_offset_int woffset
503 = wi::sext (a: wi::to_poly_offset (t: index)
504 - wi::to_poly_offset (t: low_bound),
505 TYPE_PRECISION (sizetype));
506 woffset *= wi::to_offset (t: unit_size);
507 woffset <<= LOG2_BITS_PER_UNIT;
508 bit_offset += woffset;
509
510 /* An array ref with a constant index up in the structure
511 hierarchy will constrain the size of any variable array ref
512 lower in the access hierarchy. */
513 seen_variable_array_ref = false;
514 }
515 else
516 {
517 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
518 /* We need to adjust maxsize to the whole array bitsize.
519 But we can subtract any constant offset seen so far,
520 because that would get us outside of the array otherwise. */
521 if (known_size_p (a: maxsize)
522 && asize
523 && poly_int_tree_p (t: asize))
524 maxsize = wi::to_poly_offset (t: asize) - bit_offset;
525 else
526 maxsize = -1;
527
528 /* Remember that we have seen an array ref with a variable
529 index. */
530 seen_variable_array_ref = true;
531
532 value_range vr;
533 range_query *query;
534 query = get_range_query (cfun);
535
536 if (TREE_CODE (index) == SSA_NAME
537 && (low_bound = array_ref_low_bound (exp),
538 poly_int_tree_p (t: low_bound))
539 && (unit_size = array_ref_element_size (exp),
540 TREE_CODE (unit_size) == INTEGER_CST)
541 && query->range_of_expr (r&: vr, expr: index)
542 && !vr.varying_p ()
543 && !vr.undefined_p ())
544 {
545 wide_int min = vr.lower_bound ();
546 wide_int max = vr.upper_bound ();
547 poly_offset_int lbound = wi::to_poly_offset (t: low_bound);
548 /* Try to constrain maxsize with range information. */
549 offset_int omax
550 = offset_int::from (x: max, TYPE_SIGN (TREE_TYPE (index)));
551 if (known_lt (lbound, omax))
552 {
553 poly_offset_int rmaxsize;
554 rmaxsize = (omax - lbound + 1)
555 * wi::to_offset (t: unit_size) << LOG2_BITS_PER_UNIT;
556 if (!known_size_p (a: maxsize)
557 || known_lt (rmaxsize, maxsize))
558 {
559 /* If we know an upper bound below the declared
560 one this is no longer variable. */
561 if (known_size_p (a: maxsize))
562 seen_variable_array_ref = false;
563 maxsize = rmaxsize;
564 }
565 }
566 /* Try to adjust bit_offset with range information. */
567 offset_int omin
568 = offset_int::from (x: min, TYPE_SIGN (TREE_TYPE (index)));
569 if (known_le (lbound, omin))
570 {
571 poly_offset_int woffset
572 = wi::sext (a: omin - lbound,
573 TYPE_PRECISION (sizetype));
574 woffset *= wi::to_offset (t: unit_size);
575 woffset <<= LOG2_BITS_PER_UNIT;
576 bit_offset += woffset;
577 if (known_size_p (a: maxsize))
578 maxsize -= woffset;
579 }
580 }
581 }
582 }
583 break;
584
585 case REALPART_EXPR:
586 break;
587
588 case IMAGPART_EXPR:
589 bit_offset += bitsize;
590 break;
591
592 case VIEW_CONVERT_EXPR:
593 break;
594
595 case TARGET_MEM_REF:
596 /* Via the variable index or index2 we can reach the
597 whole object. Still hand back the decl here. */
598 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
599 && (TMR_INDEX (exp) || TMR_INDEX2 (exp)))
600 {
601 exp = TREE_OPERAND (TMR_BASE (exp), 0);
602 bit_offset = 0;
603 maxsize = -1;
604 goto done;
605 }
606 /* Fallthru. */
607 case MEM_REF:
608 /* We need to deal with variable arrays ending structures such as
609 struct { int length; int a[1]; } x; x.a[d]
610 struct { struct { int a; int b; } a[1]; } x; x.a[d].a
611 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
612 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
613 where we do not know maxsize for variable index accesses to
614 the array. The simplest way to conservatively deal with this
615 is to punt in the case that offset + maxsize reaches the
616 base type boundary. This needs to include possible trailing
617 padding that is there for alignment purposes. */
618 if (seen_variable_array_ref
619 && known_size_p (a: maxsize)
620 && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
621 || !poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
622 || (maybe_eq
623 (a: bit_offset + maxsize,
624 b: wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))))))
625 maxsize = -1;
626
627 /* Hand back the decl for MEM[&decl, off]. */
628 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
629 {
630 if (integer_zerop (TREE_OPERAND (exp, 1)))
631 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
632 else
633 {
634 poly_offset_int off = mem_ref_offset (exp);
635 off <<= LOG2_BITS_PER_UNIT;
636 off += bit_offset;
637 poly_int64 off_hwi;
638 if (off.to_shwi (r: &off_hwi))
639 {
640 bit_offset = off_hwi;
641 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
642 }
643 }
644 }
645 goto done;
646
647 default:
648 goto done;
649 }
650
651 exp = TREE_OPERAND (exp, 0);
652 }
653
654 done:
655 if (!bitsize.to_shwi (r: psize) || maybe_lt (a: *psize, b: 0))
656 {
657 *poffset = 0;
658 *psize = -1;
659 *pmax_size = -1;
660
661 return exp;
662 }
663
664 /* ??? Due to negative offsets in ARRAY_REF we can end up with
665 negative bit_offset here. We might want to store a zero offset
666 in this case. */
667 if (!bit_offset.to_shwi (r: poffset))
668 {
669 *poffset = 0;
670 *pmax_size = -1;
671
672 return exp;
673 }
674
675 /* In case of a decl or constant base object we can do better. */
676
677 if (DECL_P (exp))
678 {
679 if (VAR_P (exp)
680 && ((flag_unconstrained_commons && DECL_COMMON (exp))
681 || (DECL_EXTERNAL (exp) && seen_variable_array_ref)))
682 {
683 tree sz_tree = TYPE_SIZE (TREE_TYPE (exp));
684 /* If size is unknown, or we have read to the end, assume there
685 may be more to the structure than we are told. */
686 if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
687 || (seen_variable_array_ref
688 && (sz_tree == NULL_TREE
689 || !poly_int_tree_p (t: sz_tree)
690 || maybe_eq (a: bit_offset + maxsize,
691 b: wi::to_poly_offset (t: sz_tree)))))
692 maxsize = -1;
693 }
694 /* If maxsize is unknown adjust it according to the size of the
695 base decl. */
696 else if (!known_size_p (a: maxsize)
697 && DECL_SIZE (exp)
698 && poly_int_tree_p (DECL_SIZE (exp)))
699 maxsize = wi::to_poly_offset (DECL_SIZE (exp)) - bit_offset;
700 }
701 else if (CONSTANT_CLASS_P (exp))
702 {
703 /* If maxsize is unknown adjust it according to the size of the
704 base type constant. */
705 if (!known_size_p (a: maxsize)
706 && TYPE_SIZE (TREE_TYPE (exp))
707 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp))))
708 maxsize = (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))
709 - bit_offset);
710 }
711
712 if (!maxsize.to_shwi (r: pmax_size)
713 || maybe_lt (a: *pmax_size, b: 0)
714 || !endpoint_representable_p (pos: *poffset, size: *pmax_size))
715 *pmax_size = -1;
716
717 /* Punt if *POFFSET + *PSIZE overflows in HOST_WIDE_INT, the callers don't
718 check for such overflows individually and assume it works. */
719 if (!endpoint_representable_p (pos: *poffset, size: *psize))
720 {
721 *poffset = 0;
722 *psize = -1;
723 *pmax_size = -1;
724
725 return exp;
726 }
727
728 return exp;
729}
730
731/* Like get_ref_base_and_extent, but for cases in which we only care
732 about constant-width accesses at constant offsets. Return null
733 if the access is anything else. */
734
735tree
736get_ref_base_and_extent_hwi (tree exp, HOST_WIDE_INT *poffset,
737 HOST_WIDE_INT *psize, bool *preverse)
738{
739 poly_int64 offset, size, max_size;
740 HOST_WIDE_INT const_offset, const_size;
741 bool reverse;
742 tree decl = get_ref_base_and_extent (exp, poffset: &offset, psize: &size, pmax_size: &max_size,
743 preverse: &reverse);
744 if (!offset.is_constant (const_value: &const_offset)
745 || !size.is_constant (const_value: &const_size)
746 || const_offset < 0
747 || !known_size_p (a: max_size)
748 || maybe_ne (a: max_size, b: const_size))
749 return NULL_TREE;
750
751 *poffset = const_offset;
752 *psize = const_size;
753 *preverse = reverse;
754 return decl;
755}
756
757/* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
758 denotes the starting address of the memory access EXP.
759 Returns NULL_TREE if the offset is not constant or any component
760 is not BITS_PER_UNIT-aligned.
761 VALUEIZE if non-NULL is used to valueize SSA names. It should return
762 its argument or a constant if the argument is known to be constant. */
763
764tree
765get_addr_base_and_unit_offset_1 (tree exp, poly_int64 *poffset,
766 tree (*valueize) (tree))
767{
768 poly_int64 byte_offset = 0;
769
770 /* Compute cumulative byte-offset for nested component-refs and array-refs,
771 and find the ultimate containing object. */
772 while (1)
773 {
774 switch (TREE_CODE (exp))
775 {
776 case BIT_FIELD_REF:
777 {
778 poly_int64 this_byte_offset;
779 poly_uint64 this_bit_offset;
780 if (!poly_int_tree_p (TREE_OPERAND (exp, 2), value: &this_bit_offset)
781 || !multiple_p (a: this_bit_offset, BITS_PER_UNIT,
782 multiple: &this_byte_offset))
783 return NULL_TREE;
784 byte_offset += this_byte_offset;
785 }
786 break;
787
788 case COMPONENT_REF:
789 {
790 tree field = TREE_OPERAND (exp, 1);
791 tree this_offset = component_ref_field_offset (exp);
792 poly_int64 hthis_offset;
793
794 if (!this_offset
795 || !poly_int_tree_p (t: this_offset, value: &hthis_offset)
796 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
797 % BITS_PER_UNIT))
798 return NULL_TREE;
799
800 hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
801 / BITS_PER_UNIT);
802 byte_offset += hthis_offset;
803 }
804 break;
805
806 case ARRAY_REF:
807 case ARRAY_RANGE_REF:
808 {
809 tree index = TREE_OPERAND (exp, 1);
810 tree low_bound, unit_size;
811
812 if (valueize
813 && TREE_CODE (index) == SSA_NAME)
814 index = (*valueize) (index);
815 if (!poly_int_tree_p (t: index))
816 return NULL_TREE;
817 low_bound = array_ref_low_bound (exp);
818 if (valueize
819 && TREE_CODE (low_bound) == SSA_NAME)
820 low_bound = (*valueize) (low_bound);
821 if (!poly_int_tree_p (t: low_bound))
822 return NULL_TREE;
823 unit_size = array_ref_element_size (exp);
824 if (TREE_CODE (unit_size) != INTEGER_CST)
825 return NULL_TREE;
826
827 /* If the resulting bit-offset is constant, track it. */
828 poly_offset_int woffset
829 = wi::sext (a: wi::to_poly_offset (t: index)
830 - wi::to_poly_offset (t: low_bound),
831 TYPE_PRECISION (sizetype));
832 woffset *= wi::to_offset (t: unit_size);
833 byte_offset += woffset.force_shwi ();
834 }
835 break;
836
837 case REALPART_EXPR:
838 break;
839
840 case IMAGPART_EXPR:
841 byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
842 break;
843
844 case VIEW_CONVERT_EXPR:
845 break;
846
847 case MEM_REF:
848 {
849 tree base = TREE_OPERAND (exp, 0);
850 if (valueize
851 && TREE_CODE (base) == SSA_NAME)
852 base = (*valueize) (base);
853
854 /* Hand back the decl for MEM[&decl, off]. */
855 if (TREE_CODE (base) == ADDR_EXPR)
856 {
857 if (!integer_zerop (TREE_OPERAND (exp, 1)))
858 {
859 poly_offset_int off = mem_ref_offset (exp);
860 byte_offset += off.force_shwi ();
861 }
862 exp = TREE_OPERAND (base, 0);
863 }
864 goto done;
865 }
866
867 case TARGET_MEM_REF:
868 {
869 tree base = TREE_OPERAND (exp, 0);
870 if (valueize
871 && TREE_CODE (base) == SSA_NAME)
872 base = (*valueize) (base);
873
874 /* Hand back the decl for MEM[&decl, off]. */
875 if (TREE_CODE (base) == ADDR_EXPR)
876 {
877 if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
878 return NULL_TREE;
879 if (!integer_zerop (TMR_OFFSET (exp)))
880 {
881 poly_offset_int off = mem_ref_offset (exp);
882 byte_offset += off.force_shwi ();
883 }
884 exp = TREE_OPERAND (base, 0);
885 }
886 goto done;
887 }
888
889 default:
890 goto done;
891 }
892
893 exp = TREE_OPERAND (exp, 0);
894 }
895done:
896
897 *poffset = byte_offset;
898 return exp;
899}
900
901/* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
902 denotes the starting address of the memory access EXP.
903 Returns NULL_TREE if the offset is not constant or any component
904 is not BITS_PER_UNIT-aligned. */
905
906tree
907get_addr_base_and_unit_offset (tree exp, poly_int64 *poffset)
908{
909 return get_addr_base_and_unit_offset_1 (exp, poffset, NULL);
910}
911
912/* Returns true if STMT references an SSA_NAME that has
913 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
914
915bool
916stmt_references_abnormal_ssa_name (gimple *stmt)
917{
918 ssa_op_iter oi;
919 use_operand_p use_p;
920
921 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
922 {
923 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
924 return true;
925 }
926
927 return false;
928}
929
930/* If STMT takes any abnormal PHI values as input, replace them with
931 local copies. */
932
933void
934replace_abnormal_ssa_names (gimple *stmt)
935{
936 ssa_op_iter oi;
937 use_operand_p use_p;
938
939 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
940 {
941 tree op = USE_FROM_PTR (use_p);
942 if (TREE_CODE (op) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op))
943 {
944 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
945 tree new_name = make_ssa_name (TREE_TYPE (op));
946 gassign *assign = gimple_build_assign (new_name, op);
947 gsi_insert_before (&gsi, assign, GSI_SAME_STMT);
948 SET_USE (use_p, new_name);
949 }
950 }
951}
952
953/* Pair of tree and a sorting index, for dump_enumerated_decls. */
954struct GTY(()) numbered_tree
955{
956 tree t;
957 int num;
958};
959
960
961/* Compare two declarations references by their DECL_UID / sequence number.
962 Called via qsort. */
963
964static int
965compare_decls_by_uid (const void *pa, const void *pb)
966{
967 const numbered_tree *nt_a = ((const numbered_tree *)pa);
968 const numbered_tree *nt_b = ((const numbered_tree *)pb);
969
970 if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
971 return DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
972 return nt_a->num - nt_b->num;
973}
974
975/* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */
976static tree
977dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
978{
979 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
980 vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info;
981 numbered_tree nt;
982
983 if (!DECL_P (*tp))
984 return NULL_TREE;
985 nt.t = *tp;
986 nt.num = list->length ();
987 list->safe_push (obj: nt);
988 *walk_subtrees = 0;
989 return NULL_TREE;
990}
991
992/* Find all the declarations used by the current function, sort them by uid,
993 and emit the sorted list. Each declaration is tagged with a sequence
994 number indicating when it was found during statement / tree walking,
995 so that TDF_NOUID comparisons of anonymous declarations are still
996 meaningful. Where a declaration was encountered more than once, we
997 emit only the sequence number of the first encounter.
998 FILE is the dump file where to output the list and FLAGS is as in
999 print_generic_expr. */
1000void
1001dump_enumerated_decls (FILE *file, dump_flags_t flags)
1002{
1003 if (!cfun->cfg)
1004 return;
1005
1006 basic_block bb;
1007 struct walk_stmt_info wi;
1008 auto_vec<numbered_tree, 40> decl_list;
1009
1010 memset (s: &wi, c: '\0', n: sizeof (wi));
1011 wi.info = (void *) &decl_list;
1012 FOR_EACH_BB_FN (bb, cfun)
1013 {
1014 gimple_stmt_iterator gsi;
1015
1016 for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
1017 if (!is_gimple_debug (gs: gsi_stmt (i: gsi)))
1018 walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
1019 }
1020 decl_list.qsort (compare_decls_by_uid);
1021 if (decl_list.length ())
1022 {
1023 unsigned ix;
1024 numbered_tree *ntp;
1025 tree last = NULL_TREE;
1026
1027 fprintf (stream: file, format: "Declarations used by %s, sorted by DECL_UID:\n",
1028 current_function_name ());
1029 FOR_EACH_VEC_ELT (decl_list, ix, ntp)
1030 {
1031 if (ntp->t == last)
1032 continue;
1033 fprintf (stream: file, format: "%d: ", ntp->num);
1034 print_generic_decl (file, ntp->t, flags);
1035 fprintf (stream: file, format: "\n");
1036 last = ntp->t;
1037 }
1038 }
1039}
1040

source code of gcc/tree-dfa.cc