1/* Alias analysis for trees.
2 Copyright (C) 2004-2023 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "target.h"
26#include "rtl.h"
27#include "tree.h"
28#include "gimple.h"
29#include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30#include "ssa.h"
31#include "cgraph.h"
32#include "tree-pretty-print.h"
33#include "alias.h"
34#include "fold-const.h"
35#include "langhooks.h"
36#include "dumpfile.h"
37#include "tree-eh.h"
38#include "tree-dfa.h"
39#include "ipa-reference.h"
40#include "varasm.h"
41#include "ipa-modref-tree.h"
42#include "ipa-modref.h"
43#include "attr-fnspec.h"
44#include "errors.h"
45#include "dbgcnt.h"
46#include "gimple-pretty-print.h"
47#include "print-tree.h"
48#include "tree-ssa-alias-compare.h"
49#include "builtins.h"
50#include "internal-fn.h"
51
52/* Broad overview of how alias analysis on gimple works:
53
54 Statements clobbering or using memory are linked through the
55 virtual operand factored use-def chain. The virtual operand
56 is unique per function, its symbol is accessible via gimple_vop (cfun).
57 Virtual operands are used for efficiently walking memory statements
58 in the gimple IL and are useful for things like value-numbering as
59 a generation count for memory references.
60
61 SSA_NAME pointers may have associated points-to information
62 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
63 points-to information is (re-)computed by the TODO_rebuild_alias
64 pass manager todo. Points-to information is also used for more
65 precise tracking of call-clobbered and call-used variables and
66 related disambiguations.
67
68 This file contains functions for disambiguating memory references,
69 the so called alias-oracle and tools for walking of the gimple IL.
70
71 The main alias-oracle entry-points are
72
73 bool stmt_may_clobber_ref_p (gimple *, tree)
74
75 This function queries if a statement may invalidate (parts of)
76 the memory designated by the reference tree argument.
77
78 bool ref_maybe_used_by_stmt_p (gimple *, tree)
79
80 This function queries if a statement may need (parts of) the
81 memory designated by the reference tree argument.
82
83 There are variants of these functions that only handle the call
84 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
85 Note that these do not disambiguate against a possible call lhs.
86
87 bool refs_may_alias_p (tree, tree)
88
89 This function tries to disambiguate two reference trees.
90
91 bool ptr_deref_may_alias_global_p (tree, bool)
92
93 This function queries if dereferencing a pointer variable may
94 alias global memory. If bool argument is true, global memory
95 is considered to also include function local memory that escaped.
96
97 More low-level disambiguators are available and documented in
98 this file. Low-level disambiguators dealing with points-to
99 information are in tree-ssa-structalias.cc. */
100
101static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
102static bool nonoverlapping_component_refs_p (const_tree, const_tree);
103
104/* Query statistics for the different low-level disambiguators.
105 A high-level query may trigger multiple of them. */
106
107static struct {
108 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
109 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
110 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
111 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
112 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
113 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
114 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
115 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
116 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
117 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
118 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
119 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
120 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
121 unsigned HOST_WIDE_INT stmt_kills_ref_p_no;
122 unsigned HOST_WIDE_INT stmt_kills_ref_p_yes;
123 unsigned HOST_WIDE_INT modref_use_may_alias;
124 unsigned HOST_WIDE_INT modref_use_no_alias;
125 unsigned HOST_WIDE_INT modref_clobber_may_alias;
126 unsigned HOST_WIDE_INT modref_clobber_no_alias;
127 unsigned HOST_WIDE_INT modref_kill_no;
128 unsigned HOST_WIDE_INT modref_kill_yes;
129 unsigned HOST_WIDE_INT modref_tests;
130 unsigned HOST_WIDE_INT modref_baseptr_tests;
131} alias_stats;
132
133void
134dump_alias_stats (FILE *s)
135{
136 fprintf (stream: s, format: "\nAlias oracle query stats:\n");
137 fprintf (stream: s, format: " refs_may_alias_p: "
138 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
139 HOST_WIDE_INT_PRINT_DEC" queries\n",
140 alias_stats.refs_may_alias_p_no_alias,
141 alias_stats.refs_may_alias_p_no_alias
142 + alias_stats.refs_may_alias_p_may_alias);
143 fprintf (stream: s, format: " ref_maybe_used_by_call_p: "
144 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC" queries\n",
146 alias_stats.ref_maybe_used_by_call_p_no_alias,
147 alias_stats.refs_may_alias_p_no_alias
148 + alias_stats.ref_maybe_used_by_call_p_may_alias);
149 fprintf (stream: s, format: " call_may_clobber_ref_p: "
150 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
151 HOST_WIDE_INT_PRINT_DEC" queries\n",
152 alias_stats.call_may_clobber_ref_p_no_alias,
153 alias_stats.call_may_clobber_ref_p_no_alias
154 + alias_stats.call_may_clobber_ref_p_may_alias);
155 fprintf (stream: s, format: " stmt_kills_ref_p: "
156 HOST_WIDE_INT_PRINT_DEC" kills, "
157 HOST_WIDE_INT_PRINT_DEC" queries\n",
158 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes,
159 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes
160 + alias_stats.stmt_kills_ref_p_no + alias_stats.modref_kill_no);
161 fprintf (stream: s, format: " nonoverlapping_component_refs_p: "
162 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
163 HOST_WIDE_INT_PRINT_DEC" queries\n",
164 alias_stats.nonoverlapping_component_refs_p_no_alias,
165 alias_stats.nonoverlapping_component_refs_p_no_alias
166 + alias_stats.nonoverlapping_component_refs_p_may_alias);
167 fprintf (stream: s, format: " nonoverlapping_refs_since_match_p: "
168 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
169 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
170 HOST_WIDE_INT_PRINT_DEC" queries\n",
171 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
172 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
173 alias_stats.nonoverlapping_refs_since_match_p_no_alias
174 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
175 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
176 fprintf (stream: s, format: " aliasing_component_refs_p: "
177 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
178 HOST_WIDE_INT_PRINT_DEC" queries\n",
179 alias_stats.aliasing_component_refs_p_no_alias,
180 alias_stats.aliasing_component_refs_p_no_alias
181 + alias_stats.aliasing_component_refs_p_may_alias);
182 dump_alias_stats_in_alias_c (s);
183 fprintf (stream: s, format: "\nModref stats:\n");
184 fprintf (stream: s, format: " modref kill: "
185 HOST_WIDE_INT_PRINT_DEC" kills, "
186 HOST_WIDE_INT_PRINT_DEC" queries\n",
187 alias_stats.modref_kill_yes,
188 alias_stats.modref_kill_yes
189 + alias_stats.modref_kill_no);
190 fprintf (stream: s, format: " modref use: "
191 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
192 HOST_WIDE_INT_PRINT_DEC" queries\n",
193 alias_stats.modref_use_no_alias,
194 alias_stats.modref_use_no_alias
195 + alias_stats.modref_use_may_alias);
196 fprintf (stream: s, format: " modref clobber: "
197 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
198 HOST_WIDE_INT_PRINT_DEC" queries\n"
199 " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
200 " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
201 alias_stats.modref_clobber_no_alias,
202 alias_stats.modref_clobber_no_alias
203 + alias_stats.modref_clobber_may_alias,
204 alias_stats.modref_tests,
205 ((double)alias_stats.modref_tests)
206 / (alias_stats.modref_clobber_no_alias
207 + alias_stats.modref_clobber_may_alias),
208 alias_stats.modref_baseptr_tests,
209 ((double)alias_stats.modref_baseptr_tests)
210 / (alias_stats.modref_clobber_no_alias
211 + alias_stats.modref_clobber_may_alias));
212}
213
214
215/* Return true, if dereferencing PTR may alias with a global variable.
216 When ESCAPED_LOCAL_P is true escaped local memory is also considered
217 global. */
218
219bool
220ptr_deref_may_alias_global_p (tree ptr, bool escaped_local_p)
221{
222 struct ptr_info_def *pi;
223
224 /* If we end up with a pointer constant here that may point
225 to global memory. */
226 if (TREE_CODE (ptr) != SSA_NAME)
227 return true;
228
229 pi = SSA_NAME_PTR_INFO (ptr);
230
231 /* If we do not have points-to information for this variable,
232 we have to punt. */
233 if (!pi)
234 return true;
235
236 /* ??? This does not use TBAA to prune globals ptr may not access. */
237 return pt_solution_includes_global (&pi->pt, escaped_local_p);
238}
239
240/* Return true if dereferencing PTR may alias DECL.
241 The caller is responsible for applying TBAA to see if PTR
242 may access DECL at all. */
243
244static bool
245ptr_deref_may_alias_decl_p (tree ptr, tree decl)
246{
247 struct ptr_info_def *pi;
248
249 /* Conversions are irrelevant for points-to information and
250 data-dependence analysis can feed us those. */
251 STRIP_NOPS (ptr);
252
253 /* Anything we do not explicilty handle aliases. */
254 if ((TREE_CODE (ptr) != SSA_NAME
255 && TREE_CODE (ptr) != ADDR_EXPR
256 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
257 || !POINTER_TYPE_P (TREE_TYPE (ptr))
258 || (!VAR_P (decl)
259 && TREE_CODE (decl) != PARM_DECL
260 && TREE_CODE (decl) != RESULT_DECL))
261 return true;
262
263 /* Disregard pointer offsetting. */
264 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
265 {
266 do
267 {
268 ptr = TREE_OPERAND (ptr, 0);
269 }
270 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
271 return ptr_deref_may_alias_decl_p (ptr, decl);
272 }
273
274 /* ADDR_EXPR pointers either just offset another pointer or directly
275 specify the pointed-to set. */
276 if (TREE_CODE (ptr) == ADDR_EXPR)
277 {
278 tree base = get_base_address (TREE_OPERAND (ptr, 0));
279 if (base
280 && (TREE_CODE (base) == MEM_REF
281 || TREE_CODE (base) == TARGET_MEM_REF))
282 ptr = TREE_OPERAND (base, 0);
283 else if (base
284 && DECL_P (base))
285 return compare_base_decls (base, decl) != 0;
286 else if (base
287 && CONSTANT_CLASS_P (base))
288 return false;
289 else
290 return true;
291 }
292
293 /* Non-aliased variables cannot be pointed to. */
294 if (!may_be_aliased (var: decl))
295 return false;
296
297 /* If we do not have useful points-to information for this pointer
298 we cannot disambiguate anything else. */
299 pi = SSA_NAME_PTR_INFO (ptr);
300 if (!pi)
301 return true;
302
303 return pt_solution_includes (&pi->pt, decl);
304}
305
306/* Return true if dereferenced PTR1 and PTR2 may alias.
307 The caller is responsible for applying TBAA to see if accesses
308 through PTR1 and PTR2 may conflict at all. */
309
310bool
311ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
312{
313 struct ptr_info_def *pi1, *pi2;
314
315 /* Conversions are irrelevant for points-to information and
316 data-dependence analysis can feed us those. */
317 STRIP_NOPS (ptr1);
318 STRIP_NOPS (ptr2);
319
320 /* Disregard pointer offsetting. */
321 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
322 {
323 do
324 {
325 ptr1 = TREE_OPERAND (ptr1, 0);
326 }
327 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
328 return ptr_derefs_may_alias_p (ptr1, ptr2);
329 }
330 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
331 {
332 do
333 {
334 ptr2 = TREE_OPERAND (ptr2, 0);
335 }
336 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
337 return ptr_derefs_may_alias_p (ptr1, ptr2);
338 }
339
340 /* ADDR_EXPR pointers either just offset another pointer or directly
341 specify the pointed-to set. */
342 if (TREE_CODE (ptr1) == ADDR_EXPR)
343 {
344 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
345 if (base
346 && (TREE_CODE (base) == MEM_REF
347 || TREE_CODE (base) == TARGET_MEM_REF))
348 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
349 else if (base
350 && DECL_P (base))
351 return ptr_deref_may_alias_decl_p (ptr: ptr2, decl: base);
352 /* Try ptr2 when ptr1 points to a constant. */
353 else if (base
354 && !CONSTANT_CLASS_P (base))
355 return true;
356 }
357 if (TREE_CODE (ptr2) == ADDR_EXPR)
358 {
359 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
360 if (base
361 && (TREE_CODE (base) == MEM_REF
362 || TREE_CODE (base) == TARGET_MEM_REF))
363 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
364 else if (base
365 && DECL_P (base))
366 return ptr_deref_may_alias_decl_p (ptr: ptr1, decl: base);
367 else
368 return true;
369 }
370
371 /* From here we require SSA name pointers. Anything else aliases. */
372 if (TREE_CODE (ptr1) != SSA_NAME
373 || TREE_CODE (ptr2) != SSA_NAME
374 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
375 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
376 return true;
377
378 /* We may end up with two empty points-to solutions for two same pointers.
379 In this case we still want to say both pointers alias, so shortcut
380 that here. */
381 if (ptr1 == ptr2)
382 return true;
383
384 /* If we do not have useful points-to information for either pointer
385 we cannot disambiguate anything else. */
386 pi1 = SSA_NAME_PTR_INFO (ptr1);
387 pi2 = SSA_NAME_PTR_INFO (ptr2);
388 if (!pi1 || !pi2)
389 return true;
390
391 /* ??? This does not use TBAA to prune decls from the intersection
392 that not both pointers may access. */
393 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
394}
395
396/* Return true if dereferencing PTR may alias *REF.
397 The caller is responsible for applying TBAA to see if PTR
398 may access *REF at all. */
399
400static bool
401ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
402{
403 tree base = ao_ref_base (ref);
404
405 if (TREE_CODE (base) == MEM_REF
406 || TREE_CODE (base) == TARGET_MEM_REF)
407 return ptr_derefs_may_alias_p (ptr1: ptr, TREE_OPERAND (base, 0));
408 else if (DECL_P (base))
409 return ptr_deref_may_alias_decl_p (ptr, decl: base);
410
411 return true;
412}
413
414/* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
415
416bool
417ptrs_compare_unequal (tree ptr1, tree ptr2)
418{
419 /* First resolve the pointers down to a SSA name pointer base or
420 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
421 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
422 or STRING_CSTs which needs points-to adjustments to track them
423 in the points-to sets. */
424 tree obj1 = NULL_TREE;
425 tree obj2 = NULL_TREE;
426 if (TREE_CODE (ptr1) == ADDR_EXPR)
427 {
428 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
429 if (! tem)
430 return false;
431 if (VAR_P (tem)
432 || TREE_CODE (tem) == PARM_DECL
433 || TREE_CODE (tem) == RESULT_DECL)
434 obj1 = tem;
435 else if (TREE_CODE (tem) == MEM_REF)
436 ptr1 = TREE_OPERAND (tem, 0);
437 }
438 if (TREE_CODE (ptr2) == ADDR_EXPR)
439 {
440 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
441 if (! tem)
442 return false;
443 if (VAR_P (tem)
444 || TREE_CODE (tem) == PARM_DECL
445 || TREE_CODE (tem) == RESULT_DECL)
446 obj2 = tem;
447 else if (TREE_CODE (tem) == MEM_REF)
448 ptr2 = TREE_OPERAND (tem, 0);
449 }
450
451 /* Canonicalize ptr vs. object. */
452 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
453 {
454 std::swap (a&: ptr1, b&: ptr2);
455 std::swap (a&: obj1, b&: obj2);
456 }
457
458 if (obj1 && obj2)
459 /* Other code handles this correctly, no need to duplicate it here. */;
460 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
461 {
462 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
463 /* We may not use restrict to optimize pointer comparisons.
464 See PR71062. So we have to assume that restrict-pointed-to
465 may be in fact obj1. */
466 if (!pi
467 || pi->pt.vars_contains_restrict
468 || pi->pt.vars_contains_interposable)
469 return false;
470 if (VAR_P (obj1)
471 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
472 {
473 varpool_node *node = varpool_node::get (decl: obj1);
474 /* If obj1 may bind to NULL give up (see below). */
475 if (! node
476 || ! node->nonzero_address ()
477 || ! decl_binds_to_current_def_p (obj1))
478 return false;
479 }
480 return !pt_solution_includes (&pi->pt, obj1);
481 }
482
483 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
484 but those require pt.null to be conservatively correct. */
485
486 return false;
487}
488
489/* Returns whether reference REF to BASE may refer to global memory.
490 When ESCAPED_LOCAL_P is true escaped local memory is also considered
491 global. */
492
493static bool
494ref_may_alias_global_p_1 (tree base, bool escaped_local_p)
495{
496 if (DECL_P (base))
497 return (is_global_var (t: base)
498 || (escaped_local_p
499 && pt_solution_includes (&cfun->gimple_df->escaped, base)));
500 else if (TREE_CODE (base) == MEM_REF
501 || TREE_CODE (base) == TARGET_MEM_REF)
502 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0),
503 escaped_local_p);
504 return true;
505}
506
507bool
508ref_may_alias_global_p (ao_ref *ref, bool escaped_local_p)
509{
510 tree base = ao_ref_base (ref);
511 return ref_may_alias_global_p_1 (base, escaped_local_p);
512}
513
514bool
515ref_may_alias_global_p (tree ref, bool escaped_local_p)
516{
517 tree base = get_base_address (t: ref);
518 return ref_may_alias_global_p_1 (base, escaped_local_p);
519}
520
521/* Return true whether STMT may clobber global memory.
522 When ESCAPED_LOCAL_P is true escaped local memory is also considered
523 global. */
524
525bool
526stmt_may_clobber_global_p (gimple *stmt, bool escaped_local_p)
527{
528 tree lhs;
529
530 if (!gimple_vdef (g: stmt))
531 return false;
532
533 /* ??? We can ask the oracle whether an artificial pointer
534 dereference with a pointer with points-to information covering
535 all global memory (what about non-address taken memory?) maybe
536 clobbered by this call. As there is at the moment no convenient
537 way of doing that without generating garbage do some manual
538 checking instead.
539 ??? We could make a NULL ao_ref argument to the various
540 predicates special, meaning any global memory. */
541
542 switch (gimple_code (g: stmt))
543 {
544 case GIMPLE_ASSIGN:
545 lhs = gimple_assign_lhs (gs: stmt);
546 return (TREE_CODE (lhs) != SSA_NAME
547 && ref_may_alias_global_p (ref: lhs, escaped_local_p));
548 case GIMPLE_CALL:
549 return true;
550 default:
551 return true;
552 }
553}
554
555
556/* Dump alias information on FILE. */
557
558void
559dump_alias_info (FILE *file)
560{
561 unsigned i;
562 tree ptr;
563 const char *funcname
564 = lang_hooks.decl_printable_name (current_function_decl, 2);
565 tree var;
566
567 fprintf (stream: file, format: "\n\nAlias information for %s\n\n", funcname);
568
569 fprintf (stream: file, format: "Aliased symbols\n\n");
570
571 FOR_EACH_LOCAL_DECL (cfun, i, var)
572 {
573 if (may_be_aliased (var))
574 dump_variable (file, var);
575 }
576
577 fprintf (stream: file, format: "\nCall clobber information\n");
578
579 fprintf (stream: file, format: "\nESCAPED");
580 dump_points_to_solution (file, &cfun->gimple_df->escaped);
581
582 fprintf (stream: file, format: "\n\nFlow-insensitive points-to information\n\n");
583
584 FOR_EACH_SSA_NAME (i, ptr, cfun)
585 {
586 struct ptr_info_def *pi;
587
588 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
589 || SSA_NAME_IN_FREE_LIST (ptr))
590 continue;
591
592 pi = SSA_NAME_PTR_INFO (ptr);
593 if (pi)
594 dump_points_to_info_for (file, ptr);
595 }
596
597 fprintf (stream: file, format: "\n");
598}
599
600
601/* Dump alias information on stderr. */
602
603DEBUG_FUNCTION void
604debug_alias_info (void)
605{
606 dump_alias_info (stderr);
607}
608
609
610/* Dump the points-to set *PT into FILE. */
611
612void
613dump_points_to_solution (FILE *file, struct pt_solution *pt)
614{
615 if (pt->anything)
616 fprintf (stream: file, format: ", points-to anything");
617
618 if (pt->nonlocal)
619 fprintf (stream: file, format: ", points-to non-local");
620
621 if (pt->escaped)
622 fprintf (stream: file, format: ", points-to escaped");
623
624 if (pt->ipa_escaped)
625 fprintf (stream: file, format: ", points-to unit escaped");
626
627 if (pt->null)
628 fprintf (stream: file, format: ", points-to NULL");
629
630 if (pt->vars)
631 {
632 fprintf (stream: file, format: ", points-to vars: ");
633 dump_decl_set (file, pt->vars);
634 if (pt->vars_contains_nonlocal
635 || pt->vars_contains_escaped
636 || pt->vars_contains_escaped_heap
637 || pt->vars_contains_restrict)
638 {
639 const char *comma = "";
640 fprintf (stream: file, format: " (");
641 if (pt->vars_contains_nonlocal)
642 {
643 fprintf (stream: file, format: "nonlocal");
644 comma = ", ";
645 }
646 if (pt->vars_contains_escaped)
647 {
648 fprintf (stream: file, format: "%sescaped", comma);
649 comma = ", ";
650 }
651 if (pt->vars_contains_escaped_heap)
652 {
653 fprintf (stream: file, format: "%sescaped heap", comma);
654 comma = ", ";
655 }
656 if (pt->vars_contains_restrict)
657 {
658 fprintf (stream: file, format: "%srestrict", comma);
659 comma = ", ";
660 }
661 if (pt->vars_contains_interposable)
662 fprintf (stream: file, format: "%sinterposable", comma);
663 fprintf (stream: file, format: ")");
664 }
665 }
666}
667
668
669/* Unified dump function for pt_solution. */
670
671DEBUG_FUNCTION void
672debug (pt_solution &ref)
673{
674 dump_points_to_solution (stderr, pt: &ref);
675}
676
677DEBUG_FUNCTION void
678debug (pt_solution *ptr)
679{
680 if (ptr)
681 debug (ref&: *ptr);
682 else
683 fprintf (stderr, format: "<nil>\n");
684}
685
686
687/* Dump points-to information for SSA_NAME PTR into FILE. */
688
689void
690dump_points_to_info_for (FILE *file, tree ptr)
691{
692 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
693
694 print_generic_expr (file, ptr, dump_flags);
695
696 if (pi)
697 dump_points_to_solution (file, pt: &pi->pt);
698 else
699 fprintf (stream: file, format: ", points-to anything");
700
701 fprintf (stream: file, format: "\n");
702}
703
704
705/* Dump points-to information for VAR into stderr. */
706
707DEBUG_FUNCTION void
708debug_points_to_info_for (tree var)
709{
710 dump_points_to_info_for (stderr, ptr: var);
711}
712
713
714/* Initializes the alias-oracle reference representation *R from REF. */
715
716void
717ao_ref_init (ao_ref *r, tree ref)
718{
719 r->ref = ref;
720 r->base = NULL_TREE;
721 r->offset = 0;
722 r->size = -1;
723 r->max_size = -1;
724 r->ref_alias_set = -1;
725 r->base_alias_set = -1;
726 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
727}
728
729/* Returns the base object of the memory reference *REF. */
730
731tree
732ao_ref_base (ao_ref *ref)
733{
734 bool reverse;
735
736 if (ref->base)
737 return ref->base;
738 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
739 &ref->max_size, &reverse);
740 return ref->base;
741}
742
743/* Returns the base object alias set of the memory reference *REF. */
744
745alias_set_type
746ao_ref_base_alias_set (ao_ref *ref)
747{
748 tree base_ref;
749 if (ref->base_alias_set != -1)
750 return ref->base_alias_set;
751 if (!ref->ref)
752 return 0;
753 base_ref = ref->ref;
754 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
755 base_ref = TREE_OPERAND (base_ref, 0);
756 while (handled_component_p (t: base_ref))
757 base_ref = TREE_OPERAND (base_ref, 0);
758 ref->base_alias_set = get_alias_set (base_ref);
759 return ref->base_alias_set;
760}
761
762/* Returns the reference alias set of the memory reference *REF. */
763
764alias_set_type
765ao_ref_alias_set (ao_ref *ref)
766{
767 if (ref->ref_alias_set != -1)
768 return ref->ref_alias_set;
769 if (!ref->ref)
770 return 0;
771 ref->ref_alias_set = get_alias_set (ref->ref);
772 return ref->ref_alias_set;
773}
774
775/* Returns a type satisfying
776 get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
777
778tree
779ao_ref_base_alias_ptr_type (ao_ref *ref)
780{
781 tree base_ref;
782
783 if (!ref->ref)
784 return NULL_TREE;
785 base_ref = ref->ref;
786 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
787 base_ref = TREE_OPERAND (base_ref, 0);
788 while (handled_component_p (t: base_ref))
789 base_ref = TREE_OPERAND (base_ref, 0);
790 tree ret = reference_alias_ptr_type (base_ref);
791 return ret;
792}
793
794/* Returns a type satisfying
795 get_deref_alias_set (type) == ao_ref_alias_set (REF). */
796
797tree
798ao_ref_alias_ptr_type (ao_ref *ref)
799{
800 if (!ref->ref)
801 return NULL_TREE;
802 tree ret = reference_alias_ptr_type (ref->ref);
803 return ret;
804}
805
806/* Return the alignment of the access *REF and store it in the *ALIGN
807 and *BITPOS pairs. Returns false if no alignment could be determined.
808 See get_object_alignment_2 for details. */
809
810bool
811ao_ref_alignment (ao_ref *ref, unsigned int *align,
812 unsigned HOST_WIDE_INT *bitpos)
813{
814 if (ref->ref)
815 return get_object_alignment_1 (ref->ref, align, bitpos);
816
817 /* When we just have ref->base we cannot use get_object_alignment since
818 that will eventually use the type of the appearant access while for
819 example ao_ref_init_from_ptr_and_range is not careful to adjust that. */
820 *align = BITS_PER_UNIT;
821 HOST_WIDE_INT offset;
822 if (!ref->offset.is_constant (const_value: &offset)
823 || !get_object_alignment_2 (ref->base, align, bitpos, true))
824 return false;
825 *bitpos += (unsigned HOST_WIDE_INT)offset * BITS_PER_UNIT;
826 *bitpos = *bitpos & (*align - 1);
827 return true;
828}
829
830/* Init an alias-oracle reference representation from a gimple pointer
831 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
832 that RANGE_KNOWN is set.
833
834 The access is assumed to be only to or after of the pointer target adjusted
835 by the offset, not before it (even in the case RANGE_KNOWN is false). */
836
837void
838ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
839 bool range_known,
840 poly_int64 offset,
841 poly_int64 size,
842 poly_int64 max_size)
843{
844 poly_int64 t, extra_offset = 0;
845
846 ref->ref = NULL_TREE;
847 if (TREE_CODE (ptr) == SSA_NAME)
848 {
849 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
850 if (gimple_assign_single_p (gs: stmt)
851 && gimple_assign_rhs_code (gs: stmt) == ADDR_EXPR)
852 ptr = gimple_assign_rhs1 (gs: stmt);
853 else if (is_gimple_assign (gs: stmt)
854 && gimple_assign_rhs_code (gs: stmt) == POINTER_PLUS_EXPR
855 && ptrdiff_tree_p (gimple_assign_rhs2 (gs: stmt), &extra_offset))
856 {
857 ptr = gimple_assign_rhs1 (gs: stmt);
858 extra_offset *= BITS_PER_UNIT;
859 }
860 }
861
862 if (TREE_CODE (ptr) == ADDR_EXPR)
863 {
864 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
865 if (ref->base)
866 ref->offset = BITS_PER_UNIT * t;
867 else
868 {
869 range_known = false;
870 ref->offset = 0;
871 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
872 }
873 }
874 else
875 {
876 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
877 ref->base = build2 (MEM_REF, char_type_node,
878 ptr, null_pointer_node);
879 ref->offset = 0;
880 }
881 ref->offset += extra_offset + offset;
882 if (range_known)
883 {
884 ref->max_size = max_size;
885 ref->size = size;
886 }
887 else
888 ref->max_size = ref->size = -1;
889 ref->ref_alias_set = 0;
890 ref->base_alias_set = 0;
891 ref->volatile_p = false;
892}
893
894/* Init an alias-oracle reference representation from a gimple pointer
895 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
896 size is assumed to be unknown. The access is assumed to be only
897 to or after of the pointer target, not before it. */
898
899void
900ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
901{
902 poly_int64 size_hwi;
903 if (size
904 && poly_int_tree_p (t: size, value: &size_hwi)
905 && coeffs_in_range_p (a: size_hwi, b: 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
906 {
907 size_hwi = size_hwi * BITS_PER_UNIT;
908 ao_ref_init_from_ptr_and_range (ref, ptr, range_known: true, offset: 0, size: size_hwi, max_size: size_hwi);
909 }
910 else
911 ao_ref_init_from_ptr_and_range (ref, ptr, range_known: false, offset: 0, size: -1, max_size: -1);
912}
913
914/* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
915 Return -1 if S1 < S2
916 Return 1 if S1 > S2
917 Return 0 if equal or incomparable. */
918
919static int
920compare_sizes (tree s1, tree s2)
921{
922 if (!s1 || !s2)
923 return 0;
924
925 poly_uint64 size1;
926 poly_uint64 size2;
927
928 if (!poly_int_tree_p (t: s1, value: &size1) || !poly_int_tree_p (t: s2, value: &size2))
929 return 0;
930 if (known_lt (size1, size2))
931 return -1;
932 if (known_lt (size2, size1))
933 return 1;
934 return 0;
935}
936
937/* Compare TYPE1 and TYPE2 by its size.
938 Return -1 if size of TYPE1 < size of TYPE2
939 Return 1 if size of TYPE1 > size of TYPE2
940 Return 0 if types are of equal sizes or we can not compare them. */
941
942static int
943compare_type_sizes (tree type1, tree type2)
944{
945 /* Be conservative for arrays and vectors. We want to support partial
946 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
947 while (TREE_CODE (type1) == ARRAY_TYPE
948 || VECTOR_TYPE_P (type1))
949 type1 = TREE_TYPE (type1);
950 while (TREE_CODE (type2) == ARRAY_TYPE
951 || VECTOR_TYPE_P (type2))
952 type2 = TREE_TYPE (type2);
953 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
954}
955
956/* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
957 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
958 decide. */
959
960static inline int
961same_type_for_tbaa (tree type1, tree type2)
962{
963 type1 = TYPE_MAIN_VARIANT (type1);
964 type2 = TYPE_MAIN_VARIANT (type2);
965
966 /* Handle the most common case first. */
967 if (type1 == type2)
968 return 1;
969
970 /* If we would have to do structural comparison bail out. */
971 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
972 || TYPE_STRUCTURAL_EQUALITY_P (type2))
973 return -1;
974
975 /* Compare the canonical types. */
976 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
977 return 1;
978
979 /* ??? Array types are not properly unified in all cases as we have
980 spurious changes in the index types for example. Removing this
981 causes all sorts of problems with the Fortran frontend. */
982 if (TREE_CODE (type1) == ARRAY_TYPE
983 && TREE_CODE (type2) == ARRAY_TYPE)
984 return -1;
985
986 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
987 object of one of its constrained subtypes, e.g. when a function with an
988 unconstrained parameter passed by reference is called on an object and
989 inlined. But, even in the case of a fixed size, type and subtypes are
990 not equivalent enough as to share the same TYPE_CANONICAL, since this
991 would mean that conversions between them are useless, whereas they are
992 not (e.g. type and subtypes can have different modes). So, in the end,
993 they are only guaranteed to have the same alias set. */
994 alias_set_type set1 = get_alias_set (type1);
995 alias_set_type set2 = get_alias_set (type2);
996 if (set1 == set2)
997 return -1;
998
999 /* Pointers to void are considered compatible with all other pointers,
1000 so for two pointers see what the alias set resolution thinks. */
1001 if (POINTER_TYPE_P (type1)
1002 && POINTER_TYPE_P (type2)
1003 && alias_sets_conflict_p (set1, set2))
1004 return -1;
1005
1006 /* The types are known to be not equal. */
1007 return 0;
1008}
1009
1010/* Return true if TYPE is a composite type (i.e. we may apply one of handled
1011 components on it). */
1012
1013static bool
1014type_has_components_p (tree type)
1015{
1016 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
1017 || TREE_CODE (type) == COMPLEX_TYPE;
1018}
1019
1020/* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
1021 respectively are either pointing to same address or are completely
1022 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
1023 just partly overlap.
1024
1025 Try to disambiguate using the access path starting from the match
1026 and return false if there is no conflict.
1027
1028 Helper for aliasing_component_refs_p. */
1029
1030static bool
1031aliasing_matching_component_refs_p (tree match1, tree ref1,
1032 poly_int64 offset1, poly_int64 max_size1,
1033 tree match2, tree ref2,
1034 poly_int64 offset2, poly_int64 max_size2,
1035 bool partial_overlap)
1036{
1037 poly_int64 offadj, sztmp, msztmp;
1038 bool reverse;
1039
1040 if (!partial_overlap)
1041 {
1042 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
1043 offset2 -= offadj;
1044 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
1045 offset1 -= offadj;
1046 if (!ranges_maybe_overlap_p (pos1: offset1, size1: max_size1, pos2: offset2, size2: max_size2))
1047 {
1048 ++alias_stats.aliasing_component_refs_p_no_alias;
1049 return false;
1050 }
1051 }
1052
1053 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
1054 partial_overlap);
1055 if (cmp == 1
1056 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1057 {
1058 ++alias_stats.aliasing_component_refs_p_no_alias;
1059 return false;
1060 }
1061 ++alias_stats.aliasing_component_refs_p_may_alias;
1062 return true;
1063}
1064
1065/* Return true if REF is reference to zero sized trailing array. I.e.
1066 struct foo {int bar; int array[0];} *fooptr;
1067 fooptr->array. */
1068
1069static bool
1070component_ref_to_zero_sized_trailing_array_p (tree ref)
1071{
1072 return (TREE_CODE (ref) == COMPONENT_REF
1073 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1074 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1075 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1076 && array_ref_flexible_size_p (ref));
1077}
1078
1079/* Worker for aliasing_component_refs_p. Most parameters match parameters of
1080 aliasing_component_refs_p.
1081
1082 Walk access path REF2 and try to find type matching TYPE1
1083 (which is a start of possibly aliasing access path REF1).
1084 If match is found, try to disambiguate.
1085
1086 Return 0 for sucessful disambiguation.
1087 Return 1 if match was found but disambiguation failed
1088 Return -1 if there is no match.
1089 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1090 in access patch REF2 and -1 if we are not sure. */
1091
1092static int
1093aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1094 poly_int64 offset1, poly_int64 max_size1,
1095 tree end_struct_ref1,
1096 tree ref2, tree base2,
1097 poly_int64 offset2, poly_int64 max_size2,
1098 bool *maybe_match)
1099{
1100 tree ref = ref2;
1101 int same_p = 0;
1102
1103 while (true)
1104 {
1105 /* We walk from inner type to the outer types. If type we see is
1106 already too large to be part of type1, terminate the search. */
1107 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1108
1109 if (cmp < 0
1110 && (!end_struct_ref1
1111 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1112 TREE_TYPE (ref)) < 0))
1113 break;
1114 /* If types may be of same size, see if we can decide about their
1115 equality. */
1116 if (cmp == 0)
1117 {
1118 same_p = same_type_for_tbaa (TREE_TYPE (ref), type2: type1);
1119 if (same_p == 1)
1120 break;
1121 /* In case we can't decide whether types are same try to
1122 continue looking for the exact match.
1123 Remember however that we possibly saw a match
1124 to bypass the access path continuations tests we do later. */
1125 if (same_p == -1)
1126 *maybe_match = true;
1127 }
1128 if (!handled_component_p (t: ref))
1129 break;
1130 ref = TREE_OPERAND (ref, 0);
1131 }
1132 if (same_p == 1)
1133 {
1134 bool partial_overlap = false;
1135
1136 /* We assume that arrays can overlap by multiple of their elements
1137 size as tested in gcc.dg/torture/alias-2.c.
1138 This partial overlap happen only when both arrays are bases of
1139 the access and not contained within another component ref.
1140 To be safe we also assume partial overlap for VLAs. */
1141 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1142 && (!TYPE_SIZE (TREE_TYPE (base1))
1143 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1144 || ref == base2))
1145 {
1146 /* Setting maybe_match to true triggers
1147 nonoverlapping_component_refs_p test later that still may do
1148 useful disambiguation. */
1149 *maybe_match = true;
1150 partial_overlap = true;
1151 }
1152 return aliasing_matching_component_refs_p (match1: base1, ref1,
1153 offset1, max_size1,
1154 match2: ref, ref2,
1155 offset2, max_size2,
1156 partial_overlap);
1157 }
1158 return -1;
1159}
1160
1161/* Consider access path1 base1....ref1 and access path2 base2...ref2.
1162 Return true if they can be composed to single access path
1163 base1...ref1...base2...ref2.
1164
1165 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1166 a trailing array access after REF1 in the non-TBAA part of the access.
1167 REF1_ALIAS_SET is the alias set of REF1.
1168
1169 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1170 a trailing array access in the TBAA part of access path2.
1171 BASE2_ALIAS_SET is the alias set of base2. */
1172
1173bool
1174access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1175 alias_set_type ref1_alias_set,
1176 tree base_type2, tree end_struct_ref2,
1177 alias_set_type base2_alias_set)
1178{
1179 /* Access path can not continue past types with no components. */
1180 if (!type_has_components_p (type: ref_type1))
1181 return false;
1182
1183 /* If first access path ends by too small type to hold base of
1184 the second access path, typically paths can not continue.
1185
1186 Punt if end_struct_past_end1 is true. We want to support arbitrary
1187 type puning past first COMPONENT_REF to union because redundant store
1188 elimination depends on this, see PR92152. For this reason we can not
1189 check size of the reference because types may partially overlap. */
1190 if (!end_struct_past_end1)
1191 {
1192 if (compare_type_sizes (type1: ref_type1, type2: base_type2) < 0)
1193 return false;
1194 /* If the path2 contains trailing array access we can strenghten the check
1195 to verify that also the size of element of the trailing array fits.
1196 In fact we could check for offset + type_size, but we do not track
1197 offsets and this is quite side case. */
1198 if (end_struct_ref2
1199 && compare_type_sizes (type1: ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1200 return false;
1201 }
1202 return (base2_alias_set == ref1_alias_set
1203 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1204}
1205
1206/* Determine if the two component references REF1 and REF2 which are
1207 based on access types TYPE1 and TYPE2 and of which at least one is based
1208 on an indirect reference may alias.
1209 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1210 are the respective alias sets. */
1211
1212static bool
1213aliasing_component_refs_p (tree ref1,
1214 alias_set_type ref1_alias_set,
1215 alias_set_type base1_alias_set,
1216 poly_int64 offset1, poly_int64 max_size1,
1217 tree ref2,
1218 alias_set_type ref2_alias_set,
1219 alias_set_type base2_alias_set,
1220 poly_int64 offset2, poly_int64 max_size2)
1221{
1222 /* If one reference is a component references through pointers try to find a
1223 common base and apply offset based disambiguation. This handles
1224 for example
1225 struct A { int i; int j; } *q;
1226 struct B { struct A a; int k; } *p;
1227 disambiguating q->i and p->a.j. */
1228 tree base1, base2;
1229 tree type1, type2;
1230 bool maybe_match = false;
1231 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1232 bool end_struct_past_end1 = false;
1233 bool end_struct_past_end2 = false;
1234
1235 /* Choose bases and base types to search for.
1236 The access path is as follows:
1237 base....end_of_tbaa_ref...actual_ref
1238 At one place in the access path may be a reference to zero sized or
1239 trailing array.
1240
1241 We generally discard the segment after end_of_tbaa_ref however
1242 we need to be careful in case it contains zero sized or trailing array.
1243 These may happen after reference to union and in this case we need to
1244 not disambiguate type puning scenarios.
1245
1246 We set:
1247 base1 to point to base
1248
1249 ref1 to point to end_of_tbaa_ref
1250
1251 end_struct_ref1 to point the trailing reference (if it exists
1252 in range base....end_of_tbaa_ref
1253
1254 end_struct_past_end1 is true if this trailing reference occurs in
1255 end_of_tbaa_ref...actual_ref. */
1256 base1 = ref1;
1257 while (handled_component_p (t: base1))
1258 {
1259 /* Generally access paths are monotous in the size of object. The
1260 exception are trailing arrays of structures. I.e.
1261 struct a {int array[0];};
1262 or
1263 struct a {int array1[0]; int array[];};
1264 Such struct has size 0 but accesses to a.array may have non-zero size.
1265 In this case the size of TREE_TYPE (base1) is smaller than
1266 size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1267
1268 Because we compare sizes of arrays just by sizes of their elements,
1269 we only need to care about zero sized array fields here. */
1270 if (component_ref_to_zero_sized_trailing_array_p (ref: base1))
1271 {
1272 gcc_checking_assert (!end_struct_ref1);
1273 end_struct_ref1 = base1;
1274 }
1275 if (ends_tbaa_access_path_p (base1))
1276 {
1277 ref1 = TREE_OPERAND (base1, 0);
1278 if (end_struct_ref1)
1279 {
1280 end_struct_past_end1 = true;
1281 end_struct_ref1 = NULL;
1282 }
1283 }
1284 base1 = TREE_OPERAND (base1, 0);
1285 }
1286 type1 = TREE_TYPE (base1);
1287 base2 = ref2;
1288 while (handled_component_p (t: base2))
1289 {
1290 if (component_ref_to_zero_sized_trailing_array_p (ref: base2))
1291 {
1292 gcc_checking_assert (!end_struct_ref2);
1293 end_struct_ref2 = base2;
1294 }
1295 if (ends_tbaa_access_path_p (base2))
1296 {
1297 ref2 = TREE_OPERAND (base2, 0);
1298 if (end_struct_ref2)
1299 {
1300 end_struct_past_end2 = true;
1301 end_struct_ref2 = NULL;
1302 }
1303 }
1304 base2 = TREE_OPERAND (base2, 0);
1305 }
1306 type2 = TREE_TYPE (base2);
1307
1308 /* Now search for the type1 in the access path of ref2. This
1309 would be a common base for doing offset based disambiguation on.
1310 This however only makes sense if type2 is big enough to hold type1. */
1311 int cmp_outer = compare_type_sizes (type1: type2, type2: type1);
1312
1313 /* If type2 is big enough to contain type1 walk its access path.
1314 We also need to care of arrays at the end of structs that may extend
1315 beyond the end of structure. If this occurs in the TBAA part of the
1316 access path, we need to consider the increased type as well. */
1317 if (cmp_outer >= 0
1318 || (end_struct_ref2
1319 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type2: type1) >= 0))
1320 {
1321 int res = aliasing_component_refs_walk (ref1, type1, base1,
1322 offset1, max_size1,
1323 end_struct_ref1,
1324 ref2, base2, offset2, max_size2,
1325 maybe_match: &maybe_match);
1326 if (res != -1)
1327 return res;
1328 }
1329
1330 /* If we didn't find a common base, try the other way around. */
1331 if (cmp_outer <= 0
1332 || (end_struct_ref1
1333 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type2) <= 0))
1334 {
1335 int res = aliasing_component_refs_walk (ref1: ref2, type1: type2, base1: base2,
1336 offset1: offset2, max_size1: max_size2,
1337 end_struct_ref1: end_struct_ref2,
1338 ref2: ref1, base2: base1, offset2: offset1, max_size2: max_size1,
1339 maybe_match: &maybe_match);
1340 if (res != -1)
1341 return res;
1342 }
1343
1344 /* In the following code we make an assumption that the types in access
1345 paths do not overlap and thus accesses alias only if one path can be
1346 continuation of another. If we was not able to decide about equivalence,
1347 we need to give up. */
1348 if (maybe_match)
1349 {
1350 if (!nonoverlapping_component_refs_p (ref1, ref2))
1351 {
1352 ++alias_stats.aliasing_component_refs_p_may_alias;
1353 return true;
1354 }
1355 ++alias_stats.aliasing_component_refs_p_no_alias;
1356 return false;
1357 }
1358
1359 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1360 ref1_alias_set,
1361 base_type2: type2, end_struct_ref2,
1362 base2_alias_set)
1363 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end1: end_struct_past_end2,
1364 ref1_alias_set: ref2_alias_set,
1365 base_type2: type1, end_struct_ref2: end_struct_ref1,
1366 base2_alias_set: base1_alias_set))
1367 {
1368 ++alias_stats.aliasing_component_refs_p_may_alias;
1369 return true;
1370 }
1371 ++alias_stats.aliasing_component_refs_p_no_alias;
1372 return false;
1373}
1374
1375/* FIELD1 and FIELD2 are two fields of component refs. We assume
1376 that bases of both component refs are either equivalent or nonoverlapping.
1377 We do not assume that the containers of FIELD1 and FIELD2 are of the
1378 same type or size.
1379
1380 Return 0 in case the base address of component_refs are same then
1381 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1382 may not be of same type or size.
1383
1384 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1385
1386 Return -1 otherwise.
1387
1388 Main difference between 0 and -1 is to let
1389 nonoverlapping_component_refs_since_match_p discover the semantically
1390 equivalent part of the access path.
1391
1392 Note that this function is used even with -fno-strict-aliasing
1393 and makes use of no TBAA assumptions. */
1394
1395static int
1396nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1397{
1398 /* If both fields are of the same type, we could save hard work of
1399 comparing offsets. */
1400 tree type1 = DECL_CONTEXT (field1);
1401 tree type2 = DECL_CONTEXT (field2);
1402
1403 if (TREE_CODE (type1) == RECORD_TYPE
1404 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1405 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1406 if (TREE_CODE (type2) == RECORD_TYPE
1407 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1408 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1409
1410 /* ??? Bitfields can overlap at RTL level so punt on them.
1411 FIXME: RTL expansion should be fixed by adjusting the access path
1412 when producing MEM_ATTRs for MEMs which are wider than
1413 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1414 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1415 return -1;
1416
1417 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1418 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1419 return field1 != field2;
1420
1421 /* In common case the offsets and bit offsets will be the same.
1422 However if frontends do not agree on the alignment, they may be
1423 different even if they actually represent same address.
1424 Try the common case first and if that fails calcualte the
1425 actual bit offset. */
1426 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1427 DECL_FIELD_OFFSET (field2))
1428 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1429 DECL_FIELD_BIT_OFFSET (field2)))
1430 return 0;
1431
1432 /* Note that it may be possible to use component_ref_field_offset
1433 which would provide offsets as trees. However constructing and folding
1434 trees is expensive and does not seem to be worth the compile time
1435 cost. */
1436
1437 poly_uint64 offset1, offset2;
1438 poly_uint64 bit_offset1, bit_offset2;
1439
1440 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), value: &offset1)
1441 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), value: &offset2)
1442 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), value: &bit_offset1)
1443 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), value: &bit_offset2))
1444 {
1445 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1446 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1447
1448 if (known_eq (offset1, offset2))
1449 return 0;
1450
1451 poly_uint64 size1, size2;
1452
1453 if (poly_int_tree_p (DECL_SIZE (field1), value: &size1)
1454 && poly_int_tree_p (DECL_SIZE (field2), value: &size2)
1455 && !ranges_maybe_overlap_p (pos1: offset1, size1, pos2: offset2, size2))
1456 return 1;
1457 }
1458 /* Resort to slower overlap checking by looking for matching types in
1459 the middle of access path. */
1460 return -1;
1461}
1462
1463/* Return low bound of array. Do not produce new trees
1464 and thus do not care about particular type of integer constant
1465 and placeholder exprs. */
1466
1467static tree
1468cheap_array_ref_low_bound (tree ref)
1469{
1470 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1471
1472 /* Avoid expensive array_ref_low_bound.
1473 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1474 type or it is zero. */
1475 if (TREE_OPERAND (ref, 2))
1476 return TREE_OPERAND (ref, 2);
1477 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1478 return TYPE_MIN_VALUE (domain_type);
1479 else
1480 return integer_zero_node;
1481}
1482
1483/* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1484 completely disjoint.
1485
1486 Return 1 if the refs are non-overlapping.
1487 Return 0 if they are possibly overlapping but if so the overlap again
1488 starts on the same address.
1489 Return -1 otherwise. */
1490
1491int
1492nonoverlapping_array_refs_p (tree ref1, tree ref2)
1493{
1494 tree index1 = TREE_OPERAND (ref1, 1);
1495 tree index2 = TREE_OPERAND (ref2, 1);
1496 tree low_bound1 = cheap_array_ref_low_bound (ref: ref1);
1497 tree low_bound2 = cheap_array_ref_low_bound (ref: ref2);
1498
1499 /* Handle zero offsets first: we do not need to match type size in this
1500 case. */
1501 if (operand_equal_p (index1, low_bound1, flags: 0)
1502 && operand_equal_p (index2, low_bound2, flags: 0))
1503 return 0;
1504
1505 /* If type sizes are different, give up.
1506
1507 Avoid expensive array_ref_element_size.
1508 If operand 3 is present it denotes size in the alignmnet units.
1509 Otherwise size is TYPE_SIZE of the element type.
1510 Handle only common cases where types are of the same "kind". */
1511 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1512 return -1;
1513
1514 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1515 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1516
1517 if (TREE_OPERAND (ref1, 3))
1518 {
1519 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1520 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1521 TREE_OPERAND (ref2, 3), flags: 0))
1522 return -1;
1523 }
1524 else
1525 {
1526 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1527 TYPE_SIZE_UNIT (elmt_type2), flags: 0))
1528 return -1;
1529 }
1530
1531 /* Since we know that type sizes are the same, there is no need to return
1532 -1 after this point. Partial overlap can not be introduced. */
1533
1534 /* We may need to fold trees in this case.
1535 TODO: Handle integer constant case at least. */
1536 if (!operand_equal_p (low_bound1, low_bound2, flags: 0))
1537 return 0;
1538
1539 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1540 {
1541 if (tree_int_cst_equal (index1, index2))
1542 return 0;
1543 return 1;
1544 }
1545 /* TODO: We can use VRP to further disambiguate here. */
1546 return 0;
1547}
1548
1549/* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1550 MATCH2 either point to the same address or are disjoint.
1551 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1552 respectively or NULL in the case we established equivalence of bases.
1553 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1554 overlap by exact multiply of their element size.
1555
1556 This test works by matching the initial segment of the access path
1557 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1558 match was determined without use of TBAA oracle.
1559
1560 Return 1 if we can determine that component references REF1 and REF2,
1561 that are within a common DECL, cannot overlap.
1562
1563 Return 0 if paths are same and thus there is nothing to disambiguate more
1564 (i.e. there is must alias assuming there is must alias between MATCH1 and
1565 MATCH2)
1566
1567 Return -1 if we can not determine 0 or 1 - this happens when we met
1568 non-matching types was met in the path.
1569 In this case it may make sense to continue by other disambiguation
1570 oracles. */
1571
1572static int
1573nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1574 tree match2, tree ref2,
1575 bool partial_overlap)
1576{
1577 int ntbaa1 = 0, ntbaa2 = 0;
1578 /* Early return if there are no references to match, we do not need
1579 to walk the access paths.
1580
1581 Do not consider this as may-alias for stats - it is more useful
1582 to have information how many disambiguations happened provided that
1583 the query was meaningful. */
1584
1585 if (match1 == ref1 || !handled_component_p (t: ref1)
1586 || match2 == ref2 || !handled_component_p (t: ref2))
1587 return -1;
1588
1589 auto_vec<tree, 16> component_refs1;
1590 auto_vec<tree, 16> component_refs2;
1591
1592 /* Create the stack of handled components for REF1. */
1593 while (handled_component_p (t: ref1) && ref1 != match1)
1594 {
1595 /* We use TBAA only to re-synchronize after mismatched refs. So we
1596 do not need to truncate access path after TBAA part ends. */
1597 if (ends_tbaa_access_path_p (ref1))
1598 ntbaa1 = 0;
1599 else
1600 ntbaa1++;
1601 component_refs1.safe_push (obj: ref1);
1602 ref1 = TREE_OPERAND (ref1, 0);
1603 }
1604
1605 /* Create the stack of handled components for REF2. */
1606 while (handled_component_p (t: ref2) && ref2 != match2)
1607 {
1608 if (ends_tbaa_access_path_p (ref2))
1609 ntbaa2 = 0;
1610 else
1611 ntbaa2++;
1612 component_refs2.safe_push (obj: ref2);
1613 ref2 = TREE_OPERAND (ref2, 0);
1614 }
1615
1616 if (!flag_strict_aliasing)
1617 {
1618 ntbaa1 = 0;
1619 ntbaa2 = 0;
1620 }
1621
1622 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1623 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1624
1625 /* If only one of access path starts with MEM_REF check that offset is 0
1626 so the addresses stays the same after stripping it.
1627 TODO: In this case we may walk the other access path until we get same
1628 offset.
1629
1630 If both starts with MEM_REF, offset has to be same. */
1631 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1632 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1633 || (mem_ref1 && mem_ref2
1634 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1635 TREE_OPERAND (ref2, 1))))
1636 {
1637 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1638 return -1;
1639 }
1640
1641 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1642 to handle them here at all. */
1643 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1644 && TREE_CODE (ref2) != TARGET_MEM_REF);
1645
1646 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1647 rank. This is sufficient because we start from the same DECL and you
1648 cannot reference several fields at a time with COMPONENT_REFs (unlike
1649 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1650 of them to access a sub-component, unless you're in a union, in which
1651 case the return value will precisely be false. */
1652 while (true)
1653 {
1654 /* Track if we seen unmatched ref with non-zero offset. In this case
1655 we must look for partial overlaps. */
1656 bool seen_unmatched_ref_p = false;
1657
1658 /* First match ARRAY_REFs an try to disambiguate. */
1659 if (!component_refs1.is_empty ()
1660 && !component_refs2.is_empty ())
1661 {
1662 unsigned int narray_refs1=0, narray_refs2=0;
1663
1664 /* We generally assume that both access paths starts by same sequence
1665 of refs. However if number of array refs is not in sync, try
1666 to recover and pop elts until number match. This helps the case
1667 where one access path starts by array and other by element. */
1668 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1669 narray_refs1++)
1670 if (TREE_CODE (component_refs1 [component_refs1.length()
1671 - 1 - narray_refs1]) != ARRAY_REF)
1672 break;
1673
1674 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1675 narray_refs2++)
1676 if (TREE_CODE (component_refs2 [component_refs2.length()
1677 - 1 - narray_refs2]) != ARRAY_REF)
1678 break;
1679 for (; narray_refs1 > narray_refs2; narray_refs1--)
1680 {
1681 ref1 = component_refs1.pop ();
1682 ntbaa1--;
1683
1684 /* If index is non-zero we need to check whether the reference
1685 does not break the main invariant that bases are either
1686 disjoint or equal. Consider the example:
1687
1688 unsigned char out[][1];
1689 out[1]="a";
1690 out[i][0];
1691
1692 Here bases out and out are same, but after removing the
1693 [i] index, this invariant no longer holds, because
1694 out[i] points to the middle of array out.
1695
1696 TODO: If size of type of the skipped reference is an integer
1697 multiply of the size of type of the other reference this
1698 invariant can be verified, but even then it is not completely
1699 safe with !flag_strict_aliasing if the other reference contains
1700 unbounded array accesses.
1701 See */
1702
1703 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1704 cheap_array_ref_low_bound (ref: ref1), flags: 0))
1705 return 0;
1706 }
1707 for (; narray_refs2 > narray_refs1; narray_refs2--)
1708 {
1709 ref2 = component_refs2.pop ();
1710 ntbaa2--;
1711 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1712 cheap_array_ref_low_bound (ref: ref2), flags: 0))
1713 return 0;
1714 }
1715 /* Try to disambiguate matched arrays. */
1716 for (unsigned int i = 0; i < narray_refs1; i++)
1717 {
1718 int cmp = nonoverlapping_array_refs_p (ref1: component_refs1.pop (),
1719 ref2: component_refs2.pop ());
1720 ntbaa1--;
1721 ntbaa2--;
1722 if (cmp == 1 && !partial_overlap)
1723 {
1724 ++alias_stats
1725 .nonoverlapping_refs_since_match_p_no_alias;
1726 return 1;
1727 }
1728 if (cmp == -1)
1729 {
1730 seen_unmatched_ref_p = true;
1731 /* We can not maintain the invariant that bases are either
1732 same or completely disjoint. However we can still recover
1733 from type based alias analysis if we reach references to
1734 same sizes. We do not attempt to match array sizes, so
1735 just finish array walking and look for component refs. */
1736 if (ntbaa1 < 0 || ntbaa2 < 0)
1737 {
1738 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1739 return -1;
1740 }
1741 for (i++; i < narray_refs1; i++)
1742 {
1743 component_refs1.pop ();
1744 component_refs2.pop ();
1745 ntbaa1--;
1746 ntbaa2--;
1747 }
1748 break;
1749 }
1750 partial_overlap = false;
1751 }
1752 }
1753
1754 /* Next look for component_refs. */
1755 do
1756 {
1757 if (component_refs1.is_empty ())
1758 {
1759 ++alias_stats
1760 .nonoverlapping_refs_since_match_p_must_overlap;
1761 return 0;
1762 }
1763 ref1 = component_refs1.pop ();
1764 ntbaa1--;
1765 if (TREE_CODE (ref1) != COMPONENT_REF)
1766 {
1767 seen_unmatched_ref_p = true;
1768 if (ntbaa1 < 0 || ntbaa2 < 0)
1769 {
1770 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1771 return -1;
1772 }
1773 }
1774 }
1775 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1776
1777 do
1778 {
1779 if (component_refs2.is_empty ())
1780 {
1781 ++alias_stats
1782 .nonoverlapping_refs_since_match_p_must_overlap;
1783 return 0;
1784 }
1785 ref2 = component_refs2.pop ();
1786 ntbaa2--;
1787 if (TREE_CODE (ref2) != COMPONENT_REF)
1788 {
1789 if (ntbaa1 < 0 || ntbaa2 < 0)
1790 {
1791 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1792 return -1;
1793 }
1794 seen_unmatched_ref_p = true;
1795 }
1796 }
1797 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1798
1799 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1800 earlier. */
1801 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1802 && TREE_CODE (ref2) == COMPONENT_REF);
1803
1804 tree field1 = TREE_OPERAND (ref1, 1);
1805 tree field2 = TREE_OPERAND (ref2, 1);
1806
1807 /* ??? We cannot simply use the type of operand #0 of the refs here
1808 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1809 for common blocks instead of using unions like everyone else. */
1810 tree type1 = DECL_CONTEXT (field1);
1811 tree type2 = DECL_CONTEXT (field2);
1812
1813 partial_overlap = false;
1814
1815 /* If we skipped array refs on type of different sizes, we can
1816 no longer be sure that there are not partial overlaps. */
1817 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1818 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), flags: 0))
1819 {
1820 ++alias_stats
1821 .nonoverlapping_refs_since_match_p_may_alias;
1822 return -1;
1823 }
1824
1825 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1826 if (cmp == -1)
1827 {
1828 ++alias_stats
1829 .nonoverlapping_refs_since_match_p_may_alias;
1830 return -1;
1831 }
1832 else if (cmp == 1)
1833 {
1834 ++alias_stats
1835 .nonoverlapping_refs_since_match_p_no_alias;
1836 return 1;
1837 }
1838 }
1839}
1840
1841/* Return TYPE_UID which can be used to match record types we consider
1842 same for TBAA purposes. */
1843
1844static inline int
1845ncr_type_uid (const_tree field)
1846{
1847 /* ??? We cannot simply use the type of operand #0 of the refs here
1848 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1849 for common blocks instead of using unions like everyone else. */
1850 tree type = DECL_FIELD_CONTEXT (field);
1851 /* With LTO types considered same_type_for_tbaa_p
1852 from different translation unit may not have same
1853 main variant. They however have same TYPE_CANONICAL. */
1854 if (TYPE_CANONICAL (type))
1855 return TYPE_UID (TYPE_CANONICAL (type));
1856 return TYPE_UID (type);
1857}
1858
1859/* qsort compare function to sort FIELD_DECLs after their
1860 DECL_FIELD_CONTEXT TYPE_UID. */
1861
1862static inline int
1863ncr_compar (const void *field1_, const void *field2_)
1864{
1865 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1866 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1867 unsigned int uid1 = ncr_type_uid (field: field1);
1868 unsigned int uid2 = ncr_type_uid (field: field2);
1869
1870 if (uid1 < uid2)
1871 return -1;
1872 else if (uid1 > uid2)
1873 return 1;
1874 return 0;
1875}
1876
1877/* Return true if we can determine that the fields referenced cannot
1878 overlap for any pair of objects. This relies on TBAA. */
1879
1880static bool
1881nonoverlapping_component_refs_p (const_tree x, const_tree y)
1882{
1883 /* Early return if we have nothing to do.
1884
1885 Do not consider this as may-alias for stats - it is more useful
1886 to have information how many disambiguations happened provided that
1887 the query was meaningful. */
1888 if (!flag_strict_aliasing
1889 || !x || !y
1890 || !handled_component_p (t: x)
1891 || !handled_component_p (t: y))
1892 return false;
1893
1894 auto_vec<const_tree, 16> fieldsx;
1895 while (handled_component_p (t: x))
1896 {
1897 if (TREE_CODE (x) == COMPONENT_REF)
1898 {
1899 tree field = TREE_OPERAND (x, 1);
1900 tree type = DECL_FIELD_CONTEXT (field);
1901 if (TREE_CODE (type) == RECORD_TYPE)
1902 fieldsx.safe_push (obj: field);
1903 }
1904 else if (ends_tbaa_access_path_p (x))
1905 fieldsx.truncate (size: 0);
1906 x = TREE_OPERAND (x, 0);
1907 }
1908 if (fieldsx.length () == 0)
1909 return false;
1910 auto_vec<const_tree, 16> fieldsy;
1911 while (handled_component_p (t: y))
1912 {
1913 if (TREE_CODE (y) == COMPONENT_REF)
1914 {
1915 tree field = TREE_OPERAND (y, 1);
1916 tree type = DECL_FIELD_CONTEXT (field);
1917 if (TREE_CODE (type) == RECORD_TYPE)
1918 fieldsy.safe_push (TREE_OPERAND (y, 1));
1919 }
1920 else if (ends_tbaa_access_path_p (y))
1921 fieldsy.truncate (size: 0);
1922 y = TREE_OPERAND (y, 0);
1923 }
1924 if (fieldsy.length () == 0)
1925 {
1926 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1927 return false;
1928 }
1929
1930 /* Most common case first. */
1931 if (fieldsx.length () == 1
1932 && fieldsy.length () == 1)
1933 {
1934 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1935 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1936 && nonoverlapping_component_refs_p_1 (field1: fieldsx[0], field2: fieldsy[0]) == 1)
1937 {
1938 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1939 return true;
1940 }
1941 else
1942 {
1943 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1944 return false;
1945 }
1946 }
1947
1948 if (fieldsx.length () == 2)
1949 {
1950 if (ncr_compar (field1_: &fieldsx[0], field2_: &fieldsx[1]) == 1)
1951 std::swap (a&: fieldsx[0], b&: fieldsx[1]);
1952 }
1953 else
1954 fieldsx.qsort (ncr_compar);
1955
1956 if (fieldsy.length () == 2)
1957 {
1958 if (ncr_compar (field1_: &fieldsy[0], field2_: &fieldsy[1]) == 1)
1959 std::swap (a&: fieldsy[0], b&: fieldsy[1]);
1960 }
1961 else
1962 fieldsy.qsort (ncr_compar);
1963
1964 unsigned i = 0, j = 0;
1965 do
1966 {
1967 const_tree fieldx = fieldsx[i];
1968 const_tree fieldy = fieldsy[j];
1969
1970 /* We're left with accessing different fields of a structure,
1971 no possible overlap. */
1972 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1973 DECL_FIELD_CONTEXT (fieldy)) == 1
1974 && nonoverlapping_component_refs_p_1 (field1: fieldx, field2: fieldy) == 1)
1975 {
1976 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1977 return true;
1978 }
1979
1980 if (ncr_type_uid (field: fieldx) < ncr_type_uid (field: fieldy))
1981 {
1982 i++;
1983 if (i == fieldsx.length ())
1984 break;
1985 }
1986 else
1987 {
1988 j++;
1989 if (j == fieldsy.length ())
1990 break;
1991 }
1992 }
1993 while (1);
1994
1995 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1996 return false;
1997}
1998
1999
2000/* Return true if two memory references based on the variables BASE1
2001 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2002 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
2003 if non-NULL are the complete memory reference trees. */
2004
2005static bool
2006decl_refs_may_alias_p (tree ref1, tree base1,
2007 poly_int64 offset1, poly_int64 max_size1,
2008 poly_int64 size1,
2009 tree ref2, tree base2,
2010 poly_int64 offset2, poly_int64 max_size2,
2011 poly_int64 size2)
2012{
2013 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
2014
2015 /* If both references are based on different variables, they cannot alias. */
2016 if (compare_base_decls (base1, base2) == 0)
2017 return false;
2018
2019 /* If both references are based on the same variable, they cannot alias if
2020 the accesses do not overlap. */
2021 if (!ranges_maybe_overlap_p (pos1: offset1, size1: max_size1, pos2: offset2, size2: max_size2))
2022 return false;
2023
2024 /* If there is must alias, there is no use disambiguating further. */
2025 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2026 return true;
2027
2028 /* For components with variable position, the above test isn't sufficient,
2029 so we disambiguate component references manually. */
2030 if (ref1 && ref2
2031 && handled_component_p (t: ref1) && handled_component_p (t: ref2)
2032 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, partial_overlap: false) == 1)
2033 return false;
2034
2035 return true;
2036}
2037
2038/* Return true if access with BASE is view converted.
2039 Base must not be stripped from inner MEM_REF (&decl)
2040 which is done by ao_ref_base and thus one extra walk
2041 of handled components is needed. */
2042
2043static bool
2044view_converted_memref_p (tree base)
2045{
2046 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
2047 return false;
2048 return same_type_for_tbaa (TREE_TYPE (base),
2049 TREE_TYPE (TREE_OPERAND (base, 1))) != 1;
2050}
2051
2052/* Return true if an indirect reference based on *PTR1 constrained
2053 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2054 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2055 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2056 in which case they are computed on-demand. REF1 and REF2
2057 if non-NULL are the complete memory reference trees. */
2058
2059static bool
2060indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2061 poly_int64 offset1, poly_int64 max_size1,
2062 poly_int64 size1,
2063 alias_set_type ref1_alias_set,
2064 alias_set_type base1_alias_set,
2065 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2066 poly_int64 offset2, poly_int64 max_size2,
2067 poly_int64 size2,
2068 alias_set_type ref2_alias_set,
2069 alias_set_type base2_alias_set, bool tbaa_p)
2070{
2071 tree ptr1;
2072 tree ptrtype1, dbase2;
2073
2074 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2075 || TREE_CODE (base1) == TARGET_MEM_REF)
2076 && DECL_P (base2));
2077
2078 ptr1 = TREE_OPERAND (base1, 0);
2079 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2080
2081 /* If only one reference is based on a variable, they cannot alias if
2082 the pointer access is beyond the extent of the variable access.
2083 (the pointer base cannot validly point to an offset less than zero
2084 of the variable).
2085 ??? IVOPTs creates bases that do not honor this restriction,
2086 so do not apply this optimization for TARGET_MEM_REFs. */
2087 if (TREE_CODE (base1) != TARGET_MEM_REF
2088 && !ranges_maybe_overlap_p (pos1: offset1 + moff, size1: -1, pos2: offset2, size2: max_size2))
2089 return false;
2090
2091 /* If the pointer based access is bigger than the variable they cannot
2092 alias. This is similar to the check below where we use TBAA to
2093 increase the size of the pointer based access based on the dynamic
2094 type of a containing object we can infer from it. */
2095 poly_int64 dsize2;
2096 if (known_size_p (a: size1)
2097 && poly_int_tree_p (DECL_SIZE (base2), value: &dsize2)
2098 && known_lt (dsize2, size1))
2099 return false;
2100
2101 /* They also cannot alias if the pointer may not point to the decl. */
2102 if (!ptr_deref_may_alias_decl_p (ptr: ptr1, decl: base2))
2103 return false;
2104
2105 /* Disambiguations that rely on strict aliasing rules follow. */
2106 if (!flag_strict_aliasing || !tbaa_p)
2107 return true;
2108
2109 /* If the alias set for a pointer access is zero all bets are off. */
2110 if (base1_alias_set == 0 || base2_alias_set == 0)
2111 return true;
2112
2113 /* When we are trying to disambiguate an access with a pointer dereference
2114 as base versus one with a decl as base we can use both the size
2115 of the decl and its dynamic type for extra disambiguation.
2116 ??? We do not know anything about the dynamic type of the decl
2117 other than that its alias-set contains base2_alias_set as a subset
2118 which does not help us here. */
2119 /* As we know nothing useful about the dynamic type of the decl just
2120 use the usual conflict check rather than a subset test.
2121 ??? We could introduce -fvery-strict-aliasing when the language
2122 does not allow decls to have a dynamic type that differs from their
2123 static type. Then we can check
2124 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2125 if (base1_alias_set != base2_alias_set
2126 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2127 return false;
2128
2129 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2130
2131 /* If the size of the access relevant for TBAA through the pointer
2132 is bigger than the size of the decl we can't possibly access the
2133 decl via that pointer. */
2134 if (/* ??? This in turn may run afoul when a decl of type T which is
2135 a member of union type U is accessed through a pointer to
2136 type U and sizeof T is smaller than sizeof U. */
2137 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2138 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2139 && compare_sizes (DECL_SIZE (base2),
2140 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2141 return false;
2142
2143 if (!ref2)
2144 return true;
2145
2146 /* If the decl is accessed via a MEM_REF, reconstruct the base
2147 we can use for TBAA and an appropriately adjusted offset. */
2148 dbase2 = ref2;
2149 while (handled_component_p (t: dbase2))
2150 dbase2 = TREE_OPERAND (dbase2, 0);
2151 poly_int64 doffset1 = offset1;
2152 poly_offset_int doffset2 = offset2;
2153 if (TREE_CODE (dbase2) == MEM_REF
2154 || TREE_CODE (dbase2) == TARGET_MEM_REF)
2155 {
2156 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2157 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2158 /* If second reference is view-converted, give up now. */
2159 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2160 return true;
2161 }
2162
2163 /* If first reference is view-converted, give up now. */
2164 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2165 return true;
2166
2167 /* If both references are through the same type, they do not alias
2168 if the accesses do not overlap. This does extra disambiguation
2169 for mixed/pointer accesses but requires strict aliasing.
2170 For MEM_REFs we require that the component-ref offset we computed
2171 is relative to the start of the type which we ensure by
2172 comparing rvalue and access type and disregarding the constant
2173 pointer offset.
2174
2175 But avoid treating variable length arrays as "objects", instead assume they
2176 can overlap by an exact multiple of their element size.
2177 See gcc.dg/torture/alias-2.c. */
2178 if (((TREE_CODE (base1) != TARGET_MEM_REF
2179 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2180 && (TREE_CODE (dbase2) != TARGET_MEM_REF
2181 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2182 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2183 {
2184 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2185 && (TYPE_SIZE (TREE_TYPE (base1))
2186 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2187 != INTEGER_CST));
2188 if (!partial_overlap
2189 && !ranges_maybe_overlap_p (pos1: doffset1, size1: max_size1, pos2: doffset2, size2: max_size2))
2190 return false;
2191 if (!ref1 || !ref2
2192 /* If there is must alias, there is no use disambiguating further. */
2193 || (!partial_overlap
2194 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2195 return true;
2196 int res = nonoverlapping_refs_since_match_p (match1: base1, ref1, match2: base2, ref2,
2197 partial_overlap);
2198 if (res == -1)
2199 return !nonoverlapping_component_refs_p (x: ref1, y: ref2);
2200 return !res;
2201 }
2202
2203 /* Do access-path based disambiguation. */
2204 if (ref1 && ref2
2205 && (handled_component_p (t: ref1) || handled_component_p (t: ref2)))
2206 return aliasing_component_refs_p (ref1,
2207 ref1_alias_set, base1_alias_set,
2208 offset1, max_size1,
2209 ref2,
2210 ref2_alias_set, base2_alias_set,
2211 offset2, max_size2);
2212
2213 return true;
2214}
2215
2216/* Return true if two indirect references based on *PTR1
2217 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2218 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2219 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2220 in which case they are computed on-demand. REF1 and REF2
2221 if non-NULL are the complete memory reference trees. */
2222
2223static bool
2224indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2225 poly_int64 offset1, poly_int64 max_size1,
2226 poly_int64 size1,
2227 alias_set_type ref1_alias_set,
2228 alias_set_type base1_alias_set,
2229 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2230 poly_int64 offset2, poly_int64 max_size2,
2231 poly_int64 size2,
2232 alias_set_type ref2_alias_set,
2233 alias_set_type base2_alias_set, bool tbaa_p)
2234{
2235 tree ptr1;
2236 tree ptr2;
2237 tree ptrtype1, ptrtype2;
2238
2239 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2240 || TREE_CODE (base1) == TARGET_MEM_REF)
2241 && (TREE_CODE (base2) == MEM_REF
2242 || TREE_CODE (base2) == TARGET_MEM_REF));
2243
2244 ptr1 = TREE_OPERAND (base1, 0);
2245 ptr2 = TREE_OPERAND (base2, 0);
2246
2247 /* If both bases are based on pointers they cannot alias if they may not
2248 point to the same memory object or if they point to the same object
2249 and the accesses do not overlap. */
2250 if ((!cfun || gimple_in_ssa_p (cfun))
2251 && operand_equal_p (ptr1, ptr2, flags: 0)
2252 && (((TREE_CODE (base1) != TARGET_MEM_REF
2253 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2254 && (TREE_CODE (base2) != TARGET_MEM_REF
2255 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2256 || (TREE_CODE (base1) == TARGET_MEM_REF
2257 && TREE_CODE (base2) == TARGET_MEM_REF
2258 && (TMR_STEP (base1) == TMR_STEP (base2)
2259 || (TMR_STEP (base1) && TMR_STEP (base2)
2260 && operand_equal_p (TMR_STEP (base1),
2261 TMR_STEP (base2), flags: 0)))
2262 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2263 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2264 && operand_equal_p (TMR_INDEX (base1),
2265 TMR_INDEX (base2), flags: 0)))
2266 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2267 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2268 && operand_equal_p (TMR_INDEX2 (base1),
2269 TMR_INDEX2 (base2), flags: 0))))))
2270 {
2271 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2272 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2273 if (!ranges_maybe_overlap_p (pos1: offset1 + moff1, size1: max_size1,
2274 pos2: offset2 + moff2, size2: max_size2))
2275 return false;
2276 /* If there is must alias, there is no use disambiguating further. */
2277 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2278 return true;
2279 if (ref1 && ref2)
2280 {
2281 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2282 partial_overlap: false);
2283 if (res != -1)
2284 return !res;
2285 }
2286 }
2287 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2288 return false;
2289
2290 /* Disambiguations that rely on strict aliasing rules follow. */
2291 if (!flag_strict_aliasing || !tbaa_p)
2292 return true;
2293
2294 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2295 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2296
2297 /* If the alias set for a pointer access is zero all bets are off. */
2298 if (base1_alias_set == 0
2299 || base2_alias_set == 0)
2300 return true;
2301
2302 /* Do type-based disambiguation. */
2303 if (base1_alias_set != base2_alias_set
2304 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2305 return false;
2306
2307 /* If either reference is view-converted, give up now. */
2308 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2309 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2310 return true;
2311
2312 /* If both references are through the same type, they do not alias
2313 if the accesses do not overlap. This does extra disambiguation
2314 for mixed/pointer accesses but requires strict aliasing. */
2315 if ((TREE_CODE (base1) != TARGET_MEM_REF
2316 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2317 && (TREE_CODE (base2) != TARGET_MEM_REF
2318 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2319 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2320 TREE_TYPE (ptrtype2)) == 1)
2321 {
2322 /* But avoid treating arrays as "objects", instead assume they
2323 can overlap by an exact multiple of their element size.
2324 See gcc.dg/torture/alias-2.c. */
2325 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2326
2327 if (!partial_overlap
2328 && !ranges_maybe_overlap_p (pos1: offset1, size1: max_size1, pos2: offset2, size2: max_size2))
2329 return false;
2330 if (!ref1 || !ref2
2331 || (!partial_overlap
2332 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2333 return true;
2334 int res = nonoverlapping_refs_since_match_p (match1: base1, ref1, match2: base2, ref2,
2335 partial_overlap);
2336 if (res == -1)
2337 return !nonoverlapping_component_refs_p (x: ref1, y: ref2);
2338 return !res;
2339 }
2340
2341 /* Do access-path based disambiguation. */
2342 if (ref1 && ref2
2343 && (handled_component_p (t: ref1) || handled_component_p (t: ref2)))
2344 return aliasing_component_refs_p (ref1,
2345 ref1_alias_set, base1_alias_set,
2346 offset1, max_size1,
2347 ref2,
2348 ref2_alias_set, base2_alias_set,
2349 offset2, max_size2);
2350
2351 return true;
2352}
2353
2354/* Return true, if the two memory references REF1 and REF2 may alias. */
2355
2356static bool
2357refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2358{
2359 tree base1, base2;
2360 poly_int64 offset1 = 0, offset2 = 0;
2361 poly_int64 max_size1 = -1, max_size2 = -1;
2362 bool var1_p, var2_p, ind1_p, ind2_p;
2363
2364 gcc_checking_assert ((!ref1->ref
2365 || TREE_CODE (ref1->ref) == SSA_NAME
2366 || DECL_P (ref1->ref)
2367 || TREE_CODE (ref1->ref) == STRING_CST
2368 || handled_component_p (ref1->ref)
2369 || TREE_CODE (ref1->ref) == MEM_REF
2370 || TREE_CODE (ref1->ref) == TARGET_MEM_REF
2371 || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
2372 && (!ref2->ref
2373 || TREE_CODE (ref2->ref) == SSA_NAME
2374 || DECL_P (ref2->ref)
2375 || TREE_CODE (ref2->ref) == STRING_CST
2376 || handled_component_p (ref2->ref)
2377 || TREE_CODE (ref2->ref) == MEM_REF
2378 || TREE_CODE (ref2->ref) == TARGET_MEM_REF
2379 || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
2380
2381 /* Decompose the references into their base objects and the access. */
2382 base1 = ao_ref_base (ref: ref1);
2383 offset1 = ref1->offset;
2384 max_size1 = ref1->max_size;
2385 base2 = ao_ref_base (ref: ref2);
2386 offset2 = ref2->offset;
2387 max_size2 = ref2->max_size;
2388
2389 /* We can end up with registers or constants as bases for example from
2390 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2391 which is seen as a struct copy. */
2392 if (TREE_CODE (base1) == SSA_NAME
2393 || TREE_CODE (base1) == CONST_DECL
2394 || TREE_CODE (base1) == CONSTRUCTOR
2395 || TREE_CODE (base1) == ADDR_EXPR
2396 || CONSTANT_CLASS_P (base1)
2397 || TREE_CODE (base2) == SSA_NAME
2398 || TREE_CODE (base2) == CONST_DECL
2399 || TREE_CODE (base2) == CONSTRUCTOR
2400 || TREE_CODE (base2) == ADDR_EXPR
2401 || CONSTANT_CLASS_P (base2))
2402 return false;
2403
2404 /* Two volatile accesses always conflict. */
2405 if (ref1->volatile_p
2406 && ref2->volatile_p)
2407 return true;
2408
2409 /* refN->ref may convey size information, do not confuse our workers
2410 with that but strip it - ao_ref_base took it into account already. */
2411 tree ref1ref = ref1->ref;
2412 if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
2413 ref1ref = TREE_OPERAND (ref1ref, 0);
2414 tree ref2ref = ref2->ref;
2415 if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
2416 ref2ref = TREE_OPERAND (ref2ref, 0);
2417
2418 /* Defer to simple offset based disambiguation if we have
2419 references based on two decls. Do this before defering to
2420 TBAA to handle must-alias cases in conformance with the
2421 GCC extension of allowing type-punning through unions. */
2422 var1_p = DECL_P (base1);
2423 var2_p = DECL_P (base2);
2424 if (var1_p && var2_p)
2425 return decl_refs_may_alias_p (ref1: ref1ref, base1, offset1, max_size1,
2426 size1: ref1->size,
2427 ref2: ref2ref, base2, offset2, max_size2,
2428 size2: ref2->size);
2429
2430 /* We can end up referring to code via function and label decls.
2431 As we likely do not properly track code aliases conservatively
2432 bail out. */
2433 if (TREE_CODE (base1) == FUNCTION_DECL
2434 || TREE_CODE (base1) == LABEL_DECL
2435 || TREE_CODE (base2) == FUNCTION_DECL
2436 || TREE_CODE (base2) == LABEL_DECL)
2437 return true;
2438
2439 /* Handle restrict based accesses.
2440 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2441 here. */
2442 tree rbase1 = base1;
2443 tree rbase2 = base2;
2444 if (var1_p)
2445 {
2446 rbase1 = ref1ref;
2447 if (rbase1)
2448 while (handled_component_p (t: rbase1))
2449 rbase1 = TREE_OPERAND (rbase1, 0);
2450 }
2451 if (var2_p)
2452 {
2453 rbase2 = ref2ref;
2454 if (rbase2)
2455 while (handled_component_p (t: rbase2))
2456 rbase2 = TREE_OPERAND (rbase2, 0);
2457 }
2458 if (rbase1 && rbase2
2459 && (TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
2460 && (TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
2461 /* If the accesses are in the same restrict clique... */
2462 && MR_DEPENDENCE_CLIQUE (rbase1) == MR_DEPENDENCE_CLIQUE (rbase2)
2463 /* But based on different pointers they do not alias. */
2464 && MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))
2465 return false;
2466
2467 ind1_p = (TREE_CODE (base1) == MEM_REF
2468 || TREE_CODE (base1) == TARGET_MEM_REF);
2469 ind2_p = (TREE_CODE (base2) == MEM_REF
2470 || TREE_CODE (base2) == TARGET_MEM_REF);
2471
2472 /* Canonicalize the pointer-vs-decl case. */
2473 if (ind1_p && var2_p)
2474 {
2475 std::swap (a&: offset1, b&: offset2);
2476 std::swap (a&: max_size1, b&: max_size2);
2477 std::swap (a&: base1, b&: base2);
2478 std::swap (a&: ref1, b&: ref2);
2479 std::swap (a&: ref1ref, b&: ref2ref);
2480 var1_p = true;
2481 ind1_p = false;
2482 var2_p = false;
2483 ind2_p = true;
2484 }
2485
2486 /* First defer to TBAA if possible. */
2487 if (tbaa_p
2488 && flag_strict_aliasing
2489 && !alias_sets_conflict_p (ao_ref_alias_set (ref: ref1),
2490 ao_ref_alias_set (ref: ref2)))
2491 return false;
2492
2493 /* If the reference is based on a pointer that points to memory
2494 that may not be written to then the other reference cannot possibly
2495 clobber it. */
2496 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2497 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2498 || (ind1_p
2499 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2500 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2501 return false;
2502
2503 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2504 if (var1_p && ind2_p)
2505 return indirect_ref_may_alias_decl_p (ref1: ref2ref, base1: base2,
2506 offset1: offset2, max_size1: max_size2, size1: ref2->size,
2507 ref1_alias_set: ao_ref_alias_set (ref: ref2),
2508 base1_alias_set: ao_ref_base_alias_set (ref: ref2),
2509 ref2: ref1ref, base2: base1,
2510 offset2: offset1, max_size2: max_size1, size2: ref1->size,
2511 ref2_alias_set: ao_ref_alias_set (ref: ref1),
2512 base2_alias_set: ao_ref_base_alias_set (ref: ref1),
2513 tbaa_p);
2514 else if (ind1_p && ind2_p)
2515 return indirect_refs_may_alias_p (ref1: ref1ref, base1,
2516 offset1, max_size1, size1: ref1->size,
2517 ref1_alias_set: ao_ref_alias_set (ref: ref1),
2518 base1_alias_set: ao_ref_base_alias_set (ref: ref1),
2519 ref2: ref2ref, base2,
2520 offset2, max_size2, size2: ref2->size,
2521 ref2_alias_set: ao_ref_alias_set (ref: ref2),
2522 base2_alias_set: ao_ref_base_alias_set (ref: ref2),
2523 tbaa_p);
2524
2525 gcc_unreachable ();
2526}
2527
2528/* Return true, if the two memory references REF1 and REF2 may alias
2529 and update statistics. */
2530
2531bool
2532refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2533{
2534 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2535 if (res)
2536 ++alias_stats.refs_may_alias_p_may_alias;
2537 else
2538 ++alias_stats.refs_may_alias_p_no_alias;
2539 return res;
2540}
2541
2542static bool
2543refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2544{
2545 ao_ref r1;
2546 ao_ref_init (r: &r1, ref: ref1);
2547 return refs_may_alias_p_1 (ref1: &r1, ref2, tbaa_p);
2548}
2549
2550bool
2551refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2552{
2553 ao_ref r1, r2;
2554 ao_ref_init (r: &r1, ref: ref1);
2555 ao_ref_init (r: &r2, ref: ref2);
2556 return refs_may_alias_p_1 (ref1: &r1, ref2: &r2, tbaa_p);
2557}
2558
2559/* Returns true if there is a anti-dependence for the STORE that
2560 executes after the LOAD. */
2561
2562bool
2563refs_anti_dependent_p (tree load, tree store)
2564{
2565 ao_ref r1, r2;
2566 ao_ref_init (r: &r1, ref: load);
2567 ao_ref_init (r: &r2, ref: store);
2568 return refs_may_alias_p_1 (ref1: &r1, ref2: &r2, tbaa_p: false);
2569}
2570
2571/* Returns true if there is a output dependence for the stores
2572 STORE1 and STORE2. */
2573
2574bool
2575refs_output_dependent_p (tree store1, tree store2)
2576{
2577 ao_ref r1, r2;
2578 ao_ref_init (r: &r1, ref: store1);
2579 ao_ref_init (r: &r2, ref: store2);
2580 return refs_may_alias_p_1 (ref1: &r1, ref2: &r2, tbaa_p: false);
2581}
2582
2583/* Returns true if and only if REF may alias any access stored in TT.
2584 IF TBAA_P is true, use TBAA oracle. */
2585
2586static bool
2587modref_may_conflict (const gcall *stmt,
2588 modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2589{
2590 alias_set_type base_set, ref_set;
2591 bool global_memory_ok = false;
2592
2593 if (tt->every_base)
2594 return true;
2595
2596 if (!dbg_cnt (index: ipa_mod_ref))
2597 return true;
2598
2599 base_set = ao_ref_base_alias_set (ref);
2600
2601 ref_set = ao_ref_alias_set (ref);
2602
2603 int num_tests = 0, max_tests = param_modref_max_tests;
2604 for (auto base_node : tt->bases)
2605 {
2606 if (tbaa_p && flag_strict_aliasing)
2607 {
2608 if (num_tests >= max_tests)
2609 return true;
2610 alias_stats.modref_tests++;
2611 if (!alias_sets_conflict_p (base_set, base_node->base))
2612 continue;
2613 num_tests++;
2614 }
2615
2616 if (base_node->every_ref)
2617 return true;
2618
2619 for (auto ref_node : base_node->refs)
2620 {
2621 /* Do not repeat same test as before. */
2622 if ((ref_set != base_set || base_node->base != ref_node->ref)
2623 && tbaa_p && flag_strict_aliasing)
2624 {
2625 if (num_tests >= max_tests)
2626 return true;
2627 alias_stats.modref_tests++;
2628 if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2629 continue;
2630 num_tests++;
2631 }
2632
2633 if (ref_node->every_access)
2634 return true;
2635
2636 /* TBAA checks did not disambiguate, try individual accesses. */
2637 for (auto access_node : ref_node->accesses)
2638 {
2639 if (num_tests >= max_tests)
2640 return true;
2641
2642 if (access_node.parm_index == MODREF_GLOBAL_MEMORY_PARM)
2643 {
2644 if (global_memory_ok)
2645 continue;
2646 if (ref_may_alias_global_p (ref, escaped_local_p: true))
2647 return true;
2648 global_memory_ok = true;
2649 num_tests++;
2650 continue;
2651 }
2652
2653 tree arg = access_node.get_call_arg (stmt);
2654 if (!arg)
2655 return true;
2656
2657 alias_stats.modref_baseptr_tests++;
2658
2659 if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2660 continue;
2661
2662 /* PTA oracle will be unhapy of arg is not an pointer. */
2663 if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2664 return true;
2665
2666 /* If we don't have base pointer, give up. */
2667 if (!ref->ref && !ref->base)
2668 continue;
2669
2670 ao_ref ref2;
2671 if (access_node.get_ao_ref (stmt, ref: &ref2))
2672 {
2673 ref2.ref_alias_set = ref_node->ref;
2674 ref2.base_alias_set = base_node->base;
2675 if (refs_may_alias_p_1 (ref1: &ref2, ref2: ref, tbaa_p))
2676 return true;
2677 }
2678 else if (ptr_deref_may_alias_ref_p_1 (ptr: arg, ref))
2679 return true;
2680
2681 num_tests++;
2682 }
2683 }
2684 }
2685 return false;
2686}
2687
2688/* Check if REF conflicts with call using "fn spec" attribute.
2689 If CLOBBER is true we are checking for writes, otherwise check loads.
2690
2691 Return 0 if there are no conflicts (except for possible function call
2692 argument reads), 1 if there are conflicts and -1 if we can not decide by
2693 fn spec. */
2694
2695static int
2696check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2697{
2698 attr_fnspec fnspec = gimple_call_fnspec (stmt: call);
2699 if (fnspec.known_p ())
2700 {
2701 if (clobber
2702 ? !fnspec.global_memory_written_p ()
2703 : !fnspec.global_memory_read_p ())
2704 {
2705 for (unsigned int i = 0; i < gimple_call_num_args (gs: call); i++)
2706 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2707 && (!fnspec.arg_specified_p (i)
2708 || (clobber ? fnspec.arg_maybe_written_p (i)
2709 : fnspec.arg_maybe_read_p (i))))
2710 {
2711 ao_ref dref;
2712 tree size = NULL_TREE;
2713 unsigned int size_arg;
2714
2715 if (!fnspec.arg_specified_p (i))
2716 ;
2717 else if (fnspec.arg_max_access_size_given_by_arg_p
2718 (i, arg: &size_arg))
2719 size = gimple_call_arg (gs: call, index: size_arg);
2720 else if (fnspec.arg_access_size_given_by_type_p (i))
2721 {
2722 tree callee = gimple_call_fndecl (gs: call);
2723 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2724
2725 for (unsigned int p = 0; p < i; p++)
2726 t = TREE_CHAIN (t);
2727 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2728 }
2729 poly_int64 size_hwi;
2730 if (size
2731 && poly_int_tree_p (t: size, value: &size_hwi)
2732 && coeffs_in_range_p (a: size_hwi, b: 0,
2733 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
2734 {
2735 size_hwi = size_hwi * BITS_PER_UNIT;
2736 ao_ref_init_from_ptr_and_range (ref: &dref,
2737 ptr: gimple_call_arg (gs: call, index: i),
2738 range_known: true, offset: 0, size: -1, max_size: size_hwi);
2739 }
2740 else
2741 ao_ref_init_from_ptr_and_range (ref: &dref,
2742 ptr: gimple_call_arg (gs: call, index: i),
2743 range_known: false, offset: 0, size: -1, max_size: -1);
2744 if (refs_may_alias_p_1 (ref1: &dref, ref2: ref, tbaa_p: false))
2745 return 1;
2746 }
2747 if (clobber
2748 && fnspec.errno_maybe_written_p ()
2749 && flag_errno_math
2750 && targetm.ref_may_alias_errno (ref))
2751 return 1;
2752 return 0;
2753 }
2754 }
2755
2756 /* FIXME: we should handle barriers more consistently, but for now leave the
2757 check here. */
2758 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2759 switch (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: call)))
2760 {
2761 /* __sync_* builtins and some OpenMP builtins act as threading
2762 barriers. */
2763#undef DEF_SYNC_BUILTIN
2764#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2765#include "sync-builtins.def"
2766#undef DEF_SYNC_BUILTIN
2767 case BUILT_IN_GOMP_ATOMIC_START:
2768 case BUILT_IN_GOMP_ATOMIC_END:
2769 case BUILT_IN_GOMP_BARRIER:
2770 case BUILT_IN_GOMP_BARRIER_CANCEL:
2771 case BUILT_IN_GOMP_TASKWAIT:
2772 case BUILT_IN_GOMP_TASKGROUP_END:
2773 case BUILT_IN_GOMP_CRITICAL_START:
2774 case BUILT_IN_GOMP_CRITICAL_END:
2775 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2776 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2777 case BUILT_IN_GOMP_LOOP_END:
2778 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2779 case BUILT_IN_GOMP_ORDERED_START:
2780 case BUILT_IN_GOMP_ORDERED_END:
2781 case BUILT_IN_GOMP_SECTIONS_END:
2782 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2783 case BUILT_IN_GOMP_SINGLE_COPY_START:
2784 case BUILT_IN_GOMP_SINGLE_COPY_END:
2785 return 1;
2786
2787 default:
2788 return -1;
2789 }
2790 return -1;
2791}
2792
2793/* If the call CALL may use the memory reference REF return true,
2794 otherwise return false. */
2795
2796static bool
2797ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2798{
2799 tree base, callee;
2800 unsigned i;
2801 int flags = gimple_call_flags (call);
2802
2803 if (flags & (ECF_CONST|ECF_NOVOPS))
2804 goto process_args;
2805
2806 /* A call that is not without side-effects might involve volatile
2807 accesses and thus conflicts with all other volatile accesses. */
2808 if (ref->volatile_p)
2809 return true;
2810
2811 if (gimple_call_internal_p (gs: call))
2812 switch (gimple_call_internal_fn (gs: call))
2813 {
2814 case IFN_MASK_STORE:
2815 case IFN_SCATTER_STORE:
2816 case IFN_MASK_SCATTER_STORE:
2817 case IFN_LEN_STORE:
2818 case IFN_MASK_LEN_STORE:
2819 return false;
2820 case IFN_MASK_STORE_LANES:
2821 case IFN_MASK_LEN_STORE_LANES:
2822 goto process_args;
2823 case IFN_MASK_LOAD:
2824 case IFN_LEN_LOAD:
2825 case IFN_MASK_LEN_LOAD:
2826 case IFN_MASK_LOAD_LANES:
2827 case IFN_MASK_LEN_LOAD_LANES:
2828 {
2829 ao_ref rhs_ref;
2830 tree lhs = gimple_call_lhs (gs: call);
2831 if (lhs)
2832 {
2833 ao_ref_init_from_ptr_and_size (ref: &rhs_ref,
2834 ptr: gimple_call_arg (gs: call, index: 0),
2835 TYPE_SIZE_UNIT (TREE_TYPE (lhs)));
2836 /* We cannot make this a known-size access since otherwise
2837 we disambiguate against refs to decls that are smaller. */
2838 rhs_ref.size = -1;
2839 rhs_ref.ref_alias_set = rhs_ref.base_alias_set
2840 = tbaa_p ? get_deref_alias_set (TREE_TYPE
2841 (gimple_call_arg (call, 1))) : 0;
2842 return refs_may_alias_p_1 (ref1: ref, ref2: &rhs_ref, tbaa_p);
2843 }
2844 break;
2845 }
2846 default:;
2847 }
2848
2849 callee = gimple_call_fndecl (gs: call);
2850 if (callee != NULL_TREE)
2851 {
2852 struct cgraph_node *node = cgraph_node::get (decl: callee);
2853 /* We can not safely optimize based on summary of calle if it does
2854 not always bind to current def: it is possible that memory load
2855 was optimized out earlier and the interposed variant may not be
2856 optimized this way. */
2857 if (node && node->binds_to_current_def_p ())
2858 {
2859 modref_summary *summary = get_modref_function_summary (func: node);
2860 if (summary && !summary->calls_interposable)
2861 {
2862 if (!modref_may_conflict (stmt: call, tt: summary->loads, ref, tbaa_p))
2863 {
2864 alias_stats.modref_use_no_alias++;
2865 if (dump_file && (dump_flags & TDF_DETAILS))
2866 {
2867 fprintf (stream: dump_file,
2868 format: "ipa-modref: call stmt ");
2869 print_gimple_stmt (dump_file, call, 0);
2870 fprintf (stream: dump_file,
2871 format: "ipa-modref: call to %s does not use ",
2872 node->dump_name ());
2873 if (!ref->ref && ref->base)
2874 {
2875 fprintf (stream: dump_file, format: "base: ");
2876 print_generic_expr (dump_file, ref->base);
2877 }
2878 else if (ref->ref)
2879 {
2880 fprintf (stream: dump_file, format: "ref: ");
2881 print_generic_expr (dump_file, ref->ref);
2882 }
2883 fprintf (stream: dump_file, format: " alias sets: %i->%i\n",
2884 ao_ref_base_alias_set (ref),
2885 ao_ref_alias_set (ref));
2886 }
2887 goto process_args;
2888 }
2889 alias_stats.modref_use_may_alias++;
2890 }
2891 }
2892 }
2893
2894 base = ao_ref_base (ref);
2895 if (!base)
2896 return true;
2897
2898 /* If the reference is based on a decl that is not aliased the call
2899 cannot possibly use it. */
2900 if (DECL_P (base)
2901 && !may_be_aliased (var: base)
2902 /* But local statics can be used through recursion. */
2903 && !is_global_var (t: base))
2904 goto process_args;
2905
2906 if (int res = check_fnspec (call, ref, clobber: false))
2907 {
2908 if (res == 1)
2909 return true;
2910 }
2911 else
2912 goto process_args;
2913
2914 /* Check if base is a global static variable that is not read
2915 by the function. */
2916 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2917 {
2918 struct cgraph_node *node = cgraph_node::get (decl: callee);
2919 bitmap read;
2920 int id;
2921
2922 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2923 node yet. We should enforce that there are nodes for all decls in the
2924 IL and remove this check instead. */
2925 if (node
2926 && (id = ipa_reference_var_uid (t: base)) != -1
2927 && (read = ipa_reference_get_read_global (fn: node))
2928 && !bitmap_bit_p (read, id))
2929 goto process_args;
2930 }
2931
2932 /* Check if the base variable is call-used. */
2933 if (DECL_P (base))
2934 {
2935 if (pt_solution_includes (gimple_call_use_set (call_stmt: call), base))
2936 return true;
2937 }
2938 else if ((TREE_CODE (base) == MEM_REF
2939 || TREE_CODE (base) == TARGET_MEM_REF)
2940 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2941 {
2942 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2943 if (!pi)
2944 return true;
2945
2946 if (pt_solutions_intersect (gimple_call_use_set (call_stmt: call), &pi->pt))
2947 return true;
2948 }
2949 else
2950 return true;
2951
2952 /* Inspect call arguments for passed-by-value aliases. */
2953process_args:
2954 for (i = 0; i < gimple_call_num_args (gs: call); ++i)
2955 {
2956 tree op = gimple_call_arg (gs: call, index: i);
2957 int flags = gimple_call_arg_flags (call, i);
2958
2959 if (flags & (EAF_UNUSED | EAF_NO_DIRECT_READ))
2960 continue;
2961
2962 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2963 op = TREE_OPERAND (op, 0);
2964
2965 if (TREE_CODE (op) != SSA_NAME
2966 && !is_gimple_min_invariant (op))
2967 {
2968 ao_ref r;
2969 ao_ref_init (r: &r, ref: op);
2970 if (refs_may_alias_p_1 (ref1: &r, ref2: ref, tbaa_p))
2971 return true;
2972 }
2973 }
2974
2975 return false;
2976}
2977
2978static bool
2979ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2980{
2981 bool res;
2982 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2983 if (res)
2984 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2985 else
2986 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2987 return res;
2988}
2989
2990
2991/* If the statement STMT may use the memory reference REF return
2992 true, otherwise return false. */
2993
2994bool
2995ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2996{
2997 if (is_gimple_assign (gs: stmt))
2998 {
2999 tree rhs;
3000
3001 /* All memory assign statements are single. */
3002 if (!gimple_assign_single_p (gs: stmt))
3003 return false;
3004
3005 rhs = gimple_assign_rhs1 (gs: stmt);
3006 if (is_gimple_reg (rhs)
3007 || is_gimple_min_invariant (rhs)
3008 || gimple_assign_rhs_code (gs: stmt) == CONSTRUCTOR)
3009 return false;
3010
3011 return refs_may_alias_p (ref1: rhs, ref2: ref, tbaa_p);
3012 }
3013 else if (is_gimple_call (gs: stmt))
3014 return ref_maybe_used_by_call_p (call: as_a <gcall *> (p: stmt), ref, tbaa_p);
3015 else if (greturn *return_stmt = dyn_cast <greturn *> (p: stmt))
3016 {
3017 tree retval = gimple_return_retval (gs: return_stmt);
3018 if (retval
3019 && TREE_CODE (retval) != SSA_NAME
3020 && !is_gimple_min_invariant (retval)
3021 && refs_may_alias_p (ref1: retval, ref2: ref, tbaa_p))
3022 return true;
3023 /* If ref escapes the function then the return acts as a use. */
3024 tree base = ao_ref_base (ref);
3025 if (!base)
3026 ;
3027 else if (DECL_P (base))
3028 return is_global_var (t: base);
3029 else if (TREE_CODE (base) == MEM_REF
3030 || TREE_CODE (base) == TARGET_MEM_REF)
3031 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0), escaped_local_p: false);
3032 return false;
3033 }
3034
3035 return true;
3036}
3037
3038bool
3039ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
3040{
3041 ao_ref r;
3042 ao_ref_init (r: &r, ref);
3043 return ref_maybe_used_by_stmt_p (stmt, ref: &r, tbaa_p);
3044}
3045
3046/* If the call in statement CALL may clobber the memory reference REF
3047 return true, otherwise return false. */
3048
3049bool
3050call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
3051{
3052 tree base;
3053 tree callee;
3054
3055 /* If the call is pure or const it cannot clobber anything. */
3056 if (gimple_call_flags (call)
3057 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
3058 return false;
3059 if (gimple_call_internal_p (gs: call))
3060 switch (auto fn = gimple_call_internal_fn (gs: call))
3061 {
3062 /* Treat these internal calls like ECF_PURE for aliasing,
3063 they don't write to any memory the program should care about.
3064 They have important other side-effects, and read memory,
3065 so can't be ECF_NOVOPS. */
3066 case IFN_UBSAN_NULL:
3067 case IFN_UBSAN_BOUNDS:
3068 case IFN_UBSAN_VPTR:
3069 case IFN_UBSAN_OBJECT_SIZE:
3070 case IFN_UBSAN_PTR:
3071 case IFN_ASAN_CHECK:
3072 return false;
3073 case IFN_MASK_STORE:
3074 case IFN_LEN_STORE:
3075 case IFN_MASK_LEN_STORE:
3076 case IFN_MASK_STORE_LANES:
3077 case IFN_MASK_LEN_STORE_LANES:
3078 {
3079 tree rhs = gimple_call_arg (gs: call,
3080 index: internal_fn_stored_value_index (fn));
3081 ao_ref lhs_ref;
3082 ao_ref_init_from_ptr_and_size (ref: &lhs_ref, ptr: gimple_call_arg (gs: call, index: 0),
3083 TYPE_SIZE_UNIT (TREE_TYPE (rhs)));
3084 /* We cannot make this a known-size access since otherwise
3085 we disambiguate against refs to decls that are smaller. */
3086 lhs_ref.size = -1;
3087 lhs_ref.ref_alias_set = lhs_ref.base_alias_set
3088 = tbaa_p ? get_deref_alias_set
3089 (TREE_TYPE (gimple_call_arg (call, 1))) : 0;
3090 return refs_may_alias_p_1 (ref1: ref, ref2: &lhs_ref, tbaa_p);
3091 }
3092 default:
3093 break;
3094 }
3095
3096 callee = gimple_call_fndecl (gs: call);
3097
3098 if (callee != NULL_TREE && !ref->volatile_p)
3099 {
3100 struct cgraph_node *node = cgraph_node::get (decl: callee);
3101 if (node)
3102 {
3103 modref_summary *summary = get_modref_function_summary (func: node);
3104 if (summary)
3105 {
3106 if (!modref_may_conflict (stmt: call, tt: summary->stores, ref, tbaa_p)
3107 && (!summary->writes_errno
3108 || !targetm.ref_may_alias_errno (ref)))
3109 {
3110 alias_stats.modref_clobber_no_alias++;
3111 if (dump_file && (dump_flags & TDF_DETAILS))
3112 {
3113 fprintf (stream: dump_file,
3114 format: "ipa-modref: call stmt ");
3115 print_gimple_stmt (dump_file, call, 0);
3116 fprintf (stream: dump_file,
3117 format: "ipa-modref: call to %s does not clobber ",
3118 node->dump_name ());
3119 if (!ref->ref && ref->base)
3120 {
3121 fprintf (stream: dump_file, format: "base: ");
3122 print_generic_expr (dump_file, ref->base);
3123 }
3124 else if (ref->ref)
3125 {
3126 fprintf (stream: dump_file, format: "ref: ");
3127 print_generic_expr (dump_file, ref->ref);
3128 }
3129 fprintf (stream: dump_file, format: " alias sets: %i->%i\n",
3130 ao_ref_base_alias_set (ref),
3131 ao_ref_alias_set (ref));
3132 }
3133 return false;
3134 }
3135 alias_stats.modref_clobber_may_alias++;
3136 }
3137 }
3138 }
3139
3140 base = ao_ref_base (ref);
3141 if (!base)
3142 return true;
3143
3144 if (TREE_CODE (base) == SSA_NAME
3145 || CONSTANT_CLASS_P (base))
3146 return false;
3147
3148 /* A call that is not without side-effects might involve volatile
3149 accesses and thus conflicts with all other volatile accesses. */
3150 if (ref->volatile_p)
3151 return true;
3152
3153 /* If the reference is based on a decl that is not aliased the call
3154 cannot possibly clobber it. */
3155 if (DECL_P (base)
3156 && !may_be_aliased (var: base)
3157 /* But local non-readonly statics can be modified through recursion
3158 or the call may implement a threading barrier which we must
3159 treat as may-def. */
3160 && (TREE_READONLY (base)
3161 || !is_global_var (t: base)))
3162 return false;
3163
3164 /* If the reference is based on a pointer that points to memory
3165 that may not be written to then the call cannot possibly clobber it. */
3166 if ((TREE_CODE (base) == MEM_REF
3167 || TREE_CODE (base) == TARGET_MEM_REF)
3168 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3169 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3170 return false;
3171
3172 if (int res = check_fnspec (call, ref, clobber: true))
3173 {
3174 if (res == 1)
3175 return true;
3176 }
3177 else
3178 return false;
3179
3180 /* Check if base is a global static variable that is not written
3181 by the function. */
3182 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3183 {
3184 struct cgraph_node *node = cgraph_node::get (decl: callee);
3185 bitmap written;
3186 int id;
3187
3188 if (node
3189 && (id = ipa_reference_var_uid (t: base)) != -1
3190 && (written = ipa_reference_get_written_global (fn: node))
3191 && !bitmap_bit_p (written, id))
3192 return false;
3193 }
3194
3195 /* Check if the base variable is call-clobbered. */
3196 if (DECL_P (base))
3197 return pt_solution_includes (gimple_call_clobber_set (call_stmt: call), base);
3198 else if ((TREE_CODE (base) == MEM_REF
3199 || TREE_CODE (base) == TARGET_MEM_REF)
3200 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3201 {
3202 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3203 if (!pi)
3204 return true;
3205
3206 return pt_solutions_intersect (gimple_call_clobber_set (call_stmt: call), &pi->pt);
3207 }
3208
3209 return true;
3210}
3211
3212/* If the call in statement CALL may clobber the memory reference REF
3213 return true, otherwise return false. */
3214
3215bool
3216call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3217{
3218 bool res;
3219 ao_ref r;
3220 ao_ref_init (r: &r, ref);
3221 res = call_may_clobber_ref_p_1 (call, ref: &r, tbaa_p);
3222 if (res)
3223 ++alias_stats.call_may_clobber_ref_p_may_alias;
3224 else
3225 ++alias_stats.call_may_clobber_ref_p_no_alias;
3226 return res;
3227}
3228
3229
3230/* If the statement STMT may clobber the memory reference REF return true,
3231 otherwise return false. */
3232
3233bool
3234stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3235{
3236 if (is_gimple_call (gs: stmt))
3237 {
3238 tree lhs = gimple_call_lhs (gs: stmt);
3239 if (lhs
3240 && TREE_CODE (lhs) != SSA_NAME)
3241 {
3242 ao_ref r;
3243 ao_ref_init (r: &r, ref: lhs);
3244 if (refs_may_alias_p_1 (ref1: ref, ref2: &r, tbaa_p))
3245 return true;
3246 }
3247
3248 return call_may_clobber_ref_p_1 (call: as_a <gcall *> (p: stmt), ref, tbaa_p);
3249 }
3250 else if (gimple_assign_single_p (gs: stmt))
3251 {
3252 tree lhs = gimple_assign_lhs (gs: stmt);
3253 if (TREE_CODE (lhs) != SSA_NAME)
3254 {
3255 ao_ref r;
3256 ao_ref_init (r: &r, ref: lhs);
3257 return refs_may_alias_p_1 (ref1: ref, ref2: &r, tbaa_p);
3258 }
3259 }
3260 else if (gimple_code (g: stmt) == GIMPLE_ASM)
3261 return true;
3262
3263 return false;
3264}
3265
3266bool
3267stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3268{
3269 ao_ref r;
3270 ao_ref_init (r: &r, ref);
3271 return stmt_may_clobber_ref_p_1 (stmt, ref: &r, tbaa_p);
3272}
3273
3274/* Return true if store1 and store2 described by corresponding tuples
3275 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3276 address. */
3277
3278static bool
3279same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3280 poly_int64 max_size1,
3281 tree base2, poly_int64 offset2, poly_int64 size2,
3282 poly_int64 max_size2)
3283{
3284 /* Offsets need to be 0. */
3285 if (maybe_ne (a: offset1, b: 0)
3286 || maybe_ne (a: offset2, b: 0))
3287 return false;
3288
3289 bool base1_obj_p = SSA_VAR_P (base1);
3290 bool base2_obj_p = SSA_VAR_P (base2);
3291
3292 /* We need one object. */
3293 if (base1_obj_p == base2_obj_p)
3294 return false;
3295 tree obj = base1_obj_p ? base1 : base2;
3296
3297 /* And we need one MEM_REF. */
3298 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3299 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3300 if (base1_memref_p == base2_memref_p)
3301 return false;
3302 tree memref = base1_memref_p ? base1 : base2;
3303
3304 /* Sizes need to be valid. */
3305 if (!known_size_p (a: max_size1)
3306 || !known_size_p (a: max_size2)
3307 || !known_size_p (a: size1)
3308 || !known_size_p (a: size2))
3309 return false;
3310
3311 /* Max_size needs to match size. */
3312 if (maybe_ne (a: max_size1, b: size1)
3313 || maybe_ne (a: max_size2, b: size2))
3314 return false;
3315
3316 /* Sizes need to match. */
3317 if (maybe_ne (a: size1, b: size2))
3318 return false;
3319
3320
3321 /* Check that memref is a store to pointer with singleton points-to info. */
3322 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3323 return false;
3324 tree ptr = TREE_OPERAND (memref, 0);
3325 if (TREE_CODE (ptr) != SSA_NAME)
3326 return false;
3327 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3328 unsigned int pt_uid;
3329 if (pi == NULL
3330 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3331 return false;
3332
3333 /* Be conservative with non-call exceptions when the address might
3334 be NULL. */
3335 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3336 return false;
3337
3338 /* Check that ptr points relative to obj. */
3339 unsigned int obj_uid = DECL_PT_UID (obj);
3340 if (obj_uid != pt_uid)
3341 return false;
3342
3343 /* Check that the object size is the same as the store size. That ensures us
3344 that ptr points to the start of obj. */
3345 return (DECL_SIZE (obj)
3346 && poly_int_tree_p (DECL_SIZE (obj))
3347 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3348}
3349
3350/* Return true if REF is killed by an store described by
3351 BASE, OFFSET, SIZE and MAX_SIZE. */
3352
3353static bool
3354store_kills_ref_p (tree base, poly_int64 offset, poly_int64 size,
3355 poly_int64 max_size, ao_ref *ref)
3356{
3357 poly_int64 ref_offset = ref->offset;
3358 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3359 so base == ref->base does not always hold. */
3360 if (base != ref->base)
3361 {
3362 /* Try using points-to info. */
3363 if (same_addr_size_stores_p (base1: base, offset1: offset, size1: size, max_size1: max_size, base2: ref->base,
3364 offset2: ref->offset, size2: ref->size, max_size2: ref->max_size))
3365 return true;
3366
3367 /* If both base and ref->base are MEM_REFs, only compare the
3368 first operand, and if the second operand isn't equal constant,
3369 try to add the offsets into offset and ref_offset. */
3370 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3371 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3372 {
3373 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3374 TREE_OPERAND (ref->base, 1)))
3375 {
3376 poly_offset_int off1 = mem_ref_offset (base);
3377 off1 <<= LOG2_BITS_PER_UNIT;
3378 off1 += offset;
3379 poly_offset_int off2 = mem_ref_offset (ref->base);
3380 off2 <<= LOG2_BITS_PER_UNIT;
3381 off2 += ref_offset;
3382 if (!off1.to_shwi (r: &offset) || !off2.to_shwi (r: &ref_offset))
3383 size = -1;
3384 }
3385 }
3386 else
3387 size = -1;
3388 }
3389 /* For a must-alias check we need to be able to constrain
3390 the access properly. */
3391 return (known_eq (size, max_size)
3392 && known_subrange_p (pos1: ref_offset, size1: ref->max_size, pos2: offset, size2: size));
3393}
3394
3395/* If STMT kills the memory reference REF return true, otherwise
3396 return false. */
3397
3398bool
3399stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3400{
3401 if (!ao_ref_base (ref))
3402 return false;
3403
3404 if (gimple_has_lhs (stmt)
3405 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3406 /* The assignment is not necessarily carried out if it can throw
3407 and we can catch it in the current function where we could inspect
3408 the previous value. Similarly if the function can throw externally
3409 and the ref does not die on the function return.
3410 ??? We only need to care about the RHS throwing. For aggregate
3411 assignments or similar calls and non-call exceptions the LHS
3412 might throw as well.
3413 ??? We also should care about possible longjmp, but since we
3414 do not understand that longjmp is not using global memory we will
3415 not consider a kill here since the function call will be considered
3416 as possibly using REF. */
3417 && !stmt_can_throw_internal (cfun, stmt)
3418 && (!stmt_can_throw_external (cfun, stmt)
3419 || !ref_may_alias_global_p (ref, escaped_local_p: false)))
3420 {
3421 tree lhs = gimple_get_lhs (stmt);
3422 /* If LHS is literally a base of the access we are done. */
3423 if (ref->ref)
3424 {
3425 tree base = ref->ref;
3426 tree innermost_dropped_array_ref = NULL_TREE;
3427 if (handled_component_p (t: base))
3428 {
3429 tree saved_lhs0 = NULL_TREE;
3430 if (handled_component_p (t: lhs))
3431 {
3432 saved_lhs0 = TREE_OPERAND (lhs, 0);
3433 TREE_OPERAND (lhs, 0) = integer_zero_node;
3434 }
3435 do
3436 {
3437 /* Just compare the outermost handled component, if
3438 they are equal we have found a possible common
3439 base. */
3440 tree saved_base0 = TREE_OPERAND (base, 0);
3441 TREE_OPERAND (base, 0) = integer_zero_node;
3442 bool res = operand_equal_p (lhs, base, flags: 0);
3443 TREE_OPERAND (base, 0) = saved_base0;
3444 if (res)
3445 break;
3446 /* Remember if we drop an array-ref that we need to
3447 double-check not being at struct end. */
3448 if (TREE_CODE (base) == ARRAY_REF
3449 || TREE_CODE (base) == ARRAY_RANGE_REF)
3450 innermost_dropped_array_ref = base;
3451 /* Otherwise drop handled components of the access. */
3452 base = saved_base0;
3453 }
3454 while (handled_component_p (t: base));
3455 if (saved_lhs0)
3456 TREE_OPERAND (lhs, 0) = saved_lhs0;
3457 }
3458 /* Finally check if the lhs has the same address and size as the
3459 base candidate of the access. Watch out if we have dropped
3460 an array-ref that might have flexible size, this means ref->ref
3461 may be outside of the TYPE_SIZE of its base. */
3462 if ((! innermost_dropped_array_ref
3463 || ! array_ref_flexible_size_p (innermost_dropped_array_ref))
3464 && (lhs == base
3465 || (((TYPE_SIZE (TREE_TYPE (lhs))
3466 == TYPE_SIZE (TREE_TYPE (base)))
3467 || (TYPE_SIZE (TREE_TYPE (lhs))
3468 && TYPE_SIZE (TREE_TYPE (base))
3469 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3470 TYPE_SIZE (TREE_TYPE (base)),
3471 flags: 0)))
3472 && operand_equal_p (lhs, base,
3473 flags: OEP_ADDRESS_OF
3474 | OEP_MATCH_SIDE_EFFECTS))))
3475 {
3476 ++alias_stats.stmt_kills_ref_p_yes;
3477 return true;
3478 }
3479 }
3480
3481 /* Now look for non-literal equal bases with the restriction of
3482 handling constant offset and size. */
3483 /* For a must-alias check we need to be able to constrain
3484 the access properly. */
3485 if (!ref->max_size_known_p ())
3486 {
3487 ++alias_stats.stmt_kills_ref_p_no;
3488 return false;
3489 }
3490 poly_int64 size, offset, max_size;
3491 bool reverse;
3492 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3493 &reverse);
3494 if (store_kills_ref_p (base, offset, size, max_size, ref))
3495 {
3496 ++alias_stats.stmt_kills_ref_p_yes;
3497 return true;
3498 }
3499 }
3500
3501 if (is_gimple_call (gs: stmt))
3502 {
3503 tree callee = gimple_call_fndecl (gs: stmt);
3504 struct cgraph_node *node;
3505 modref_summary *summary;
3506
3507 /* Try to disambiguate using modref summary. Modref records a vector
3508 of stores with known offsets relative to function parameters that must
3509 happen every execution of function. Find if we have a matching
3510 store and verify that function can not use the value. */
3511 if (callee != NULL_TREE
3512 && (node = cgraph_node::get (decl: callee)) != NULL
3513 && node->binds_to_current_def_p ()
3514 && (summary = get_modref_function_summary (func: node)) != NULL
3515 && summary->kills.length ()
3516 /* Check that we can not trap while evaulating function
3517 parameters. This check is overly conservative. */
3518 && (!cfun->can_throw_non_call_exceptions
3519 || (!stmt_can_throw_internal (cfun, stmt)
3520 && (!stmt_can_throw_external (cfun, stmt)
3521 || !ref_may_alias_global_p (ref, escaped_local_p: false)))))
3522 {
3523 for (auto kill : summary->kills)
3524 {
3525 ao_ref dref;
3526
3527 /* We only can do useful compares if we know the access range
3528 precisely. */
3529 if (!kill.get_ao_ref (stmt: as_a <gcall *> (p: stmt), ref: &dref))
3530 continue;
3531 if (store_kills_ref_p (base: ao_ref_base (ref: &dref), offset: dref.offset,
3532 size: dref.size, max_size: dref.max_size, ref))
3533 {
3534 /* For store to be killed it needs to not be used
3535 earlier. */
3536 if (ref_maybe_used_by_call_p_1 (call: as_a <gcall *> (p: stmt), ref,
3537 tbaa_p: true)
3538 || !dbg_cnt (index: ipa_mod_ref))
3539 break;
3540 if (dump_file && (dump_flags & TDF_DETAILS))
3541 {
3542 fprintf (stream: dump_file,
3543 format: "ipa-modref: call stmt ");
3544 print_gimple_stmt (dump_file, stmt, 0);
3545 fprintf (stream: dump_file,
3546 format: "ipa-modref: call to %s kills ",
3547 node->dump_name ());
3548 print_generic_expr (dump_file, ref->base);
3549 fprintf (stream: dump_file, format: "\n");
3550 }
3551 ++alias_stats.modref_kill_yes;
3552 return true;
3553 }
3554 }
3555 ++alias_stats.modref_kill_no;
3556 }
3557 if (callee != NULL_TREE
3558 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3559 switch (DECL_FUNCTION_CODE (decl: callee))
3560 {
3561 case BUILT_IN_FREE:
3562 {
3563 tree ptr = gimple_call_arg (gs: stmt, index: 0);
3564 tree base = ao_ref_base (ref);
3565 if (base && TREE_CODE (base) == MEM_REF
3566 && TREE_OPERAND (base, 0) == ptr)
3567 {
3568 ++alias_stats.stmt_kills_ref_p_yes;
3569 return true;
3570 }
3571 break;
3572 }
3573
3574 case BUILT_IN_MEMCPY:
3575 case BUILT_IN_MEMPCPY:
3576 case BUILT_IN_MEMMOVE:
3577 case BUILT_IN_MEMSET:
3578 case BUILT_IN_MEMCPY_CHK:
3579 case BUILT_IN_MEMPCPY_CHK:
3580 case BUILT_IN_MEMMOVE_CHK:
3581 case BUILT_IN_MEMSET_CHK:
3582 case BUILT_IN_STRNCPY:
3583 case BUILT_IN_STPNCPY:
3584 case BUILT_IN_CALLOC:
3585 {
3586 /* For a must-alias check we need to be able to constrain
3587 the access properly. */
3588 if (!ref->max_size_known_p ())
3589 {
3590 ++alias_stats.stmt_kills_ref_p_no;
3591 return false;
3592 }
3593 tree dest;
3594 tree len;
3595
3596 /* In execution order a calloc call will never kill
3597 anything. However, DSE will (ab)use this interface
3598 to ask if a calloc call writes the same memory locations
3599 as a later assignment, memset, etc. So handle calloc
3600 in the expected way. */
3601 if (DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_CALLOC)
3602 {
3603 tree arg0 = gimple_call_arg (gs: stmt, index: 0);
3604 tree arg1 = gimple_call_arg (gs: stmt, index: 1);
3605 if (TREE_CODE (arg0) != INTEGER_CST
3606 || TREE_CODE (arg1) != INTEGER_CST)
3607 {
3608 ++alias_stats.stmt_kills_ref_p_no;
3609 return false;
3610 }
3611
3612 dest = gimple_call_lhs (gs: stmt);
3613 if (!dest)
3614 {
3615 ++alias_stats.stmt_kills_ref_p_no;
3616 return false;
3617 }
3618 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3619 }
3620 else
3621 {
3622 dest = gimple_call_arg (gs: stmt, index: 0);
3623 len = gimple_call_arg (gs: stmt, index: 2);
3624 }
3625 if (!poly_int_tree_p (t: len))
3626 return false;
3627 ao_ref dref;
3628 ao_ref_init_from_ptr_and_size (ref: &dref, ptr: dest, size: len);
3629 if (store_kills_ref_p (base: ao_ref_base (ref: &dref), offset: dref.offset,
3630 size: dref.size, max_size: dref.max_size, ref))
3631 {
3632 ++alias_stats.stmt_kills_ref_p_yes;
3633 return true;
3634 }
3635 break;
3636 }
3637
3638 case BUILT_IN_VA_END:
3639 {
3640 tree ptr = gimple_call_arg (gs: stmt, index: 0);
3641 if (TREE_CODE (ptr) == ADDR_EXPR)
3642 {
3643 tree base = ao_ref_base (ref);
3644 if (TREE_OPERAND (ptr, 0) == base)
3645 {
3646 ++alias_stats.stmt_kills_ref_p_yes;
3647 return true;
3648 }
3649 }
3650 break;
3651 }
3652
3653 default:;
3654 }
3655 }
3656 ++alias_stats.stmt_kills_ref_p_no;
3657 return false;
3658}
3659
3660bool
3661stmt_kills_ref_p (gimple *stmt, tree ref)
3662{
3663 ao_ref r;
3664 ao_ref_init (r: &r, ref);
3665 return stmt_kills_ref_p (stmt, ref: &r);
3666}
3667
3668
3669/* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3670 TARGET or a statement clobbering the memory reference REF in which
3671 case false is returned. The walk starts with VUSE, one argument of PHI. */
3672
3673static bool
3674maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3675 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3676 bitmap *visited, bool abort_on_visited,
3677 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3678 translate_flags disambiguate_only,
3679 void *data)
3680{
3681 basic_block bb = gimple_bb (g: phi);
3682
3683 if (!*visited)
3684 {
3685 *visited = BITMAP_ALLOC (NULL);
3686 bitmap_tree_view (*visited);
3687 }
3688
3689 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3690
3691 /* Walk until we hit the target. */
3692 while (vuse != target)
3693 {
3694 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3695 /* If we are searching for the target VUSE by walking up to
3696 TARGET_BB dominating the original PHI we are finished once
3697 we reach a default def or a definition in a block dominating
3698 that block. Update TARGET and return. */
3699 if (!target
3700 && (gimple_nop_p (g: def_stmt)
3701 || dominated_by_p (CDI_DOMINATORS,
3702 target_bb, gimple_bb (g: def_stmt))))
3703 {
3704 target = vuse;
3705 return true;
3706 }
3707
3708 /* Recurse for PHI nodes. */
3709 if (gimple_code (g: def_stmt) == GIMPLE_PHI)
3710 {
3711 /* An already visited PHI node ends the walk successfully. */
3712 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3713 return !abort_on_visited;
3714 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3715 visited, abort_on_visited,
3716 translate, data, disambiguate_only);
3717 if (!vuse)
3718 return false;
3719 continue;
3720 }
3721 else if (gimple_nop_p (g: def_stmt))
3722 return false;
3723 else
3724 {
3725 /* A clobbering statement or the end of the IL ends it failing. */
3726 if ((int)limit <= 0)
3727 return false;
3728 --limit;
3729 if (stmt_may_clobber_ref_p_1 (stmt: def_stmt, ref, tbaa_p))
3730 {
3731 translate_flags tf = disambiguate_only;
3732 if (translate
3733 && (*translate) (ref, vuse, data, &tf) == NULL)
3734 ;
3735 else
3736 return false;
3737 }
3738 }
3739 /* If we reach a new basic-block see if we already skipped it
3740 in a previous walk that ended successfully. */
3741 if (gimple_bb (g: def_stmt) != bb)
3742 {
3743 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3744 return !abort_on_visited;
3745 bb = gimple_bb (g: def_stmt);
3746 }
3747 vuse = gimple_vuse (g: def_stmt);
3748 }
3749 return true;
3750}
3751
3752
3753/* Starting from a PHI node for the virtual operand of the memory reference
3754 REF find a continuation virtual operand that allows to continue walking
3755 statements dominating PHI skipping only statements that cannot possibly
3756 clobber REF. Decrements LIMIT for each alias disambiguation done
3757 and aborts the walk, returning NULL_TREE if it reaches zero.
3758 Returns NULL_TREE if no suitable virtual operand can be found. */
3759
3760tree
3761get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3762 unsigned int &limit, bitmap *visited,
3763 bool abort_on_visited,
3764 void *(*translate)(ao_ref *, tree, void *,
3765 translate_flags *),
3766 void *data,
3767 translate_flags disambiguate_only)
3768{
3769 unsigned nargs = gimple_phi_num_args (gs: phi);
3770
3771 /* Through a single-argument PHI we can simply look through. */
3772 if (nargs == 1)
3773 return PHI_ARG_DEF (phi, 0);
3774
3775 /* For two or more arguments try to pairwise skip non-aliasing code
3776 until we hit the phi argument definition that dominates the other one. */
3777 basic_block phi_bb = gimple_bb (g: phi);
3778 tree arg0, arg1;
3779 unsigned i;
3780
3781 /* Find a candidate for the virtual operand which definition
3782 dominates those of all others. */
3783 /* First look if any of the args themselves satisfy this. */
3784 for (i = 0; i < nargs; ++i)
3785 {
3786 arg0 = PHI_ARG_DEF (phi, i);
3787 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3788 break;
3789 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3790 if (def_bb != phi_bb
3791 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3792 break;
3793 arg0 = NULL_TREE;
3794 }
3795 /* If not, look if we can reach such candidate by walking defs
3796 until we hit the immediate dominator. maybe_skip_until will
3797 do that for us. */
3798 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3799
3800 /* Then check against the (to be) found candidate. */
3801 for (i = 0; i < nargs; ++i)
3802 {
3803 arg1 = PHI_ARG_DEF (phi, i);
3804 if (arg1 == arg0)
3805 ;
3806 else if (! maybe_skip_until (phi, target&: arg0, target_bb: dom, ref, vuse: arg1, tbaa_p,
3807 limit, visited,
3808 abort_on_visited,
3809 translate,
3810 /* Do not valueize when walking over
3811 backedges. */
3812 disambiguate_only: dominated_by_p
3813 (CDI_DOMINATORS,
3814 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3815 phi_bb)
3816 ? TR_DISAMBIGUATE
3817 : disambiguate_only, data))
3818 return NULL_TREE;
3819 }
3820
3821 return arg0;
3822}
3823
3824/* Based on the memory reference REF and its virtual use VUSE call
3825 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3826 itself. That is, for each virtual use for which its defining statement
3827 does not clobber REF.
3828
3829 WALKER is called with REF, the current virtual use and DATA. If
3830 WALKER returns non-NULL the walk stops and its result is returned.
3831 At the end of a non-successful walk NULL is returned.
3832
3833 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3834 use which definition is a statement that may clobber REF and DATA.
3835 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3836 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3837 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3838 to adjust REF and *DATA to make that valid.
3839
3840 VALUEIZE if non-NULL is called with the next VUSE that is considered
3841 and return value is substituted for that. This can be used to
3842 implement optimistic value-numbering for example. Note that the
3843 VUSE argument is assumed to be valueized already.
3844
3845 LIMIT specifies the number of alias queries we are allowed to do,
3846 the walk stops when it reaches zero and NULL is returned. LIMIT
3847 is decremented by the number of alias queries (plus adjustments
3848 done by the callbacks) upon return.
3849
3850 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3851
3852void *
3853walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3854 void *(*walker)(ao_ref *, tree, void *),
3855 void *(*translate)(ao_ref *, tree, void *,
3856 translate_flags *),
3857 tree (*valueize)(tree),
3858 unsigned &limit, void *data)
3859{
3860 bitmap visited = NULL;
3861 void *res;
3862 bool translated = false;
3863
3864 timevar_push (tv: TV_ALIAS_STMT_WALK);
3865
3866 do
3867 {
3868 gimple *def_stmt;
3869
3870 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3871 res = (*walker) (ref, vuse, data);
3872 /* Abort walk. */
3873 if (res == (void *)-1)
3874 {
3875 res = NULL;
3876 break;
3877 }
3878 /* Lookup succeeded. */
3879 else if (res != NULL)
3880 break;
3881
3882 if (valueize)
3883 {
3884 vuse = valueize (vuse);
3885 if (!vuse)
3886 {
3887 res = NULL;
3888 break;
3889 }
3890 }
3891 def_stmt = SSA_NAME_DEF_STMT (vuse);
3892 if (gimple_nop_p (g: def_stmt))
3893 break;
3894 else if (gimple_code (g: def_stmt) == GIMPLE_PHI)
3895 vuse = get_continuation_for_phi (phi: def_stmt, ref, tbaa_p, limit,
3896 visited: &visited, abort_on_visited: translated, translate, data);
3897 else
3898 {
3899 if ((int)limit <= 0)
3900 {
3901 res = NULL;
3902 break;
3903 }
3904 --limit;
3905 if (stmt_may_clobber_ref_p_1 (stmt: def_stmt, ref, tbaa_p))
3906 {
3907 if (!translate)
3908 break;
3909 translate_flags disambiguate_only = TR_TRANSLATE;
3910 res = (*translate) (ref, vuse, data, &disambiguate_only);
3911 /* Failed lookup and translation. */
3912 if (res == (void *)-1)
3913 {
3914 res = NULL;
3915 break;
3916 }
3917 /* Lookup succeeded. */
3918 else if (res != NULL)
3919 break;
3920 /* Translation succeeded, continue walking. */
3921 translated = translated || disambiguate_only == TR_TRANSLATE;
3922 }
3923 vuse = gimple_vuse (g: def_stmt);
3924 }
3925 }
3926 while (vuse);
3927
3928 if (visited)
3929 BITMAP_FREE (visited);
3930
3931 timevar_pop (tv: TV_ALIAS_STMT_WALK);
3932
3933 return res;
3934}
3935
3936
3937/* Based on the memory reference REF call WALKER for each vdef whose
3938 defining statement may clobber REF, starting with VDEF. If REF
3939 is NULL_TREE, each defining statement is visited.
3940
3941 WALKER is called with REF, the current vdef and DATA. If WALKER
3942 returns true the walk is stopped, otherwise it continues.
3943
3944 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3945 The pointer may be NULL and then we do not track this information.
3946
3947 At PHI nodes walk_aliased_vdefs forks into one walk for each
3948 PHI argument (but only one walk continues at merge points), the
3949 return value is true if any of the walks was successful.
3950
3951 The function returns the number of statements walked or -1 if
3952 LIMIT stmts were walked and the walk was aborted at this point.
3953 If LIMIT is zero the walk is not aborted. */
3954
3955static int
3956walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3957 bool (*walker)(ao_ref *, tree, void *), void *data,
3958 bitmap *visited, unsigned int cnt,
3959 bool *function_entry_reached, unsigned limit)
3960{
3961 do
3962 {
3963 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3964
3965 if (*visited
3966 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3967 return cnt;
3968
3969 if (gimple_nop_p (g: def_stmt))
3970 {
3971 if (function_entry_reached)
3972 *function_entry_reached = true;
3973 return cnt;
3974 }
3975 else if (gimple_code (g: def_stmt) == GIMPLE_PHI)
3976 {
3977 unsigned i;
3978 if (!*visited)
3979 {
3980 *visited = BITMAP_ALLOC (NULL);
3981 bitmap_tree_view (*visited);
3982 }
3983 for (i = 0; i < gimple_phi_num_args (gs: def_stmt); ++i)
3984 {
3985 int res = walk_aliased_vdefs_1 (ref,
3986 vdef: gimple_phi_arg_def (gs: def_stmt, index: i),
3987 walker, data, visited, cnt,
3988 function_entry_reached, limit);
3989 if (res == -1)
3990 return -1;
3991 cnt = res;
3992 }
3993 return cnt;
3994 }
3995
3996 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3997 cnt++;
3998 if (cnt == limit)
3999 return -1;
4000 if ((!ref
4001 || stmt_may_clobber_ref_p_1 (stmt: def_stmt, ref))
4002 && (*walker) (ref, vdef, data))
4003 return cnt;
4004
4005 vdef = gimple_vuse (g: def_stmt);
4006 }
4007 while (1);
4008}
4009
4010int
4011walk_aliased_vdefs (ao_ref *ref, tree vdef,
4012 bool (*walker)(ao_ref *, tree, void *), void *data,
4013 bitmap *visited,
4014 bool *function_entry_reached, unsigned int limit)
4015{
4016 bitmap local_visited = NULL;
4017 int ret;
4018
4019 timevar_push (tv: TV_ALIAS_STMT_WALK);
4020
4021 if (function_entry_reached)
4022 *function_entry_reached = false;
4023
4024 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
4025 visited: visited ? visited : &local_visited, cnt: 0,
4026 function_entry_reached, limit);
4027 if (local_visited)
4028 BITMAP_FREE (local_visited);
4029
4030 timevar_pop (tv: TV_ALIAS_STMT_WALK);
4031
4032 return ret;
4033}
4034
4035/* Verify validity of the fnspec string.
4036 See attr-fnspec.h for details. */
4037
4038void
4039attr_fnspec::verify ()
4040{
4041 bool err = false;
4042 if (!len)
4043 return;
4044
4045 /* Check return value specifier. */
4046 if (len < return_desc_size)
4047 err = true;
4048 else if ((len - return_desc_size) % arg_desc_size)
4049 err = true;
4050 else if ((str[0] < '1' || str[0] > '4')
4051 && str[0] != '.' && str[0] != 'm')
4052 err = true;
4053
4054 switch (str[1])
4055 {
4056 case ' ':
4057 case 'p':
4058 case 'P':
4059 case 'c':
4060 case 'C':
4061 break;
4062 default:
4063 err = true;
4064 }
4065 if (err)
4066 internal_error ("invalid fn spec attribute \"%s\"", str);
4067
4068 /* Now check all parameters. */
4069 for (unsigned int i = 0; arg_specified_p (i); i++)
4070 {
4071 unsigned int idx = arg_idx (i);
4072 switch (str[idx])
4073 {
4074 case 'x':
4075 case 'X':
4076 case 'r':
4077 case 'R':
4078 case 'o':
4079 case 'O':
4080 case 'w':
4081 case 'W':
4082 case '.':
4083 if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
4084 || str[idx + 1] == 't')
4085 {
4086 if (str[idx] != 'r' && str[idx] != 'R'
4087 && str[idx] != 'w' && str[idx] != 'W'
4088 && str[idx] != 'o' && str[idx] != 'O')
4089 err = true;
4090 if (str[idx + 1] != 't'
4091 /* Size specified is scalar, so it should be described
4092 by ". " if specified at all. */
4093 && (arg_specified_p (i: str[idx + 1] - '1')
4094 && str[arg_idx (i: str[idx + 1] - '1')] != '.'))
4095 err = true;
4096 }
4097 else if (str[idx + 1] != ' ')
4098 err = true;
4099 break;
4100 default:
4101 if (str[idx] < '1' || str[idx] > '9')
4102 err = true;
4103 }
4104 if (err)
4105 internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
4106 }
4107}
4108
4109/* Return ture if TYPE1 and TYPE2 will always give the same answer
4110 when compared wit hother types using same_type_for_tbaa_p. */
4111
4112static bool
4113types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
4114 bool lto_streaming_safe)
4115{
4116 /* We use same_type_for_tbaa_p to match types in the access path.
4117 This check is overly conservative. */
4118 type1 = TYPE_MAIN_VARIANT (type1);
4119 type2 = TYPE_MAIN_VARIANT (type2);
4120
4121 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
4122 != TYPE_STRUCTURAL_EQUALITY_P (type2))
4123 return false;
4124 if (TYPE_STRUCTURAL_EQUALITY_P (type1))
4125 return true;
4126
4127 if (lto_streaming_safe)
4128 return type1 == type2;
4129 else
4130 return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
4131}
4132
4133/* Compare REF1 and REF2 and return flags specifying their differences.
4134 If LTO_STREAMING_SAFE is true do not use alias sets and canonical
4135 types that are going to be recomputed.
4136 If TBAA is true also compare TBAA metadata. */
4137
4138int
4139ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
4140 bool lto_streaming_safe,
4141 bool tbaa)
4142{
4143 if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
4144 return SEMANTICS;
4145 tree base1 = ao_ref_base (ref: ref1);
4146 tree base2 = ao_ref_base (ref: ref2);
4147
4148 if (!known_eq (ref1->offset, ref2->offset)
4149 || !known_eq (ref1->size, ref2->size)
4150 || !known_eq (ref1->max_size, ref2->max_size))
4151 return SEMANTICS;
4152
4153 /* For variable accesses we need to compare actual paths
4154 to check that both refs are accessing same address and the access size. */
4155 if (!known_eq (ref1->size, ref1->max_size))
4156 {
4157 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
4158 TYPE_SIZE (TREE_TYPE (ref2->ref)), flags: 0))
4159 return SEMANTICS;
4160 tree r1 = ref1->ref;
4161 tree r2 = ref2->ref;
4162
4163 /* Handle toplevel COMPONENT_REFs of bitfields.
4164 Those are special since they are not allowed in
4165 ADDR_EXPR. */
4166 if (TREE_CODE (r1) == COMPONENT_REF
4167 && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
4168 {
4169 if (TREE_CODE (r2) != COMPONENT_REF
4170 || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4171 return SEMANTICS;
4172 tree field1 = TREE_OPERAND (r1, 1);
4173 tree field2 = TREE_OPERAND (r2, 1);
4174 if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
4175 DECL_FIELD_OFFSET (field2), flags: 0)
4176 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
4177 DECL_FIELD_BIT_OFFSET (field2), flags: 0)
4178 || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), flags: 0)
4179 || !types_compatible_p (TREE_TYPE (r1),
4180 TREE_TYPE (r2)))
4181 return SEMANTICS;
4182 r1 = TREE_OPERAND (r1, 0);
4183 r2 = TREE_OPERAND (r2, 0);
4184 }
4185 else if (TREE_CODE (r2) == COMPONENT_REF
4186 && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4187 return SEMANTICS;
4188
4189 /* Similarly for bit field refs. */
4190 if (TREE_CODE (r1) == BIT_FIELD_REF)
4191 {
4192 if (TREE_CODE (r2) != BIT_FIELD_REF
4193 || !operand_equal_p (TREE_OPERAND (r1, 1),
4194 TREE_OPERAND (r2, 1), flags: 0)
4195 || !operand_equal_p (TREE_OPERAND (r1, 2),
4196 TREE_OPERAND (r2, 2), flags: 0)
4197 || !types_compatible_p (TREE_TYPE (r1),
4198 TREE_TYPE (r2)))
4199 return SEMANTICS;
4200 r1 = TREE_OPERAND (r1, 0);
4201 r2 = TREE_OPERAND (r2, 0);
4202 }
4203 else if (TREE_CODE (r2) == BIT_FIELD_REF)
4204 return SEMANTICS;
4205
4206 /* Now we can compare the address of actual memory access. */
4207 if (!operand_equal_p (r1, r2, flags: OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4208 return SEMANTICS;
4209 }
4210 /* For constant accesses we get more matches by comparing offset only. */
4211 else if (!operand_equal_p (base1, base2,
4212 flags: OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4213 return SEMANTICS;
4214
4215 /* We can't simply use get_object_alignment_1 on the full
4216 reference as for accesses with variable indexes this reports
4217 too conservative alignment. */
4218 unsigned int align1, align2;
4219 unsigned HOST_WIDE_INT bitpos1, bitpos2;
4220 bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4221 bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4222 /* ??? For MEMREF get_object_alignment_1 determines aligned from
4223 TYPE_ALIGN but still returns false. This seem to contradict
4224 its description. So compare even if alignment is unknown. */
4225 if (known1 != known2
4226 || (bitpos1 != bitpos2 || align1 != align2))
4227 return SEMANTICS;
4228
4229 /* Now we know that accesses are semantically same. */
4230 int flags = 0;
4231
4232 /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4233 tree rbase1 = ref1->ref;
4234 if (rbase1)
4235 while (handled_component_p (t: rbase1))
4236 rbase1 = TREE_OPERAND (rbase1, 0);
4237 tree rbase2 = ref2->ref;
4238 while (handled_component_p (t: rbase2))
4239 rbase2 = TREE_OPERAND (rbase2, 0);
4240
4241 /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4242 implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4243 Otherwise we need to match bases and cliques. */
4244 if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4245 && MR_DEPENDENCE_CLIQUE (rbase1))
4246 || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4247 && MR_DEPENDENCE_CLIQUE (rbase2)))
4248 && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4249 || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4250 || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4251 flags |= DEPENDENCE_CLIQUE;
4252
4253 if (!tbaa)
4254 return flags;
4255
4256 /* Alias sets are not stable across LTO sreaming; be conservative here
4257 and compare types the alias sets are ultimately based on. */
4258 if (lto_streaming_safe)
4259 {
4260 tree t1 = ao_ref_alias_ptr_type (ref: ref1);
4261 tree t2 = ao_ref_alias_ptr_type (ref: ref2);
4262 if (!alias_ptr_types_compatible_p (t1, t2))
4263 flags |= REF_ALIAS_SET;
4264
4265 t1 = ao_ref_base_alias_ptr_type (ref: ref1);
4266 t2 = ao_ref_base_alias_ptr_type (ref: ref2);
4267 if (!alias_ptr_types_compatible_p (t1, t2))
4268 flags |= BASE_ALIAS_SET;
4269 }
4270 else
4271 {
4272 if (ao_ref_alias_set (ref: ref1) != ao_ref_alias_set (ref: ref2))
4273 flags |= REF_ALIAS_SET;
4274 if (ao_ref_base_alias_set (ref: ref1) != ao_ref_base_alias_set (ref: ref2))
4275 flags |= BASE_ALIAS_SET;
4276 }
4277
4278 /* Access path is used only on non-view-converted references. */
4279 bool view_converted = view_converted_memref_p (base: rbase1);
4280 if (view_converted_memref_p (base: rbase2) != view_converted)
4281 return flags | ACCESS_PATH;
4282 else if (view_converted)
4283 return flags;
4284
4285
4286 /* Find start of access paths and look for trailing arrays. */
4287 tree c1 = ref1->ref, c2 = ref2->ref;
4288 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4289 int nskipped1 = 0, nskipped2 = 0;
4290 int i = 0;
4291
4292 for (tree p1 = ref1->ref; handled_component_p (t: p1); p1 = TREE_OPERAND (p1, 0))
4293 {
4294 if (component_ref_to_zero_sized_trailing_array_p (ref: p1))
4295 end_struct_ref1 = p1;
4296 if (ends_tbaa_access_path_p (p1))
4297 c1 = p1, nskipped1 = i;
4298 i++;
4299 }
4300 for (tree p2 = ref2->ref; handled_component_p (t: p2); p2 = TREE_OPERAND (p2, 0))
4301 {
4302 if (component_ref_to_zero_sized_trailing_array_p (ref: p2))
4303 end_struct_ref2 = p2;
4304 if (ends_tbaa_access_path_p (p2))
4305 c2 = p2, nskipped1 = i;
4306 i++;
4307 }
4308
4309 /* For variable accesses we can not rely on offset match bellow.
4310 We know that paths are struturally same, so only check that
4311 starts of TBAA paths did not diverge. */
4312 if (!known_eq (ref1->size, ref1->max_size)
4313 && nskipped1 != nskipped2)
4314 return flags | ACCESS_PATH;
4315
4316 /* Information about trailing refs is used by
4317 aliasing_component_refs_p that is applied only if paths
4318 has handled components.. */
4319 if (!handled_component_p (t: c1) && !handled_component_p (t: c2))
4320 ;
4321 else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4322 return flags | ACCESS_PATH;
4323 if (end_struct_ref1
4324 && TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref1))
4325 != TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref2)))
4326 return flags | ACCESS_PATH;
4327
4328 /* Now compare all handled components of the access path.
4329 We have three oracles that cares about access paths:
4330 - aliasing_component_refs_p
4331 - nonoverlapping_refs_since_match_p
4332 - nonoverlapping_component_refs_p
4333 We need to match things these oracles compare.
4334
4335 It is only necessary to check types for compatibility
4336 and offsets. Rest of what oracles compares are actual
4337 addresses. Those are already known to be same:
4338 - for constant accesses we check offsets
4339 - for variable accesses we already matched
4340 the path lexically with operand_equal_p. */
4341 while (true)
4342 {
4343 bool comp1 = handled_component_p (t: c1);
4344 bool comp2 = handled_component_p (t: c2);
4345
4346 if (comp1 != comp2)
4347 return flags | ACCESS_PATH;
4348 if (!comp1)
4349 break;
4350
4351 if (TREE_CODE (c1) != TREE_CODE (c2))
4352 return flags | ACCESS_PATH;
4353
4354 /* aliasing_component_refs_p attempts to find type match within
4355 the paths. For that reason both types needs to be equal
4356 with respect to same_type_for_tbaa_p. */
4357 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4358 TREE_TYPE (c2),
4359 lto_streaming_safe))
4360 return flags | ACCESS_PATH;
4361 if (component_ref_to_zero_sized_trailing_array_p (ref: c1)
4362 != component_ref_to_zero_sized_trailing_array_p (ref: c2))
4363 return flags | ACCESS_PATH;
4364
4365 /* aliasing_matching_component_refs_p compares
4366 offsets within the path. Other properties are ignored.
4367 Do not bother to verify offsets in variable accesses. Here we
4368 already compared them by operand_equal_p so they are
4369 structurally same. */
4370 if (!known_eq (ref1->size, ref1->max_size))
4371 {
4372 poly_int64 offadj1, sztmc1, msztmc1;
4373 bool reverse1;
4374 get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4375 poly_int64 offadj2, sztmc2, msztmc2;
4376 bool reverse2;
4377 get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4378 if (!known_eq (offadj1, offadj2))
4379 return flags | ACCESS_PATH;
4380 }
4381 c1 = TREE_OPERAND (c1, 0);
4382 c2 = TREE_OPERAND (c2, 0);
4383 }
4384 /* Finally test the access type. */
4385 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4386 TREE_TYPE (c2),
4387 lto_streaming_safe))
4388 return flags | ACCESS_PATH;
4389 return flags;
4390}
4391
4392/* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4393 and canonical types. */
4394void
4395ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4396 inchash::hash &hstate)
4397{
4398 tree base = ao_ref_base (ref);
4399 tree tbase = base;
4400
4401 if (!known_eq (ref->size, ref->max_size))
4402 {
4403 tree r = ref->ref;
4404 if (TREE_CODE (r) == COMPONENT_REF
4405 && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4406 {
4407 tree field = TREE_OPERAND (r, 1);
4408 hash_operand (DECL_FIELD_OFFSET (field), hstate, flags: 0);
4409 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, flags: 0);
4410 hash_operand (DECL_SIZE (field), hstate, flags: 0);
4411 r = TREE_OPERAND (r, 0);
4412 }
4413 if (TREE_CODE (r) == BIT_FIELD_REF)
4414 {
4415 hash_operand (TREE_OPERAND (r, 1), hstate, flags: 0);
4416 hash_operand (TREE_OPERAND (r, 2), hstate, flags: 0);
4417 r = TREE_OPERAND (r, 0);
4418 }
4419 hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, flags: 0);
4420 hash_operand (r, hstate, flags: OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4421 }
4422 else
4423 {
4424 hash_operand (tbase, hstate, flags: OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4425 hstate.add_poly_int (v: ref->offset);
4426 hstate.add_poly_int (v: ref->size);
4427 hstate.add_poly_int (v: ref->max_size);
4428 }
4429 if (!lto_streaming_safe && tbaa)
4430 {
4431 hstate.add_int (v: ao_ref_alias_set (ref));
4432 hstate.add_int (v: ao_ref_base_alias_set (ref));
4433 }
4434}
4435

source code of gcc/tree-ssa-alias.cc