1/* Alias analysis for trees.
2 Copyright (C) 2004-2025 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "target.h"
26#include "rtl.h"
27#include "tree.h"
28#include "gimple.h"
29#include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30#include "ssa.h"
31#include "cgraph.h"
32#include "tree-pretty-print.h"
33#include "alias.h"
34#include "fold-const.h"
35#include "langhooks.h"
36#include "dumpfile.h"
37#include "tree-eh.h"
38#include "tree-dfa.h"
39#include "ipa-reference.h"
40#include "varasm.h"
41#include "ipa-modref-tree.h"
42#include "ipa-modref.h"
43#include "attr-fnspec.h"
44#include "errors.h"
45#include "dbgcnt.h"
46#include "gimple-pretty-print.h"
47#include "print-tree.h"
48#include "tree-ssa-alias-compare.h"
49#include "builtins.h"
50#include "internal-fn.h"
51#include "ipa-utils.h"
52
53/* Broad overview of how alias analysis on gimple works:
54
55 Statements clobbering or using memory are linked through the
56 virtual operand factored use-def chain. The virtual operand
57 is unique per function, its symbol is accessible via gimple_vop (cfun).
58 Virtual operands are used for efficiently walking memory statements
59 in the gimple IL and are useful for things like value-numbering as
60 a generation count for memory references.
61
62 SSA_NAME pointers may have associated points-to information
63 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
64 points-to information is (re-)computed by the TODO_rebuild_alias
65 pass manager todo. Points-to information is also used for more
66 precise tracking of call-clobbered and call-used variables and
67 related disambiguations.
68
69 This file contains functions for disambiguating memory references,
70 the so called alias-oracle and tools for walking of the gimple IL.
71
72 The main alias-oracle entry-points are
73
74 bool stmt_may_clobber_ref_p (gimple *, tree)
75
76 This function queries if a statement may invalidate (parts of)
77 the memory designated by the reference tree argument.
78
79 bool ref_maybe_used_by_stmt_p (gimple *, tree)
80
81 This function queries if a statement may need (parts of) the
82 memory designated by the reference tree argument.
83
84 There are variants of these functions that only handle the call
85 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
86 Note that these do not disambiguate against a possible call lhs.
87
88 bool refs_may_alias_p (tree, tree)
89
90 This function tries to disambiguate two reference trees.
91
92 bool ptr_deref_may_alias_global_p (tree, bool)
93
94 This function queries if dereferencing a pointer variable may
95 alias global memory. If bool argument is true, global memory
96 is considered to also include function local memory that escaped.
97
98 More low-level disambiguators are available and documented in
99 this file. Low-level disambiguators dealing with points-to
100 information are in tree-ssa-structalias.cc. */
101
102static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
103static bool nonoverlapping_component_refs_p (const_tree, const_tree);
104
105/* Query statistics for the different low-level disambiguators.
106 A high-level query may trigger multiple of them. */
107
108static struct {
109 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
110 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
111 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
112 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
113 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
114 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
115 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
116 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
117 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
118 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
119 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
120 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
121 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
122 unsigned HOST_WIDE_INT stmt_kills_ref_p_no;
123 unsigned HOST_WIDE_INT stmt_kills_ref_p_yes;
124 unsigned HOST_WIDE_INT modref_use_may_alias;
125 unsigned HOST_WIDE_INT modref_use_no_alias;
126 unsigned HOST_WIDE_INT modref_clobber_may_alias;
127 unsigned HOST_WIDE_INT modref_clobber_no_alias;
128 unsigned HOST_WIDE_INT modref_kill_no;
129 unsigned HOST_WIDE_INT modref_kill_yes;
130 unsigned HOST_WIDE_INT modref_tests;
131 unsigned HOST_WIDE_INT modref_baseptr_tests;
132} alias_stats;
133
134void
135dump_alias_stats (FILE *s)
136{
137 fprintf (stream: s, format: "\nAlias oracle query stats:\n");
138 fprintf (stream: s, format: " refs_may_alias_p: "
139 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
140 HOST_WIDE_INT_PRINT_DEC" queries\n",
141 alias_stats.refs_may_alias_p_no_alias,
142 alias_stats.refs_may_alias_p_no_alias
143 + alias_stats.refs_may_alias_p_may_alias);
144 fprintf (stream: s, format: " ref_maybe_used_by_call_p: "
145 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
146 HOST_WIDE_INT_PRINT_DEC" queries\n",
147 alias_stats.ref_maybe_used_by_call_p_no_alias,
148 alias_stats.refs_may_alias_p_no_alias
149 + alias_stats.ref_maybe_used_by_call_p_may_alias);
150 fprintf (stream: s, format: " call_may_clobber_ref_p: "
151 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
152 HOST_WIDE_INT_PRINT_DEC" queries\n",
153 alias_stats.call_may_clobber_ref_p_no_alias,
154 alias_stats.call_may_clobber_ref_p_no_alias
155 + alias_stats.call_may_clobber_ref_p_may_alias);
156 fprintf (stream: s, format: " stmt_kills_ref_p: "
157 HOST_WIDE_INT_PRINT_DEC" kills, "
158 HOST_WIDE_INT_PRINT_DEC" queries\n",
159 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes,
160 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes
161 + alias_stats.stmt_kills_ref_p_no + alias_stats.modref_kill_no);
162 fprintf (stream: s, format: " nonoverlapping_component_refs_p: "
163 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
164 HOST_WIDE_INT_PRINT_DEC" queries\n",
165 alias_stats.nonoverlapping_component_refs_p_no_alias,
166 alias_stats.nonoverlapping_component_refs_p_no_alias
167 + alias_stats.nonoverlapping_component_refs_p_may_alias);
168 fprintf (stream: s, format: " nonoverlapping_refs_since_match_p: "
169 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
170 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
171 HOST_WIDE_INT_PRINT_DEC" queries\n",
172 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
173 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
174 alias_stats.nonoverlapping_refs_since_match_p_no_alias
175 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
176 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
177 fprintf (stream: s, format: " aliasing_component_refs_p: "
178 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
179 HOST_WIDE_INT_PRINT_DEC" queries\n",
180 alias_stats.aliasing_component_refs_p_no_alias,
181 alias_stats.aliasing_component_refs_p_no_alias
182 + alias_stats.aliasing_component_refs_p_may_alias);
183 dump_alias_stats_in_alias_c (s);
184 fprintf (stream: s, format: "\nModref stats:\n");
185 fprintf (stream: s, format: " modref kill: "
186 HOST_WIDE_INT_PRINT_DEC" kills, "
187 HOST_WIDE_INT_PRINT_DEC" queries\n",
188 alias_stats.modref_kill_yes,
189 alias_stats.modref_kill_yes
190 + alias_stats.modref_kill_no);
191 fprintf (stream: s, format: " modref use: "
192 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
193 HOST_WIDE_INT_PRINT_DEC" queries\n",
194 alias_stats.modref_use_no_alias,
195 alias_stats.modref_use_no_alias
196 + alias_stats.modref_use_may_alias);
197 fprintf (stream: s, format: " modref clobber: "
198 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
199 HOST_WIDE_INT_PRINT_DEC" queries\n"
200 " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
201 " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
202 alias_stats.modref_clobber_no_alias,
203 alias_stats.modref_clobber_no_alias
204 + alias_stats.modref_clobber_may_alias,
205 alias_stats.modref_tests,
206 ((double)alias_stats.modref_tests)
207 / (alias_stats.modref_clobber_no_alias
208 + alias_stats.modref_clobber_may_alias),
209 alias_stats.modref_baseptr_tests,
210 ((double)alias_stats.modref_baseptr_tests)
211 / (alias_stats.modref_clobber_no_alias
212 + alias_stats.modref_clobber_may_alias));
213}
214
215
216/* Return true, if dereferencing PTR may alias with a global variable.
217 When ESCAPED_LOCAL_P is true escaped local memory is also considered
218 global. */
219
220bool
221ptr_deref_may_alias_global_p (tree ptr, bool escaped_local_p)
222{
223 struct ptr_info_def *pi;
224
225 /* If we end up with a pointer constant here that may point
226 to global memory. */
227 if (TREE_CODE (ptr) != SSA_NAME)
228 return true;
229
230 pi = SSA_NAME_PTR_INFO (ptr);
231
232 /* If we do not have points-to information for this variable,
233 we have to punt. */
234 if (!pi)
235 return true;
236
237 /* ??? This does not use TBAA to prune globals ptr may not access. */
238 return pt_solution_includes_global (&pi->pt, escaped_local_p);
239}
240
241/* Return true if dereferencing PTR may alias DECL.
242 The caller is responsible for applying TBAA to see if PTR
243 may access DECL at all. */
244
245static bool
246ptr_deref_may_alias_decl_p (tree ptr, tree decl)
247{
248 struct ptr_info_def *pi;
249
250 /* Conversions are irrelevant for points-to information and
251 data-dependence analysis can feed us those. */
252 STRIP_NOPS (ptr);
253
254 /* Anything we do not explicilty handle aliases. */
255 if ((TREE_CODE (ptr) != SSA_NAME
256 && TREE_CODE (ptr) != ADDR_EXPR
257 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
258 || !POINTER_TYPE_P (TREE_TYPE (ptr))
259 || (!VAR_P (decl)
260 && TREE_CODE (decl) != PARM_DECL
261 && TREE_CODE (decl) != RESULT_DECL))
262 return true;
263
264 /* Disregard pointer offsetting. */
265 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
266 {
267 do
268 {
269 ptr = TREE_OPERAND (ptr, 0);
270 }
271 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
272 return ptr_deref_may_alias_decl_p (ptr, decl);
273 }
274
275 /* ADDR_EXPR pointers either just offset another pointer or directly
276 specify the pointed-to set. */
277 if (TREE_CODE (ptr) == ADDR_EXPR)
278 {
279 tree base = get_base_address (TREE_OPERAND (ptr, 0));
280 if (base
281 && (TREE_CODE (base) == MEM_REF
282 || TREE_CODE (base) == TARGET_MEM_REF))
283 ptr = TREE_OPERAND (base, 0);
284 else if (base
285 && DECL_P (base))
286 return compare_base_decls (base, decl) != 0;
287 else if (base
288 && CONSTANT_CLASS_P (base))
289 return false;
290 else
291 return true;
292 }
293
294 /* Non-aliased variables cannot be pointed to. */
295 if (!may_be_aliased (var: decl))
296 return false;
297
298 /* From here we require a SSA name pointer. Anything else aliases. */
299 if (TREE_CODE (ptr) != SSA_NAME
300 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
301 return true;
302
303 /* If we do not have useful points-to information for this pointer
304 we cannot disambiguate anything else. */
305 pi = SSA_NAME_PTR_INFO (ptr);
306 if (!pi)
307 return true;
308
309 return pt_solution_includes (&pi->pt, decl);
310}
311
312/* Return true if dereferenced PTR1 and PTR2 may alias.
313 The caller is responsible for applying TBAA to see if accesses
314 through PTR1 and PTR2 may conflict at all. */
315
316bool
317ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
318{
319 struct ptr_info_def *pi1, *pi2;
320
321 /* Conversions are irrelevant for points-to information and
322 data-dependence analysis can feed us those. */
323 STRIP_NOPS (ptr1);
324 STRIP_NOPS (ptr2);
325
326 /* Disregard pointer offsetting. */
327 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
328 {
329 do
330 {
331 ptr1 = TREE_OPERAND (ptr1, 0);
332 }
333 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
334 return ptr_derefs_may_alias_p (ptr1, ptr2);
335 }
336 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
337 {
338 do
339 {
340 ptr2 = TREE_OPERAND (ptr2, 0);
341 }
342 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
343 return ptr_derefs_may_alias_p (ptr1, ptr2);
344 }
345
346 /* ADDR_EXPR pointers either just offset another pointer or directly
347 specify the pointed-to set. */
348 if (TREE_CODE (ptr1) == ADDR_EXPR)
349 {
350 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
351 if (base
352 && (TREE_CODE (base) == MEM_REF
353 || TREE_CODE (base) == TARGET_MEM_REF))
354 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
355 else if (base
356 && DECL_P (base))
357 return ptr_deref_may_alias_decl_p (ptr: ptr2, decl: base);
358 /* Try ptr2 when ptr1 points to a constant. */
359 else if (base
360 && !CONSTANT_CLASS_P (base))
361 return true;
362 }
363 if (TREE_CODE (ptr2) == ADDR_EXPR)
364 {
365 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
366 if (base
367 && (TREE_CODE (base) == MEM_REF
368 || TREE_CODE (base) == TARGET_MEM_REF))
369 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
370 else if (base
371 && DECL_P (base))
372 return ptr_deref_may_alias_decl_p (ptr: ptr1, decl: base);
373 else
374 return true;
375 }
376
377 /* From here we require SSA name pointers. Anything else aliases. */
378 if (TREE_CODE (ptr1) != SSA_NAME
379 || TREE_CODE (ptr2) != SSA_NAME
380 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
381 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
382 return true;
383
384 /* We may end up with two empty points-to solutions for two same pointers.
385 In this case we still want to say both pointers alias, so shortcut
386 that here. */
387 if (ptr1 == ptr2)
388 return true;
389
390 /* If we do not have useful points-to information for either pointer
391 we cannot disambiguate anything else. */
392 pi1 = SSA_NAME_PTR_INFO (ptr1);
393 pi2 = SSA_NAME_PTR_INFO (ptr2);
394 if (!pi1 || !pi2)
395 return true;
396
397 /* ??? This does not use TBAA to prune decls from the intersection
398 that not both pointers may access. */
399 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
400}
401
402/* Return true if dereferencing PTR may alias *REF.
403 The caller is responsible for applying TBAA to see if PTR
404 may access *REF at all. */
405
406static bool
407ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
408{
409 tree base = ao_ref_base (ref);
410
411 if (TREE_CODE (base) == MEM_REF
412 || TREE_CODE (base) == TARGET_MEM_REF)
413 return ptr_derefs_may_alias_p (ptr1: ptr, TREE_OPERAND (base, 0));
414 else if (DECL_P (base))
415 return ptr_deref_may_alias_decl_p (ptr, decl: base);
416
417 return true;
418}
419
420/* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
421
422bool
423ptrs_compare_unequal (tree ptr1, tree ptr2)
424{
425 /* First resolve the pointers down to a SSA name pointer base or
426 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
427 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
428 or STRING_CSTs which needs points-to adjustments to track them
429 in the points-to sets. */
430 tree obj1 = NULL_TREE;
431 tree obj2 = NULL_TREE;
432 if (TREE_CODE (ptr1) == ADDR_EXPR)
433 {
434 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
435 if (! tem)
436 return false;
437 if (VAR_P (tem)
438 || TREE_CODE (tem) == PARM_DECL
439 || TREE_CODE (tem) == RESULT_DECL)
440 obj1 = tem;
441 else if (TREE_CODE (tem) == MEM_REF)
442 ptr1 = TREE_OPERAND (tem, 0);
443 }
444 if (TREE_CODE (ptr2) == ADDR_EXPR)
445 {
446 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
447 if (! tem)
448 return false;
449 if (VAR_P (tem)
450 || TREE_CODE (tem) == PARM_DECL
451 || TREE_CODE (tem) == RESULT_DECL)
452 obj2 = tem;
453 else if (TREE_CODE (tem) == MEM_REF)
454 ptr2 = TREE_OPERAND (tem, 0);
455 }
456
457 /* Canonicalize ptr vs. object. */
458 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
459 {
460 std::swap (a&: ptr1, b&: ptr2);
461 std::swap (a&: obj1, b&: obj2);
462 }
463
464 if (obj1 && obj2)
465 /* Other code handles this correctly, no need to duplicate it here. */;
466 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
467 {
468 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
469 /* We may not use restrict to optimize pointer comparisons.
470 See PR71062. So we have to assume that restrict-pointed-to
471 may be in fact obj1. */
472 if (!pi
473 || pi->pt.vars_contains_restrict
474 || pi->pt.vars_contains_interposable)
475 return false;
476 if (VAR_P (obj1)
477 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
478 {
479 varpool_node *node = varpool_node::get (decl: obj1);
480 /* If obj1 may bind to NULL give up (see below). */
481 if (! node
482 || ! node->nonzero_address ()
483 || ! decl_binds_to_current_def_p (obj1))
484 return false;
485 }
486 return !pt_solution_includes (&pi->pt, obj1);
487 }
488 else if (TREE_CODE (ptr1) == SSA_NAME)
489 {
490 struct ptr_info_def *pi1 = SSA_NAME_PTR_INFO (ptr1);
491 if (!pi1
492 || pi1->pt.vars_contains_restrict
493 || pi1->pt.vars_contains_interposable)
494 return false;
495 if (integer_zerop (ptr2) && !pi1->pt.null)
496 return true;
497 if (TREE_CODE (ptr2) == SSA_NAME)
498 {
499 struct ptr_info_def *pi2 = SSA_NAME_PTR_INFO (ptr2);
500 if (!pi2
501 || pi2->pt.vars_contains_restrict
502 || pi2->pt.vars_contains_interposable)
503 return false;
504 if ((!pi1->pt.null || !pi2->pt.null)
505 /* ??? We do not represent FUNCTION_DECL and LABEL_DECL
506 in pt.vars but only set pt.vars_contains_nonlocal. This
507 makes compares involving those and other nonlocals
508 imprecise. */
509 && (!pi1->pt.vars_contains_nonlocal
510 || !pi2->pt.vars_contains_nonlocal)
511 && (!pt_solution_includes_const_pool (&pi1->pt)
512 || !pt_solution_includes_const_pool (&pi2->pt)))
513 return !pt_solutions_intersect (&pi1->pt, &pi2->pt);
514 }
515 }
516
517 return false;
518}
519
520/* Returns whether reference REF to BASE may refer to global memory.
521 When ESCAPED_LOCAL_P is true escaped local memory is also considered
522 global. */
523
524static bool
525ref_may_alias_global_p_1 (tree base, bool escaped_local_p)
526{
527 if (DECL_P (base))
528 return (is_global_var (t: base)
529 || (escaped_local_p
530 && pt_solution_includes (&cfun->gimple_df->escaped_return,
531 base)));
532 else if (TREE_CODE (base) == MEM_REF
533 || TREE_CODE (base) == TARGET_MEM_REF)
534 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0),
535 escaped_local_p);
536 return true;
537}
538
539bool
540ref_may_alias_global_p (ao_ref *ref, bool escaped_local_p)
541{
542 tree base = ao_ref_base (ref);
543 return ref_may_alias_global_p_1 (base, escaped_local_p);
544}
545
546bool
547ref_may_alias_global_p (tree ref, bool escaped_local_p)
548{
549 tree base = get_base_address (t: ref);
550 return ref_may_alias_global_p_1 (base, escaped_local_p);
551}
552
553/* Return true whether STMT may clobber global memory.
554 When ESCAPED_LOCAL_P is true escaped local memory is also considered
555 global. */
556
557bool
558stmt_may_clobber_global_p (gimple *stmt, bool escaped_local_p)
559{
560 tree lhs;
561
562 if (!gimple_vdef (g: stmt))
563 return false;
564
565 /* ??? We can ask the oracle whether an artificial pointer
566 dereference with a pointer with points-to information covering
567 all global memory (what about non-address taken memory?) maybe
568 clobbered by this call. As there is at the moment no convenient
569 way of doing that without generating garbage do some manual
570 checking instead.
571 ??? We could make a NULL ao_ref argument to the various
572 predicates special, meaning any global memory. */
573
574 switch (gimple_code (g: stmt))
575 {
576 case GIMPLE_ASSIGN:
577 lhs = gimple_assign_lhs (gs: stmt);
578 return (TREE_CODE (lhs) != SSA_NAME
579 && ref_may_alias_global_p (ref: lhs, escaped_local_p));
580 case GIMPLE_CALL:
581 return true;
582 default:
583 return true;
584 }
585}
586
587
588/* Dump alias information on FILE. */
589
590void
591dump_alias_info (FILE *file)
592{
593 unsigned i;
594 tree ptr;
595 const char *funcname
596 = lang_hooks.decl_printable_name (current_function_decl, 2);
597 tree var;
598
599 fprintf (stream: file, format: "\n\nAlias information for %s\n\n", funcname);
600
601 fprintf (stream: file, format: "Aliased symbols\n\n");
602
603 FOR_EACH_LOCAL_DECL (cfun, i, var)
604 {
605 if (may_be_aliased (var))
606 dump_variable (file, var);
607 }
608
609 fprintf (stream: file, format: "\nCall clobber information\n");
610
611 fprintf (stream: file, format: "\nESCAPED");
612 dump_points_to_solution (file, &cfun->gimple_df->escaped);
613
614 fprintf (stream: file, format: "\nESCAPED_RETURN");
615 dump_points_to_solution (file, &cfun->gimple_df->escaped_return);
616
617 fprintf (stream: file, format: "\n\nFlow-insensitive points-to information\n\n");
618
619 FOR_EACH_SSA_NAME (i, ptr, cfun)
620 {
621 struct ptr_info_def *pi;
622
623 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
624 || SSA_NAME_IN_FREE_LIST (ptr))
625 continue;
626
627 pi = SSA_NAME_PTR_INFO (ptr);
628 if (pi)
629 dump_points_to_info_for (file, ptr);
630 }
631
632 fprintf (stream: file, format: "\n");
633}
634
635
636/* Dump alias information on stderr. */
637
638DEBUG_FUNCTION void
639debug_alias_info (void)
640{
641 dump_alias_info (stderr);
642}
643
644
645/* Dump the points-to set *PT into FILE. */
646
647void
648dump_points_to_solution (FILE *file, struct pt_solution *pt)
649{
650 if (pt->anything)
651 fprintf (stream: file, format: ", points-to anything");
652
653 if (pt->nonlocal)
654 fprintf (stream: file, format: ", points-to non-local");
655
656 if (pt->escaped)
657 fprintf (stream: file, format: ", points-to escaped");
658
659 if (pt->ipa_escaped)
660 fprintf (stream: file, format: ", points-to unit escaped");
661
662 if (pt->null)
663 fprintf (stream: file, format: ", points-to NULL");
664
665 if (pt->const_pool)
666 fprintf (stream: file, format: ", points-to const-pool");
667
668 if (pt->vars)
669 {
670 fprintf (stream: file, format: ", points-to vars: ");
671 dump_decl_set (file, pt->vars);
672 if (pt->vars_contains_nonlocal
673 || pt->vars_contains_escaped
674 || pt->vars_contains_escaped_heap
675 || pt->vars_contains_restrict
676 || pt->vars_contains_interposable)
677 {
678 const char *comma = "";
679 fprintf (stream: file, format: " (");
680 if (pt->vars_contains_nonlocal)
681 {
682 fprintf (stream: file, format: "nonlocal");
683 comma = ", ";
684 }
685 if (pt->vars_contains_escaped)
686 {
687 fprintf (stream: file, format: "%sescaped", comma);
688 comma = ", ";
689 }
690 if (pt->vars_contains_escaped_heap)
691 {
692 fprintf (stream: file, format: "%sescaped heap", comma);
693 comma = ", ";
694 }
695 if (pt->vars_contains_restrict)
696 {
697 fprintf (stream: file, format: "%srestrict", comma);
698 comma = ", ";
699 }
700 if (pt->vars_contains_interposable)
701 fprintf (stream: file, format: "%sinterposable", comma);
702 fprintf (stream: file, format: ")");
703 }
704 }
705}
706
707
708/* Unified dump function for pt_solution. */
709
710DEBUG_FUNCTION void
711debug (pt_solution &ref)
712{
713 dump_points_to_solution (stderr, pt: &ref);
714}
715
716DEBUG_FUNCTION void
717debug (pt_solution *ptr)
718{
719 if (ptr)
720 debug (ref&: *ptr);
721 else
722 fprintf (stderr, format: "<nil>\n");
723}
724
725
726/* Dump points-to information for SSA_NAME PTR into FILE. */
727
728void
729dump_points_to_info_for (FILE *file, tree ptr)
730{
731 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
732
733 print_generic_expr (file, ptr, dump_flags);
734
735 if (pi)
736 dump_points_to_solution (file, pt: &pi->pt);
737 else
738 fprintf (stream: file, format: ", points-to anything");
739
740 fprintf (stream: file, format: "\n");
741}
742
743
744/* Dump points-to information for VAR into stderr. */
745
746DEBUG_FUNCTION void
747debug_points_to_info_for (tree var)
748{
749 dump_points_to_info_for (stderr, ptr: var);
750}
751
752
753/* Initializes the alias-oracle reference representation *R from REF. */
754
755void
756ao_ref_init (ao_ref *r, tree ref)
757{
758 r->ref = ref;
759 r->base = NULL_TREE;
760 r->offset = 0;
761 r->size = -1;
762 r->max_size = -1;
763 r->ref_alias_set = -1;
764 r->base_alias_set = -1;
765 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
766}
767
768/* Returns the base object of the memory reference *REF. */
769
770tree
771ao_ref_base (ao_ref *ref)
772{
773 bool reverse;
774
775 if (ref->base)
776 return ref->base;
777 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
778 &ref->max_size, &reverse);
779 return ref->base;
780}
781
782/* Returns the base object alias set of the memory reference *REF. */
783
784alias_set_type
785ao_ref_base_alias_set (ao_ref *ref)
786{
787 tree base_ref;
788 if (ref->base_alias_set != -1)
789 return ref->base_alias_set;
790 if (!ref->ref)
791 return 0;
792 base_ref = ref->ref;
793 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
794 base_ref = TREE_OPERAND (base_ref, 0);
795 while (handled_component_p (t: base_ref))
796 base_ref = TREE_OPERAND (base_ref, 0);
797 ref->base_alias_set = get_alias_set (base_ref);
798 return ref->base_alias_set;
799}
800
801/* Returns the reference alias set of the memory reference *REF. */
802
803alias_set_type
804ao_ref_alias_set (ao_ref *ref)
805{
806 if (ref->ref_alias_set != -1)
807 return ref->ref_alias_set;
808 if (!ref->ref)
809 return 0;
810 ref->ref_alias_set = get_alias_set (ref->ref);
811 return ref->ref_alias_set;
812}
813
814/* Returns a type satisfying
815 get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
816
817tree
818ao_ref_base_alias_ptr_type (ao_ref *ref)
819{
820 tree base_ref;
821
822 if (!ref->ref)
823 return NULL_TREE;
824 base_ref = ref->ref;
825 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
826 base_ref = TREE_OPERAND (base_ref, 0);
827 while (handled_component_p (t: base_ref))
828 base_ref = TREE_OPERAND (base_ref, 0);
829 tree ret = reference_alias_ptr_type (base_ref);
830 return ret;
831}
832
833/* Returns a type satisfying
834 get_deref_alias_set (type) == ao_ref_alias_set (REF). */
835
836tree
837ao_ref_alias_ptr_type (ao_ref *ref)
838{
839 if (!ref->ref)
840 return NULL_TREE;
841 tree ret = reference_alias_ptr_type (ref->ref);
842 return ret;
843}
844
845/* Return the alignment of the access *REF and store it in the *ALIGN
846 and *BITPOS pairs. Returns false if no alignment could be determined.
847 See get_object_alignment_2 for details. */
848
849bool
850ao_ref_alignment (ao_ref *ref, unsigned int *align,
851 unsigned HOST_WIDE_INT *bitpos)
852{
853 if (ref->ref)
854 return get_object_alignment_1 (ref->ref, align, bitpos);
855
856 /* When we just have ref->base we cannot use get_object_alignment since
857 that will eventually use the type of the appearant access while for
858 example ao_ref_init_from_ptr_and_range is not careful to adjust that. */
859 *align = BITS_PER_UNIT;
860 HOST_WIDE_INT offset;
861 if (!ref->offset.is_constant (const_value: &offset)
862 || !get_object_alignment_2 (ref->base, align, bitpos, true))
863 return false;
864 *bitpos += (unsigned HOST_WIDE_INT)offset * BITS_PER_UNIT;
865 *bitpos = *bitpos & (*align - 1);
866 return true;
867}
868
869/* Init an alias-oracle reference representation from a gimple pointer
870 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
871 that RANGE_KNOWN is set.
872
873 The access is assumed to be only to or after of the pointer target adjusted
874 by the offset, not before it (even in the case RANGE_KNOWN is false). */
875
876void
877ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
878 bool range_known,
879 poly_int64 offset,
880 poly_int64 size,
881 poly_int64 max_size)
882{
883 poly_int64 t, extra_offset = 0;
884
885 ref->ref = NULL_TREE;
886 if (TREE_CODE (ptr) == SSA_NAME)
887 {
888 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
889 if (gimple_assign_single_p (gs: stmt)
890 && gimple_assign_rhs_code (gs: stmt) == ADDR_EXPR)
891 ptr = gimple_assign_rhs1 (gs: stmt);
892 else if (is_gimple_assign (gs: stmt)
893 && gimple_assign_rhs_code (gs: stmt) == POINTER_PLUS_EXPR
894 && ptrdiff_tree_p (gimple_assign_rhs2 (gs: stmt), &extra_offset))
895 {
896 ptr = gimple_assign_rhs1 (gs: stmt);
897 extra_offset *= BITS_PER_UNIT;
898 }
899 }
900
901 if (TREE_CODE (ptr) == ADDR_EXPR)
902 {
903 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
904 if (ref->base)
905 ref->offset = BITS_PER_UNIT * t;
906 else
907 {
908 range_known = false;
909 ref->offset = 0;
910 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
911 }
912 }
913 else
914 {
915 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
916 ref->base = build2 (MEM_REF, char_type_node,
917 ptr, null_pointer_node);
918 ref->offset = 0;
919 }
920 ref->offset += extra_offset + offset;
921 if (range_known)
922 {
923 ref->max_size = max_size;
924 ref->size = size;
925 }
926 else
927 ref->max_size = ref->size = -1;
928 ref->ref_alias_set = 0;
929 ref->base_alias_set = 0;
930 ref->volatile_p = false;
931}
932
933/* Init an alias-oracle reference representation from a gimple pointer
934 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
935 size is assumed to be unknown. The access is assumed to be only
936 to or after of the pointer target, not before it. */
937
938void
939ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
940{
941 poly_int64 size_hwi;
942 if (size
943 && poly_int_tree_p (t: size, value: &size_hwi)
944 && coeffs_in_range_p (a: size_hwi, b: 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
945 {
946 size_hwi = size_hwi * BITS_PER_UNIT;
947 ao_ref_init_from_ptr_and_range (ref, ptr, range_known: true, offset: 0, size: size_hwi, max_size: size_hwi);
948 }
949 else
950 ao_ref_init_from_ptr_and_range (ref, ptr, range_known: false, offset: 0, size: -1, max_size: -1);
951}
952
953/* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
954 Return -1 if S1 < S2
955 Return 1 if S1 > S2
956 Return 0 if equal or incomparable. */
957
958static int
959compare_sizes (tree s1, tree s2)
960{
961 if (!s1 || !s2)
962 return 0;
963
964 poly_uint64 size1;
965 poly_uint64 size2;
966
967 if (!poly_int_tree_p (t: s1, value: &size1) || !poly_int_tree_p (t: s2, value: &size2))
968 return 0;
969 if (known_lt (size1, size2))
970 return -1;
971 if (known_lt (size2, size1))
972 return 1;
973 return 0;
974}
975
976/* Compare TYPE1 and TYPE2 by its size.
977 Return -1 if size of TYPE1 < size of TYPE2
978 Return 1 if size of TYPE1 > size of TYPE2
979 Return 0 if types are of equal sizes or we can not compare them. */
980
981static int
982compare_type_sizes (tree type1, tree type2)
983{
984 /* Be conservative for arrays and vectors. We want to support partial
985 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
986 while (TREE_CODE (type1) == ARRAY_TYPE
987 || VECTOR_TYPE_P (type1))
988 type1 = TREE_TYPE (type1);
989 while (TREE_CODE (type2) == ARRAY_TYPE
990 || VECTOR_TYPE_P (type2))
991 type2 = TREE_TYPE (type2);
992 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
993}
994
995/* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
996 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
997 decide. */
998
999static inline int
1000same_type_for_tbaa (tree type1, tree type2)
1001{
1002 type1 = TYPE_MAIN_VARIANT (type1);
1003 type2 = TYPE_MAIN_VARIANT (type2);
1004
1005 /* Handle the most common case first. */
1006 if (type1 == type2)
1007 return 1;
1008
1009 /* If we would have to do structural comparison bail out. */
1010 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
1011 || TYPE_STRUCTURAL_EQUALITY_P (type2))
1012 return -1;
1013
1014 /* Compare the canonical types. */
1015 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
1016 return 1;
1017
1018 /* ??? Array types are not properly unified in all cases as we have
1019 spurious changes in the index types for example. Removing this
1020 causes all sorts of problems with the Fortran frontend. */
1021 if (TREE_CODE (type1) == ARRAY_TYPE
1022 && TREE_CODE (type2) == ARRAY_TYPE)
1023 return -1;
1024
1025 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
1026 object of one of its constrained subtypes, e.g. when a function with an
1027 unconstrained parameter passed by reference is called on an object and
1028 inlined. But, even in the case of a fixed size, type and subtypes are
1029 not equivalent enough as to share the same TYPE_CANONICAL, since this
1030 would mean that conversions between them are useless, whereas they are
1031 not (e.g. type and subtypes can have different modes). So, in the end,
1032 they are only guaranteed to have the same alias set. */
1033 alias_set_type set1 = get_alias_set (type1);
1034 alias_set_type set2 = get_alias_set (type2);
1035 if (set1 == set2)
1036 return -1;
1037
1038 /* Pointers to void are considered compatible with all other pointers,
1039 so for two pointers see what the alias set resolution thinks. */
1040 if (POINTER_TYPE_P (type1)
1041 && POINTER_TYPE_P (type2)
1042 && alias_sets_conflict_p (set1, set2))
1043 return -1;
1044
1045 /* The types are known to be not equal. */
1046 return 0;
1047}
1048
1049/* Return true if TYPE is a composite type (i.e. we may apply one of handled
1050 components on it). */
1051
1052static bool
1053type_has_components_p (tree type)
1054{
1055 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
1056 || TREE_CODE (type) == COMPLEX_TYPE;
1057}
1058
1059/* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
1060 respectively are either pointing to same address or are completely
1061 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
1062 just partly overlap.
1063
1064 Try to disambiguate using the access path starting from the match
1065 and return false if there is no conflict.
1066
1067 Helper for aliasing_component_refs_p. */
1068
1069static bool
1070aliasing_matching_component_refs_p (tree match1, tree ref1,
1071 poly_int64 offset1, poly_int64 max_size1,
1072 tree match2, tree ref2,
1073 poly_int64 offset2, poly_int64 max_size2,
1074 bool partial_overlap)
1075{
1076 poly_int64 offadj, sztmp, msztmp;
1077 bool reverse;
1078
1079 if (!partial_overlap)
1080 {
1081 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
1082 offset2 -= offadj;
1083 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
1084 offset1 -= offadj;
1085 if (!ranges_maybe_overlap_p (pos1: offset1, size1: max_size1, pos2: offset2, size2: max_size2))
1086 {
1087 ++alias_stats.aliasing_component_refs_p_no_alias;
1088 return false;
1089 }
1090 }
1091
1092 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
1093 partial_overlap);
1094 if (cmp == 1
1095 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1096 {
1097 ++alias_stats.aliasing_component_refs_p_no_alias;
1098 return false;
1099 }
1100 ++alias_stats.aliasing_component_refs_p_may_alias;
1101 return true;
1102}
1103
1104/* Return true if REF is reference to zero sized trailing array. I.e.
1105 struct foo {int bar; int array[0];} *fooptr;
1106 fooptr->array. */
1107
1108static bool
1109component_ref_to_zero_sized_trailing_array_p (tree ref)
1110{
1111 return (TREE_CODE (ref) == COMPONENT_REF
1112 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1113 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1114 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1115 && array_ref_flexible_size_p (ref));
1116}
1117
1118/* Worker for aliasing_component_refs_p. Most parameters match parameters of
1119 aliasing_component_refs_p.
1120
1121 Walk access path REF2 and try to find type matching TYPE1
1122 (which is a start of possibly aliasing access path REF1).
1123 If match is found, try to disambiguate.
1124
1125 Return 0 for sucessful disambiguation.
1126 Return 1 if match was found but disambiguation failed
1127 Return -1 if there is no match.
1128 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1129 in access patch REF2 and -1 if we are not sure. */
1130
1131static int
1132aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1133 poly_int64 offset1, poly_int64 max_size1,
1134 tree end_struct_ref1,
1135 tree ref2, tree base2,
1136 poly_int64 offset2, poly_int64 max_size2,
1137 bool *maybe_match)
1138{
1139 tree ref = ref2;
1140 int same_p = 0;
1141
1142 while (true)
1143 {
1144 /* We walk from inner type to the outer types. If type we see is
1145 already too large to be part of type1, terminate the search. */
1146 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1147
1148 if (cmp < 0
1149 && (!end_struct_ref1
1150 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1151 TREE_TYPE (ref)) < 0))
1152 break;
1153 /* If types may be of same size, see if we can decide about their
1154 equality. */
1155 if (cmp == 0)
1156 {
1157 same_p = same_type_for_tbaa (TREE_TYPE (ref), type2: type1);
1158 if (same_p == 1)
1159 break;
1160 /* In case we can't decide whether types are same try to
1161 continue looking for the exact match.
1162 Remember however that we possibly saw a match
1163 to bypass the access path continuations tests we do later. */
1164 if (same_p == -1)
1165 *maybe_match = true;
1166 }
1167 if (!handled_component_p (t: ref))
1168 break;
1169 ref = TREE_OPERAND (ref, 0);
1170 }
1171 if (same_p == 1)
1172 {
1173 bool partial_overlap = false;
1174
1175 /* We assume that arrays can overlap by multiple of their elements
1176 size as tested in gcc.dg/torture/alias-2.c.
1177 This partial overlap happen only when both arrays are bases of
1178 the access and not contained within another component ref.
1179 To be safe we also assume partial overlap for VLAs. */
1180 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1181 && (!TYPE_SIZE (TREE_TYPE (base1))
1182 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1183 || ref == base2))
1184 {
1185 /* Setting maybe_match to true triggers
1186 nonoverlapping_component_refs_p test later that still may do
1187 useful disambiguation. */
1188 *maybe_match = true;
1189 partial_overlap = true;
1190 }
1191 return aliasing_matching_component_refs_p (match1: base1, ref1,
1192 offset1, max_size1,
1193 match2: ref, ref2,
1194 offset2, max_size2,
1195 partial_overlap);
1196 }
1197 return -1;
1198}
1199
1200/* Consider access path1 base1....ref1 and access path2 base2...ref2.
1201 Return true if they can be composed to single access path
1202 base1...ref1...base2...ref2.
1203
1204 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1205 a trailing array access after REF1 in the non-TBAA part of the access.
1206 REF1_ALIAS_SET is the alias set of REF1.
1207
1208 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1209 a trailing array access in the TBAA part of access path2.
1210 BASE2_ALIAS_SET is the alias set of base2. */
1211
1212bool
1213access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1214 alias_set_type ref1_alias_set,
1215 tree base_type2, tree end_struct_ref2,
1216 alias_set_type base2_alias_set)
1217{
1218 /* Access path can not continue past types with no components. */
1219 if (!type_has_components_p (type: ref_type1))
1220 return false;
1221
1222 /* If first access path ends by too small type to hold base of
1223 the second access path, typically paths can not continue.
1224
1225 Punt if end_struct_past_end1 is true. We want to support arbitrary
1226 type puning past first COMPONENT_REF to union because redundant store
1227 elimination depends on this, see PR92152. For this reason we can not
1228 check size of the reference because types may partially overlap. */
1229 if (!end_struct_past_end1)
1230 {
1231 if (compare_type_sizes (type1: ref_type1, type2: base_type2) < 0)
1232 return false;
1233 /* If the path2 contains trailing array access we can strenghten the check
1234 to verify that also the size of element of the trailing array fits.
1235 In fact we could check for offset + type_size, but we do not track
1236 offsets and this is quite side case. */
1237 if (end_struct_ref2
1238 && compare_type_sizes (type1: ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1239 return false;
1240 }
1241 return (base2_alias_set == ref1_alias_set
1242 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1243}
1244
1245/* Determine if the two component references REF1 and REF2 which are
1246 based on access types TYPE1 and TYPE2 and of which at least one is based
1247 on an indirect reference may alias.
1248 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1249 are the respective alias sets. */
1250
1251static bool
1252aliasing_component_refs_p (tree ref1,
1253 alias_set_type ref1_alias_set,
1254 alias_set_type base1_alias_set,
1255 poly_int64 offset1, poly_int64 max_size1,
1256 tree ref2,
1257 alias_set_type ref2_alias_set,
1258 alias_set_type base2_alias_set,
1259 poly_int64 offset2, poly_int64 max_size2)
1260{
1261 /* If one reference is a component references through pointers try to find a
1262 common base and apply offset based disambiguation. This handles
1263 for example
1264 struct A { int i; int j; } *q;
1265 struct B { struct A a; int k; } *p;
1266 disambiguating q->i and p->a.j. */
1267 tree base1, base2;
1268 tree type1, type2;
1269 bool maybe_match = false;
1270 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1271 bool end_struct_past_end1 = false;
1272 bool end_struct_past_end2 = false;
1273
1274 /* Choose bases and base types to search for.
1275 The access path is as follows:
1276 base....end_of_tbaa_ref...actual_ref
1277 At one place in the access path may be a reference to zero sized or
1278 trailing array.
1279
1280 We generally discard the segment after end_of_tbaa_ref however
1281 we need to be careful in case it contains zero sized or trailing array.
1282 These may happen after reference to union and in this case we need to
1283 not disambiguate type puning scenarios.
1284
1285 We set:
1286 base1 to point to base
1287
1288 ref1 to point to end_of_tbaa_ref
1289
1290 end_struct_ref1 to point the trailing reference (if it exists
1291 in range base....end_of_tbaa_ref
1292
1293 end_struct_past_end1 is true if this trailing reference occurs in
1294 end_of_tbaa_ref...actual_ref. */
1295 base1 = ref1;
1296 while (handled_component_p (t: base1))
1297 {
1298 /* Generally access paths are monotous in the size of object. The
1299 exception are trailing arrays of structures. I.e.
1300 struct a {int array[0];};
1301 or
1302 struct a {int array1[0]; int array[];};
1303 Such struct has size 0 but accesses to a.array may have non-zero size.
1304 In this case the size of TREE_TYPE (base1) is smaller than
1305 size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1306
1307 Because we compare sizes of arrays just by sizes of their elements,
1308 we only need to care about zero sized array fields here. */
1309 if (component_ref_to_zero_sized_trailing_array_p (ref: base1))
1310 {
1311 gcc_checking_assert (!end_struct_ref1);
1312 end_struct_ref1 = base1;
1313 }
1314 if (ends_tbaa_access_path_p (base1))
1315 {
1316 ref1 = TREE_OPERAND (base1, 0);
1317 if (end_struct_ref1)
1318 {
1319 end_struct_past_end1 = true;
1320 end_struct_ref1 = NULL;
1321 }
1322 }
1323 base1 = TREE_OPERAND (base1, 0);
1324 }
1325 type1 = TREE_TYPE (base1);
1326 base2 = ref2;
1327 while (handled_component_p (t: base2))
1328 {
1329 if (component_ref_to_zero_sized_trailing_array_p (ref: base2))
1330 {
1331 gcc_checking_assert (!end_struct_ref2);
1332 end_struct_ref2 = base2;
1333 }
1334 if (ends_tbaa_access_path_p (base2))
1335 {
1336 ref2 = TREE_OPERAND (base2, 0);
1337 if (end_struct_ref2)
1338 {
1339 end_struct_past_end2 = true;
1340 end_struct_ref2 = NULL;
1341 }
1342 }
1343 base2 = TREE_OPERAND (base2, 0);
1344 }
1345 type2 = TREE_TYPE (base2);
1346
1347 /* Now search for the type1 in the access path of ref2. This
1348 would be a common base for doing offset based disambiguation on.
1349 This however only makes sense if type2 is big enough to hold type1. */
1350 int cmp_outer = compare_type_sizes (type1: type2, type2: type1);
1351
1352 /* If type2 is big enough to contain type1 walk its access path.
1353 We also need to care of arrays at the end of structs that may extend
1354 beyond the end of structure. If this occurs in the TBAA part of the
1355 access path, we need to consider the increased type as well. */
1356 if (cmp_outer >= 0
1357 || (end_struct_ref2
1358 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type2: type1) >= 0))
1359 {
1360 int res = aliasing_component_refs_walk (ref1, type1, base1,
1361 offset1, max_size1,
1362 end_struct_ref1,
1363 ref2, base2, offset2, max_size2,
1364 maybe_match: &maybe_match);
1365 if (res != -1)
1366 return res;
1367 }
1368
1369 /* If we didn't find a common base, try the other way around. */
1370 if (cmp_outer <= 0
1371 || (end_struct_ref1
1372 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type2) <= 0))
1373 {
1374 int res = aliasing_component_refs_walk (ref1: ref2, type1: type2, base1: base2,
1375 offset1: offset2, max_size1: max_size2,
1376 end_struct_ref1: end_struct_ref2,
1377 ref2: ref1, base2: base1, offset2: offset1, max_size2: max_size1,
1378 maybe_match: &maybe_match);
1379 if (res != -1)
1380 return res;
1381 }
1382
1383 /* In the following code we make an assumption that the types in access
1384 paths do not overlap and thus accesses alias only if one path can be
1385 continuation of another. If we was not able to decide about equivalence,
1386 we need to give up. */
1387 if (maybe_match)
1388 {
1389 if (!nonoverlapping_component_refs_p (ref1, ref2))
1390 {
1391 ++alias_stats.aliasing_component_refs_p_may_alias;
1392 return true;
1393 }
1394 ++alias_stats.aliasing_component_refs_p_no_alias;
1395 return false;
1396 }
1397
1398 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1399 ref1_alias_set,
1400 base_type2: type2, end_struct_ref2,
1401 base2_alias_set)
1402 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end1: end_struct_past_end2,
1403 ref1_alias_set: ref2_alias_set,
1404 base_type2: type1, end_struct_ref2: end_struct_ref1,
1405 base2_alias_set: base1_alias_set))
1406 {
1407 ++alias_stats.aliasing_component_refs_p_may_alias;
1408 return true;
1409 }
1410 ++alias_stats.aliasing_component_refs_p_no_alias;
1411 return false;
1412}
1413
1414/* FIELD1 and FIELD2 are two fields of component refs. We assume
1415 that bases of both component refs are either equivalent or nonoverlapping.
1416 We do not assume that the containers of FIELD1 and FIELD2 are of the
1417 same type or size.
1418
1419 Return 0 in case the base address of component_refs are same then
1420 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1421 may not be of same type or size.
1422
1423 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1424
1425 Return -1 otherwise.
1426
1427 Main difference between 0 and -1 is to let
1428 nonoverlapping_component_refs_since_match_p discover the semantically
1429 equivalent part of the access path.
1430
1431 Note that this function is used even with -fno-strict-aliasing
1432 and makes use of no TBAA assumptions. */
1433
1434static int
1435nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1436{
1437 /* If both fields are of the same type, we could save hard work of
1438 comparing offsets. */
1439 tree type1 = DECL_CONTEXT (field1);
1440 tree type2 = DECL_CONTEXT (field2);
1441
1442 if (TREE_CODE (type1) == RECORD_TYPE
1443 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1444 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1445 if (TREE_CODE (type2) == RECORD_TYPE
1446 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1447 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1448
1449 /* ??? Bitfields can overlap at RTL level so punt on them.
1450 FIXME: RTL expansion should be fixed by adjusting the access path
1451 when producing MEM_ATTRs for MEMs which are wider than
1452 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1453 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1454 return -1;
1455
1456 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1457 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1458 return field1 != field2;
1459
1460 /* In common case the offsets and bit offsets will be the same.
1461 However if frontends do not agree on the alignment, they may be
1462 different even if they actually represent same address.
1463 Try the common case first and if that fails calcualte the
1464 actual bit offset. */
1465 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1466 DECL_FIELD_OFFSET (field2))
1467 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1468 DECL_FIELD_BIT_OFFSET (field2)))
1469 return 0;
1470
1471 /* Note that it may be possible to use component_ref_field_offset
1472 which would provide offsets as trees. However constructing and folding
1473 trees is expensive and does not seem to be worth the compile time
1474 cost. */
1475
1476 poly_uint64 offset1, offset2;
1477 poly_uint64 bit_offset1, bit_offset2;
1478
1479 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), value: &offset1)
1480 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), value: &offset2)
1481 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), value: &bit_offset1)
1482 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), value: &bit_offset2))
1483 {
1484 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1485 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1486
1487 if (known_eq (offset1, offset2))
1488 return 0;
1489
1490 poly_uint64 size1, size2;
1491
1492 if (poly_int_tree_p (DECL_SIZE (field1), value: &size1)
1493 && poly_int_tree_p (DECL_SIZE (field2), value: &size2)
1494 && !ranges_maybe_overlap_p (pos1: offset1, size1, pos2: offset2, size2))
1495 return 1;
1496 }
1497 /* Resort to slower overlap checking by looking for matching types in
1498 the middle of access path. */
1499 return -1;
1500}
1501
1502/* Return low bound of array. Do not produce new trees
1503 and thus do not care about particular type of integer constant
1504 and placeholder exprs. */
1505
1506static tree
1507cheap_array_ref_low_bound (tree ref)
1508{
1509 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1510
1511 /* Avoid expensive array_ref_low_bound.
1512 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1513 type or it is zero. */
1514 if (TREE_OPERAND (ref, 2))
1515 return TREE_OPERAND (ref, 2);
1516 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1517 return TYPE_MIN_VALUE (domain_type);
1518 else
1519 return integer_zero_node;
1520}
1521
1522/* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1523 completely disjoint.
1524
1525 Return 1 if the refs are non-overlapping.
1526 Return 0 if they are possibly overlapping but if so the overlap again
1527 starts on the same address.
1528 Return -1 otherwise. */
1529
1530int
1531nonoverlapping_array_refs_p (tree ref1, tree ref2)
1532{
1533 tree index1 = TREE_OPERAND (ref1, 1);
1534 tree index2 = TREE_OPERAND (ref2, 1);
1535 tree low_bound1 = cheap_array_ref_low_bound (ref: ref1);
1536 tree low_bound2 = cheap_array_ref_low_bound (ref: ref2);
1537
1538 /* Handle zero offsets first: we do not need to match type size in this
1539 case. */
1540 if (operand_equal_p (index1, low_bound1, flags: 0)
1541 && operand_equal_p (index2, low_bound2, flags: 0))
1542 return 0;
1543
1544 /* If type sizes are different, give up.
1545
1546 Avoid expensive array_ref_element_size.
1547 If operand 3 is present it denotes size in the alignmnet units.
1548 Otherwise size is TYPE_SIZE of the element type.
1549 Handle only common cases where types are of the same "kind". */
1550 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1551 return -1;
1552
1553 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1554 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1555
1556 if (TREE_OPERAND (ref1, 3))
1557 {
1558 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1559 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1560 TREE_OPERAND (ref2, 3), flags: 0))
1561 return -1;
1562 }
1563 else
1564 {
1565 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1566 TYPE_SIZE_UNIT (elmt_type2), flags: 0))
1567 return -1;
1568 }
1569
1570 /* Since we know that type sizes are the same, there is no need to return
1571 -1 after this point. Partial overlap can not be introduced. */
1572
1573 /* We may need to fold trees in this case.
1574 TODO: Handle integer constant case at least. */
1575 if (!operand_equal_p (low_bound1, low_bound2, flags: 0))
1576 return 0;
1577
1578 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1579 {
1580 if (tree_int_cst_equal (index1, index2))
1581 return 0;
1582 return 1;
1583 }
1584 /* TODO: We can use VRP to further disambiguate here. */
1585 return 0;
1586}
1587
1588/* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1589 MATCH2 either point to the same address or are disjoint.
1590 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1591 respectively or NULL in the case we established equivalence of bases.
1592 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1593 overlap by exact multiply of their element size.
1594
1595 This test works by matching the initial segment of the access path
1596 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1597 match was determined without use of TBAA oracle.
1598
1599 Return 1 if we can determine that component references REF1 and REF2,
1600 that are within a common DECL, cannot overlap.
1601
1602 Return 0 if paths are same and thus there is nothing to disambiguate more
1603 (i.e. there is must alias assuming there is must alias between MATCH1 and
1604 MATCH2)
1605
1606 Return -1 if we can not determine 0 or 1 - this happens when we met
1607 non-matching types was met in the path.
1608 In this case it may make sense to continue by other disambiguation
1609 oracles. */
1610
1611static int
1612nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1613 tree match2, tree ref2,
1614 bool partial_overlap)
1615{
1616 int ntbaa1 = 0, ntbaa2 = 0;
1617 /* Early return if there are no references to match, we do not need
1618 to walk the access paths.
1619
1620 Do not consider this as may-alias for stats - it is more useful
1621 to have information how many disambiguations happened provided that
1622 the query was meaningful. */
1623
1624 if (match1 == ref1 || !handled_component_p (t: ref1)
1625 || match2 == ref2 || !handled_component_p (t: ref2))
1626 return -1;
1627
1628 auto_vec<tree, 16> component_refs1;
1629 auto_vec<tree, 16> component_refs2;
1630
1631 /* Create the stack of handled components for REF1. */
1632 while (handled_component_p (t: ref1) && ref1 != match1)
1633 {
1634 /* We use TBAA only to re-synchronize after mismatched refs. So we
1635 do not need to truncate access path after TBAA part ends. */
1636 if (ends_tbaa_access_path_p (ref1))
1637 ntbaa1 = 0;
1638 else
1639 ntbaa1++;
1640 component_refs1.safe_push (obj: ref1);
1641 ref1 = TREE_OPERAND (ref1, 0);
1642 }
1643
1644 /* Create the stack of handled components for REF2. */
1645 while (handled_component_p (t: ref2) && ref2 != match2)
1646 {
1647 if (ends_tbaa_access_path_p (ref2))
1648 ntbaa2 = 0;
1649 else
1650 ntbaa2++;
1651 component_refs2.safe_push (obj: ref2);
1652 ref2 = TREE_OPERAND (ref2, 0);
1653 }
1654
1655 if (!flag_strict_aliasing)
1656 {
1657 ntbaa1 = 0;
1658 ntbaa2 = 0;
1659 }
1660
1661 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1662 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1663
1664 /* If only one of access path starts with MEM_REF check that offset is 0
1665 so the addresses stays the same after stripping it.
1666 TODO: In this case we may walk the other access path until we get same
1667 offset.
1668
1669 If both starts with MEM_REF, offset has to be same. */
1670 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1671 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1672 || (mem_ref1 && mem_ref2
1673 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1674 TREE_OPERAND (ref2, 1))))
1675 {
1676 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1677 return -1;
1678 }
1679
1680 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1681 to handle them here at all. */
1682 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1683 && TREE_CODE (ref2) != TARGET_MEM_REF);
1684
1685 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1686 rank. This is sufficient because we start from the same DECL and you
1687 cannot reference several fields at a time with COMPONENT_REFs (unlike
1688 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1689 of them to access a sub-component, unless you're in a union, in which
1690 case the return value will precisely be false. */
1691 while (true)
1692 {
1693 /* Track if we seen unmatched ref with non-zero offset. In this case
1694 we must look for partial overlaps. */
1695 bool seen_unmatched_ref_p = false;
1696
1697 /* First match ARRAY_REFs an try to disambiguate. */
1698 if (!component_refs1.is_empty ()
1699 && !component_refs2.is_empty ())
1700 {
1701 unsigned int narray_refs1=0, narray_refs2=0;
1702
1703 /* We generally assume that both access paths starts by same sequence
1704 of refs. However if number of array refs is not in sync, try
1705 to recover and pop elts until number match. This helps the case
1706 where one access path starts by array and other by element. */
1707 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1708 narray_refs1++)
1709 if (TREE_CODE (component_refs1 [component_refs1.length()
1710 - 1 - narray_refs1]) != ARRAY_REF)
1711 break;
1712
1713 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1714 narray_refs2++)
1715 if (TREE_CODE (component_refs2 [component_refs2.length()
1716 - 1 - narray_refs2]) != ARRAY_REF)
1717 break;
1718 for (; narray_refs1 > narray_refs2; narray_refs1--)
1719 {
1720 ref1 = component_refs1.pop ();
1721 ntbaa1--;
1722
1723 /* If index is non-zero we need to check whether the reference
1724 does not break the main invariant that bases are either
1725 disjoint or equal. Consider the example:
1726
1727 unsigned char out[][1];
1728 out[1]="a";
1729 out[i][0];
1730
1731 Here bases out and out are same, but after removing the
1732 [i] index, this invariant no longer holds, because
1733 out[i] points to the middle of array out.
1734
1735 TODO: If size of type of the skipped reference is an integer
1736 multiply of the size of type of the other reference this
1737 invariant can be verified, but even then it is not completely
1738 safe with !flag_strict_aliasing if the other reference contains
1739 unbounded array accesses.
1740 See */
1741
1742 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1743 cheap_array_ref_low_bound (ref: ref1), flags: 0))
1744 return 0;
1745 }
1746 for (; narray_refs2 > narray_refs1; narray_refs2--)
1747 {
1748 ref2 = component_refs2.pop ();
1749 ntbaa2--;
1750 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1751 cheap_array_ref_low_bound (ref: ref2), flags: 0))
1752 return 0;
1753 }
1754 /* Try to disambiguate matched arrays. */
1755 for (unsigned int i = 0; i < narray_refs1; i++)
1756 {
1757 int cmp = nonoverlapping_array_refs_p (ref1: component_refs1.pop (),
1758 ref2: component_refs2.pop ());
1759 ntbaa1--;
1760 ntbaa2--;
1761 if (cmp == 1 && !partial_overlap)
1762 {
1763 ++alias_stats
1764 .nonoverlapping_refs_since_match_p_no_alias;
1765 return 1;
1766 }
1767 if (cmp == -1)
1768 {
1769 seen_unmatched_ref_p = true;
1770 /* We can not maintain the invariant that bases are either
1771 same or completely disjoint. However we can still recover
1772 from type based alias analysis if we reach references to
1773 same sizes. We do not attempt to match array sizes, so
1774 just finish array walking and look for component refs. */
1775 if (ntbaa1 < 0 || ntbaa2 < 0)
1776 {
1777 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1778 return -1;
1779 }
1780 for (i++; i < narray_refs1; i++)
1781 {
1782 component_refs1.pop ();
1783 component_refs2.pop ();
1784 ntbaa1--;
1785 ntbaa2--;
1786 }
1787 break;
1788 }
1789 partial_overlap = false;
1790 }
1791 }
1792
1793 /* Next look for component_refs. */
1794 do
1795 {
1796 if (component_refs1.is_empty ())
1797 {
1798 ++alias_stats
1799 .nonoverlapping_refs_since_match_p_must_overlap;
1800 return 0;
1801 }
1802 ref1 = component_refs1.pop ();
1803 ntbaa1--;
1804 if (TREE_CODE (ref1) != COMPONENT_REF)
1805 {
1806 seen_unmatched_ref_p = true;
1807 if (ntbaa1 < 0 || ntbaa2 < 0)
1808 {
1809 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1810 return -1;
1811 }
1812 }
1813 }
1814 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1815
1816 do
1817 {
1818 if (component_refs2.is_empty ())
1819 {
1820 ++alias_stats
1821 .nonoverlapping_refs_since_match_p_must_overlap;
1822 return 0;
1823 }
1824 ref2 = component_refs2.pop ();
1825 ntbaa2--;
1826 if (TREE_CODE (ref2) != COMPONENT_REF)
1827 {
1828 if (ntbaa1 < 0 || ntbaa2 < 0)
1829 {
1830 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1831 return -1;
1832 }
1833 seen_unmatched_ref_p = true;
1834 }
1835 }
1836 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1837
1838 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1839 earlier. */
1840 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1841 && TREE_CODE (ref2) == COMPONENT_REF);
1842
1843 tree field1 = TREE_OPERAND (ref1, 1);
1844 tree field2 = TREE_OPERAND (ref2, 1);
1845
1846 /* ??? We cannot simply use the type of operand #0 of the refs here
1847 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1848 for common blocks instead of using unions like everyone else. */
1849 tree type1 = DECL_CONTEXT (field1);
1850 tree type2 = DECL_CONTEXT (field2);
1851
1852 partial_overlap = false;
1853
1854 /* If we skipped array refs on type of different sizes, we can
1855 no longer be sure that there are not partial overlaps. */
1856 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1857 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), flags: 0))
1858 {
1859 ++alias_stats
1860 .nonoverlapping_refs_since_match_p_may_alias;
1861 return -1;
1862 }
1863
1864 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1865 if (cmp == -1)
1866 {
1867 ++alias_stats
1868 .nonoverlapping_refs_since_match_p_may_alias;
1869 return -1;
1870 }
1871 else if (cmp == 1)
1872 {
1873 ++alias_stats
1874 .nonoverlapping_refs_since_match_p_no_alias;
1875 return 1;
1876 }
1877 }
1878}
1879
1880/* Return TYPE_UID which can be used to match record types we consider
1881 same for TBAA purposes. */
1882
1883static inline int
1884ncr_type_uid (const_tree field)
1885{
1886 /* ??? We cannot simply use the type of operand #0 of the refs here
1887 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1888 for common blocks instead of using unions like everyone else. */
1889 tree type = DECL_FIELD_CONTEXT (field);
1890 /* With LTO types considered same_type_for_tbaa_p
1891 from different translation unit may not have same
1892 main variant. They however have same TYPE_CANONICAL. */
1893 if (TYPE_CANONICAL (type))
1894 return TYPE_UID (TYPE_CANONICAL (type));
1895 return TYPE_UID (type);
1896}
1897
1898/* qsort compare function to sort FIELD_DECLs after their
1899 DECL_FIELD_CONTEXT TYPE_UID. */
1900
1901static inline int
1902ncr_compar (const void *field1_, const void *field2_)
1903{
1904 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1905 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1906 unsigned int uid1 = ncr_type_uid (field: field1);
1907 unsigned int uid2 = ncr_type_uid (field: field2);
1908
1909 if (uid1 < uid2)
1910 return -1;
1911 else if (uid1 > uid2)
1912 return 1;
1913 return 0;
1914}
1915
1916/* Return true if we can determine that the fields referenced cannot
1917 overlap for any pair of objects. This relies on TBAA. */
1918
1919static bool
1920nonoverlapping_component_refs_p (const_tree x, const_tree y)
1921{
1922 /* Early return if we have nothing to do.
1923
1924 Do not consider this as may-alias for stats - it is more useful
1925 to have information how many disambiguations happened provided that
1926 the query was meaningful. */
1927 if (!flag_strict_aliasing
1928 || !x || !y
1929 || !handled_component_p (t: x)
1930 || !handled_component_p (t: y))
1931 return false;
1932
1933 auto_vec<const_tree, 16> fieldsx;
1934 while (handled_component_p (t: x))
1935 {
1936 if (TREE_CODE (x) == COMPONENT_REF)
1937 {
1938 tree field = TREE_OPERAND (x, 1);
1939 tree type = DECL_FIELD_CONTEXT (field);
1940 if (TREE_CODE (type) == RECORD_TYPE)
1941 fieldsx.safe_push (obj: field);
1942 }
1943 else if (ends_tbaa_access_path_p (x))
1944 fieldsx.truncate (size: 0);
1945 x = TREE_OPERAND (x, 0);
1946 }
1947 if (fieldsx.length () == 0)
1948 return false;
1949 auto_vec<const_tree, 16> fieldsy;
1950 while (handled_component_p (t: y))
1951 {
1952 if (TREE_CODE (y) == COMPONENT_REF)
1953 {
1954 tree field = TREE_OPERAND (y, 1);
1955 tree type = DECL_FIELD_CONTEXT (field);
1956 if (TREE_CODE (type) == RECORD_TYPE)
1957 fieldsy.safe_push (TREE_OPERAND (y, 1));
1958 }
1959 else if (ends_tbaa_access_path_p (y))
1960 fieldsy.truncate (size: 0);
1961 y = TREE_OPERAND (y, 0);
1962 }
1963 if (fieldsy.length () == 0)
1964 {
1965 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1966 return false;
1967 }
1968
1969 /* Most common case first. */
1970 if (fieldsx.length () == 1
1971 && fieldsy.length () == 1)
1972 {
1973 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1974 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1975 && nonoverlapping_component_refs_p_1 (field1: fieldsx[0], field2: fieldsy[0]) == 1)
1976 {
1977 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1978 return true;
1979 }
1980 else
1981 {
1982 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1983 return false;
1984 }
1985 }
1986
1987 if (fieldsx.length () == 2)
1988 {
1989 if (ncr_compar (field1_: &fieldsx[0], field2_: &fieldsx[1]) == 1)
1990 std::swap (a&: fieldsx[0], b&: fieldsx[1]);
1991 }
1992 else
1993 fieldsx.qsort (ncr_compar);
1994
1995 if (fieldsy.length () == 2)
1996 {
1997 if (ncr_compar (field1_: &fieldsy[0], field2_: &fieldsy[1]) == 1)
1998 std::swap (a&: fieldsy[0], b&: fieldsy[1]);
1999 }
2000 else
2001 fieldsy.qsort (ncr_compar);
2002
2003 unsigned i = 0, j = 0;
2004 do
2005 {
2006 const_tree fieldx = fieldsx[i];
2007 const_tree fieldy = fieldsy[j];
2008
2009 /* We're left with accessing different fields of a structure,
2010 no possible overlap. */
2011 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
2012 DECL_FIELD_CONTEXT (fieldy)) == 1
2013 && nonoverlapping_component_refs_p_1 (field1: fieldx, field2: fieldy) == 1)
2014 {
2015 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
2016 return true;
2017 }
2018
2019 if (ncr_type_uid (field: fieldx) < ncr_type_uid (field: fieldy))
2020 {
2021 i++;
2022 if (i == fieldsx.length ())
2023 break;
2024 }
2025 else
2026 {
2027 j++;
2028 if (j == fieldsy.length ())
2029 break;
2030 }
2031 }
2032 while (1);
2033
2034 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
2035 return false;
2036}
2037
2038
2039/* Return true if two memory references based on the variables BASE1
2040 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2041 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
2042 if non-NULL are the complete memory reference trees. */
2043
2044static bool
2045decl_refs_may_alias_p (tree ref1, tree base1,
2046 poly_int64 offset1, poly_int64 max_size1,
2047 poly_int64 size1,
2048 tree ref2, tree base2,
2049 poly_int64 offset2, poly_int64 max_size2,
2050 poly_int64 size2)
2051{
2052 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
2053
2054 /* If both references are based on different variables, they cannot alias. */
2055 if (compare_base_decls (base1, base2) == 0)
2056 return false;
2057
2058 /* If both references are based on the same variable, they cannot alias if
2059 the accesses do not overlap. */
2060 if (!ranges_maybe_overlap_p (pos1: offset1, size1: max_size1, pos2: offset2, size2: max_size2))
2061 return false;
2062
2063 /* If there is must alias, there is no use disambiguating further. */
2064 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2065 return true;
2066
2067 /* For components with variable position, the above test isn't sufficient,
2068 so we disambiguate component references manually. */
2069 if (ref1 && ref2
2070 && handled_component_p (t: ref1) && handled_component_p (t: ref2)
2071 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, partial_overlap: false) == 1)
2072 return false;
2073
2074 return true;
2075}
2076
2077/* Return true if access with BASE is view converted.
2078 Base must not be stripped from inner MEM_REF (&decl)
2079 which is done by ao_ref_base and thus one extra walk
2080 of handled components is needed. */
2081
2082bool
2083view_converted_memref_p (tree base)
2084{
2085 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
2086 return false;
2087 return (same_type_for_tbaa (TREE_TYPE (base),
2088 TREE_TYPE (TREE_TYPE (TREE_OPERAND (base, 1))))
2089 != 1);
2090}
2091
2092/* Return true if an indirect reference based on *PTR1 constrained
2093 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2094 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2095 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2096 in which case they are computed on-demand. REF1 and REF2
2097 if non-NULL are the complete memory reference trees. */
2098
2099static bool
2100indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2101 poly_int64 offset1, poly_int64 max_size1,
2102 poly_int64 size1,
2103 alias_set_type ref1_alias_set,
2104 alias_set_type base1_alias_set,
2105 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2106 poly_int64 offset2, poly_int64 max_size2,
2107 poly_int64 size2,
2108 alias_set_type ref2_alias_set,
2109 alias_set_type base2_alias_set, bool tbaa_p)
2110{
2111 tree ptr1;
2112 tree ptrtype1, dbase2;
2113
2114 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2115 || TREE_CODE (base1) == TARGET_MEM_REF)
2116 && DECL_P (base2));
2117
2118 ptr1 = TREE_OPERAND (base1, 0);
2119 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2120
2121 /* If only one reference is based on a variable, they cannot alias if
2122 the pointer access is beyond the extent of the variable access.
2123 (the pointer base cannot validly point to an offset less than zero
2124 of the variable).
2125 ??? IVOPTs creates bases that do not honor this restriction,
2126 so do not apply this optimization for TARGET_MEM_REFs. */
2127 if (TREE_CODE (base1) != TARGET_MEM_REF
2128 && !ranges_maybe_overlap_p (pos1: offset1 + moff, size1: -1, pos2: offset2, size2: max_size2))
2129 return false;
2130
2131 /* If the pointer based access is bigger than the variable they cannot
2132 alias. This is similar to the check below where we use TBAA to
2133 increase the size of the pointer based access based on the dynamic
2134 type of a containing object we can infer from it. */
2135 poly_int64 dsize2;
2136 if (known_size_p (a: size1)
2137 && poly_int_tree_p (DECL_SIZE (base2), value: &dsize2)
2138 && known_lt (dsize2, size1))
2139 return false;
2140
2141 /* They also cannot alias if the pointer may not point to the decl. */
2142 if (!ptr_deref_may_alias_decl_p (ptr: ptr1, decl: base2))
2143 return false;
2144
2145 /* Disambiguations that rely on strict aliasing rules follow. */
2146 if (!flag_strict_aliasing || !tbaa_p)
2147 return true;
2148
2149 /* If the alias set for a pointer access is zero all bets are off. */
2150 if (base1_alias_set == 0 || base2_alias_set == 0)
2151 return true;
2152
2153 /* When we are trying to disambiguate an access with a pointer dereference
2154 as base versus one with a decl as base we can use both the size
2155 of the decl and its dynamic type for extra disambiguation.
2156 ??? We do not know anything about the dynamic type of the decl
2157 other than that its alias-set contains base2_alias_set as a subset
2158 which does not help us here. */
2159 /* As we know nothing useful about the dynamic type of the decl just
2160 use the usual conflict check rather than a subset test.
2161 ??? We could introduce -fvery-strict-aliasing when the language
2162 does not allow decls to have a dynamic type that differs from their
2163 static type. Then we can check
2164 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2165 if (base1_alias_set != base2_alias_set
2166 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2167 return false;
2168
2169 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2170
2171 /* If the size of the access relevant for TBAA through the pointer
2172 is bigger than the size of the decl we can't possibly access the
2173 decl via that pointer. */
2174 if (/* ??? This in turn may run afoul when a decl of type T which is
2175 a member of union type U is accessed through a pointer to
2176 type U and sizeof T is smaller than sizeof U. */
2177 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2178 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2179 && compare_sizes (DECL_SIZE (base2),
2180 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2181 return false;
2182
2183 if (!ref2)
2184 return true;
2185
2186 /* If the decl is accessed via a MEM_REF, reconstruct the base
2187 we can use for TBAA and an appropriately adjusted offset. */
2188 dbase2 = ref2;
2189 while (handled_component_p (t: dbase2))
2190 dbase2 = TREE_OPERAND (dbase2, 0);
2191 poly_int64 doffset1 = offset1;
2192 poly_offset_int doffset2 = offset2;
2193 if (TREE_CODE (dbase2) == MEM_REF
2194 || TREE_CODE (dbase2) == TARGET_MEM_REF)
2195 {
2196 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2197 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2198 /* If second reference is view-converted, give up now. */
2199 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2200 return true;
2201 }
2202
2203 /* If first reference is view-converted, give up now. */
2204 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2205 return true;
2206
2207 /* If both references are through the same type, they do not alias
2208 if the accesses do not overlap. This does extra disambiguation
2209 for mixed/pointer accesses but requires strict aliasing.
2210 For MEM_REFs we require that the component-ref offset we computed
2211 is relative to the start of the type which we ensure by
2212 comparing rvalue and access type and disregarding the constant
2213 pointer offset.
2214
2215 But avoid treating variable length arrays as "objects", instead assume they
2216 can overlap by an exact multiple of their element size.
2217 See gcc.dg/torture/alias-2.c. */
2218 if (((TREE_CODE (base1) != TARGET_MEM_REF
2219 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2220 && (TREE_CODE (dbase2) != TARGET_MEM_REF
2221 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2222 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2223 {
2224 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2225 && (TYPE_SIZE (TREE_TYPE (base1))
2226 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2227 != INTEGER_CST));
2228 if (!partial_overlap
2229 && !ranges_maybe_overlap_p (pos1: doffset1, size1: max_size1, pos2: doffset2, size2: max_size2))
2230 return false;
2231 if (!ref1 || !ref2
2232 /* If there is must alias, there is no use disambiguating further. */
2233 || (!partial_overlap
2234 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2235 return true;
2236 int res = nonoverlapping_refs_since_match_p (match1: base1, ref1, match2: base2, ref2,
2237 partial_overlap);
2238 if (res == -1)
2239 return !nonoverlapping_component_refs_p (x: ref1, y: ref2);
2240 return !res;
2241 }
2242
2243 /* Do access-path based disambiguation. */
2244 if (ref1 && ref2
2245 && (handled_component_p (t: ref1) || handled_component_p (t: ref2)))
2246 return aliasing_component_refs_p (ref1,
2247 ref1_alias_set, base1_alias_set,
2248 offset1, max_size1,
2249 ref2,
2250 ref2_alias_set, base2_alias_set,
2251 offset2, max_size2);
2252
2253 return true;
2254}
2255
2256/* Return true if two indirect references based on *PTR1
2257 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2258 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2259 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2260 in which case they are computed on-demand. REF1 and REF2
2261 if non-NULL are the complete memory reference trees. */
2262
2263static bool
2264indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2265 poly_int64 offset1, poly_int64 max_size1,
2266 poly_int64 size1,
2267 alias_set_type ref1_alias_set,
2268 alias_set_type base1_alias_set,
2269 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2270 poly_int64 offset2, poly_int64 max_size2,
2271 poly_int64 size2,
2272 alias_set_type ref2_alias_set,
2273 alias_set_type base2_alias_set, bool tbaa_p)
2274{
2275 tree ptr1;
2276 tree ptr2;
2277 tree ptrtype1, ptrtype2;
2278
2279 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2280 || TREE_CODE (base1) == TARGET_MEM_REF)
2281 && (TREE_CODE (base2) == MEM_REF
2282 || TREE_CODE (base2) == TARGET_MEM_REF));
2283
2284 ptr1 = TREE_OPERAND (base1, 0);
2285 ptr2 = TREE_OPERAND (base2, 0);
2286
2287 /* If both bases are based on pointers they cannot alias if they may not
2288 point to the same memory object or if they point to the same object
2289 and the accesses do not overlap. */
2290 if ((!cfun || gimple_in_ssa_p (cfun))
2291 && operand_equal_p (ptr1, ptr2, flags: 0)
2292 && (((TREE_CODE (base1) != TARGET_MEM_REF
2293 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2294 && (TREE_CODE (base2) != TARGET_MEM_REF
2295 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2296 || (TREE_CODE (base1) == TARGET_MEM_REF
2297 && TREE_CODE (base2) == TARGET_MEM_REF
2298 && (TMR_STEP (base1) == TMR_STEP (base2)
2299 || (TMR_STEP (base1) && TMR_STEP (base2)
2300 && operand_equal_p (TMR_STEP (base1),
2301 TMR_STEP (base2), flags: 0)))
2302 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2303 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2304 && operand_equal_p (TMR_INDEX (base1),
2305 TMR_INDEX (base2), flags: 0)))
2306 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2307 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2308 && operand_equal_p (TMR_INDEX2 (base1),
2309 TMR_INDEX2 (base2), flags: 0))))))
2310 {
2311 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2312 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2313 if (!ranges_maybe_overlap_p (pos1: offset1 + moff1, size1: max_size1,
2314 pos2: offset2 + moff2, size2: max_size2))
2315 return false;
2316 /* If there is must alias, there is no use disambiguating further. */
2317 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2318 return true;
2319 if (ref1 && ref2)
2320 {
2321 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2322 partial_overlap: false);
2323 if (res != -1)
2324 return !res;
2325 }
2326 }
2327 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2328 return false;
2329
2330 /* Disambiguations that rely on strict aliasing rules follow. */
2331 if (!flag_strict_aliasing || !tbaa_p)
2332 return true;
2333
2334 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2335 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2336
2337 /* If the alias set for a pointer access is zero all bets are off. */
2338 if (base1_alias_set == 0
2339 || base2_alias_set == 0)
2340 return true;
2341
2342 /* Do type-based disambiguation. */
2343 if (base1_alias_set != base2_alias_set
2344 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2345 return false;
2346
2347 /* If either reference is view-converted, give up now. */
2348 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2349 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2350 return true;
2351
2352 /* If both references are through the same type, they do not alias
2353 if the accesses do not overlap. This does extra disambiguation
2354 for mixed/pointer accesses but requires strict aliasing. */
2355 if ((TREE_CODE (base1) != TARGET_MEM_REF
2356 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2357 && (TREE_CODE (base2) != TARGET_MEM_REF
2358 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2359 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2360 TREE_TYPE (ptrtype2)) == 1)
2361 {
2362 /* But avoid treating arrays as "objects", instead assume they
2363 can overlap by an exact multiple of their element size.
2364 See gcc.dg/torture/alias-2.c. */
2365 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2366
2367 if (!partial_overlap
2368 && !ranges_maybe_overlap_p (pos1: offset1, size1: max_size1, pos2: offset2, size2: max_size2))
2369 return false;
2370 if (!ref1 || !ref2
2371 || (!partial_overlap
2372 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2373 return true;
2374 int res = nonoverlapping_refs_since_match_p (match1: base1, ref1, match2: base2, ref2,
2375 partial_overlap);
2376 if (res == -1)
2377 return !nonoverlapping_component_refs_p (x: ref1, y: ref2);
2378 return !res;
2379 }
2380
2381 /* Do access-path based disambiguation. */
2382 if (ref1 && ref2
2383 && (handled_component_p (t: ref1) || handled_component_p (t: ref2)))
2384 return aliasing_component_refs_p (ref1,
2385 ref1_alias_set, base1_alias_set,
2386 offset1, max_size1,
2387 ref2,
2388 ref2_alias_set, base2_alias_set,
2389 offset2, max_size2);
2390
2391 return true;
2392}
2393
2394/* Return true, if the two memory references REF1 and REF2 may alias. */
2395
2396static bool
2397refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2398{
2399 tree base1, base2;
2400 poly_int64 offset1 = 0, offset2 = 0;
2401 poly_int64 max_size1 = -1, max_size2 = -1;
2402 bool var1_p, var2_p, ind1_p, ind2_p;
2403
2404 gcc_checking_assert ((!ref1->ref
2405 || TREE_CODE (ref1->ref) == SSA_NAME
2406 || DECL_P (ref1->ref)
2407 || TREE_CODE (ref1->ref) == STRING_CST
2408 || handled_component_p (ref1->ref)
2409 || TREE_CODE (ref1->ref) == MEM_REF
2410 || TREE_CODE (ref1->ref) == TARGET_MEM_REF
2411 || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
2412 && (!ref2->ref
2413 || TREE_CODE (ref2->ref) == SSA_NAME
2414 || DECL_P (ref2->ref)
2415 || TREE_CODE (ref2->ref) == STRING_CST
2416 || handled_component_p (ref2->ref)
2417 || TREE_CODE (ref2->ref) == MEM_REF
2418 || TREE_CODE (ref2->ref) == TARGET_MEM_REF
2419 || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
2420
2421 /* Decompose the references into their base objects and the access. */
2422 base1 = ao_ref_base (ref: ref1);
2423 offset1 = ref1->offset;
2424 max_size1 = ref1->max_size;
2425 base2 = ao_ref_base (ref: ref2);
2426 offset2 = ref2->offset;
2427 max_size2 = ref2->max_size;
2428
2429 /* We can end up with registers or constants as bases for example from
2430 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2431 which is seen as a struct copy. */
2432 if (TREE_CODE (base1) == SSA_NAME
2433 || TREE_CODE (base1) == CONST_DECL
2434 || TREE_CODE (base1) == CONSTRUCTOR
2435 || TREE_CODE (base1) == ADDR_EXPR
2436 || CONSTANT_CLASS_P (base1)
2437 || TREE_CODE (base2) == SSA_NAME
2438 || TREE_CODE (base2) == CONST_DECL
2439 || TREE_CODE (base2) == CONSTRUCTOR
2440 || TREE_CODE (base2) == ADDR_EXPR
2441 || CONSTANT_CLASS_P (base2))
2442 return false;
2443
2444 /* Two volatile accesses always conflict. */
2445 if (ref1->volatile_p
2446 && ref2->volatile_p)
2447 return true;
2448
2449 /* refN->ref may convey size information, do not confuse our workers
2450 with that but strip it - ao_ref_base took it into account already. */
2451 tree ref1ref = ref1->ref;
2452 if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
2453 ref1ref = TREE_OPERAND (ref1ref, 0);
2454 tree ref2ref = ref2->ref;
2455 if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
2456 ref2ref = TREE_OPERAND (ref2ref, 0);
2457
2458 /* Defer to simple offset based disambiguation if we have
2459 references based on two decls. Do this before defering to
2460 TBAA to handle must-alias cases in conformance with the
2461 GCC extension of allowing type-punning through unions. */
2462 var1_p = DECL_P (base1);
2463 var2_p = DECL_P (base2);
2464 if (var1_p && var2_p)
2465 return decl_refs_may_alias_p (ref1: ref1ref, base1, offset1, max_size1,
2466 size1: ref1->size,
2467 ref2: ref2ref, base2, offset2, max_size2,
2468 size2: ref2->size);
2469
2470 /* We can end up referring to code via function and label decls.
2471 As we likely do not properly track code aliases conservatively
2472 bail out. */
2473 if (TREE_CODE (base1) == FUNCTION_DECL
2474 || TREE_CODE (base1) == LABEL_DECL
2475 || TREE_CODE (base2) == FUNCTION_DECL
2476 || TREE_CODE (base2) == LABEL_DECL)
2477 return true;
2478
2479 /* Handle restrict based accesses.
2480 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2481 here. */
2482 tree rbase1 = base1;
2483 tree rbase2 = base2;
2484 if (var1_p)
2485 {
2486 rbase1 = ref1ref;
2487 if (rbase1)
2488 while (handled_component_p (t: rbase1))
2489 rbase1 = TREE_OPERAND (rbase1, 0);
2490 }
2491 if (var2_p)
2492 {
2493 rbase2 = ref2ref;
2494 if (rbase2)
2495 while (handled_component_p (t: rbase2))
2496 rbase2 = TREE_OPERAND (rbase2, 0);
2497 }
2498 if (rbase1 && rbase2
2499 && (TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
2500 && (TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
2501 /* If the accesses are in the same restrict clique... */
2502 && MR_DEPENDENCE_CLIQUE (rbase1) == MR_DEPENDENCE_CLIQUE (rbase2)
2503 /* But based on different pointers they do not alias. */
2504 && MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))
2505 return false;
2506
2507 ind1_p = (TREE_CODE (base1) == MEM_REF
2508 || TREE_CODE (base1) == TARGET_MEM_REF);
2509 ind2_p = (TREE_CODE (base2) == MEM_REF
2510 || TREE_CODE (base2) == TARGET_MEM_REF);
2511
2512 /* Canonicalize the pointer-vs-decl case. */
2513 if (ind1_p && var2_p)
2514 {
2515 std::swap (a&: offset1, b&: offset2);
2516 std::swap (a&: max_size1, b&: max_size2);
2517 std::swap (a&: base1, b&: base2);
2518 std::swap (a&: ref1, b&: ref2);
2519 std::swap (a&: ref1ref, b&: ref2ref);
2520 var1_p = true;
2521 ind1_p = false;
2522 var2_p = false;
2523 ind2_p = true;
2524 }
2525
2526 /* First defer to TBAA if possible. */
2527 if (tbaa_p
2528 && flag_strict_aliasing
2529 && !alias_sets_conflict_p (ao_ref_alias_set (ref: ref1),
2530 ao_ref_alias_set (ref: ref2)))
2531 return false;
2532
2533 /* If the reference is based on a pointer that points to memory
2534 that may not be written to then the other reference cannot possibly
2535 clobber it. */
2536 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2537 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2538 || (ind1_p
2539 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2540 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2541 return false;
2542
2543 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2544 if (var1_p && ind2_p)
2545 return indirect_ref_may_alias_decl_p (ref1: ref2ref, base1: base2,
2546 offset1: offset2, max_size1: max_size2, size1: ref2->size,
2547 ref1_alias_set: ao_ref_alias_set (ref: ref2),
2548 base1_alias_set: ao_ref_base_alias_set (ref: ref2),
2549 ref2: ref1ref, base2: base1,
2550 offset2: offset1, max_size2: max_size1, size2: ref1->size,
2551 ref2_alias_set: ao_ref_alias_set (ref: ref1),
2552 base2_alias_set: ao_ref_base_alias_set (ref: ref1),
2553 tbaa_p);
2554 else if (ind1_p && ind2_p)
2555 return indirect_refs_may_alias_p (ref1: ref1ref, base1,
2556 offset1, max_size1, size1: ref1->size,
2557 ref1_alias_set: ao_ref_alias_set (ref: ref1),
2558 base1_alias_set: ao_ref_base_alias_set (ref: ref1),
2559 ref2: ref2ref, base2,
2560 offset2, max_size2, size2: ref2->size,
2561 ref2_alias_set: ao_ref_alias_set (ref: ref2),
2562 base2_alias_set: ao_ref_base_alias_set (ref: ref2),
2563 tbaa_p);
2564
2565 gcc_unreachable ();
2566}
2567
2568/* Return true, if the two memory references REF1 and REF2 may alias
2569 and update statistics. */
2570
2571bool
2572refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2573{
2574 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2575 if (res)
2576 ++alias_stats.refs_may_alias_p_may_alias;
2577 else
2578 ++alias_stats.refs_may_alias_p_no_alias;
2579 return res;
2580}
2581
2582static bool
2583refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2584{
2585 ao_ref r1;
2586 ao_ref_init (r: &r1, ref: ref1);
2587 return refs_may_alias_p_1 (ref1: &r1, ref2, tbaa_p);
2588}
2589
2590bool
2591refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2592{
2593 ao_ref r1, r2;
2594 ao_ref_init (r: &r1, ref: ref1);
2595 ao_ref_init (r: &r2, ref: ref2);
2596 return refs_may_alias_p_1 (ref1: &r1, ref2: &r2, tbaa_p);
2597}
2598
2599/* Returns true if there is a anti-dependence for the STORE that
2600 executes after the LOAD. */
2601
2602bool
2603refs_anti_dependent_p (tree load, tree store)
2604{
2605 ao_ref r1, r2;
2606 ao_ref_init (r: &r1, ref: load);
2607 ao_ref_init (r: &r2, ref: store);
2608 return refs_may_alias_p_1 (ref1: &r1, ref2: &r2, tbaa_p: false);
2609}
2610
2611/* Returns true if there is a output dependence for the stores
2612 STORE1 and STORE2. */
2613
2614bool
2615refs_output_dependent_p (tree store1, tree store2)
2616{
2617 ao_ref r1, r2;
2618 ao_ref_init (r: &r1, ref: store1);
2619 ao_ref_init (r: &r2, ref: store2);
2620 return refs_may_alias_p_1 (ref1: &r1, ref2: &r2, tbaa_p: false);
2621}
2622
2623/* Returns true if and only if REF may alias any access stored in TT.
2624 IF TBAA_P is true, use TBAA oracle. */
2625
2626static bool
2627modref_may_conflict (const gcall *stmt,
2628 modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2629{
2630 alias_set_type base_set, ref_set;
2631 bool global_memory_ok = false;
2632
2633 if (tt->every_base)
2634 return true;
2635
2636 if (!dbg_cnt (index: ipa_mod_ref))
2637 return true;
2638
2639 base_set = ao_ref_base_alias_set (ref);
2640
2641 ref_set = ao_ref_alias_set (ref);
2642
2643 int num_tests = 0, max_tests = param_modref_max_tests;
2644 for (auto base_node : tt->bases)
2645 {
2646 if (tbaa_p && flag_strict_aliasing)
2647 {
2648 if (num_tests >= max_tests)
2649 return true;
2650 alias_stats.modref_tests++;
2651 if (!alias_sets_conflict_p (base_set, base_node->base))
2652 continue;
2653 num_tests++;
2654 }
2655
2656 if (base_node->every_ref)
2657 return true;
2658
2659 for (auto ref_node : base_node->refs)
2660 {
2661 /* Do not repeat same test as before. */
2662 if ((ref_set != base_set || base_node->base != ref_node->ref)
2663 && tbaa_p && flag_strict_aliasing)
2664 {
2665 if (num_tests >= max_tests)
2666 return true;
2667 alias_stats.modref_tests++;
2668 if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2669 continue;
2670 num_tests++;
2671 }
2672
2673 if (ref_node->every_access)
2674 return true;
2675
2676 /* TBAA checks did not disambiguate, try individual accesses. */
2677 for (auto access_node : ref_node->accesses)
2678 {
2679 if (num_tests >= max_tests)
2680 return true;
2681
2682 if (access_node.parm_index == MODREF_GLOBAL_MEMORY_PARM)
2683 {
2684 if (global_memory_ok)
2685 continue;
2686 if (ref_may_alias_global_p (ref, escaped_local_p: true))
2687 return true;
2688 global_memory_ok = true;
2689 num_tests++;
2690 continue;
2691 }
2692
2693 tree arg = access_node.get_call_arg (stmt);
2694 if (!arg)
2695 return true;
2696
2697 alias_stats.modref_baseptr_tests++;
2698
2699 if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2700 continue;
2701
2702 /* PTA oracle will be unhapy of arg is not an pointer. */
2703 if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2704 return true;
2705
2706 /* If we don't have base pointer, give up. */
2707 if (!ref->ref && !ref->base)
2708 continue;
2709
2710 ao_ref ref2;
2711 if (access_node.get_ao_ref (stmt, ref: &ref2))
2712 {
2713 ref2.ref_alias_set = ref_node->ref;
2714 ref2.base_alias_set = base_node->base;
2715 if (refs_may_alias_p_1 (ref1: &ref2, ref2: ref, tbaa_p))
2716 return true;
2717 }
2718 else if (ptr_deref_may_alias_ref_p_1 (ptr: arg, ref))
2719 return true;
2720
2721 num_tests++;
2722 }
2723 }
2724 }
2725 return false;
2726}
2727
2728/* Check if REF conflicts with call using "fn spec" attribute.
2729 If CLOBBER is true we are checking for writes, otherwise check loads.
2730
2731 Return 0 if there are no conflicts (except for possible function call
2732 argument reads), 1 if there are conflicts and -1 if we can not decide by
2733 fn spec. */
2734
2735static int
2736check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2737{
2738 attr_fnspec fnspec = gimple_call_fnspec (stmt: call);
2739 if (fnspec.known_p ())
2740 {
2741 if (clobber
2742 ? !fnspec.global_memory_written_p ()
2743 : !fnspec.global_memory_read_p ())
2744 {
2745 for (unsigned int i = 0; i < gimple_call_num_args (gs: call); i++)
2746 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2747 && (!fnspec.arg_specified_p (i)
2748 || (clobber ? fnspec.arg_maybe_written_p (i)
2749 : fnspec.arg_maybe_read_p (i))))
2750 {
2751 ao_ref dref;
2752 tree size = NULL_TREE;
2753 unsigned int size_arg;
2754
2755 if (!fnspec.arg_specified_p (i))
2756 ;
2757 else if (fnspec.arg_max_access_size_given_by_arg_p
2758 (i, arg: &size_arg))
2759 size = gimple_call_arg (gs: call, index: size_arg);
2760 else if (fnspec.arg_access_size_given_by_type_p (i))
2761 {
2762 tree callee = gimple_call_fndecl (gs: call);
2763 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2764
2765 for (unsigned int p = 0; p < i; p++)
2766 t = TREE_CHAIN (t);
2767 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2768 }
2769 poly_int64 size_hwi;
2770 if (size
2771 && poly_int_tree_p (t: size, value: &size_hwi)
2772 && coeffs_in_range_p (a: size_hwi, b: 0,
2773 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
2774 {
2775 size_hwi = size_hwi * BITS_PER_UNIT;
2776 ao_ref_init_from_ptr_and_range (ref: &dref,
2777 ptr: gimple_call_arg (gs: call, index: i),
2778 range_known: true, offset: 0, size: -1, max_size: size_hwi);
2779 }
2780 else
2781 ao_ref_init_from_ptr_and_range (ref: &dref,
2782 ptr: gimple_call_arg (gs: call, index: i),
2783 range_known: false, offset: 0, size: -1, max_size: -1);
2784 if (refs_may_alias_p_1 (ref1: &dref, ref2: ref, tbaa_p: false))
2785 return 1;
2786 }
2787 if (clobber
2788 && fnspec.errno_maybe_written_p ()
2789 && flag_errno_math
2790 && targetm.ref_may_alias_errno (ref))
2791 return 1;
2792 return 0;
2793 }
2794 }
2795
2796 /* FIXME: we should handle barriers more consistently, but for now leave the
2797 check here. */
2798 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2799 switch (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: call)))
2800 {
2801 /* __sync_* builtins and some OpenMP builtins act as threading
2802 barriers. */
2803#undef DEF_SYNC_BUILTIN
2804#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2805#include "sync-builtins.def"
2806#undef DEF_SYNC_BUILTIN
2807 case BUILT_IN_GOMP_ATOMIC_START:
2808 case BUILT_IN_GOMP_ATOMIC_END:
2809 case BUILT_IN_GOMP_BARRIER:
2810 case BUILT_IN_GOMP_BARRIER_CANCEL:
2811 case BUILT_IN_GOMP_TASKWAIT:
2812 case BUILT_IN_GOMP_TASKGROUP_END:
2813 case BUILT_IN_GOMP_CRITICAL_START:
2814 case BUILT_IN_GOMP_CRITICAL_END:
2815 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2816 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2817 case BUILT_IN_GOMP_LOOP_END:
2818 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2819 case BUILT_IN_GOMP_ORDERED_START:
2820 case BUILT_IN_GOMP_ORDERED_END:
2821 case BUILT_IN_GOMP_SECTIONS_END:
2822 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2823 case BUILT_IN_GOMP_SINGLE_COPY_START:
2824 case BUILT_IN_GOMP_SINGLE_COPY_END:
2825 return 1;
2826
2827 default:
2828 return -1;
2829 }
2830 return -1;
2831}
2832
2833/* If the call CALL may use the memory reference REF return true,
2834 otherwise return false. */
2835
2836static bool
2837ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2838{
2839 tree base, callee;
2840 unsigned i;
2841 int flags = gimple_call_flags (call);
2842
2843 if (flags & (ECF_CONST|ECF_NOVOPS))
2844 goto process_args;
2845
2846 /* A call that is not without side-effects might involve volatile
2847 accesses and thus conflicts with all other volatile accesses. */
2848 if (ref->volatile_p)
2849 return true;
2850
2851 if (gimple_call_internal_p (gs: call))
2852 switch (gimple_call_internal_fn (gs: call))
2853 {
2854 case IFN_MASK_STORE:
2855 case IFN_SCATTER_STORE:
2856 case IFN_MASK_SCATTER_STORE:
2857 case IFN_LEN_STORE:
2858 case IFN_MASK_LEN_STORE:
2859 return false;
2860 case IFN_MASK_STORE_LANES:
2861 case IFN_MASK_LEN_STORE_LANES:
2862 goto process_args;
2863 case IFN_MASK_LOAD:
2864 case IFN_LEN_LOAD:
2865 case IFN_MASK_LEN_LOAD:
2866 case IFN_MASK_LOAD_LANES:
2867 case IFN_MASK_LEN_LOAD_LANES:
2868 {
2869 ao_ref rhs_ref;
2870 tree lhs = gimple_call_lhs (gs: call);
2871 if (lhs)
2872 {
2873 ao_ref_init_from_ptr_and_size (ref: &rhs_ref,
2874 ptr: gimple_call_arg (gs: call, index: 0),
2875 TYPE_SIZE_UNIT (TREE_TYPE (lhs)));
2876 /* We cannot make this a known-size access since otherwise
2877 we disambiguate against refs to decls that are smaller. */
2878 rhs_ref.size = -1;
2879 rhs_ref.ref_alias_set = rhs_ref.base_alias_set
2880 = tbaa_p ? get_deref_alias_set (TREE_TYPE
2881 (gimple_call_arg (call, 1))) : 0;
2882 return refs_may_alias_p_1 (ref1: ref, ref2: &rhs_ref, tbaa_p);
2883 }
2884 break;
2885 }
2886 default:;
2887 }
2888
2889 callee = gimple_call_fndecl (gs: call);
2890 if (callee != NULL_TREE)
2891 {
2892 struct cgraph_node *node = cgraph_node::get (decl: callee);
2893 /* We can not safely optimize based on summary of calle if it does
2894 not always bind to current def: it is possible that memory load
2895 was optimized out earlier and the interposed variant may not be
2896 optimized this way. */
2897 if (node && node->binds_to_current_def_p ())
2898 {
2899 modref_summary *summary = get_modref_function_summary (func: node);
2900 if (summary && !summary->calls_interposable)
2901 {
2902 if (!modref_may_conflict (stmt: call, tt: summary->loads, ref, tbaa_p))
2903 {
2904 alias_stats.modref_use_no_alias++;
2905 if (dump_file && (dump_flags & TDF_DETAILS))
2906 {
2907 fprintf (stream: dump_file,
2908 format: "ipa-modref: call stmt ");
2909 print_gimple_stmt (dump_file, call, 0);
2910 fprintf (stream: dump_file,
2911 format: "ipa-modref: call to %s does not use ",
2912 node->dump_name ());
2913 if (!ref->ref && ref->base)
2914 {
2915 fprintf (stream: dump_file, format: "base: ");
2916 print_generic_expr (dump_file, ref->base);
2917 }
2918 else if (ref->ref)
2919 {
2920 fprintf (stream: dump_file, format: "ref: ");
2921 print_generic_expr (dump_file, ref->ref);
2922 }
2923 fprintf (stream: dump_file, format: " alias sets: %i->%i\n",
2924 ao_ref_base_alias_set (ref),
2925 ao_ref_alias_set (ref));
2926 }
2927 goto process_args;
2928 }
2929 alias_stats.modref_use_may_alias++;
2930 }
2931 }
2932 }
2933
2934 base = ao_ref_base (ref);
2935 if (!base)
2936 return true;
2937
2938 /* If the reference is based on a decl that is not aliased the call
2939 cannot possibly use it. */
2940 if (DECL_P (base)
2941 && !may_be_aliased (var: base)
2942 /* But local statics can be used through recursion. */
2943 && !is_global_var (t: base))
2944 goto process_args;
2945
2946 if (int res = check_fnspec (call, ref, clobber: false))
2947 {
2948 if (res == 1)
2949 return true;
2950 }
2951 else
2952 goto process_args;
2953
2954 /* Check if base is a global static variable that is not read
2955 by the function. */
2956 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2957 {
2958 struct cgraph_node *node = cgraph_node::get (decl: callee);
2959 bitmap read;
2960 int id;
2961
2962 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2963 node yet. We should enforce that there are nodes for all decls in the
2964 IL and remove this check instead. */
2965 if (node
2966 && (id = ipa_reference_var_uid (t: base)) != -1
2967 && (read = ipa_reference_get_read_global (fn: node))
2968 && !bitmap_bit_p (read, id))
2969 goto process_args;
2970 }
2971
2972 /* Check if the base variable is call-used. */
2973 if (DECL_P (base))
2974 {
2975 if (pt_solution_includes (gimple_call_use_set (call_stmt: call), base))
2976 return true;
2977 }
2978 else if ((TREE_CODE (base) == MEM_REF
2979 || TREE_CODE (base) == TARGET_MEM_REF)
2980 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2981 {
2982 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2983 if (!pi)
2984 return true;
2985
2986 if (pt_solutions_intersect (gimple_call_use_set (call_stmt: call), &pi->pt))
2987 return true;
2988 }
2989 else
2990 return true;
2991
2992 /* Inspect call arguments for passed-by-value aliases. */
2993process_args:
2994 for (i = 0; i < gimple_call_num_args (gs: call); ++i)
2995 {
2996 tree op = gimple_call_arg (gs: call, index: i);
2997 int flags = gimple_call_arg_flags (call, i);
2998
2999 if (flags & (EAF_UNUSED | EAF_NO_DIRECT_READ))
3000 continue;
3001
3002 if (TREE_CODE (op) == WITH_SIZE_EXPR)
3003 op = TREE_OPERAND (op, 0);
3004
3005 if (TREE_CODE (op) != SSA_NAME
3006 && !is_gimple_min_invariant (op))
3007 {
3008 ao_ref r;
3009 ao_ref_init (r: &r, ref: op);
3010 if (refs_may_alias_p_1 (ref1: &r, ref2: ref, tbaa_p))
3011 return true;
3012 }
3013 }
3014
3015 return false;
3016}
3017
3018static bool
3019ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
3020{
3021 bool res;
3022 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
3023 if (res)
3024 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
3025 else
3026 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
3027 return res;
3028}
3029
3030
3031/* If the statement STMT may use the memory reference REF return
3032 true, otherwise return false. */
3033
3034bool
3035ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
3036{
3037 if (is_gimple_assign (gs: stmt))
3038 {
3039 tree rhs;
3040
3041 /* All memory assign statements are single. */
3042 if (!gimple_assign_single_p (gs: stmt))
3043 return false;
3044
3045 rhs = gimple_assign_rhs1 (gs: stmt);
3046 if (is_gimple_reg (rhs)
3047 || is_gimple_min_invariant (rhs)
3048 || gimple_assign_rhs_code (gs: stmt) == CONSTRUCTOR)
3049 return false;
3050
3051 return refs_may_alias_p (ref1: rhs, ref2: ref, tbaa_p);
3052 }
3053 else if (is_gimple_call (gs: stmt))
3054 return ref_maybe_used_by_call_p (call: as_a <gcall *> (p: stmt), ref, tbaa_p);
3055 else if (greturn *return_stmt = dyn_cast <greturn *> (p: stmt))
3056 {
3057 tree retval = gimple_return_retval (gs: return_stmt);
3058 if (retval
3059 && TREE_CODE (retval) != SSA_NAME
3060 && !is_gimple_min_invariant (retval)
3061 && refs_may_alias_p (ref1: retval, ref2: ref, tbaa_p))
3062 return true;
3063 /* If ref escapes the function then the return acts as a use. */
3064 tree base = ao_ref_base (ref);
3065 if (!base)
3066 ;
3067 else if (DECL_P (base))
3068 return is_global_var (t: base);
3069 else if (TREE_CODE (base) == MEM_REF
3070 || TREE_CODE (base) == TARGET_MEM_REF)
3071 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0), escaped_local_p: false);
3072 return false;
3073 }
3074
3075 return true;
3076}
3077
3078bool
3079ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
3080{
3081 ao_ref r;
3082 ao_ref_init (r: &r, ref);
3083 return ref_maybe_used_by_stmt_p (stmt, ref: &r, tbaa_p);
3084}
3085
3086/* If the call in statement CALL may clobber the memory reference REF
3087 return true, otherwise return false. */
3088
3089bool
3090call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
3091{
3092 tree base;
3093 tree callee;
3094
3095 /* If the call is pure or const it cannot clobber anything. */
3096 if (gimple_call_flags (call)
3097 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
3098 return false;
3099 if (gimple_call_internal_p (gs: call))
3100 switch (auto fn = gimple_call_internal_fn (gs: call))
3101 {
3102 /* Treat these internal calls like ECF_PURE for aliasing,
3103 they don't write to any memory the program should care about.
3104 They have important other side-effects, and read memory,
3105 so can't be ECF_NOVOPS. */
3106 case IFN_UBSAN_NULL:
3107 case IFN_UBSAN_BOUNDS:
3108 case IFN_UBSAN_VPTR:
3109 case IFN_UBSAN_OBJECT_SIZE:
3110 case IFN_UBSAN_PTR:
3111 case IFN_ASAN_CHECK:
3112 return false;
3113 case IFN_MASK_STORE:
3114 case IFN_LEN_STORE:
3115 case IFN_MASK_LEN_STORE:
3116 case IFN_MASK_STORE_LANES:
3117 case IFN_MASK_LEN_STORE_LANES:
3118 {
3119 tree rhs = gimple_call_arg (gs: call,
3120 index: internal_fn_stored_value_index (fn));
3121 ao_ref lhs_ref;
3122 ao_ref_init_from_ptr_and_size (ref: &lhs_ref, ptr: gimple_call_arg (gs: call, index: 0),
3123 TYPE_SIZE_UNIT (TREE_TYPE (rhs)));
3124 /* We cannot make this a known-size access since otherwise
3125 we disambiguate against refs to decls that are smaller. */
3126 lhs_ref.size = -1;
3127 lhs_ref.ref_alias_set = lhs_ref.base_alias_set
3128 = tbaa_p ? get_deref_alias_set
3129 (TREE_TYPE (gimple_call_arg (call, 1))) : 0;
3130 return refs_may_alias_p_1 (ref1: ref, ref2: &lhs_ref, tbaa_p);
3131 }
3132 default:
3133 break;
3134 }
3135
3136 callee = gimple_call_fndecl (gs: call);
3137
3138 if (callee != NULL_TREE && !ref->volatile_p)
3139 {
3140 struct cgraph_node *node = cgraph_node::get (decl: callee);
3141 if (node)
3142 {
3143 modref_summary *summary = get_modref_function_summary (func: node);
3144 if (summary)
3145 {
3146 if (!modref_may_conflict (stmt: call, tt: summary->stores, ref, tbaa_p)
3147 && (!summary->writes_errno
3148 || !targetm.ref_may_alias_errno (ref)))
3149 {
3150 alias_stats.modref_clobber_no_alias++;
3151 if (dump_file && (dump_flags & TDF_DETAILS))
3152 {
3153 fprintf (stream: dump_file,
3154 format: "ipa-modref: call stmt ");
3155 print_gimple_stmt (dump_file, call, 0);
3156 fprintf (stream: dump_file,
3157 format: "ipa-modref: call to %s does not clobber ",
3158 node->dump_name ());
3159 if (!ref->ref && ref->base)
3160 {
3161 fprintf (stream: dump_file, format: "base: ");
3162 print_generic_expr (dump_file, ref->base);
3163 }
3164 else if (ref->ref)
3165 {
3166 fprintf (stream: dump_file, format: "ref: ");
3167 print_generic_expr (dump_file, ref->ref);
3168 }
3169 fprintf (stream: dump_file, format: " alias sets: %i->%i\n",
3170 ao_ref_base_alias_set (ref),
3171 ao_ref_alias_set (ref));
3172 }
3173 return false;
3174 }
3175 alias_stats.modref_clobber_may_alias++;
3176 }
3177 }
3178 }
3179
3180 base = ao_ref_base (ref);
3181 if (!base)
3182 return true;
3183
3184 if (TREE_CODE (base) == SSA_NAME
3185 || CONSTANT_CLASS_P (base))
3186 return false;
3187
3188 /* A call that is not without side-effects might involve volatile
3189 accesses and thus conflicts with all other volatile accesses. */
3190 if (ref->volatile_p)
3191 return true;
3192
3193 /* If the reference is based on a decl that is not aliased the call
3194 cannot possibly clobber it. */
3195 if (DECL_P (base)
3196 && !may_be_aliased (var: base)
3197 /* But local non-readonly statics can be modified through recursion
3198 or the call may implement a threading barrier which we must
3199 treat as may-def. */
3200 && (TREE_READONLY (base)
3201 || !is_global_var (t: base)))
3202 return false;
3203
3204 /* If the reference is based on a pointer that points to memory
3205 that may not be written to then the call cannot possibly clobber it. */
3206 if ((TREE_CODE (base) == MEM_REF
3207 || TREE_CODE (base) == TARGET_MEM_REF)
3208 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3209 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3210 return false;
3211
3212 if (int res = check_fnspec (call, ref, clobber: true))
3213 {
3214 if (res == 1)
3215 return true;
3216 }
3217 else
3218 return false;
3219
3220 /* Check if base is a global static variable that is not written
3221 by the function. */
3222 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3223 {
3224 struct cgraph_node *node = cgraph_node::get (decl: callee);
3225 bitmap written;
3226 int id;
3227
3228 if (node
3229 && (id = ipa_reference_var_uid (t: base)) != -1
3230 && (written = ipa_reference_get_written_global (fn: node))
3231 && !bitmap_bit_p (written, id))
3232 return false;
3233 }
3234
3235 /* Check if the base variable is call-clobbered. */
3236 if (DECL_P (base))
3237 return pt_solution_includes (gimple_call_clobber_set (call_stmt: call), base);
3238 else if ((TREE_CODE (base) == MEM_REF
3239 || TREE_CODE (base) == TARGET_MEM_REF)
3240 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3241 {
3242 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3243 if (!pi)
3244 return true;
3245
3246 return pt_solutions_intersect (gimple_call_clobber_set (call_stmt: call), &pi->pt);
3247 }
3248
3249 return true;
3250}
3251
3252/* If the call in statement CALL may clobber the memory reference REF
3253 return true, otherwise return false. */
3254
3255bool
3256call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3257{
3258 bool res;
3259 ao_ref r;
3260 ao_ref_init (r: &r, ref);
3261 res = call_may_clobber_ref_p_1 (call, ref: &r, tbaa_p);
3262 if (res)
3263 ++alias_stats.call_may_clobber_ref_p_may_alias;
3264 else
3265 ++alias_stats.call_may_clobber_ref_p_no_alias;
3266 return res;
3267}
3268
3269
3270/* If the statement STMT may clobber the memory reference REF return true,
3271 otherwise return false. */
3272
3273bool
3274stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3275{
3276 if (is_gimple_call (gs: stmt))
3277 {
3278 tree lhs = gimple_call_lhs (gs: stmt);
3279 if (lhs
3280 && TREE_CODE (lhs) != SSA_NAME)
3281 {
3282 ao_ref r;
3283 ao_ref_init (r: &r, ref: lhs);
3284 if (refs_may_alias_p_1 (ref1: ref, ref2: &r, tbaa_p))
3285 return true;
3286 }
3287
3288 return call_may_clobber_ref_p_1 (call: as_a <gcall *> (p: stmt), ref, tbaa_p);
3289 }
3290 else if (gimple_assign_single_p (gs: stmt))
3291 {
3292 tree lhs = gimple_assign_lhs (gs: stmt);
3293 if (TREE_CODE (lhs) != SSA_NAME)
3294 {
3295 ao_ref r;
3296 ao_ref_init (r: &r, ref: lhs);
3297 return refs_may_alias_p_1 (ref1: ref, ref2: &r, tbaa_p);
3298 }
3299 }
3300 else if (gimple_code (g: stmt) == GIMPLE_ASM)
3301 return true;
3302
3303 return false;
3304}
3305
3306bool
3307stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3308{
3309 ao_ref r;
3310 ao_ref_init (r: &r, ref);
3311 return stmt_may_clobber_ref_p_1 (stmt, ref: &r, tbaa_p);
3312}
3313
3314/* Return true if store1 and store2 described by corresponding tuples
3315 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3316 address. */
3317
3318static bool
3319same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3320 poly_int64 max_size1,
3321 tree base2, poly_int64 offset2, poly_int64 size2,
3322 poly_int64 max_size2)
3323{
3324 /* Offsets need to be 0. */
3325 if (maybe_ne (a: offset1, b: 0)
3326 || maybe_ne (a: offset2, b: 0))
3327 return false;
3328
3329 bool base1_obj_p = SSA_VAR_P (base1);
3330 bool base2_obj_p = SSA_VAR_P (base2);
3331
3332 /* We need one object. */
3333 if (base1_obj_p == base2_obj_p)
3334 return false;
3335 tree obj = base1_obj_p ? base1 : base2;
3336
3337 /* And we need one MEM_REF. */
3338 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3339 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3340 if (base1_memref_p == base2_memref_p)
3341 return false;
3342 tree memref = base1_memref_p ? base1 : base2;
3343
3344 /* Sizes need to be valid. */
3345 if (!known_size_p (a: max_size1)
3346 || !known_size_p (a: max_size2)
3347 || !known_size_p (a: size1)
3348 || !known_size_p (a: size2))
3349 return false;
3350
3351 /* Max_size needs to match size. */
3352 if (maybe_ne (a: max_size1, b: size1)
3353 || maybe_ne (a: max_size2, b: size2))
3354 return false;
3355
3356 /* Sizes need to match. */
3357 if (maybe_ne (a: size1, b: size2))
3358 return false;
3359
3360
3361 /* Check that memref is a store to pointer with singleton points-to info. */
3362 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3363 return false;
3364 tree ptr = TREE_OPERAND (memref, 0);
3365 if (TREE_CODE (ptr) != SSA_NAME)
3366 return false;
3367 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3368 unsigned int pt_uid;
3369 if (pi == NULL
3370 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3371 return false;
3372
3373 /* Be conservative with non-call exceptions when the address might
3374 be NULL. */
3375 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3376 return false;
3377
3378 /* Check that ptr points relative to obj. */
3379 unsigned int obj_uid = DECL_PT_UID (obj);
3380 if (obj_uid != pt_uid)
3381 return false;
3382
3383 /* Check that the object size is the same as the store size. That ensures us
3384 that ptr points to the start of obj. */
3385 return (DECL_SIZE (obj)
3386 && poly_int_tree_p (DECL_SIZE (obj))
3387 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3388}
3389
3390/* Return true if REF is killed by an store described by
3391 BASE, OFFSET, SIZE and MAX_SIZE. */
3392
3393static bool
3394store_kills_ref_p (tree base, poly_int64 offset, poly_int64 size,
3395 poly_int64 max_size, ao_ref *ref)
3396{
3397 poly_int64 ref_offset = ref->offset;
3398 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3399 so base == ref->base does not always hold. */
3400 if (base != ref->base)
3401 {
3402 /* Try using points-to info. */
3403 if (same_addr_size_stores_p (base1: base, offset1: offset, size1: size, max_size1: max_size, base2: ref->base,
3404 offset2: ref->offset, size2: ref->size, max_size2: ref->max_size))
3405 return true;
3406
3407 /* If both base and ref->base are MEM_REFs, only compare the
3408 first operand, and if the second operand isn't equal constant,
3409 try to add the offsets into offset and ref_offset. */
3410 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3411 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3412 {
3413 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3414 TREE_OPERAND (ref->base, 1)))
3415 {
3416 poly_offset_int off1 = mem_ref_offset (base);
3417 off1 <<= LOG2_BITS_PER_UNIT;
3418 off1 += offset;
3419 poly_offset_int off2 = mem_ref_offset (ref->base);
3420 off2 <<= LOG2_BITS_PER_UNIT;
3421 off2 += ref_offset;
3422 if (!off1.to_shwi (r: &offset) || !off2.to_shwi (r: &ref_offset))
3423 size = -1;
3424 }
3425 }
3426 else
3427 size = -1;
3428 }
3429 /* For a must-alias check we need to be able to constrain
3430 the access properly. */
3431 return (known_eq (size, max_size)
3432 && known_subrange_p (pos1: ref_offset, size1: ref->max_size, pos2: offset, size2: size));
3433}
3434
3435/* If STMT kills the memory reference REF return true, otherwise
3436 return false. */
3437
3438bool
3439stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3440{
3441 if (!ao_ref_base (ref))
3442 return false;
3443
3444 if (gimple_has_lhs (stmt)
3445 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3446 /* The assignment is not necessarily carried out if it can throw
3447 and we can catch it in the current function where we could inspect
3448 the previous value. Similarly if the function can throw externally
3449 and the ref does not die on the function return.
3450 ??? We only need to care about the RHS throwing. For aggregate
3451 assignments or similar calls and non-call exceptions the LHS
3452 might throw as well.
3453 ??? We also should care about possible longjmp, but since we
3454 do not understand that longjmp is not using global memory we will
3455 not consider a kill here since the function call will be considered
3456 as possibly using REF. */
3457 && !stmt_can_throw_internal (cfun, stmt)
3458 && (!stmt_can_throw_external (cfun, stmt)
3459 || !ref_may_alias_global_p (ref, escaped_local_p: false)))
3460 {
3461 tree lhs = gimple_get_lhs (stmt);
3462 /* If LHS is literally a base of the access we are done. */
3463 if (ref->ref)
3464 {
3465 tree base = ref->ref;
3466 tree innermost_dropped_array_ref = NULL_TREE;
3467 if (handled_component_p (t: base))
3468 {
3469 tree saved_lhs0 = NULL_TREE;
3470 if (handled_component_p (t: lhs))
3471 {
3472 saved_lhs0 = TREE_OPERAND (lhs, 0);
3473 TREE_OPERAND (lhs, 0) = integer_zero_node;
3474 }
3475 do
3476 {
3477 /* Just compare the outermost handled component, if
3478 they are equal we have found a possible common
3479 base. */
3480 tree saved_base0 = TREE_OPERAND (base, 0);
3481 TREE_OPERAND (base, 0) = integer_zero_node;
3482 bool res = operand_equal_p (lhs, base, flags: 0);
3483 TREE_OPERAND (base, 0) = saved_base0;
3484 if (res)
3485 break;
3486 /* Remember if we drop an array-ref that we need to
3487 double-check not being at struct end. */
3488 if (TREE_CODE (base) == ARRAY_REF
3489 || TREE_CODE (base) == ARRAY_RANGE_REF)
3490 innermost_dropped_array_ref = base;
3491 /* Otherwise drop handled components of the access. */
3492 base = saved_base0;
3493 }
3494 while (handled_component_p (t: base));
3495 if (saved_lhs0)
3496 TREE_OPERAND (lhs, 0) = saved_lhs0;
3497 }
3498 /* Finally check if the lhs has the same address and size as the
3499 base candidate of the access. Watch out if we have dropped
3500 an array-ref that might have flexible size, this means ref->ref
3501 may be outside of the TYPE_SIZE of its base. */
3502 if ((! innermost_dropped_array_ref
3503 || ! array_ref_flexible_size_p (innermost_dropped_array_ref))
3504 && (lhs == base
3505 || (((TYPE_SIZE (TREE_TYPE (lhs))
3506 == TYPE_SIZE (TREE_TYPE (base)))
3507 || (TYPE_SIZE (TREE_TYPE (lhs))
3508 && TYPE_SIZE (TREE_TYPE (base))
3509 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3510 TYPE_SIZE (TREE_TYPE (base)),
3511 flags: 0)))
3512 && operand_equal_p (lhs, base,
3513 flags: OEP_ADDRESS_OF
3514 | OEP_MATCH_SIDE_EFFECTS))))
3515 {
3516 ++alias_stats.stmt_kills_ref_p_yes;
3517 return true;
3518 }
3519 }
3520
3521 /* Now look for non-literal equal bases with the restriction of
3522 handling constant offset and size. */
3523 /* For a must-alias check we need to be able to constrain
3524 the access properly. */
3525 if (!ref->max_size_known_p ())
3526 {
3527 ++alias_stats.stmt_kills_ref_p_no;
3528 return false;
3529 }
3530 poly_int64 size, offset, max_size;
3531 bool reverse;
3532 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3533 &reverse);
3534 if (store_kills_ref_p (base, offset, size, max_size, ref))
3535 {
3536 ++alias_stats.stmt_kills_ref_p_yes;
3537 return true;
3538 }
3539 }
3540
3541 if (is_gimple_call (gs: stmt))
3542 {
3543 tree callee = gimple_call_fndecl (gs: stmt);
3544 struct cgraph_node *node;
3545 modref_summary *summary;
3546
3547 /* Try to disambiguate using modref summary. Modref records a vector
3548 of stores with known offsets relative to function parameters that must
3549 happen every execution of function. Find if we have a matching
3550 store and verify that function can not use the value. */
3551 if (callee != NULL_TREE
3552 && (node = cgraph_node::get (decl: callee)) != NULL
3553 && node->binds_to_current_def_p ()
3554 && (summary = get_modref_function_summary (func: node)) != NULL
3555 && summary->kills.length ()
3556 /* Check that we can not trap while evaulating function
3557 parameters. This check is overly conservative. */
3558 && (!cfun->can_throw_non_call_exceptions
3559 || (!stmt_can_throw_internal (cfun, stmt)
3560 && (!stmt_can_throw_external (cfun, stmt)
3561 || !ref_may_alias_global_p (ref, escaped_local_p: false)))))
3562 {
3563 for (auto kill : summary->kills)
3564 {
3565 ao_ref dref;
3566
3567 /* We only can do useful compares if we know the access range
3568 precisely. */
3569 if (!kill.get_ao_ref (stmt: as_a <gcall *> (p: stmt), ref: &dref))
3570 continue;
3571 if (store_kills_ref_p (base: ao_ref_base (ref: &dref), offset: dref.offset,
3572 size: dref.size, max_size: dref.max_size, ref))
3573 {
3574 /* For store to be killed it needs to not be used
3575 earlier. */
3576 if (ref_maybe_used_by_call_p_1 (call: as_a <gcall *> (p: stmt), ref,
3577 tbaa_p: true)
3578 || !dbg_cnt (index: ipa_mod_ref))
3579 break;
3580 if (dump_file && (dump_flags & TDF_DETAILS))
3581 {
3582 fprintf (stream: dump_file,
3583 format: "ipa-modref: call stmt ");
3584 print_gimple_stmt (dump_file, stmt, 0);
3585 fprintf (stream: dump_file,
3586 format: "ipa-modref: call to %s kills ",
3587 node->dump_name ());
3588 print_generic_expr (dump_file, ref->base);
3589 fprintf (stream: dump_file, format: "\n");
3590 }
3591 ++alias_stats.modref_kill_yes;
3592 return true;
3593 }
3594 }
3595 ++alias_stats.modref_kill_no;
3596 }
3597 if (callee != NULL_TREE
3598 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3599 switch (DECL_FUNCTION_CODE (decl: callee))
3600 {
3601 case BUILT_IN_FREE:
3602 {
3603 tree ptr = gimple_call_arg (gs: stmt, index: 0);
3604 tree base = ao_ref_base (ref);
3605 if (base && TREE_CODE (base) == MEM_REF
3606 && TREE_OPERAND (base, 0) == ptr)
3607 {
3608 ++alias_stats.stmt_kills_ref_p_yes;
3609 return true;
3610 }
3611 break;
3612 }
3613
3614 case BUILT_IN_MEMCPY:
3615 case BUILT_IN_MEMPCPY:
3616 case BUILT_IN_MEMMOVE:
3617 case BUILT_IN_MEMSET:
3618 case BUILT_IN_MEMCPY_CHK:
3619 case BUILT_IN_MEMPCPY_CHK:
3620 case BUILT_IN_MEMMOVE_CHK:
3621 case BUILT_IN_MEMSET_CHK:
3622 case BUILT_IN_STRNCPY:
3623 case BUILT_IN_STPNCPY:
3624 case BUILT_IN_CALLOC:
3625 {
3626 /* For a must-alias check we need to be able to constrain
3627 the access properly. */
3628 if (!ref->max_size_known_p ())
3629 {
3630 ++alias_stats.stmt_kills_ref_p_no;
3631 return false;
3632 }
3633 tree dest;
3634 tree len;
3635
3636 /* In execution order a calloc call will never kill
3637 anything. However, DSE will (ab)use this interface
3638 to ask if a calloc call writes the same memory locations
3639 as a later assignment, memset, etc. So handle calloc
3640 in the expected way. */
3641 if (DECL_FUNCTION_CODE (decl: callee) == BUILT_IN_CALLOC)
3642 {
3643 tree arg0 = gimple_call_arg (gs: stmt, index: 0);
3644 tree arg1 = gimple_call_arg (gs: stmt, index: 1);
3645 if (TREE_CODE (arg0) != INTEGER_CST
3646 || TREE_CODE (arg1) != INTEGER_CST)
3647 {
3648 ++alias_stats.stmt_kills_ref_p_no;
3649 return false;
3650 }
3651
3652 dest = gimple_call_lhs (gs: stmt);
3653 if (!dest)
3654 {
3655 ++alias_stats.stmt_kills_ref_p_no;
3656 return false;
3657 }
3658 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3659 }
3660 else
3661 {
3662 dest = gimple_call_arg (gs: stmt, index: 0);
3663 len = gimple_call_arg (gs: stmt, index: 2);
3664 }
3665 if (!poly_int_tree_p (t: len))
3666 return false;
3667 ao_ref dref;
3668 ao_ref_init_from_ptr_and_size (ref: &dref, ptr: dest, size: len);
3669 if (store_kills_ref_p (base: ao_ref_base (ref: &dref), offset: dref.offset,
3670 size: dref.size, max_size: dref.max_size, ref))
3671 {
3672 ++alias_stats.stmt_kills_ref_p_yes;
3673 return true;
3674 }
3675 break;
3676 }
3677
3678 case BUILT_IN_VA_END:
3679 {
3680 tree ptr = gimple_call_arg (gs: stmt, index: 0);
3681 if (TREE_CODE (ptr) == ADDR_EXPR)
3682 {
3683 tree base = ao_ref_base (ref);
3684 if (TREE_OPERAND (ptr, 0) == base)
3685 {
3686 ++alias_stats.stmt_kills_ref_p_yes;
3687 return true;
3688 }
3689 }
3690 break;
3691 }
3692
3693 default:;
3694 }
3695 }
3696 ++alias_stats.stmt_kills_ref_p_no;
3697 return false;
3698}
3699
3700bool
3701stmt_kills_ref_p (gimple *stmt, tree ref)
3702{
3703 ao_ref r;
3704 ao_ref_init (r: &r, ref);
3705 return stmt_kills_ref_p (stmt, ref: &r);
3706}
3707
3708/* Return whether REF can be subject to store data races. */
3709
3710bool
3711ref_can_have_store_data_races (tree ref)
3712{
3713 /* With -fallow-store-data-races do not care about them. */
3714 if (flag_store_data_races)
3715 return false;
3716
3717 tree base = get_base_address (t: ref);
3718 if (auto_var_p (base)
3719 && ! may_be_aliased (var: base))
3720 /* Automatic variables not aliased are not subject to
3721 data races. */
3722 return false;
3723
3724 return true;
3725}
3726
3727
3728/* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3729 TARGET or a statement clobbering the memory reference REF in which
3730 case false is returned. The walk starts with VUSE, one argument of PHI. */
3731
3732static bool
3733maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3734 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3735 bitmap *visited, bool abort_on_visited,
3736 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3737 translate_flags disambiguate_only,
3738 void *data)
3739{
3740 basic_block bb = gimple_bb (g: phi);
3741
3742 if (!*visited)
3743 {
3744 *visited = BITMAP_ALLOC (NULL);
3745 bitmap_tree_view (*visited);
3746 }
3747
3748 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3749
3750 /* Walk until we hit the target. */
3751 while (vuse != target)
3752 {
3753 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3754 /* If we are searching for the target VUSE by walking up to
3755 TARGET_BB dominating the original PHI we are finished once
3756 we reach a default def or a definition in a block dominating
3757 that block. Update TARGET and return. */
3758 if (!target
3759 && (gimple_nop_p (g: def_stmt)
3760 || dominated_by_p (CDI_DOMINATORS,
3761 target_bb, gimple_bb (g: def_stmt))))
3762 {
3763 target = vuse;
3764 return true;
3765 }
3766
3767 /* Recurse for PHI nodes. */
3768 if (gimple_code (g: def_stmt) == GIMPLE_PHI)
3769 {
3770 /* An already visited PHI node ends the walk successfully. */
3771 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3772 return !abort_on_visited;
3773 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3774 visited, abort_on_visited,
3775 translate, data, disambiguate_only);
3776 if (!vuse)
3777 return false;
3778 continue;
3779 }
3780 else if (gimple_nop_p (g: def_stmt))
3781 return false;
3782 else
3783 {
3784 /* A clobbering statement or the end of the IL ends it failing. */
3785 if ((int)limit <= 0)
3786 return false;
3787 --limit;
3788 if (stmt_may_clobber_ref_p_1 (stmt: def_stmt, ref, tbaa_p))
3789 {
3790 translate_flags tf = disambiguate_only;
3791 if (translate
3792 && (*translate) (ref, vuse, data, &tf) == NULL)
3793 ;
3794 else
3795 return false;
3796 }
3797 }
3798 /* If we reach a new basic-block see if we already skipped it
3799 in a previous walk that ended successfully. */
3800 if (gimple_bb (g: def_stmt) != bb)
3801 {
3802 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3803 return !abort_on_visited;
3804 bb = gimple_bb (g: def_stmt);
3805 }
3806 vuse = gimple_vuse (g: def_stmt);
3807 }
3808 return true;
3809}
3810
3811
3812/* Starting from a PHI node for the virtual operand of the memory reference
3813 REF find a continuation virtual operand that allows to continue walking
3814 statements dominating PHI skipping only statements that cannot possibly
3815 clobber REF. Decrements LIMIT for each alias disambiguation done
3816 and aborts the walk, returning NULL_TREE if it reaches zero.
3817 Returns NULL_TREE if no suitable virtual operand can be found. */
3818
3819tree
3820get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3821 unsigned int &limit, bitmap *visited,
3822 bool abort_on_visited,
3823 void *(*translate)(ao_ref *, tree, void *,
3824 translate_flags *),
3825 void *data,
3826 translate_flags disambiguate_only)
3827{
3828 unsigned nargs = gimple_phi_num_args (gs: phi);
3829
3830 /* Through a single-argument PHI we can simply look through. */
3831 if (nargs == 1)
3832 return PHI_ARG_DEF (phi, 0);
3833
3834 /* For two or more arguments try to pairwise skip non-aliasing code
3835 until we hit the phi argument definition that dominates the other one. */
3836 basic_block phi_bb = gimple_bb (g: phi);
3837 tree arg0, arg1;
3838 unsigned i;
3839
3840 /* Find a candidate for the virtual operand which definition
3841 dominates those of all others. */
3842 /* First look if any of the args themselves satisfy this. */
3843 for (i = 0; i < nargs; ++i)
3844 {
3845 arg0 = PHI_ARG_DEF (phi, i);
3846 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3847 break;
3848 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3849 if (def_bb != phi_bb
3850 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3851 break;
3852 arg0 = NULL_TREE;
3853 }
3854 /* If not, look if we can reach such candidate by walking defs
3855 until we hit the immediate dominator. maybe_skip_until will
3856 do that for us. */
3857 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3858
3859 /* Then check against the (to be) found candidate. */
3860 for (i = 0; i < nargs; ++i)
3861 {
3862 arg1 = PHI_ARG_DEF (phi, i);
3863 if (arg1 == arg0)
3864 ;
3865 else if (! maybe_skip_until (phi, target&: arg0, target_bb: dom, ref, vuse: arg1, tbaa_p,
3866 limit, visited,
3867 abort_on_visited,
3868 translate,
3869 /* Do not valueize when walking over
3870 backedges. */
3871 disambiguate_only: dominated_by_p
3872 (CDI_DOMINATORS,
3873 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3874 phi_bb)
3875 ? TR_DISAMBIGUATE
3876 : disambiguate_only, data))
3877 return NULL_TREE;
3878 }
3879
3880 return arg0;
3881}
3882
3883/* Based on the memory reference REF and its virtual use VUSE call
3884 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3885 itself. That is, for each virtual use for which its defining statement
3886 does not clobber REF.
3887
3888 WALKER is called with REF, the current virtual use and DATA. If
3889 WALKER returns non-NULL the walk stops and its result is returned.
3890 At the end of a non-successful walk NULL is returned.
3891
3892 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3893 use which definition is a statement that may clobber REF and DATA.
3894 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3895 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3896 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3897 to adjust REF and *DATA to make that valid.
3898
3899 VALUEIZE if non-NULL is called with the next VUSE that is considered
3900 and return value is substituted for that. This can be used to
3901 implement optimistic value-numbering for example. Note that the
3902 VUSE argument is assumed to be valueized already.
3903
3904 LIMIT specifies the number of alias queries we are allowed to do,
3905 the walk stops when it reaches zero and NULL is returned. LIMIT
3906 is decremented by the number of alias queries (plus adjustments
3907 done by the callbacks) upon return.
3908
3909 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3910
3911void *
3912walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3913 void *(*walker)(ao_ref *, tree, void *),
3914 void *(*translate)(ao_ref *, tree, void *,
3915 translate_flags *),
3916 tree (*valueize)(tree),
3917 unsigned &limit, void *data)
3918{
3919 bitmap visited = NULL;
3920 void *res;
3921 bool translated = false;
3922
3923 timevar_push (tv: TV_ALIAS_STMT_WALK);
3924
3925 do
3926 {
3927 gimple *def_stmt;
3928
3929 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3930 res = (*walker) (ref, vuse, data);
3931 /* Abort walk. */
3932 if (res == (void *)-1)
3933 {
3934 res = NULL;
3935 break;
3936 }
3937 /* Lookup succeeded. */
3938 else if (res != NULL)
3939 break;
3940
3941 if (valueize)
3942 {
3943 vuse = valueize (vuse);
3944 if (!vuse)
3945 {
3946 res = NULL;
3947 break;
3948 }
3949 }
3950 def_stmt = SSA_NAME_DEF_STMT (vuse);
3951 if (gimple_nop_p (g: def_stmt))
3952 break;
3953 else if (gimple_code (g: def_stmt) == GIMPLE_PHI)
3954 vuse = get_continuation_for_phi (phi: def_stmt, ref, tbaa_p, limit,
3955 visited: &visited, abort_on_visited: translated, translate, data);
3956 else
3957 {
3958 if ((int)limit <= 0)
3959 {
3960 res = NULL;
3961 break;
3962 }
3963 --limit;
3964 if (stmt_may_clobber_ref_p_1 (stmt: def_stmt, ref, tbaa_p))
3965 {
3966 if (!translate)
3967 break;
3968 translate_flags disambiguate_only = TR_TRANSLATE;
3969 res = (*translate) (ref, vuse, data, &disambiguate_only);
3970 /* Failed lookup and translation. */
3971 if (res == (void *)-1)
3972 {
3973 res = NULL;
3974 break;
3975 }
3976 /* Lookup succeeded. */
3977 else if (res != NULL)
3978 break;
3979 /* Translation succeeded, continue walking. */
3980 translated = translated || disambiguate_only == TR_TRANSLATE;
3981 }
3982 vuse = gimple_vuse (g: def_stmt);
3983 }
3984 }
3985 while (vuse);
3986
3987 if (visited)
3988 BITMAP_FREE (visited);
3989
3990 timevar_pop (tv: TV_ALIAS_STMT_WALK);
3991
3992 return res;
3993}
3994
3995
3996/* Based on the memory reference REF call WALKER for each vdef whose
3997 defining statement may clobber REF, starting with VDEF. If REF
3998 is NULL_TREE, each defining statement is visited.
3999
4000 WALKER is called with REF, the current vdef and DATA. If WALKER
4001 returns true the walk is stopped, otherwise it continues.
4002
4003 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
4004 The pointer may be NULL and then we do not track this information.
4005
4006 At PHI nodes walk_aliased_vdefs forks into one walk for each
4007 PHI argument (but only one walk continues at merge points), the
4008 return value is true if any of the walks was successful.
4009
4010 The function returns the number of statements walked or -1 if
4011 LIMIT stmts were walked and the walk was aborted at this point.
4012 If LIMIT is zero the walk is not aborted. */
4013
4014static int
4015walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
4016 bool (*walker)(ao_ref *, tree, void *), void *data,
4017 bitmap *visited, unsigned int cnt,
4018 bool *function_entry_reached, unsigned limit)
4019{
4020 do
4021 {
4022 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
4023
4024 if (*visited
4025 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
4026 return cnt;
4027
4028 if (gimple_nop_p (g: def_stmt))
4029 {
4030 if (function_entry_reached)
4031 *function_entry_reached = true;
4032 return cnt;
4033 }
4034 else if (gimple_code (g: def_stmt) == GIMPLE_PHI)
4035 {
4036 unsigned i;
4037 if (!*visited)
4038 {
4039 *visited = BITMAP_ALLOC (NULL);
4040 bitmap_tree_view (*visited);
4041 }
4042 for (i = 0; i < gimple_phi_num_args (gs: def_stmt); ++i)
4043 {
4044 int res = walk_aliased_vdefs_1 (ref,
4045 vdef: gimple_phi_arg_def (gs: def_stmt, index: i),
4046 walker, data, visited, cnt,
4047 function_entry_reached, limit);
4048 if (res == -1)
4049 return -1;
4050 cnt = res;
4051 }
4052 return cnt;
4053 }
4054
4055 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
4056 cnt++;
4057 if (cnt == limit)
4058 return -1;
4059 if ((!ref
4060 || stmt_may_clobber_ref_p_1 (stmt: def_stmt, ref))
4061 && (*walker) (ref, vdef, data))
4062 return cnt;
4063
4064 vdef = gimple_vuse (g: def_stmt);
4065 }
4066 while (1);
4067}
4068
4069int
4070walk_aliased_vdefs (ao_ref *ref, tree vdef,
4071 bool (*walker)(ao_ref *, tree, void *), void *data,
4072 bitmap *visited,
4073 bool *function_entry_reached, unsigned int limit)
4074{
4075 bitmap local_visited = NULL;
4076 int ret;
4077
4078 timevar_push (tv: TV_ALIAS_STMT_WALK);
4079
4080 if (function_entry_reached)
4081 *function_entry_reached = false;
4082
4083 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
4084 visited: visited ? visited : &local_visited, cnt: 0,
4085 function_entry_reached, limit);
4086 if (local_visited)
4087 BITMAP_FREE (local_visited);
4088
4089 timevar_pop (tv: TV_ALIAS_STMT_WALK);
4090
4091 return ret;
4092}
4093
4094/* Verify validity of the fnspec string.
4095 See attr-fnspec.h for details. */
4096
4097void
4098attr_fnspec::verify ()
4099{
4100 bool err = false;
4101 if (!len)
4102 return;
4103
4104 /* Check return value specifier. */
4105 if (len < return_desc_size)
4106 err = true;
4107 else if ((len - return_desc_size) % arg_desc_size)
4108 err = true;
4109 else if ((str[0] < '1' || str[0] > '4')
4110 && str[0] != '.' && str[0] != 'm')
4111 err = true;
4112
4113 switch (str[1])
4114 {
4115 case ' ':
4116 case 'p':
4117 case 'P':
4118 case 'c':
4119 case 'C':
4120 break;
4121 default:
4122 err = true;
4123 }
4124 if (err)
4125 internal_error ("invalid fn spec attribute \"%s\"", str);
4126
4127 /* Now check all parameters. */
4128 for (unsigned int i = 0; arg_specified_p (i); i++)
4129 {
4130 unsigned int idx = arg_idx (i);
4131 switch (str[idx])
4132 {
4133 case 'x':
4134 case 'X':
4135 case 'r':
4136 case 'R':
4137 case 'o':
4138 case 'O':
4139 case 'w':
4140 case 'W':
4141 case '.':
4142 if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
4143 || str[idx + 1] == 't')
4144 {
4145 if (str[idx] != 'r' && str[idx] != 'R'
4146 && str[idx] != 'w' && str[idx] != 'W'
4147 && str[idx] != 'o' && str[idx] != 'O')
4148 err = true;
4149 if (str[idx + 1] != 't'
4150 /* Size specified is scalar, so it should be described
4151 by ". " if specified at all. */
4152 && (arg_specified_p (i: str[idx + 1] - '1')
4153 && str[arg_idx (i: str[idx + 1] - '1')] != '.'))
4154 err = true;
4155 }
4156 else if (str[idx + 1] != ' ')
4157 err = true;
4158 break;
4159 default:
4160 if (str[idx] < '1' || str[idx] > '9')
4161 err = true;
4162 }
4163 if (err)
4164 internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
4165 }
4166}
4167
4168/* Return ture if TYPE1 and TYPE2 will always give the same answer
4169 when compared with other types using same_type_for_tbaa. */
4170
4171static bool
4172types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
4173 bool lto_streaming_safe)
4174{
4175 /* We use same_type_for_tbaa_p to match types in the access path.
4176 This check is overly conservative. */
4177 type1 = TYPE_MAIN_VARIANT (type1);
4178 type2 = TYPE_MAIN_VARIANT (type2);
4179
4180 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
4181 != TYPE_STRUCTURAL_EQUALITY_P (type2))
4182 return false;
4183 if (TYPE_STRUCTURAL_EQUALITY_P (type1))
4184 return true;
4185
4186 if (lto_streaming_safe)
4187 return type1 == type2;
4188 else
4189 return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
4190}
4191
4192/* Return ture if TYPE1 and TYPE2 will always give the same answer
4193 when compared with other types using same_type_for_tbaa. */
4194
4195bool
4196types_equal_for_same_type_for_tbaa_p (tree type1, tree type2)
4197{
4198 return types_equal_for_same_type_for_tbaa_p (type1, type2,
4199 lto_streaming_safe: lto_streaming_expected_p ());
4200}
4201
4202/* Compare REF1 and REF2 and return flags specifying their differences.
4203 If LTO_STREAMING_SAFE is true do not use alias sets and canonical
4204 types that are going to be recomputed.
4205 If TBAA is true also compare TBAA metadata. */
4206
4207int
4208ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
4209 bool lto_streaming_safe,
4210 bool tbaa)
4211{
4212 if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
4213 return SEMANTICS;
4214 tree base1 = ao_ref_base (ref: ref1);
4215 tree base2 = ao_ref_base (ref: ref2);
4216
4217 if (!known_eq (ref1->offset, ref2->offset)
4218 || !known_eq (ref1->size, ref2->size)
4219 || !known_eq (ref1->max_size, ref2->max_size))
4220 return SEMANTICS;
4221
4222 /* For variable accesses we need to compare actual paths
4223 to check that both refs are accessing same address and the access size. */
4224 if (!known_eq (ref1->size, ref1->max_size))
4225 {
4226 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
4227 TYPE_SIZE (TREE_TYPE (ref2->ref)), flags: 0))
4228 return SEMANTICS;
4229 tree r1 = ref1->ref;
4230 tree r2 = ref2->ref;
4231
4232 /* Handle toplevel COMPONENT_REFs of bitfields.
4233 Those are special since they are not allowed in
4234 ADDR_EXPR. */
4235 if (TREE_CODE (r1) == COMPONENT_REF
4236 && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
4237 {
4238 if (TREE_CODE (r2) != COMPONENT_REF
4239 || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4240 return SEMANTICS;
4241 tree field1 = TREE_OPERAND (r1, 1);
4242 tree field2 = TREE_OPERAND (r2, 1);
4243 if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
4244 DECL_FIELD_OFFSET (field2), flags: 0)
4245 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
4246 DECL_FIELD_BIT_OFFSET (field2), flags: 0)
4247 || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), flags: 0)
4248 || !types_compatible_p (TREE_TYPE (r1),
4249 TREE_TYPE (r2)))
4250 return SEMANTICS;
4251 r1 = TREE_OPERAND (r1, 0);
4252 r2 = TREE_OPERAND (r2, 0);
4253 }
4254 else if (TREE_CODE (r2) == COMPONENT_REF
4255 && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4256 return SEMANTICS;
4257
4258 /* Similarly for bit field refs. */
4259 if (TREE_CODE (r1) == BIT_FIELD_REF)
4260 {
4261 if (TREE_CODE (r2) != BIT_FIELD_REF
4262 || !operand_equal_p (TREE_OPERAND (r1, 1),
4263 TREE_OPERAND (r2, 1), flags: 0)
4264 || !operand_equal_p (TREE_OPERAND (r1, 2),
4265 TREE_OPERAND (r2, 2), flags: 0)
4266 || !types_compatible_p (TREE_TYPE (r1),
4267 TREE_TYPE (r2)))
4268 return SEMANTICS;
4269 r1 = TREE_OPERAND (r1, 0);
4270 r2 = TREE_OPERAND (r2, 0);
4271 }
4272 else if (TREE_CODE (r2) == BIT_FIELD_REF)
4273 return SEMANTICS;
4274
4275 /* Now we can compare the address of actual memory access. */
4276 if (!operand_equal_p (r1, r2, flags: OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4277 return SEMANTICS;
4278 }
4279 /* For constant accesses we get more matches by comparing offset only. */
4280 else if (!operand_equal_p (base1, base2,
4281 flags: OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4282 return SEMANTICS;
4283
4284 /* We can't simply use get_object_alignment_1 on the full
4285 reference as for accesses with variable indexes this reports
4286 too conservative alignment. */
4287 unsigned int align1, align2;
4288 unsigned HOST_WIDE_INT bitpos1, bitpos2;
4289 bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4290 bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4291 /* ??? For MEMREF get_object_alignment_1 determines aligned from
4292 TYPE_ALIGN but still returns false. This seem to contradict
4293 its description. So compare even if alignment is unknown. */
4294 if (known1 != known2
4295 || (bitpos1 != bitpos2 || align1 != align2))
4296 return SEMANTICS;
4297
4298 /* Now we know that accesses are semantically same. */
4299 int flags = 0;
4300
4301 /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4302 tree rbase1 = ref1->ref;
4303 if (rbase1)
4304 while (handled_component_p (t: rbase1))
4305 rbase1 = TREE_OPERAND (rbase1, 0);
4306 tree rbase2 = ref2->ref;
4307 while (handled_component_p (t: rbase2))
4308 rbase2 = TREE_OPERAND (rbase2, 0);
4309
4310 /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4311 implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4312 Otherwise we need to match bases and cliques. */
4313 if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4314 && MR_DEPENDENCE_CLIQUE (rbase1))
4315 || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4316 && MR_DEPENDENCE_CLIQUE (rbase2)))
4317 && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4318 || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4319 || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4320 flags |= DEPENDENCE_CLIQUE;
4321
4322 if (!tbaa)
4323 return flags;
4324
4325 /* Alias sets are not stable across LTO sreaming; be conservative here
4326 and compare types the alias sets are ultimately based on. */
4327 if (lto_streaming_safe)
4328 {
4329 tree t1 = ao_ref_alias_ptr_type (ref: ref1);
4330 tree t2 = ao_ref_alias_ptr_type (ref: ref2);
4331 if (!alias_ptr_types_compatible_p (t1, t2))
4332 flags |= REF_ALIAS_SET;
4333
4334 t1 = ao_ref_base_alias_ptr_type (ref: ref1);
4335 t2 = ao_ref_base_alias_ptr_type (ref: ref2);
4336 if (!alias_ptr_types_compatible_p (t1, t2))
4337 flags |= BASE_ALIAS_SET;
4338 }
4339 else
4340 {
4341 if (ao_ref_alias_set (ref: ref1) != ao_ref_alias_set (ref: ref2))
4342 flags |= REF_ALIAS_SET;
4343 if (ao_ref_base_alias_set (ref: ref1) != ao_ref_base_alias_set (ref: ref2))
4344 flags |= BASE_ALIAS_SET;
4345 }
4346
4347 /* Access path is used only on non-view-converted references. */
4348 bool view_converted = view_converted_memref_p (base: rbase1);
4349 if (view_converted_memref_p (base: rbase2) != view_converted)
4350 return flags | ACCESS_PATH;
4351 else if (view_converted)
4352 return flags;
4353
4354
4355 /* Find start of access paths and look for trailing arrays. */
4356 tree c1 = ref1->ref, c2 = ref2->ref;
4357 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4358 int nskipped1 = 0, nskipped2 = 0;
4359 int i = 0;
4360
4361 for (tree p1 = ref1->ref; handled_component_p (t: p1); p1 = TREE_OPERAND (p1, 0))
4362 {
4363 if (component_ref_to_zero_sized_trailing_array_p (ref: p1))
4364 end_struct_ref1 = p1;
4365 if (ends_tbaa_access_path_p (p1))
4366 c1 = p1, nskipped1 = i;
4367 i++;
4368 }
4369 i = 0;
4370 for (tree p2 = ref2->ref; handled_component_p (t: p2); p2 = TREE_OPERAND (p2, 0))
4371 {
4372 if (component_ref_to_zero_sized_trailing_array_p (ref: p2))
4373 end_struct_ref2 = p2;
4374 if (ends_tbaa_access_path_p (p2))
4375 c2 = p2, nskipped2 = i;
4376 i++;
4377 }
4378
4379 /* For variable accesses we can not rely on offset match bellow.
4380 We know that paths are struturally same, so only check that
4381 starts of TBAA paths did not diverge. */
4382 if (!known_eq (ref1->size, ref1->max_size)
4383 && nskipped1 != nskipped2)
4384 return flags | ACCESS_PATH;
4385
4386 /* Information about trailing refs is used by
4387 aliasing_component_refs_p that is applied only if paths
4388 has handled components.. */
4389 if (!handled_component_p (t: c1) && !handled_component_p (t: c2))
4390 ;
4391 else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4392 return flags | ACCESS_PATH;
4393 if (end_struct_ref1
4394 && same_type_for_tbaa (TREE_TYPE (end_struct_ref1),
4395 TREE_TYPE (end_struct_ref2)) != 1)
4396 return flags | ACCESS_PATH;
4397
4398 /* Now compare all handled components of the access path.
4399 We have three oracles that cares about access paths:
4400 - aliasing_component_refs_p
4401 - nonoverlapping_refs_since_match_p
4402 - nonoverlapping_component_refs_p
4403 We need to match things these oracles compare.
4404
4405 It is only necessary to check types for compatibility
4406 and offsets. Rest of what oracles compares are actual
4407 addresses. Those are already known to be same:
4408 - for constant accesses we check offsets
4409 - for variable accesses we already matched
4410 the path lexically with operand_equal_p. */
4411 while (true)
4412 {
4413 bool comp1 = handled_component_p (t: c1);
4414 bool comp2 = handled_component_p (t: c2);
4415
4416 if (comp1 != comp2)
4417 return flags | ACCESS_PATH;
4418 if (!comp1)
4419 break;
4420
4421 if (TREE_CODE (c1) != TREE_CODE (c2))
4422 return flags | ACCESS_PATH;
4423
4424 /* aliasing_component_refs_p attempts to find type match within
4425 the paths. For that reason both types needs to be equal
4426 with respect to same_type_for_tbaa_p. */
4427 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4428 TREE_TYPE (c2),
4429 lto_streaming_safe))
4430 return flags | ACCESS_PATH;
4431 if (component_ref_to_zero_sized_trailing_array_p (ref: c1)
4432 != component_ref_to_zero_sized_trailing_array_p (ref: c2))
4433 return flags | ACCESS_PATH;
4434
4435 /* aliasing_matching_component_refs_p compares
4436 offsets within the path. Other properties are ignored.
4437 Do not bother to verify offsets in variable accesses. Here we
4438 already compared them by operand_equal_p so they are
4439 structurally same. */
4440 if (!known_eq (ref1->size, ref1->max_size))
4441 {
4442 poly_int64 offadj1, sztmc1, msztmc1;
4443 bool reverse1;
4444 get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4445 poly_int64 offadj2, sztmc2, msztmc2;
4446 bool reverse2;
4447 get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4448 if (!known_eq (offadj1, offadj2))
4449 return flags | ACCESS_PATH;
4450 }
4451 c1 = TREE_OPERAND (c1, 0);
4452 c2 = TREE_OPERAND (c2, 0);
4453 }
4454 /* Finally test the access type. */
4455 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4456 TREE_TYPE (c2),
4457 lto_streaming_safe))
4458 return flags | ACCESS_PATH;
4459 return flags;
4460}
4461
4462/* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4463 and canonical types. */
4464void
4465ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4466 inchash::hash &hstate)
4467{
4468 tree base = ao_ref_base (ref);
4469 tree tbase = base;
4470
4471 if (!known_eq (ref->size, ref->max_size))
4472 {
4473 tree r = ref->ref;
4474 if (TREE_CODE (r) == COMPONENT_REF
4475 && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4476 {
4477 tree field = TREE_OPERAND (r, 1);
4478 hash_operand (DECL_FIELD_OFFSET (field), hstate, flags: 0);
4479 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, flags: 0);
4480 hash_operand (DECL_SIZE (field), hstate, flags: 0);
4481 r = TREE_OPERAND (r, 0);
4482 }
4483 if (TREE_CODE (r) == BIT_FIELD_REF)
4484 {
4485 hash_operand (TREE_OPERAND (r, 1), hstate, flags: 0);
4486 hash_operand (TREE_OPERAND (r, 2), hstate, flags: 0);
4487 r = TREE_OPERAND (r, 0);
4488 }
4489 hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, flags: 0);
4490 hash_operand (r, hstate, flags: OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4491 }
4492 else
4493 {
4494 hash_operand (tbase, hstate, flags: OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4495 hstate.add_poly_int (v: ref->offset);
4496 hstate.add_poly_int (v: ref->size);
4497 hstate.add_poly_int (v: ref->max_size);
4498 }
4499 if (!lto_streaming_safe && tbaa)
4500 {
4501 hstate.add_int (v: ao_ref_alias_set (ref));
4502 hstate.add_int (v: ao_ref_base_alias_set (ref));
4503 }
4504}
4505

source code of gcc/tree-ssa-alias.cc