1/* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2023 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.cc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "target.h"
26#include "rtl.h"
27#include "tree.h"
28#include "gimple.h"
29#include "predict.h"
30#include "ssa.h"
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
33#include "gimple-ssa-warn-access.h"
34#include "gimple-ssa-warn-restrict.h"
35#include "fold-const.h"
36#include "stmt.h"
37#include "expr.h"
38#include "stor-layout.h"
39#include "dumpfile.h"
40#include "gimple-iterator.h"
41#include "gimple-fold.h"
42#include "gimplify.h"
43#include "tree-into-ssa.h"
44#include "tree-dfa.h"
45#include "tree-object-size.h"
46#include "tree-ssa.h"
47#include "tree-ssa-propagate.h"
48#include "ipa-utils.h"
49#include "tree-ssa-address.h"
50#include "langhooks.h"
51#include "gimplify-me.h"
52#include "dbgcnt.h"
53#include "builtins.h"
54#include "tree-eh.h"
55#include "gimple-match.h"
56#include "gomp-constants.h"
57#include "optabs-query.h"
58#include "omp-general.h"
59#include "tree-cfg.h"
60#include "fold-const-call.h"
61#include "stringpool.h"
62#include "attribs.h"
63#include "asan.h"
64#include "diagnostic-core.h"
65#include "intl.h"
66#include "calls.h"
67#include "tree-vector-builder.h"
68#include "tree-ssa-strlen.h"
69#include "varasm.h"
70#include "internal-fn.h"
71#include "gimple-range.h"
72
73enum strlen_range_kind {
74 /* Compute the exact constant string length. */
75 SRK_STRLEN,
76 /* Compute the maximum constant string length. */
77 SRK_STRLENMAX,
78 /* Compute a range of string lengths bounded by object sizes. When
79 the length of a string cannot be determined, consider as the upper
80 bound the size of the enclosing object the string may be a member
81 or element of. Also determine the size of the largest character
82 array the string may refer to. */
83 SRK_LENRANGE,
84 /* Determine the integer value of the argument (not string length). */
85 SRK_INT_VALUE
86};
87
88static bool
89get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned);
90
91/* Return true when DECL can be referenced from current unit.
92 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
93 We can get declarations that are not possible to reference for various
94 reasons:
95
96 1) When analyzing C++ virtual tables.
97 C++ virtual tables do have known constructors even
98 when they are keyed to other compilation unit.
99 Those tables can contain pointers to methods and vars
100 in other units. Those methods have both STATIC and EXTERNAL
101 set.
102 2) In WHOPR mode devirtualization might lead to reference
103 to method that was partitioned elsehwere.
104 In this case we have static VAR_DECL or FUNCTION_DECL
105 that has no corresponding callgraph/varpool node
106 declaring the body.
107 3) COMDAT functions referred by external vtables that
108 we devirtualize only during final compilation stage.
109 At this time we already decided that we will not output
110 the function body and thus we can't reference the symbol
111 directly. */
112
113static bool
114can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
115{
116 varpool_node *vnode;
117 struct cgraph_node *node;
118 symtab_node *snode;
119
120 if (DECL_ABSTRACT_P (decl))
121 return false;
122
123 /* We are concerned only about static/external vars and functions. */
124 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
125 || !VAR_OR_FUNCTION_DECL_P (decl))
126 return true;
127
128 /* Static objects can be referred only if they are defined and not optimized
129 out yet. */
130 if (!TREE_PUBLIC (decl))
131 {
132 if (DECL_EXTERNAL (decl))
133 return false;
134 /* Before we start optimizing unreachable code we can be sure all
135 static objects are defined. */
136 if (symtab->function_flags_ready)
137 return true;
138 snode = symtab_node::get (decl);
139 if (!snode || !snode->definition)
140 return false;
141 node = dyn_cast <cgraph_node *> (p: snode);
142 return !node || !node->inlined_to;
143 }
144
145 /* We will later output the initializer, so we can refer to it.
146 So we are concerned only when DECL comes from initializer of
147 external var or var that has been optimized out. */
148 if (!from_decl
149 || !VAR_P (from_decl)
150 || (!DECL_EXTERNAL (from_decl)
151 && (vnode = varpool_node::get (decl: from_decl)) != NULL
152 && vnode->definition)
153 || (flag_ltrans
154 && (vnode = varpool_node::get (decl: from_decl)) != NULL
155 && vnode->in_other_partition))
156 return true;
157 /* We are folding reference from external vtable. The vtable may reffer
158 to a symbol keyed to other compilation unit. The other compilation
159 unit may be in separate DSO and the symbol may be hidden. */
160 if (DECL_VISIBILITY_SPECIFIED (decl)
161 && DECL_EXTERNAL (decl)
162 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
163 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
164 return false;
165 /* When function is public, we always can introduce new reference.
166 Exception are the COMDAT functions where introducing a direct
167 reference imply need to include function body in the curren tunit. */
168 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
169 return true;
170 /* We have COMDAT. We are going to check if we still have definition
171 or if the definition is going to be output in other partition.
172 Bypass this when gimplifying; all needed functions will be produced.
173
174 As observed in PR20991 for already optimized out comdat virtual functions
175 it may be tempting to not necessarily give up because the copy will be
176 output elsewhere when corresponding vtable is output.
177 This is however not possible - ABI specify that COMDATs are output in
178 units where they are used and when the other unit was compiled with LTO
179 it is possible that vtable was kept public while the function itself
180 was privatized. */
181 if (!symtab->function_flags_ready)
182 return true;
183
184 snode = symtab_node::get (decl);
185 if (!snode
186 || ((!snode->definition || DECL_EXTERNAL (decl))
187 && (!snode->in_other_partition
188 || (!snode->forced_by_abi && !snode->force_output))))
189 return false;
190 node = dyn_cast <cgraph_node *> (p: snode);
191 return !node || !node->inlined_to;
192}
193
194/* Create a temporary for TYPE for a statement STMT. If the current function
195 is in SSA form, a SSA name is created. Otherwise a temporary register
196 is made. */
197
198tree
199create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
200{
201 if (gimple_in_ssa_p (cfun))
202 return make_ssa_name (var: type, stmt);
203 else
204 return create_tmp_reg (type);
205}
206
207/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
208 acceptable form for is_gimple_min_invariant.
209 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
210
211tree
212canonicalize_constructor_val (tree cval, tree from_decl)
213{
214 if (CONSTANT_CLASS_P (cval))
215 return cval;
216
217 tree orig_cval = cval;
218 STRIP_NOPS (cval);
219 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
220 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
221 {
222 tree ptr = TREE_OPERAND (cval, 0);
223 if (is_gimple_min_invariant (ptr))
224 cval = build1_loc (EXPR_LOCATION (cval),
225 code: ADDR_EXPR, TREE_TYPE (ptr),
226 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
227 ptr,
228 fold_convert (ptr_type_node,
229 TREE_OPERAND (cval, 1))));
230 }
231 if (TREE_CODE (cval) == ADDR_EXPR)
232 {
233 tree base = NULL_TREE;
234 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
235 {
236 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
237 if (base)
238 TREE_OPERAND (cval, 0) = base;
239 }
240 else
241 base = get_base_address (TREE_OPERAND (cval, 0));
242 if (!base)
243 return NULL_TREE;
244
245 if (VAR_OR_FUNCTION_DECL_P (base)
246 && !can_refer_decl_in_current_unit_p (decl: base, from_decl))
247 return NULL_TREE;
248 if (TREE_TYPE (base) == error_mark_node)
249 return NULL_TREE;
250 if (VAR_P (base))
251 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
252 but since the use can be in a debug stmt we can't. */
253 ;
254 else if (TREE_CODE (base) == FUNCTION_DECL)
255 {
256 /* Make sure we create a cgraph node for functions we'll reference.
257 They can be non-existent if the reference comes from an entry
258 of an external vtable for example. */
259 cgraph_node::get_create (base);
260 }
261 /* Fixup types in global initializers. */
262 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
263 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
264
265 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
266 cval = fold_convert (TREE_TYPE (orig_cval), cval);
267 return cval;
268 }
269 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
270 if (TREE_CODE (cval) == INTEGER_CST)
271 {
272 if (TREE_OVERFLOW_P (cval))
273 cval = drop_tree_overflow (cval);
274 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
275 cval = fold_convert (TREE_TYPE (orig_cval), cval);
276 return cval;
277 }
278 return orig_cval;
279}
280
281/* If SYM is a constant variable with known value, return the value.
282 NULL_TREE is returned otherwise. */
283
284tree
285get_symbol_constant_value (tree sym)
286{
287 tree val = ctor_for_folding (sym);
288 if (val != error_mark_node)
289 {
290 if (val)
291 {
292 val = canonicalize_constructor_val (cval: unshare_expr (val), from_decl: sym);
293 if (val
294 && is_gimple_min_invariant (val)
295 && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val)))
296 return val;
297 else
298 return NULL_TREE;
299 }
300 /* Variables declared 'const' without an initializer
301 have zero as the initializer if they may not be
302 overridden at link or run time. */
303 if (!val
304 && is_gimple_reg_type (TREE_TYPE (sym)))
305 return build_zero_cst (TREE_TYPE (sym));
306 }
307
308 return NULL_TREE;
309}
310
311
312
313/* Subroutine of fold_stmt. We perform constant folding of the
314 memory reference tree EXPR. */
315
316static tree
317maybe_fold_reference (tree expr)
318{
319 tree result = NULL_TREE;
320
321 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
322 || TREE_CODE (expr) == REALPART_EXPR
323 || TREE_CODE (expr) == IMAGPART_EXPR)
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 result = fold_unary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0));
329 else if (TREE_CODE (expr) == BIT_FIELD_REF
330 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
331 result = fold_ternary_loc (EXPR_LOCATION (expr),
332 TREE_CODE (expr),
333 TREE_TYPE (expr),
334 TREE_OPERAND (expr, 0),
335 TREE_OPERAND (expr, 1),
336 TREE_OPERAND (expr, 2));
337 else
338 result = fold_const_aggregate_ref (expr);
339
340 if (result && is_gimple_min_invariant (result))
341 return result;
342
343 return NULL_TREE;
344}
345
346/* Return true if EXPR is an acceptable right-hand-side for a
347 GIMPLE assignment. We validate the entire tree, not just
348 the root node, thus catching expressions that embed complex
349 operands that are not permitted in GIMPLE. This function
350 is needed because the folding routines in fold-const.cc
351 may return such expressions in some cases, e.g., an array
352 access with an embedded index addition. It may make more
353 sense to have folding routines that are sensitive to the
354 constraints on GIMPLE operands, rather than abandoning any
355 any attempt to fold if the usual folding turns out to be too
356 aggressive. */
357
358bool
359valid_gimple_rhs_p (tree expr)
360{
361 enum tree_code code = TREE_CODE (expr);
362
363 switch (TREE_CODE_CLASS (code))
364 {
365 case tcc_declaration:
366 if (!is_gimple_variable (t: expr))
367 return false;
368 break;
369
370 case tcc_constant:
371 /* All constants are ok. */
372 break;
373
374 case tcc_comparison:
375 /* GENERIC allows comparisons with non-boolean types, reject
376 those for GIMPLE. Let vector-typed comparisons pass - rules
377 for GENERIC and GIMPLE are the same here. */
378 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
379 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
380 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
381 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
382 return false;
383
384 /* Fallthru. */
385 case tcc_binary:
386 if (!is_gimple_val (TREE_OPERAND (expr, 0))
387 || !is_gimple_val (TREE_OPERAND (expr, 1)))
388 return false;
389 break;
390
391 case tcc_unary:
392 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
393 return false;
394 break;
395
396 case tcc_expression:
397 switch (code)
398 {
399 case ADDR_EXPR:
400 {
401 tree t;
402 if (is_gimple_min_invariant (expr))
403 return true;
404 t = TREE_OPERAND (expr, 0);
405 while (handled_component_p (t))
406 {
407 /* ??? More checks needed, see the GIMPLE verifier. */
408 if ((TREE_CODE (t) == ARRAY_REF
409 || TREE_CODE (t) == ARRAY_RANGE_REF)
410 && !is_gimple_val (TREE_OPERAND (t, 1)))
411 return false;
412 t = TREE_OPERAND (t, 0);
413 }
414 if (!is_gimple_id (t))
415 return false;
416 }
417 break;
418
419 default:
420 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
421 {
422 if (!is_gimple_val (TREE_OPERAND (expr, 0))
423 || !is_gimple_val (TREE_OPERAND (expr, 1))
424 || !is_gimple_val (TREE_OPERAND (expr, 2)))
425 return false;
426 break;
427 }
428 return false;
429 }
430 break;
431
432 case tcc_vl_exp:
433 return false;
434
435 case tcc_exceptional:
436 if (code == CONSTRUCTOR)
437 {
438 unsigned i;
439 tree elt;
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
441 if (!is_gimple_val (elt))
442 return false;
443 return true;
444 }
445 if (code != SSA_NAME)
446 return false;
447 break;
448
449 case tcc_reference:
450 if (code == BIT_FIELD_REF)
451 return is_gimple_val (TREE_OPERAND (expr, 0));
452 return false;
453
454 default:
455 return false;
456 }
457
458 return true;
459}
460
461
462/* Attempt to fold an assignment statement pointed-to by SI. Returns a
463 replacement rhs for the statement or NULL_TREE if no simplification
464 could be made. It is assumed that the operands have been previously
465 folded. */
466
467static tree
468fold_gimple_assign (gimple_stmt_iterator *si)
469{
470 gimple *stmt = gsi_stmt (i: *si);
471 enum tree_code subcode = gimple_assign_rhs_code (gs: stmt);
472 location_t loc = gimple_location (g: stmt);
473
474 tree result = NULL_TREE;
475
476 switch (get_gimple_rhs_class (code: subcode))
477 {
478 case GIMPLE_SINGLE_RHS:
479 {
480 tree rhs = gimple_assign_rhs1 (gs: stmt);
481
482 if (TREE_CLOBBER_P (rhs))
483 return NULL_TREE;
484
485 if (REFERENCE_CLASS_P (rhs))
486 return maybe_fold_reference (expr: rhs);
487
488 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
489 {
490 tree val = OBJ_TYPE_REF_EXPR (rhs);
491 if (is_gimple_min_invariant (val))
492 return val;
493 else if (flag_devirtualize && virtual_method_call_p (rhs))
494 {
495 bool final;
496 vec <cgraph_node *>targets
497 = possible_polymorphic_call_targets (ref: rhs, call: stmt, completep: &final);
498 if (final && targets.length () <= 1 && dbg_cnt (index: devirt))
499 {
500 if (dump_enabled_p ())
501 {
502 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
503 "resolving virtual function address "
504 "reference to function %s\n",
505 targets.length () == 1
506 ? targets[0]->name ()
507 : "NULL");
508 }
509 if (targets.length () == 1)
510 {
511 val = fold_convert (TREE_TYPE (val),
512 build_fold_addr_expr_loc
513 (loc, targets[0]->decl));
514 STRIP_USELESS_TYPE_CONVERSION (val);
515 }
516 else
517 /* We cannot use __builtin_unreachable here because it
518 cannot have address taken. */
519 val = build_int_cst (TREE_TYPE (val), 0);
520 return val;
521 }
522 }
523 }
524
525 else if (TREE_CODE (rhs) == ADDR_EXPR)
526 {
527 tree ref = TREE_OPERAND (rhs, 0);
528 if (TREE_CODE (ref) == MEM_REF
529 && integer_zerop (TREE_OPERAND (ref, 1)))
530 {
531 result = TREE_OPERAND (ref, 0);
532 if (!useless_type_conversion_p (TREE_TYPE (rhs),
533 TREE_TYPE (result)))
534 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
535 return result;
536 }
537 }
538
539 else if (TREE_CODE (rhs) == CONSTRUCTOR
540 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
541 {
542 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
543 unsigned i;
544 tree val;
545
546 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
547 if (! CONSTANT_CLASS_P (val))
548 return NULL_TREE;
549
550 return build_vector_from_ctor (TREE_TYPE (rhs),
551 CONSTRUCTOR_ELTS (rhs));
552 }
553
554 else if (DECL_P (rhs)
555 && is_gimple_reg_type (TREE_TYPE (rhs)))
556 return get_symbol_constant_value (sym: rhs);
557 }
558 break;
559
560 case GIMPLE_UNARY_RHS:
561 break;
562
563 case GIMPLE_BINARY_RHS:
564 break;
565
566 case GIMPLE_TERNARY_RHS:
567 result = fold_ternary_loc (loc, subcode,
568 TREE_TYPE (gimple_assign_lhs (stmt)),
569 gimple_assign_rhs1 (gs: stmt),
570 gimple_assign_rhs2 (gs: stmt),
571 gimple_assign_rhs3 (gs: stmt));
572
573 if (result)
574 {
575 STRIP_USELESS_TYPE_CONVERSION (result);
576 if (valid_gimple_rhs_p (expr: result))
577 return result;
578 }
579 break;
580
581 case GIMPLE_INVALID_RHS:
582 gcc_unreachable ();
583 }
584
585 return NULL_TREE;
586}
587
588
589/* Replace a statement at *SI_P with a sequence of statements in STMTS,
590 adjusting the replacement stmts location and virtual operands.
591 If the statement has a lhs the last stmt in the sequence is expected
592 to assign to that lhs. */
593
594void
595gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
596{
597 gimple *stmt = gsi_stmt (i: *si_p);
598
599 if (gimple_has_location (g: stmt))
600 annotate_all_with_location (stmts, gimple_location (g: stmt));
601
602 /* First iterate over the replacement statements backward, assigning
603 virtual operands to their defining statements. */
604 gimple *laststore = NULL;
605 for (gimple_stmt_iterator i = gsi_last (seq&: stmts);
606 !gsi_end_p (i); gsi_prev (i: &i))
607 {
608 gimple *new_stmt = gsi_stmt (i);
609 if ((gimple_assign_single_p (gs: new_stmt)
610 && !is_gimple_reg (gimple_assign_lhs (gs: new_stmt)))
611 || (is_gimple_call (gs: new_stmt)
612 && (gimple_call_flags (new_stmt)
613 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
614 {
615 tree vdef;
616 if (!laststore)
617 vdef = gimple_vdef (g: stmt);
618 else
619 vdef = make_ssa_name (var: gimple_vop (cfun), stmt: new_stmt);
620 gimple_set_vdef (g: new_stmt, vdef);
621 if (vdef && TREE_CODE (vdef) == SSA_NAME)
622 SSA_NAME_DEF_STMT (vdef) = new_stmt;
623 laststore = new_stmt;
624 }
625 }
626
627 /* Second iterate over the statements forward, assigning virtual
628 operands to their uses. */
629 tree reaching_vuse = gimple_vuse (g: stmt);
630 for (gimple_stmt_iterator i = gsi_start (seq&: stmts);
631 !gsi_end_p (i); gsi_next (i: &i))
632 {
633 gimple *new_stmt = gsi_stmt (i);
634 /* If the new statement possibly has a VUSE, update it with exact SSA
635 name we know will reach this one. */
636 if (gimple_has_mem_ops (g: new_stmt))
637 gimple_set_vuse (g: new_stmt, vuse: reaching_vuse);
638 gimple_set_modified (s: new_stmt, modifiedp: true);
639 if (gimple_vdef (g: new_stmt))
640 reaching_vuse = gimple_vdef (g: new_stmt);
641 }
642
643 /* If the new sequence does not do a store release the virtual
644 definition of the original statement. */
645 if (reaching_vuse
646 && reaching_vuse == gimple_vuse (g: stmt))
647 {
648 tree vdef = gimple_vdef (g: stmt);
649 if (vdef
650 && TREE_CODE (vdef) == SSA_NAME)
651 {
652 unlink_stmt_vdef (stmt);
653 release_ssa_name (name: vdef);
654 }
655 }
656
657 /* Finally replace the original statement with the sequence. */
658 gsi_replace_with_seq (si_p, stmts, false);
659}
660
661/* Helper function for update_gimple_call and
662 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
663 with GIMPLE_CALL NEW_STMT. */
664
665static void
666finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
667 gimple *stmt)
668{
669 tree lhs = gimple_call_lhs (gs: stmt);
670 gimple_call_set_lhs (gs: new_stmt, lhs);
671 if (lhs && TREE_CODE (lhs) == SSA_NAME)
672 SSA_NAME_DEF_STMT (lhs) = new_stmt;
673 gimple_move_vops (new_stmt, stmt);
674 gimple_set_location (g: new_stmt, location: gimple_location (g: stmt));
675 if (gimple_block (g: new_stmt) == NULL_TREE)
676 gimple_set_block (g: new_stmt, block: gimple_block (g: stmt));
677 gsi_replace (si_p, new_stmt, false);
678}
679
680/* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
681 with number of arguments NARGS, where the arguments in GIMPLE form
682 follow NARGS argument. */
683
684bool
685update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
686{
687 va_list ap;
688 gcall *new_stmt, *stmt = as_a <gcall *> (p: gsi_stmt (i: *si_p));
689
690 gcc_assert (is_gimple_call (stmt));
691 va_start (ap, nargs);
692 new_stmt = gimple_build_call_valist (fn, nargs, ap);
693 finish_update_gimple_call (si_p, new_stmt, stmt);
694 va_end (ap);
695 return true;
696}
697
698/* Return true if EXPR is a CALL_EXPR suitable for representation
699 as a single GIMPLE_CALL statement. If the arguments require
700 further gimplification, return false. */
701
702static bool
703valid_gimple_call_p (tree expr)
704{
705 unsigned i, nargs;
706
707 if (TREE_CODE (expr) != CALL_EXPR)
708 return false;
709
710 nargs = call_expr_nargs (expr);
711 for (i = 0; i < nargs; i++)
712 {
713 tree arg = CALL_EXPR_ARG (expr, i);
714 if (is_gimple_reg_type (TREE_TYPE (arg)))
715 {
716 if (!is_gimple_val (arg))
717 return false;
718 }
719 else
720 if (!is_gimple_lvalue (arg))
721 return false;
722 }
723
724 return true;
725}
726
727/* Convert EXPR into a GIMPLE value suitable for substitution on the
728 RHS of an assignment. Insert the necessary statements before
729 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
730 is replaced. If the call is expected to produces a result, then it
731 is replaced by an assignment of the new RHS to the result variable.
732 If the result is to be ignored, then the call is replaced by a
733 GIMPLE_NOP. A proper VDEF chain is retained by making the first
734 VUSE and the last VDEF of the whole sequence be the same as the replaced
735 statement and using new SSA names for stores in between. */
736
737void
738gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
739{
740 tree lhs;
741 gimple *stmt, *new_stmt;
742 gimple_stmt_iterator i;
743 gimple_seq stmts = NULL;
744
745 stmt = gsi_stmt (i: *si_p);
746
747 gcc_assert (is_gimple_call (stmt));
748
749 if (valid_gimple_call_p (expr))
750 {
751 /* The call has simplified to another call. */
752 tree fn = CALL_EXPR_FN (expr);
753 unsigned i;
754 unsigned nargs = call_expr_nargs (expr);
755 vec<tree> args = vNULL;
756 gcall *new_stmt;
757
758 if (nargs > 0)
759 {
760 args.create (nelems: nargs);
761 args.safe_grow_cleared (len: nargs, exact: true);
762
763 for (i = 0; i < nargs; i++)
764 args[i] = CALL_EXPR_ARG (expr, i);
765 }
766
767 new_stmt = gimple_build_call_vec (fn, args);
768 finish_update_gimple_call (si_p, new_stmt, stmt);
769 args.release ();
770 return;
771 }
772
773 lhs = gimple_call_lhs (gs: stmt);
774 if (lhs == NULL_TREE)
775 {
776 push_gimplify_context (in_ssa: gimple_in_ssa_p (cfun));
777 gimplify_and_add (expr, &stmts);
778 pop_gimplify_context (NULL);
779
780 /* We can end up with folding a memcpy of an empty class assignment
781 which gets optimized away by C++ gimplification. */
782 if (gimple_seq_empty_p (s: stmts))
783 {
784 if (gimple_in_ssa_p (cfun))
785 {
786 unlink_stmt_vdef (stmt);
787 release_defs (stmt);
788 }
789 gsi_replace (si_p, gimple_build_nop (), false);
790 return;
791 }
792 }
793 else
794 {
795 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
796 new_stmt = gimple_build_assign (lhs, tmp);
797 i = gsi_last (seq&: stmts);
798 gsi_insert_after_without_update (&i, new_stmt,
799 GSI_CONTINUE_LINKING);
800 }
801
802 gsi_replace_with_seq_vops (si_p, stmts);
803}
804
805
806/* Replace the call at *GSI with the gimple value VAL. */
807
808void
809replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
810{
811 gimple *stmt = gsi_stmt (i: *gsi);
812 tree lhs = gimple_call_lhs (gs: stmt);
813 gimple *repl;
814 if (lhs)
815 {
816 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
817 val = fold_convert (TREE_TYPE (lhs), val);
818 repl = gimple_build_assign (lhs, val);
819 }
820 else
821 repl = gimple_build_nop ();
822 tree vdef = gimple_vdef (g: stmt);
823 if (vdef && TREE_CODE (vdef) == SSA_NAME)
824 {
825 unlink_stmt_vdef (stmt);
826 release_ssa_name (name: vdef);
827 }
828 gsi_replace (gsi, repl, false);
829}
830
831/* Replace the call at *GSI with the new call REPL and fold that
832 again. */
833
834static void
835replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
836{
837 gimple *stmt = gsi_stmt (i: *gsi);
838 gimple_call_set_lhs (gs: repl, lhs: gimple_call_lhs (gs: stmt));
839 gimple_set_location (g: repl, location: gimple_location (g: stmt));
840 gimple_move_vops (repl, stmt);
841 gsi_replace (gsi, repl, false);
842 fold_stmt (gsi);
843}
844
845/* Return true if VAR is a VAR_DECL or a component thereof. */
846
847static bool
848var_decl_component_p (tree var)
849{
850 tree inner = var;
851 while (handled_component_p (t: inner))
852 inner = TREE_OPERAND (inner, 0);
853 return (DECL_P (inner)
854 || (TREE_CODE (inner) == MEM_REF
855 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
856}
857
858/* Return TRUE if the SIZE argument, representing the size of an
859 object, is in a range of values of which exactly zero is valid. */
860
861static bool
862size_must_be_zero_p (tree size)
863{
864 if (integer_zerop (size))
865 return true;
866
867 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
868 return false;
869
870 tree type = TREE_TYPE (size);
871 int prec = TYPE_PRECISION (type);
872
873 /* Compute the value of SSIZE_MAX, the largest positive value that
874 can be stored in ssize_t, the signed counterpart of size_t. */
875 wide_int ssize_max = wi::lshift (x: wi::one (precision: prec), y: prec - 1) - 1;
876 wide_int zero = wi::zero (TYPE_PRECISION (type));
877 value_range valid_range (type, zero, ssize_max);
878 value_range vr;
879 get_range_query (cfun)->range_of_expr (r&: vr, expr: size);
880
881 if (vr.undefined_p ())
882 vr.set_varying (TREE_TYPE (size));
883 vr.intersect (valid_range);
884 return vr.zero_p ();
885}
886
887/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
888 diagnose (otherwise undefined) overlapping copies without preventing
889 folding. When folded, GCC guarantees that overlapping memcpy has
890 the same semantics as memmove. Call to the library memcpy need not
891 provide the same guarantee. Return false if no simplification can
892 be made. */
893
894static bool
895gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
896 tree dest, tree src, enum built_in_function code)
897{
898 gimple *stmt = gsi_stmt (i: *gsi);
899 tree lhs = gimple_call_lhs (gs: stmt);
900 tree len = gimple_call_arg (gs: stmt, index: 2);
901 location_t loc = gimple_location (g: stmt);
902
903 /* If the LEN parameter is a constant zero or in range where
904 the only valid value is zero, return DEST. */
905 if (size_must_be_zero_p (size: len))
906 {
907 gimple *repl;
908 if (gimple_call_lhs (gs: stmt))
909 repl = gimple_build_assign (gimple_call_lhs (gs: stmt), dest);
910 else
911 repl = gimple_build_nop ();
912 tree vdef = gimple_vdef (g: stmt);
913 if (vdef && TREE_CODE (vdef) == SSA_NAME)
914 {
915 unlink_stmt_vdef (stmt);
916 release_ssa_name (name: vdef);
917 }
918 gsi_replace (gsi, repl, false);
919 return true;
920 }
921
922 /* If SRC and DEST are the same (and not volatile), return
923 DEST{,+LEN,+LEN-1}. */
924 if (operand_equal_p (src, dest, flags: 0))
925 {
926 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
927 It's safe and may even be emitted by GCC itself (see bug
928 32667). */
929 unlink_stmt_vdef (stmt);
930 if (gimple_vdef (g: stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
931 release_ssa_name (name: gimple_vdef (g: stmt));
932 if (!lhs)
933 {
934 gsi_replace (gsi, gimple_build_nop (), false);
935 return true;
936 }
937 goto done;
938 }
939 else
940 {
941 /* We cannot (easily) change the type of the copy if it is a storage
942 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
943 modify the storage order of objects (see storage_order_barrier_p). */
944 tree srctype
945 = POINTER_TYPE_P (TREE_TYPE (src))
946 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
947 tree desttype
948 = POINTER_TYPE_P (TREE_TYPE (dest))
949 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
950 tree destvar, srcvar, srcoff;
951 unsigned int src_align, dest_align;
952 unsigned HOST_WIDE_INT tmp_len;
953 const char *tmp_str;
954
955 /* Build accesses at offset zero with a ref-all character type. */
956 tree off0
957 = build_int_cst (build_pointer_type_for_mode (char_type_node,
958 ptr_mode, true), 0);
959
960 /* If we can perform the copy efficiently with first doing all loads
961 and then all stores inline it that way. Currently efficiently
962 means that we can load all the memory into a single integer
963 register which is what MOVE_MAX gives us. */
964 src_align = get_pointer_alignment (src);
965 dest_align = get_pointer_alignment (dest);
966 if (tree_fits_uhwi_p (len)
967 && compare_tree_int (len, MOVE_MAX) <= 0
968 /* FIXME: Don't transform copies from strings with known length.
969 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
970 from being handled, and the case was XFAILed for that reason.
971 Now that it is handled and the XFAIL removed, as soon as other
972 strlenopt tests that rely on it for passing are adjusted, this
973 hack can be removed. */
974 && !c_strlen (src, 1)
975 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
976 && memchr (s: tmp_str, c: 0, n: tmp_len) == NULL)
977 && !(srctype
978 && AGGREGATE_TYPE_P (srctype)
979 && TYPE_REVERSE_STORAGE_ORDER (srctype))
980 && !(desttype
981 && AGGREGATE_TYPE_P (desttype)
982 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
983 {
984 unsigned ilen = tree_to_uhwi (len);
985 if (pow2p_hwi (x: ilen))
986 {
987 /* Detect out-of-bounds accesses without issuing warnings.
988 Avoid folding out-of-bounds copies but to avoid false
989 positives for unreachable code defer warning until after
990 DCE has worked its magic.
991 -Wrestrict is still diagnosed. */
992 if (int warning = check_bounds_or_overlap (as_a <gcall *>(p: stmt),
993 dest, src, len, len,
994 false, false))
995 if (warning != OPT_Wrestrict)
996 return false;
997
998 scalar_int_mode mode;
999 if (int_mode_for_size (size: ilen * 8, limit: 0).exists (mode: &mode)
1000 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
1001 /* If the destination pointer is not aligned we must be able
1002 to emit an unaligned store. */
1003 && (dest_align >= GET_MODE_ALIGNMENT (mode)
1004 || !targetm.slow_unaligned_access (mode, dest_align)
1005 || (optab_handler (op: movmisalign_optab, mode)
1006 != CODE_FOR_nothing)))
1007 {
1008 tree type = build_nonstandard_integer_type (ilen * 8, 1);
1009 tree srctype = type;
1010 tree desttype = type;
1011 if (src_align < GET_MODE_ALIGNMENT (mode))
1012 srctype = build_aligned_type (type, src_align);
1013 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1014 tree tem = fold_const_aggregate_ref (srcmem);
1015 if (tem)
1016 srcmem = tem;
1017 else if (src_align < GET_MODE_ALIGNMENT (mode)
1018 && targetm.slow_unaligned_access (mode, src_align)
1019 && (optab_handler (op: movmisalign_optab, mode)
1020 == CODE_FOR_nothing))
1021 srcmem = NULL_TREE;
1022 if (srcmem)
1023 {
1024 gimple *new_stmt;
1025 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1026 {
1027 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1028 srcmem
1029 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1030 stmt: new_stmt);
1031 gimple_assign_set_lhs (gs: new_stmt, lhs: srcmem);
1032 gimple_set_vuse (g: new_stmt, vuse: gimple_vuse (g: stmt));
1033 gimple_set_location (g: new_stmt, location: loc);
1034 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1035 }
1036 if (dest_align < GET_MODE_ALIGNMENT (mode))
1037 desttype = build_aligned_type (type, dest_align);
1038 new_stmt
1039 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1040 dest, off0),
1041 srcmem);
1042 gimple_move_vops (new_stmt, stmt);
1043 if (!lhs)
1044 {
1045 gsi_replace (gsi, new_stmt, false);
1046 return true;
1047 }
1048 gimple_set_location (g: new_stmt, location: loc);
1049 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1050 goto done;
1051 }
1052 }
1053 }
1054 }
1055
1056 if (code == BUILT_IN_MEMMOVE)
1057 {
1058 /* Both DEST and SRC must be pointer types.
1059 ??? This is what old code did. Is the testing for pointer types
1060 really mandatory?
1061
1062 If either SRC is readonly or length is 1, we can use memcpy. */
1063 if (!dest_align || !src_align)
1064 return false;
1065 if (readonly_data_expr (exp: src)
1066 || (tree_fits_uhwi_p (len)
1067 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1068 >= tree_to_uhwi (len))))
1069 {
1070 tree fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY);
1071 if (!fn)
1072 return false;
1073 gimple_call_set_fndecl (gs: stmt, decl: fn);
1074 gimple_call_set_arg (gs: stmt, index: 0, arg: dest);
1075 gimple_call_set_arg (gs: stmt, index: 1, arg: src);
1076 fold_stmt (gsi);
1077 return true;
1078 }
1079
1080 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1081 if (TREE_CODE (src) == ADDR_EXPR
1082 && TREE_CODE (dest) == ADDR_EXPR)
1083 {
1084 tree src_base, dest_base, fn;
1085 poly_int64 src_offset = 0, dest_offset = 0;
1086 poly_uint64 maxsize;
1087
1088 srcvar = TREE_OPERAND (src, 0);
1089 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1090 if (src_base == NULL)
1091 src_base = srcvar;
1092 destvar = TREE_OPERAND (dest, 0);
1093 dest_base = get_addr_base_and_unit_offset (destvar,
1094 &dest_offset);
1095 if (dest_base == NULL)
1096 dest_base = destvar;
1097 if (!poly_int_tree_p (t: len, value: &maxsize))
1098 maxsize = -1;
1099 if (SSA_VAR_P (src_base)
1100 && SSA_VAR_P (dest_base))
1101 {
1102 if (operand_equal_p (src_base, dest_base, flags: 0)
1103 && ranges_maybe_overlap_p (pos1: src_offset, size1: maxsize,
1104 pos2: dest_offset, size2: maxsize))
1105 return false;
1106 }
1107 else if (TREE_CODE (src_base) == MEM_REF
1108 && TREE_CODE (dest_base) == MEM_REF)
1109 {
1110 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1111 TREE_OPERAND (dest_base, 0), flags: 0))
1112 return false;
1113 poly_offset_int full_src_offset
1114 = mem_ref_offset (src_base) + src_offset;
1115 poly_offset_int full_dest_offset
1116 = mem_ref_offset (dest_base) + dest_offset;
1117 if (ranges_maybe_overlap_p (pos1: full_src_offset, size1: maxsize,
1118 pos2: full_dest_offset, size2: maxsize))
1119 return false;
1120 }
1121 else
1122 return false;
1123
1124 fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY);
1125 if (!fn)
1126 return false;
1127 gimple_call_set_fndecl (gs: stmt, decl: fn);
1128 gimple_call_set_arg (gs: stmt, index: 0, arg: dest);
1129 gimple_call_set_arg (gs: stmt, index: 1, arg: src);
1130 fold_stmt (gsi);
1131 return true;
1132 }
1133
1134 /* If the destination and source do not alias optimize into
1135 memcpy as well. */
1136 if ((is_gimple_min_invariant (dest)
1137 || TREE_CODE (dest) == SSA_NAME)
1138 && (is_gimple_min_invariant (src)
1139 || TREE_CODE (src) == SSA_NAME))
1140 {
1141 ao_ref destr, srcr;
1142 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1143 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1144 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1145 {
1146 tree fn;
1147 fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY);
1148 if (!fn)
1149 return false;
1150 gimple_call_set_fndecl (gs: stmt, decl: fn);
1151 gimple_call_set_arg (gs: stmt, index: 0, arg: dest);
1152 gimple_call_set_arg (gs: stmt, index: 1, arg: src);
1153 fold_stmt (gsi);
1154 return true;
1155 }
1156 }
1157
1158 return false;
1159 }
1160
1161 if (!tree_fits_shwi_p (len))
1162 return false;
1163 if (!srctype
1164 || (AGGREGATE_TYPE_P (srctype)
1165 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1166 return false;
1167 if (!desttype
1168 || (AGGREGATE_TYPE_P (desttype)
1169 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1170 return false;
1171 /* In the following try to find a type that is most natural to be
1172 used for the memcpy source and destination and that allows
1173 the most optimization when memcpy is turned into a plain assignment
1174 using that type. In theory we could always use a char[len] type
1175 but that only gains us that the destination and source possibly
1176 no longer will have their address taken. */
1177 if (TREE_CODE (srctype) == ARRAY_TYPE
1178 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1179 srctype = TREE_TYPE (srctype);
1180 if (TREE_CODE (desttype) == ARRAY_TYPE
1181 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1182 desttype = TREE_TYPE (desttype);
1183 if (TREE_ADDRESSABLE (srctype)
1184 || TREE_ADDRESSABLE (desttype))
1185 return false;
1186
1187 /* Make sure we are not copying using a floating-point mode or
1188 a type whose size possibly does not match its precision. */
1189 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1190 || TREE_CODE (desttype) == BOOLEAN_TYPE
1191 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1192 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1193 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1194 || TREE_CODE (srctype) == BOOLEAN_TYPE
1195 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1196 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1197 if (!srctype)
1198 srctype = desttype;
1199 if (!desttype)
1200 desttype = srctype;
1201 if (!srctype)
1202 return false;
1203
1204 src_align = get_pointer_alignment (src);
1205 dest_align = get_pointer_alignment (dest);
1206
1207 /* Choose between src and destination type for the access based
1208 on alignment, whether the access constitutes a register access
1209 and whether it may actually expose a declaration for SSA rewrite
1210 or SRA decomposition. Also try to expose a string constant, we
1211 might be able to concatenate several of them later into a single
1212 string store. */
1213 destvar = NULL_TREE;
1214 srcvar = NULL_TREE;
1215 if (TREE_CODE (dest) == ADDR_EXPR
1216 && var_decl_component_p (TREE_OPERAND (dest, 0))
1217 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1218 && dest_align >= TYPE_ALIGN (desttype)
1219 && (is_gimple_reg_type (type: desttype)
1220 || src_align >= TYPE_ALIGN (desttype)))
1221 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1222 else if (TREE_CODE (src) == ADDR_EXPR
1223 && var_decl_component_p (TREE_OPERAND (src, 0))
1224 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1225 && src_align >= TYPE_ALIGN (srctype)
1226 && (is_gimple_reg_type (type: srctype)
1227 || dest_align >= TYPE_ALIGN (srctype)))
1228 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1229 /* FIXME: Don't transform copies from strings with known original length.
1230 As soon as strlenopt tests that rely on it for passing are adjusted,
1231 this hack can be removed. */
1232 else if (gimple_call_alloca_for_var_p (s: stmt)
1233 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1234 && integer_zerop (srcoff)
1235 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1236 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1237 srctype = TREE_TYPE (srcvar);
1238 else
1239 return false;
1240
1241 /* Now that we chose an access type express the other side in
1242 terms of it if the target allows that with respect to alignment
1243 constraints. */
1244 if (srcvar == NULL_TREE)
1245 {
1246 if (src_align >= TYPE_ALIGN (desttype))
1247 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1248 else
1249 {
1250 enum machine_mode mode = TYPE_MODE (desttype);
1251 if ((mode == BLKmode && STRICT_ALIGNMENT)
1252 || (targetm.slow_unaligned_access (mode, src_align)
1253 && (optab_handler (op: movmisalign_optab, mode)
1254 == CODE_FOR_nothing)))
1255 return false;
1256 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1257 src_align);
1258 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1259 }
1260 }
1261 else if (destvar == NULL_TREE)
1262 {
1263 if (dest_align >= TYPE_ALIGN (srctype))
1264 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1265 else
1266 {
1267 enum machine_mode mode = TYPE_MODE (srctype);
1268 if ((mode == BLKmode && STRICT_ALIGNMENT)
1269 || (targetm.slow_unaligned_access (mode, dest_align)
1270 && (optab_handler (op: movmisalign_optab, mode)
1271 == CODE_FOR_nothing)))
1272 return false;
1273 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1274 dest_align);
1275 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1276 }
1277 }
1278
1279 /* Same as above, detect out-of-bounds accesses without issuing
1280 warnings. Avoid folding out-of-bounds copies but to avoid
1281 false positives for unreachable code defer warning until
1282 after DCE has worked its magic.
1283 -Wrestrict is still diagnosed. */
1284 if (int warning = check_bounds_or_overlap (as_a <gcall *>(p: stmt),
1285 dest, src, len, len,
1286 false, false))
1287 if (warning != OPT_Wrestrict)
1288 return false;
1289
1290 gimple *new_stmt;
1291 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1292 {
1293 tree tem = fold_const_aggregate_ref (srcvar);
1294 if (tem)
1295 srcvar = tem;
1296 if (! is_gimple_min_invariant (srcvar))
1297 {
1298 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1299 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1300 stmt: new_stmt);
1301 gimple_assign_set_lhs (gs: new_stmt, lhs: srcvar);
1302 gimple_set_vuse (g: new_stmt, vuse: gimple_vuse (g: stmt));
1303 gimple_set_location (g: new_stmt, location: loc);
1304 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1305 }
1306 new_stmt = gimple_build_assign (destvar, srcvar);
1307 goto set_vop_and_replace;
1308 }
1309
1310 /* We get an aggregate copy. If the source is a STRING_CST, then
1311 directly use its type to perform the copy. */
1312 if (TREE_CODE (srcvar) == STRING_CST)
1313 desttype = srctype;
1314
1315 /* Or else, use an unsigned char[] type to perform the copy in order
1316 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1317 types or float modes behavior on copying. */
1318 else
1319 {
1320 desttype = build_array_type_nelts (unsigned_char_type_node,
1321 tree_to_uhwi (len));
1322 srctype = desttype;
1323 if (src_align > TYPE_ALIGN (srctype))
1324 srctype = build_aligned_type (srctype, src_align);
1325 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1326 }
1327
1328 if (dest_align > TYPE_ALIGN (desttype))
1329 desttype = build_aligned_type (desttype, dest_align);
1330 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1331 new_stmt = gimple_build_assign (destvar, srcvar);
1332
1333set_vop_and_replace:
1334 gimple_move_vops (new_stmt, stmt);
1335 if (!lhs)
1336 {
1337 gsi_replace (gsi, new_stmt, false);
1338 return true;
1339 }
1340 gimple_set_location (g: new_stmt, location: loc);
1341 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1342 }
1343
1344done:
1345 gimple_seq stmts = NULL;
1346 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1347 len = NULL_TREE;
1348 else if (code == BUILT_IN_MEMPCPY)
1349 {
1350 len = gimple_convert_to_ptrofftype (seq: &stmts, loc, op: len);
1351 dest = gimple_build (seq: &stmts, loc, code: POINTER_PLUS_EXPR,
1352 TREE_TYPE (dest), ops: dest, ops: len);
1353 }
1354 else
1355 gcc_unreachable ();
1356
1357 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1358 gimple *repl = gimple_build_assign (lhs, dest);
1359 gsi_replace (gsi, repl, false);
1360 return true;
1361}
1362
1363/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1364 to built-in memcmp (a, b, len). */
1365
1366static bool
1367gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1368{
1369 tree fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCMP);
1370
1371 if (!fn)
1372 return false;
1373
1374 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1375
1376 gimple *stmt = gsi_stmt (i: *gsi);
1377 tree a = gimple_call_arg (gs: stmt, index: 0);
1378 tree b = gimple_call_arg (gs: stmt, index: 1);
1379 tree len = gimple_call_arg (gs: stmt, index: 2);
1380
1381 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1382 replace_call_with_call_and_fold (gsi, repl);
1383
1384 return true;
1385}
1386
1387/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1388 to built-in memmove (dest, src, len). */
1389
1390static bool
1391gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1392{
1393 tree fn = builtin_decl_implicit (fncode: BUILT_IN_MEMMOVE);
1394
1395 if (!fn)
1396 return false;
1397
1398 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1399 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1400 len) into memmove (dest, src, len). */
1401
1402 gimple *stmt = gsi_stmt (i: *gsi);
1403 tree src = gimple_call_arg (gs: stmt, index: 0);
1404 tree dest = gimple_call_arg (gs: stmt, index: 1);
1405 tree len = gimple_call_arg (gs: stmt, index: 2);
1406
1407 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1408 gimple_call_set_fntype (call_stmt: as_a <gcall *> (p: stmt), TREE_TYPE (fn));
1409 replace_call_with_call_and_fold (gsi, repl);
1410
1411 return true;
1412}
1413
1414/* Transform a call to built-in bzero (dest, len) at *GSI into one
1415 to built-in memset (dest, 0, len). */
1416
1417static bool
1418gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1419{
1420 tree fn = builtin_decl_implicit (fncode: BUILT_IN_MEMSET);
1421
1422 if (!fn)
1423 return false;
1424
1425 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1426
1427 gimple *stmt = gsi_stmt (i: *gsi);
1428 tree dest = gimple_call_arg (gs: stmt, index: 0);
1429 tree len = gimple_call_arg (gs: stmt, index: 1);
1430
1431 gimple_seq seq = NULL;
1432 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1433 gimple_seq_add_stmt_without_update (&seq, repl);
1434 gsi_replace_with_seq_vops (si_p: gsi, stmts: seq);
1435 fold_stmt (gsi);
1436
1437 return true;
1438}
1439
1440/* Fold function call to builtin memset or bzero at *GSI setting the
1441 memory of size LEN to VAL. Return whether a simplification was made. */
1442
1443static bool
1444gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1445{
1446 gimple *stmt = gsi_stmt (i: *gsi);
1447 tree etype;
1448 unsigned HOST_WIDE_INT length, cval;
1449
1450 /* If the LEN parameter is zero, return DEST. */
1451 if (integer_zerop (len))
1452 {
1453 replace_call_with_value (gsi, val: gimple_call_arg (gs: stmt, index: 0));
1454 return true;
1455 }
1456
1457 if (! tree_fits_uhwi_p (len))
1458 return false;
1459
1460 if (TREE_CODE (c) != INTEGER_CST)
1461 return false;
1462
1463 tree dest = gimple_call_arg (gs: stmt, index: 0);
1464 tree var = dest;
1465 if (TREE_CODE (var) != ADDR_EXPR)
1466 return false;
1467
1468 var = TREE_OPERAND (var, 0);
1469 if (TREE_THIS_VOLATILE (var))
1470 return false;
1471
1472 etype = TREE_TYPE (var);
1473 if (TREE_CODE (etype) == ARRAY_TYPE)
1474 etype = TREE_TYPE (etype);
1475
1476 if ((!INTEGRAL_TYPE_P (etype)
1477 && !POINTER_TYPE_P (etype))
1478 || TREE_CODE (etype) == BITINT_TYPE)
1479 return NULL_TREE;
1480
1481 if (! var_decl_component_p (var))
1482 return NULL_TREE;
1483
1484 length = tree_to_uhwi (len);
1485 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1486 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1487 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1488 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1489 return NULL_TREE;
1490
1491 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1492 return NULL_TREE;
1493
1494 if (!type_has_mode_precision_p (t: etype))
1495 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1496 TYPE_UNSIGNED (etype));
1497
1498 if (integer_zerop (c))
1499 cval = 0;
1500 else
1501 {
1502 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1503 return NULL_TREE;
1504
1505 cval = TREE_INT_CST_LOW (c);
1506 cval &= 0xff;
1507 cval |= cval << 8;
1508 cval |= cval << 16;
1509 cval |= (cval << 31) << 1;
1510 }
1511
1512 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1513 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1514 gimple_move_vops (store, stmt);
1515 gimple_set_location (g: store, location: gimple_location (g: stmt));
1516 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1517 if (gimple_call_lhs (gs: stmt))
1518 {
1519 gimple *asgn = gimple_build_assign (gimple_call_lhs (gs: stmt), dest);
1520 gsi_replace (gsi, asgn, false);
1521 }
1522 else
1523 {
1524 gimple_stmt_iterator gsi2 = *gsi;
1525 gsi_prev (i: gsi);
1526 gsi_remove (&gsi2, true);
1527 }
1528
1529 return true;
1530}
1531
1532/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1533
1534static bool
1535get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind,
1536 c_strlen_data *pdata, unsigned eltsize)
1537{
1538 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1539
1540 /* The length computed by this invocation of the function. */
1541 tree val = NULL_TREE;
1542
1543 /* True if VAL is an optimistic (tight) bound determined from
1544 the size of the character array in which the string may be
1545 stored. In that case, the computed VAL is used to set
1546 PDATA->MAXBOUND. */
1547 bool tight_bound = false;
1548
1549 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1550 if (TREE_CODE (arg) == ADDR_EXPR
1551 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1552 {
1553 tree op = TREE_OPERAND (arg, 0);
1554 if (integer_zerop (TREE_OPERAND (op, 1)))
1555 {
1556 tree aop0 = TREE_OPERAND (op, 0);
1557 if (TREE_CODE (aop0) == INDIRECT_REF
1558 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1559 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1560 pdata, eltsize);
1561 }
1562 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1563 && rkind == SRK_LENRANGE)
1564 {
1565 /* Fail if an array is the last member of a struct object
1566 since it could be treated as a (fake) flexible array
1567 member. */
1568 tree idx = TREE_OPERAND (op, 1);
1569
1570 arg = TREE_OPERAND (op, 0);
1571 tree optype = TREE_TYPE (arg);
1572 if (tree dom = TYPE_DOMAIN (optype))
1573 if (tree bound = TYPE_MAX_VALUE (dom))
1574 if (TREE_CODE (bound) == INTEGER_CST
1575 && TREE_CODE (idx) == INTEGER_CST
1576 && tree_int_cst_lt (t1: bound, t2: idx))
1577 return false;
1578 }
1579 }
1580
1581 if (rkind == SRK_INT_VALUE)
1582 {
1583 /* We are computing the maximum value (not string length). */
1584 val = arg;
1585 if (TREE_CODE (val) != INTEGER_CST
1586 || tree_int_cst_sgn (val) < 0)
1587 return false;
1588 }
1589 else
1590 {
1591 c_strlen_data lendata = { };
1592 val = c_strlen (arg, 1, &lendata, eltsize);
1593
1594 if (!val && lendata.decl)
1595 {
1596 /* ARG refers to an unterminated const character array.
1597 DATA.DECL with size DATA.LEN. */
1598 val = lendata.minlen;
1599 pdata->decl = lendata.decl;
1600 }
1601 }
1602
1603 /* Set if VAL represents the maximum length based on array size (set
1604 when exact length cannot be determined). */
1605 bool maxbound = false;
1606
1607 if (!val && rkind == SRK_LENRANGE)
1608 {
1609 if (TREE_CODE (arg) == ADDR_EXPR)
1610 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1611 pdata, eltsize);
1612
1613 if (TREE_CODE (arg) == ARRAY_REF)
1614 {
1615 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1616
1617 /* Determine the "innermost" array type. */
1618 while (TREE_CODE (optype) == ARRAY_TYPE
1619 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1620 optype = TREE_TYPE (optype);
1621
1622 /* Avoid arrays of pointers. */
1623 tree eltype = TREE_TYPE (optype);
1624 if (TREE_CODE (optype) != ARRAY_TYPE
1625 || !INTEGRAL_TYPE_P (eltype))
1626 return false;
1627
1628 /* Fail when the array bound is unknown or zero. */
1629 val = TYPE_SIZE_UNIT (optype);
1630 if (!val
1631 || TREE_CODE (val) != INTEGER_CST
1632 || integer_zerop (val))
1633 return false;
1634
1635 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1636 integer_one_node);
1637
1638 /* Set the minimum size to zero since the string in
1639 the array could have zero length. */
1640 pdata->minlen = ssize_int (0);
1641
1642 tight_bound = true;
1643 }
1644 else if (TREE_CODE (arg) == COMPONENT_REF
1645 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1646 == ARRAY_TYPE))
1647 {
1648 /* Use the type of the member array to determine the upper
1649 bound on the length of the array. This may be overly
1650 optimistic if the array itself isn't NUL-terminated and
1651 the caller relies on the subsequent member to contain
1652 the NUL but that would only be considered valid if
1653 the array were the last member of a struct. */
1654
1655 tree fld = TREE_OPERAND (arg, 1);
1656
1657 tree optype = TREE_TYPE (fld);
1658
1659 /* Determine the "innermost" array type. */
1660 while (TREE_CODE (optype) == ARRAY_TYPE
1661 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1662 optype = TREE_TYPE (optype);
1663
1664 /* Fail when the array bound is unknown or zero. */
1665 val = TYPE_SIZE_UNIT (optype);
1666 if (!val
1667 || TREE_CODE (val) != INTEGER_CST
1668 || integer_zerop (val))
1669 return false;
1670 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1671 integer_one_node);
1672
1673 /* Set the minimum size to zero since the string in
1674 the array could have zero length. */
1675 pdata->minlen = ssize_int (0);
1676
1677 /* The array size determined above is an optimistic bound
1678 on the length. If the array isn't nul-terminated the
1679 length computed by the library function would be greater.
1680 Even though using strlen to cross the subobject boundary
1681 is undefined, avoid drawing conclusions from the member
1682 type about the length here. */
1683 tight_bound = true;
1684 }
1685 else if (TREE_CODE (arg) == MEM_REF
1686 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1688 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1689 {
1690 /* Handle a MEM_REF into a DECL accessing an array of integers,
1691 being conservative about references to extern structures with
1692 flexible array members that can be initialized to arbitrary
1693 numbers of elements as an extension (static structs are okay). */
1694 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1695 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1696 && (decl_binds_to_current_def_p (ref)
1697 || !array_ref_flexible_size_p (arg)))
1698 {
1699 /* Fail if the offset is out of bounds. Such accesses
1700 should be diagnosed at some point. */
1701 val = DECL_SIZE_UNIT (ref);
1702 if (!val
1703 || TREE_CODE (val) != INTEGER_CST
1704 || integer_zerop (val))
1705 return false;
1706
1707 poly_offset_int psiz = wi::to_offset (t: val);
1708 poly_offset_int poff = mem_ref_offset (arg);
1709 if (known_le (psiz, poff))
1710 return false;
1711
1712 pdata->minlen = ssize_int (0);
1713
1714 /* Subtract the offset and one for the terminating nul. */
1715 psiz -= poff;
1716 psiz -= 1;
1717 val = wide_int_to_tree (TREE_TYPE (val), cst: psiz);
1718 /* Since VAL reflects the size of a declared object
1719 rather the type of the access it is not a tight bound. */
1720 }
1721 }
1722 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1723 {
1724 /* Avoid handling pointers to arrays. GCC might misuse
1725 a pointer to an array of one bound to point to an array
1726 object of a greater bound. */
1727 tree argtype = TREE_TYPE (arg);
1728 if (TREE_CODE (argtype) == ARRAY_TYPE)
1729 {
1730 val = TYPE_SIZE_UNIT (argtype);
1731 if (!val
1732 || TREE_CODE (val) != INTEGER_CST
1733 || integer_zerop (val))
1734 return false;
1735 val = wide_int_to_tree (TREE_TYPE (val),
1736 cst: wi::sub (x: wi::to_wide (t: val), y: 1));
1737
1738 /* Set the minimum size to zero since the string in
1739 the array could have zero length. */
1740 pdata->minlen = ssize_int (0);
1741 }
1742 }
1743 maxbound = true;
1744 }
1745
1746 if (!val)
1747 return false;
1748
1749 /* Adjust the lower bound on the string length as necessary. */
1750 if (!pdata->minlen
1751 || (rkind != SRK_STRLEN
1752 && TREE_CODE (pdata->minlen) == INTEGER_CST
1753 && TREE_CODE (val) == INTEGER_CST
1754 && tree_int_cst_lt (t1: val, t2: pdata->minlen)))
1755 pdata->minlen = val;
1756
1757 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1758 {
1759 /* Adjust the tighter (more optimistic) string length bound
1760 if necessary and proceed to adjust the more conservative
1761 bound. */
1762 if (TREE_CODE (val) == INTEGER_CST)
1763 {
1764 if (tree_int_cst_lt (t1: pdata->maxbound, t2: val))
1765 pdata->maxbound = val;
1766 }
1767 else
1768 pdata->maxbound = val;
1769 }
1770 else if (pdata->maxbound || maxbound)
1771 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1772 if VAL corresponds to the maximum length determined based
1773 on the type of the object. */
1774 pdata->maxbound = val;
1775
1776 if (tight_bound)
1777 {
1778 /* VAL computed above represents an optimistically tight bound
1779 on the length of the string based on the referenced object's
1780 or subobject's type. Determine the conservative upper bound
1781 based on the enclosing object's size if possible. */
1782 if (rkind == SRK_LENRANGE)
1783 {
1784 poly_int64 offset;
1785 tree base = get_addr_base_and_unit_offset (arg, &offset);
1786 if (!base)
1787 {
1788 /* When the call above fails due to a non-constant offset
1789 assume the offset is zero and use the size of the whole
1790 enclosing object instead. */
1791 base = get_base_address (t: arg);
1792 offset = 0;
1793 }
1794 /* If the base object is a pointer no upper bound on the length
1795 can be determined. Otherwise the maximum length is equal to
1796 the size of the enclosing object minus the offset of
1797 the referenced subobject minus 1 (for the terminating nul). */
1798 tree type = TREE_TYPE (base);
1799 if (TREE_CODE (type) == POINTER_TYPE
1800 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1801 || !(val = DECL_SIZE_UNIT (base)))
1802 val = build_all_ones_cst (size_type_node);
1803 else
1804 {
1805 val = DECL_SIZE_UNIT (base);
1806 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1807 size_int (offset + 1));
1808 }
1809 }
1810 else
1811 return false;
1812 }
1813
1814 if (pdata->maxlen)
1815 {
1816 /* Adjust the more conservative bound if possible/necessary
1817 and fail otherwise. */
1818 if (rkind != SRK_STRLEN)
1819 {
1820 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1821 || TREE_CODE (val) != INTEGER_CST)
1822 return false;
1823
1824 if (tree_int_cst_lt (t1: pdata->maxlen, t2: val))
1825 pdata->maxlen = val;
1826 return true;
1827 }
1828 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1829 {
1830 /* Fail if the length of this ARG is different from that
1831 previously determined from another ARG. */
1832 return false;
1833 }
1834 }
1835
1836 pdata->maxlen = val;
1837 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1838}
1839
1840/* For an ARG referencing one or more strings, try to obtain the range
1841 of their lengths, or the size of the largest array ARG referes to if
1842 the range of lengths cannot be determined, and store all in *PDATA.
1843 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1844 the maximum constant value.
1845 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1846 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1847 length or if we are unable to determine the length, return false.
1848 VISITED is a bitmap of visited variables.
1849 RKIND determines the kind of value or range to obtain (see
1850 strlen_range_kind).
1851 Set PDATA->DECL if ARG refers to an unterminated constant array.
1852 On input, set ELTSIZE to 1 for normal single byte character strings,
1853 and either 2 or 4 for wide characer strings (the size of wchar_t).
1854 Return true if *PDATA was successfully populated and false otherwise. */
1855
1856static bool
1857get_range_strlen (tree arg, bitmap visited,
1858 strlen_range_kind rkind,
1859 c_strlen_data *pdata, unsigned eltsize)
1860{
1861
1862 if (TREE_CODE (arg) != SSA_NAME)
1863 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1864
1865 /* If ARG is registered for SSA update we cannot look at its defining
1866 statement. */
1867 if (name_registered_for_update_p (arg))
1868 return false;
1869
1870 /* If we were already here, break the infinite cycle. */
1871 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1872 return true;
1873
1874 tree var = arg;
1875 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1876
1877 switch (gimple_code (g: def_stmt))
1878 {
1879 case GIMPLE_ASSIGN:
1880 /* The RHS of the statement defining VAR must either have a
1881 constant length or come from another SSA_NAME with a constant
1882 length. */
1883 if (gimple_assign_single_p (gs: def_stmt)
1884 || gimple_assign_unary_nop_p (def_stmt))
1885 {
1886 tree rhs = gimple_assign_rhs1 (gs: def_stmt);
1887 return get_range_strlen (arg: rhs, visited, rkind, pdata, eltsize);
1888 }
1889 else if (gimple_assign_rhs_code (gs: def_stmt) == COND_EXPR)
1890 {
1891 tree ops[2] = { gimple_assign_rhs2 (gs: def_stmt),
1892 gimple_assign_rhs3 (gs: def_stmt) };
1893
1894 for (unsigned int i = 0; i < 2; i++)
1895 if (!get_range_strlen (arg: ops[i], visited, rkind, pdata, eltsize))
1896 {
1897 if (rkind != SRK_LENRANGE)
1898 return false;
1899 /* Set the upper bound to the maximum to prevent
1900 it from being adjusted in the next iteration but
1901 leave MINLEN and the more conservative MAXBOUND
1902 determined so far alone (or leave them null if
1903 they haven't been set yet). That the MINLEN is
1904 in fact zero can be determined from MAXLEN being
1905 unbounded but the discovered minimum is used for
1906 diagnostics. */
1907 pdata->maxlen = build_all_ones_cst (size_type_node);
1908 }
1909 return true;
1910 }
1911 return false;
1912
1913 case GIMPLE_PHI:
1914 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1915 must have a constant length. */
1916 for (unsigned i = 0; i < gimple_phi_num_args (gs: def_stmt); i++)
1917 {
1918 tree arg = gimple_phi_arg (gs: def_stmt, index: i)->def;
1919
1920 /* If this PHI has itself as an argument, we cannot
1921 determine the string length of this argument. However,
1922 if we can find a constant string length for the other
1923 PHI args then we can still be sure that this is a
1924 constant string length. So be optimistic and just
1925 continue with the next argument. */
1926 if (arg == gimple_phi_result (gs: def_stmt))
1927 continue;
1928
1929 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1930 {
1931 if (rkind != SRK_LENRANGE)
1932 return false;
1933 /* Set the upper bound to the maximum to prevent
1934 it from being adjusted in the next iteration but
1935 leave MINLEN and the more conservative MAXBOUND
1936 determined so far alone (or leave them null if
1937 they haven't been set yet). That the MINLEN is
1938 in fact zero can be determined from MAXLEN being
1939 unbounded but the discovered minimum is used for
1940 diagnostics. */
1941 pdata->maxlen = build_all_ones_cst (size_type_node);
1942 }
1943 }
1944 return true;
1945
1946 default:
1947 return false;
1948 }
1949}
1950
1951/* Try to obtain the range of the lengths of the string(s) referenced
1952 by ARG, or the size of the largest array ARG refers to if the range
1953 of lengths cannot be determined, and store all in *PDATA which must
1954 be zero-initialized on input except PDATA->MAXBOUND may be set to
1955 a non-null tree node other than INTEGER_CST to request to have it
1956 set to the length of the longest string in a PHI. ELTSIZE is
1957 the expected size of the string element in bytes: 1 for char and
1958 some power of 2 for wide characters.
1959 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1960 for optimization. Returning false means that a nonzero PDATA->MINLEN
1961 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1962 is -1 (in that case, the actual range is indeterminate, i.e.,
1963 [0, PTRDIFF_MAX - 2]. */
1964
1965bool
1966get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1967{
1968 auto_bitmap visited;
1969 tree maxbound = pdata->maxbound;
1970
1971 if (!get_range_strlen (arg, visited, rkind: SRK_LENRANGE, pdata, eltsize))
1972 {
1973 /* On failure extend the length range to an impossible maximum
1974 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1975 members can stay unchanged regardless. */
1976 pdata->minlen = ssize_int (0);
1977 pdata->maxlen = build_all_ones_cst (size_type_node);
1978 }
1979 else if (!pdata->minlen)
1980 pdata->minlen = ssize_int (0);
1981
1982 /* If it's unchanged from it initial non-null value, set the conservative
1983 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1984 if (maxbound && pdata->maxbound == maxbound)
1985 pdata->maxbound = build_all_ones_cst (size_type_node);
1986
1987 return !integer_all_onesp (pdata->maxlen);
1988}
1989
1990/* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1991 For ARG of pointer types, NONSTR indicates if the caller is prepared
1992 to handle unterminated strings. For integer ARG and when RKIND ==
1993 SRK_INT_VALUE, NONSTR must be null.
1994
1995 If an unterminated array is discovered and our caller handles
1996 unterminated arrays, then bubble up the offending DECL and
1997 return the maximum size. Otherwise return NULL. */
1998
1999static tree
2000get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
2001{
2002 /* A non-null NONSTR is meaningless when determining the maximum
2003 value of an integer ARG. */
2004 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2005 /* ARG must have an integral type when RKIND says so. */
2006 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2007
2008 auto_bitmap visited;
2009
2010 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2011 is unbounded. */
2012 c_strlen_data lendata = { };
2013 if (!get_range_strlen (arg, visited, rkind, pdata: &lendata, /* eltsize = */1))
2014 lendata.maxlen = NULL_TREE;
2015 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2016 lendata.maxlen = NULL_TREE;
2017
2018 if (nonstr)
2019 {
2020 /* For callers prepared to handle unterminated arrays set
2021 *NONSTR to point to the declaration of the array and return
2022 the maximum length/size. */
2023 *nonstr = lendata.decl;
2024 return lendata.maxlen;
2025 }
2026
2027 /* Fail if the constant array isn't nul-terminated. */
2028 return lendata.decl ? NULL_TREE : lendata.maxlen;
2029}
2030
2031/* Return true if LEN is known to be less than or equal to (or if STRICT is
2032 true, strictly less than) the lower bound of SIZE at compile time and false
2033 otherwise. */
2034
2035static bool
2036known_lower (gimple *stmt, tree len, tree size, bool strict = false)
2037{
2038 if (len == NULL_TREE)
2039 return false;
2040
2041 wide_int size_range[2];
2042 wide_int len_range[2];
2043 if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range))
2044 {
2045 if (strict)
2046 return wi::ltu_p (x: len_range[1], y: size_range[0]);
2047 else
2048 return wi::leu_p (x: len_range[1], y: size_range[0]);
2049 }
2050
2051 return false;
2052}
2053
2054/* Fold function call to builtin strcpy with arguments DEST and SRC.
2055 If LEN is not NULL, it represents the length of the string to be
2056 copied. Return NULL_TREE if no simplification can be made. */
2057
2058static bool
2059gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2060 tree dest, tree src)
2061{
2062 gimple *stmt = gsi_stmt (i: *gsi);
2063 location_t loc = gimple_location (g: stmt);
2064 tree fn;
2065
2066 /* If SRC and DEST are the same (and not volatile), return DEST. */
2067 if (operand_equal_p (src, dest, flags: 0))
2068 {
2069 /* Issue -Wrestrict unless the pointers are null (those do
2070 not point to objects and so do not indicate an overlap;
2071 such calls could be the result of sanitization and jump
2072 threading). */
2073 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2074 {
2075 tree func = gimple_call_fndecl (gs: stmt);
2076
2077 warning_at (loc, OPT_Wrestrict,
2078 "%qD source argument is the same as destination",
2079 func);
2080 }
2081
2082 replace_call_with_value (gsi, val: dest);
2083 return true;
2084 }
2085
2086 if (optimize_function_for_size_p (cfun))
2087 return false;
2088
2089 fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY);
2090 if (!fn)
2091 return false;
2092
2093 /* Set to non-null if ARG refers to an unterminated array. */
2094 tree nonstr = NULL;
2095 tree len = get_maxval_strlen (arg: src, rkind: SRK_STRLEN, nonstr: &nonstr);
2096
2097 if (nonstr)
2098 {
2099 /* Avoid folding calls with unterminated arrays. */
2100 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2101 warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
2102 suppress_warning (stmt, OPT_Wstringop_overread);
2103 return false;
2104 }
2105
2106 if (!len)
2107 return false;
2108
2109 len = fold_convert_loc (loc, size_type_node, len);
2110 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2111 len = force_gimple_operand_gsi (gsi, len, true,
2112 NULL_TREE, true, GSI_SAME_STMT);
2113 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2114 replace_call_with_call_and_fold (gsi, repl);
2115 return true;
2116}
2117
2118/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2119 If SLEN is not NULL, it represents the length of the source string.
2120 Return NULL_TREE if no simplification can be made. */
2121
2122static bool
2123gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2124 tree dest, tree src, tree len)
2125{
2126 gimple *stmt = gsi_stmt (i: *gsi);
2127 location_t loc = gimple_location (g: stmt);
2128 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2129
2130 /* If the LEN parameter is zero, return DEST. */
2131 if (integer_zerop (len))
2132 {
2133 /* Avoid warning if the destination refers to an array/pointer
2134 decorate with attribute nonstring. */
2135 if (!nonstring)
2136 {
2137 tree fndecl = gimple_call_fndecl (gs: stmt);
2138
2139 /* Warn about the lack of nul termination: the result is not
2140 a (nul-terminated) string. */
2141 tree slen = get_maxval_strlen (arg: src, rkind: SRK_STRLEN);
2142 if (slen && !integer_zerop (slen))
2143 warning_at (loc, OPT_Wstringop_truncation,
2144 "%qD destination unchanged after copying no bytes "
2145 "from a string of length %E",
2146 fndecl, slen);
2147 else
2148 warning_at (loc, OPT_Wstringop_truncation,
2149 "%qD destination unchanged after copying no bytes",
2150 fndecl);
2151 }
2152
2153 replace_call_with_value (gsi, val: dest);
2154 return true;
2155 }
2156
2157 /* We can't compare slen with len as constants below if len is not a
2158 constant. */
2159 if (TREE_CODE (len) != INTEGER_CST)
2160 return false;
2161
2162 /* Now, we must be passed a constant src ptr parameter. */
2163 tree slen = get_maxval_strlen (arg: src, rkind: SRK_STRLEN);
2164 if (!slen || TREE_CODE (slen) != INTEGER_CST)
2165 return false;
2166
2167 /* The size of the source string including the terminating nul. */
2168 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2169
2170 /* We do not support simplification of this case, though we do
2171 support it when expanding trees into RTL. */
2172 /* FIXME: generate a call to __builtin_memset. */
2173 if (tree_int_cst_lt (t1: ssize, t2: len))
2174 return false;
2175
2176 /* Diagnose truncation that leaves the copy unterminated. */
2177 maybe_diag_stxncpy_trunc (*gsi, src, len);
2178
2179 /* OK transform into builtin memcpy. */
2180 tree fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY);
2181 if (!fn)
2182 return false;
2183
2184 len = fold_convert_loc (loc, size_type_node, len);
2185 len = force_gimple_operand_gsi (gsi, len, true,
2186 NULL_TREE, true, GSI_SAME_STMT);
2187 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2188 replace_call_with_call_and_fold (gsi, repl);
2189
2190 return true;
2191}
2192
2193/* Fold function call to builtin strchr or strrchr.
2194 If both arguments are constant, evaluate and fold the result,
2195 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2196 In general strlen is significantly faster than strchr
2197 due to being a simpler operation. */
2198static bool
2199gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2200{
2201 gimple *stmt = gsi_stmt (i: *gsi);
2202 tree str = gimple_call_arg (gs: stmt, index: 0);
2203 tree c = gimple_call_arg (gs: stmt, index: 1);
2204 location_t loc = gimple_location (g: stmt);
2205 const char *p;
2206 char ch;
2207
2208 if (!gimple_call_lhs (gs: stmt))
2209 return false;
2210
2211 /* Avoid folding if the first argument is not a nul-terminated array.
2212 Defer warning until later. */
2213 if (!check_nul_terminated_array (NULL_TREE, str))
2214 return false;
2215
2216 if ((p = c_getstr (str)) && target_char_cst_p (t: c, p: &ch))
2217 {
2218 const char *p1 = is_strrchr ? strrchr (s: p, c: ch) : strchr (s: p, c: ch);
2219
2220 if (p1 == NULL)
2221 {
2222 replace_call_with_value (gsi, integer_zero_node);
2223 return true;
2224 }
2225
2226 tree len = build_int_cst (size_type_node, p1 - p);
2227 gimple_seq stmts = NULL;
2228 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (gs: stmt),
2229 POINTER_PLUS_EXPR, str, len);
2230 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2231 gsi_replace_with_seq_vops (si_p: gsi, stmts);
2232 return true;
2233 }
2234
2235 if (!integer_zerop (c))
2236 return false;
2237
2238 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2239 if (is_strrchr && optimize_function_for_size_p (cfun))
2240 {
2241 tree strchr_fn = builtin_decl_implicit (fncode: BUILT_IN_STRCHR);
2242
2243 if (strchr_fn)
2244 {
2245 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2246 replace_call_with_call_and_fold (gsi, repl);
2247 return true;
2248 }
2249
2250 return false;
2251 }
2252
2253 tree len;
2254 tree strlen_fn = builtin_decl_implicit (fncode: BUILT_IN_STRLEN);
2255
2256 if (!strlen_fn)
2257 return false;
2258
2259 /* Create newstr = strlen (str). */
2260 gimple_seq stmts = NULL;
2261 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2262 gimple_set_location (g: new_stmt, location: loc);
2263 len = create_tmp_reg_or_ssa_name (size_type_node);
2264 gimple_call_set_lhs (gs: new_stmt, lhs: len);
2265 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2266
2267 /* Create (str p+ strlen (str)). */
2268 new_stmt = gimple_build_assign (gimple_call_lhs (gs: stmt),
2269 POINTER_PLUS_EXPR, str, len);
2270 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2271 gsi_replace_with_seq_vops (si_p: gsi, stmts);
2272 /* gsi now points at the assignment to the lhs, get a
2273 stmt iterator to the strlen.
2274 ??? We can't use gsi_for_stmt as that doesn't work when the
2275 CFG isn't built yet. */
2276 gimple_stmt_iterator gsi2 = *gsi;
2277 gsi_prev (i: &gsi2);
2278 fold_stmt (&gsi2);
2279 return true;
2280}
2281
2282/* Fold function call to builtin strstr.
2283 If both arguments are constant, evaluate and fold the result,
2284 additionally fold strstr (x, "") into x and strstr (x, "c")
2285 into strchr (x, 'c'). */
2286static bool
2287gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2288{
2289 gimple *stmt = gsi_stmt (i: *gsi);
2290 if (!gimple_call_lhs (gs: stmt))
2291 return false;
2292
2293 tree haystack = gimple_call_arg (gs: stmt, index: 0);
2294 tree needle = gimple_call_arg (gs: stmt, index: 1);
2295
2296 /* Avoid folding if either argument is not a nul-terminated array.
2297 Defer warning until later. */
2298 if (!check_nul_terminated_array (NULL_TREE, haystack)
2299 || !check_nul_terminated_array (NULL_TREE, needle))
2300 return false;
2301
2302 const char *q = c_getstr (needle);
2303 if (q == NULL)
2304 return false;
2305
2306 if (const char *p = c_getstr (haystack))
2307 {
2308 const char *r = strstr (haystack: p, needle: q);
2309
2310 if (r == NULL)
2311 {
2312 replace_call_with_value (gsi, integer_zero_node);
2313 return true;
2314 }
2315
2316 tree len = build_int_cst (size_type_node, r - p);
2317 gimple_seq stmts = NULL;
2318 gimple *new_stmt
2319 = gimple_build_assign (gimple_call_lhs (gs: stmt), POINTER_PLUS_EXPR,
2320 haystack, len);
2321 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2322 gsi_replace_with_seq_vops (si_p: gsi, stmts);
2323 return true;
2324 }
2325
2326 /* For strstr (x, "") return x. */
2327 if (q[0] == '\0')
2328 {
2329 replace_call_with_value (gsi, val: haystack);
2330 return true;
2331 }
2332
2333 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2334 if (q[1] == '\0')
2335 {
2336 tree strchr_fn = builtin_decl_implicit (fncode: BUILT_IN_STRCHR);
2337 if (strchr_fn)
2338 {
2339 tree c = build_int_cst (integer_type_node, q[0]);
2340 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2341 replace_call_with_call_and_fold (gsi, repl);
2342 return true;
2343 }
2344 }
2345
2346 return false;
2347}
2348
2349/* Simplify a call to the strcat builtin. DST and SRC are the arguments
2350 to the call.
2351
2352 Return NULL_TREE if no simplification was possible, otherwise return the
2353 simplified form of the call as a tree.
2354
2355 The simplified form may be a constant or other expression which
2356 computes the same value, but in a more efficient manner (including
2357 calls to other builtin functions).
2358
2359 The call may contain arguments which need to be evaluated, but
2360 which are not useful to determine the result of the call. In
2361 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2362 COMPOUND_EXPR will be an argument which must be evaluated.
2363 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2364 COMPOUND_EXPR in the chain will contain the tree for the simplified
2365 form of the builtin function call. */
2366
2367static bool
2368gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2369{
2370 gimple *stmt = gsi_stmt (i: *gsi);
2371 location_t loc = gimple_location (g: stmt);
2372
2373 const char *p = c_getstr (src);
2374
2375 /* If the string length is zero, return the dst parameter. */
2376 if (p && *p == '\0')
2377 {
2378 replace_call_with_value (gsi, val: dst);
2379 return true;
2380 }
2381
2382 if (!optimize_bb_for_speed_p (gimple_bb (g: stmt)))
2383 return false;
2384
2385 /* See if we can store by pieces into (dst + strlen(dst)). */
2386 tree newdst;
2387 tree strlen_fn = builtin_decl_implicit (fncode: BUILT_IN_STRLEN);
2388 tree memcpy_fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY);
2389
2390 if (!strlen_fn || !memcpy_fn)
2391 return false;
2392
2393 /* If the length of the source string isn't computable don't
2394 split strcat into strlen and memcpy. */
2395 tree len = get_maxval_strlen (arg: src, rkind: SRK_STRLEN);
2396 if (! len)
2397 return false;
2398
2399 /* Create strlen (dst). */
2400 gimple_seq stmts = NULL, stmts2;
2401 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2402 gimple_set_location (g: repl, location: loc);
2403 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2404 gimple_call_set_lhs (gs: repl, lhs: newdst);
2405 gimple_seq_add_stmt_without_update (&stmts, repl);
2406
2407 /* Create (dst p+ strlen (dst)). */
2408 newdst = fold_build_pointer_plus_loc (loc, ptr: dst, off: newdst);
2409 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2410 gimple_seq_add_seq_without_update (&stmts, stmts2);
2411
2412 len = fold_convert_loc (loc, size_type_node, len);
2413 len = size_binop_loc (loc, PLUS_EXPR, len,
2414 build_int_cst (size_type_node, 1));
2415 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2416 gimple_seq_add_seq_without_update (&stmts, stmts2);
2417
2418 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2419 gimple_seq_add_stmt_without_update (&stmts, repl);
2420 if (gimple_call_lhs (gs: stmt))
2421 {
2422 repl = gimple_build_assign (gimple_call_lhs (gs: stmt), dst);
2423 gimple_seq_add_stmt_without_update (&stmts, repl);
2424 gsi_replace_with_seq_vops (si_p: gsi, stmts);
2425 /* gsi now points at the assignment to the lhs, get a
2426 stmt iterator to the memcpy call.
2427 ??? We can't use gsi_for_stmt as that doesn't work when the
2428 CFG isn't built yet. */
2429 gimple_stmt_iterator gsi2 = *gsi;
2430 gsi_prev (i: &gsi2);
2431 fold_stmt (&gsi2);
2432 }
2433 else
2434 {
2435 gsi_replace_with_seq_vops (si_p: gsi, stmts);
2436 fold_stmt (gsi);
2437 }
2438 return true;
2439}
2440
2441/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2442 are the arguments to the call. */
2443
2444static bool
2445gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2446{
2447 gimple *stmt = gsi_stmt (i: *gsi);
2448 tree dest = gimple_call_arg (gs: stmt, index: 0);
2449 tree src = gimple_call_arg (gs: stmt, index: 1);
2450 tree size = gimple_call_arg (gs: stmt, index: 2);
2451 tree fn;
2452 const char *p;
2453
2454
2455 p = c_getstr (src);
2456 /* If the SRC parameter is "", return DEST. */
2457 if (p && *p == '\0')
2458 {
2459 replace_call_with_value (gsi, val: dest);
2460 return true;
2461 }
2462
2463 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2464 return false;
2465
2466 /* If __builtin_strcat_chk is used, assume strcat is available. */
2467 fn = builtin_decl_explicit (fncode: BUILT_IN_STRCAT);
2468 if (!fn)
2469 return false;
2470
2471 gimple *repl = gimple_build_call (fn, 2, dest, src);
2472 replace_call_with_call_and_fold (gsi, repl);
2473 return true;
2474}
2475
2476/* Simplify a call to the strncat builtin. */
2477
2478static bool
2479gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2480{
2481 gimple *stmt = gsi_stmt (i: *gsi);
2482 tree dst = gimple_call_arg (gs: stmt, index: 0);
2483 tree src = gimple_call_arg (gs: stmt, index: 1);
2484 tree len = gimple_call_arg (gs: stmt, index: 2);
2485 tree src_len = c_strlen (src, 1);
2486
2487 /* If the requested length is zero, or the src parameter string
2488 length is zero, return the dst parameter. */
2489 if (integer_zerop (len) || (src_len && integer_zerop (src_len)))
2490 {
2491 replace_call_with_value (gsi, val: dst);
2492 return true;
2493 }
2494
2495 /* Return early if the requested len is less than the string length.
2496 Warnings will be issued elsewhere later. */
2497 if (!src_len || known_lower (stmt, len, size: src_len, strict: true))
2498 return false;
2499
2500 /* Warn on constant LEN. */
2501 if (TREE_CODE (len) == INTEGER_CST)
2502 {
2503 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2504 tree dstsize;
2505
2506 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize)
2507 && TREE_CODE (dstsize) == INTEGER_CST)
2508 {
2509 int cmpdst = tree_int_cst_compare (t1: len, t2: dstsize);
2510
2511 if (cmpdst >= 0)
2512 {
2513 tree fndecl = gimple_call_fndecl (gs: stmt);
2514
2515 /* Strncat copies (at most) LEN bytes and always appends
2516 the terminating NUL so the specified bound should never
2517 be equal to (or greater than) the size of the destination.
2518 If it is, the copy could overflow. */
2519 location_t loc = gimple_location (g: stmt);
2520 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2521 cmpdst == 0
2522 ? G_("%qD specified bound %E equals "
2523 "destination size")
2524 : G_("%qD specified bound %E exceeds "
2525 "destination size %E"),
2526 fndecl, len, dstsize);
2527 if (nowarn)
2528 suppress_warning (stmt, OPT_Wstringop_overflow_);
2529 }
2530 }
2531
2532 if (!nowarn && TREE_CODE (src_len) == INTEGER_CST
2533 && tree_int_cst_compare (t1: src_len, t2: len) == 0)
2534 {
2535 tree fndecl = gimple_call_fndecl (gs: stmt);
2536 location_t loc = gimple_location (g: stmt);
2537
2538 /* To avoid possible overflow the specified bound should also
2539 not be equal to the length of the source, even when the size
2540 of the destination is unknown (it's not an uncommon mistake
2541 to specify as the bound to strncpy the length of the source). */
2542 if (warning_at (loc, OPT_Wstringop_overflow_,
2543 "%qD specified bound %E equals source length",
2544 fndecl, len))
2545 suppress_warning (stmt, OPT_Wstringop_overflow_);
2546 }
2547 }
2548
2549 if (!known_lower (stmt, len: src_len, size: len))
2550 return false;
2551
2552 tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCAT);
2553
2554 /* If the replacement _DECL isn't initialized, don't do the
2555 transformation. */
2556 if (!fn)
2557 return false;
2558
2559 /* Otherwise, emit a call to strcat. */
2560 gcall *repl = gimple_build_call (fn, 2, dst, src);
2561 replace_call_with_call_and_fold (gsi, repl);
2562 return true;
2563}
2564
2565/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2566 LEN, and SIZE. */
2567
2568static bool
2569gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2570{
2571 gimple *stmt = gsi_stmt (i: *gsi);
2572 tree dest = gimple_call_arg (gs: stmt, index: 0);
2573 tree src = gimple_call_arg (gs: stmt, index: 1);
2574 tree len = gimple_call_arg (gs: stmt, index: 2);
2575 tree size = gimple_call_arg (gs: stmt, index: 3);
2576 tree fn;
2577 const char *p;
2578
2579 p = c_getstr (src);
2580 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2581 if ((p && *p == '\0')
2582 || integer_zerop (len))
2583 {
2584 replace_call_with_value (gsi, val: dest);
2585 return true;
2586 }
2587
2588 if (! integer_all_onesp (size))
2589 {
2590 tree src_len = c_strlen (src, 1);
2591 if (known_lower (stmt, len: src_len, size: len))
2592 {
2593 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2594 fn = builtin_decl_explicit (fncode: BUILT_IN_STRCAT_CHK);
2595 if (!fn)
2596 return false;
2597
2598 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2599 replace_call_with_call_and_fold (gsi, repl);
2600 return true;
2601 }
2602 return false;
2603 }
2604
2605 /* If __builtin_strncat_chk is used, assume strncat is available. */
2606 fn = builtin_decl_explicit (fncode: BUILT_IN_STRNCAT);
2607 if (!fn)
2608 return false;
2609
2610 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2611 replace_call_with_call_and_fold (gsi, repl);
2612 return true;
2613}
2614
2615/* Build and append gimple statements to STMTS that would load a first
2616 character of a memory location identified by STR. LOC is location
2617 of the statement. */
2618
2619static tree
2620gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2621{
2622 tree var;
2623
2624 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2625 tree cst_uchar_ptr_node
2626 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2627 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2628
2629 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2630 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2631 var = create_tmp_reg_or_ssa_name (type: cst_uchar_node, stmt);
2632
2633 gimple_assign_set_lhs (gs: stmt, lhs: var);
2634 gimple_seq_add_stmt_without_update (stmts, stmt);
2635
2636 return var;
2637}
2638
2639/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2640
2641static bool
2642gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2643{
2644 gimple *stmt = gsi_stmt (i: *gsi);
2645 tree callee = gimple_call_fndecl (gs: stmt);
2646 enum built_in_function fcode = DECL_FUNCTION_CODE (decl: callee);
2647
2648 tree type = integer_type_node;
2649 tree str1 = gimple_call_arg (gs: stmt, index: 0);
2650 tree str2 = gimple_call_arg (gs: stmt, index: 1);
2651 tree lhs = gimple_call_lhs (gs: stmt);
2652
2653 tree bound_node = NULL_TREE;
2654 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2655
2656 /* Handle strncmp and strncasecmp functions. */
2657 if (gimple_call_num_args (gs: stmt) == 3)
2658 {
2659 bound_node = gimple_call_arg (gs: stmt, index: 2);
2660 if (tree_fits_uhwi_p (bound_node))
2661 bound = tree_to_uhwi (bound_node);
2662 }
2663
2664 /* If the BOUND parameter is zero, return zero. */
2665 if (bound == 0)
2666 {
2667 replace_call_with_value (gsi, integer_zero_node);
2668 return true;
2669 }
2670
2671 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2672 if (operand_equal_p (str1, str2, flags: 0))
2673 {
2674 replace_call_with_value (gsi, integer_zero_node);
2675 return true;
2676 }
2677
2678 /* Initially set to the number of characters, including the terminating
2679 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2680 the array Sx is not terminated by a nul.
2681 For nul-terminated strings then adjusted to their length so that
2682 LENx == NULPOSx holds. */
2683 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2684 const char *p1 = getbyterep (str1, &len1);
2685 const char *p2 = getbyterep (str2, &len2);
2686
2687 /* The position of the terminating nul character if one exists, otherwise
2688 a value greater than LENx. */
2689 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2690
2691 if (p1)
2692 {
2693 size_t n = strnlen (string: p1, maxlen: len1);
2694 if (n < len1)
2695 len1 = nulpos1 = n;
2696 }
2697
2698 if (p2)
2699 {
2700 size_t n = strnlen (string: p2, maxlen: len2);
2701 if (n < len2)
2702 len2 = nulpos2 = n;
2703 }
2704
2705 /* For known strings, return an immediate value. */
2706 if (p1 && p2)
2707 {
2708 int r = 0;
2709 bool known_result = false;
2710
2711 switch (fcode)
2712 {
2713 case BUILT_IN_STRCMP:
2714 case BUILT_IN_STRCMP_EQ:
2715 if (len1 != nulpos1 || len2 != nulpos2)
2716 break;
2717
2718 r = strcmp (s1: p1, s2: p2);
2719 known_result = true;
2720 break;
2721
2722 case BUILT_IN_STRNCMP:
2723 case BUILT_IN_STRNCMP_EQ:
2724 {
2725 if (bound == HOST_WIDE_INT_M1U)
2726 break;
2727
2728 /* Reduce the bound to be no more than the length
2729 of the shorter of the two strings, or the sizes
2730 of the unterminated arrays. */
2731 unsigned HOST_WIDE_INT n = bound;
2732
2733 if (len1 == nulpos1 && len1 < n)
2734 n = len1 + 1;
2735 if (len2 == nulpos2 && len2 < n)
2736 n = len2 + 1;
2737
2738 if (MIN (nulpos1, nulpos2) + 1 < n)
2739 break;
2740
2741 r = strncmp (s1: p1, s2: p2, n: n);
2742 known_result = true;
2743 break;
2744 }
2745 /* Only handleable situation is where the string are equal (result 0),
2746 which is already handled by operand_equal_p case. */
2747 case BUILT_IN_STRCASECMP:
2748 break;
2749 case BUILT_IN_STRNCASECMP:
2750 {
2751 if (bound == HOST_WIDE_INT_M1U)
2752 break;
2753 r = strncmp (s1: p1, s2: p2, n: bound);
2754 if (r == 0)
2755 known_result = true;
2756 break;
2757 }
2758 default:
2759 gcc_unreachable ();
2760 }
2761
2762 if (known_result)
2763 {
2764 replace_call_with_value (gsi, val: build_cmp_result (type, res: r));
2765 return true;
2766 }
2767 }
2768
2769 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2770 || fcode == BUILT_IN_STRCMP
2771 || fcode == BUILT_IN_STRCMP_EQ
2772 || fcode == BUILT_IN_STRCASECMP;
2773
2774 location_t loc = gimple_location (g: stmt);
2775
2776 /* If the second arg is "", return *(const unsigned char*)arg1. */
2777 if (p2 && *p2 == '\0' && nonzero_bound)
2778 {
2779 gimple_seq stmts = NULL;
2780 tree var = gimple_load_first_char (loc, str: str1, stmts: &stmts);
2781 if (lhs)
2782 {
2783 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2784 gimple_seq_add_stmt_without_update (&stmts, stmt);
2785 }
2786
2787 gsi_replace_with_seq_vops (si_p: gsi, stmts);
2788 return true;
2789 }
2790
2791 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2792 if (p1 && *p1 == '\0' && nonzero_bound)
2793 {
2794 gimple_seq stmts = NULL;
2795 tree var = gimple_load_first_char (loc, str: str2, stmts: &stmts);
2796
2797 if (lhs)
2798 {
2799 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2800 stmt = gimple_build_assign (c, NOP_EXPR, var);
2801 gimple_seq_add_stmt_without_update (&stmts, stmt);
2802
2803 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2804 gimple_seq_add_stmt_without_update (&stmts, stmt);
2805 }
2806
2807 gsi_replace_with_seq_vops (si_p: gsi, stmts);
2808 return true;
2809 }
2810
2811 /* If BOUND is one, return an expression corresponding to
2812 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2813 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2814 {
2815 gimple_seq stmts = NULL;
2816 tree temp1 = gimple_load_first_char (loc, str: str1, stmts: &stmts);
2817 tree temp2 = gimple_load_first_char (loc, str: str2, stmts: &stmts);
2818
2819 if (lhs)
2820 {
2821 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2822 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2823 gimple_seq_add_stmt_without_update (&stmts, convert1);
2824
2825 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2826 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2827 gimple_seq_add_stmt_without_update (&stmts, convert2);
2828
2829 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2830 gimple_seq_add_stmt_without_update (&stmts, stmt);
2831 }
2832
2833 gsi_replace_with_seq_vops (si_p: gsi, stmts);
2834 return true;
2835 }
2836
2837 /* If BOUND is greater than the length of one constant string,
2838 and the other argument is also a nul-terminated string, replace
2839 strncmp with strcmp. */
2840 if (fcode == BUILT_IN_STRNCMP
2841 && bound > 0 && bound < HOST_WIDE_INT_M1U
2842 && ((p2 && len2 < bound && len2 == nulpos2)
2843 || (p1 && len1 < bound && len1 == nulpos1)))
2844 {
2845 tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCMP);
2846 if (!fn)
2847 return false;
2848 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2849 replace_call_with_call_and_fold (gsi, repl);
2850 return true;
2851 }
2852
2853 return false;
2854}
2855
2856/* Fold a call to the memchr pointed by GSI iterator. */
2857
2858static bool
2859gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2860{
2861 gimple *stmt = gsi_stmt (i: *gsi);
2862 tree lhs = gimple_call_lhs (gs: stmt);
2863 tree arg1 = gimple_call_arg (gs: stmt, index: 0);
2864 tree arg2 = gimple_call_arg (gs: stmt, index: 1);
2865 tree len = gimple_call_arg (gs: stmt, index: 2);
2866
2867 /* If the LEN parameter is zero, return zero. */
2868 if (integer_zerop (len))
2869 {
2870 replace_call_with_value (gsi, val: build_int_cst (ptr_type_node, 0));
2871 return true;
2872 }
2873
2874 char c;
2875 if (TREE_CODE (arg2) != INTEGER_CST
2876 || !tree_fits_uhwi_p (len)
2877 || !target_char_cst_p (t: arg2, p: &c))
2878 return false;
2879
2880 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2881 unsigned HOST_WIDE_INT string_length;
2882 const char *p1 = getbyterep (arg1, &string_length);
2883
2884 if (p1)
2885 {
2886 const char *r = (const char *)memchr (s: p1, c: c, MIN (length, string_length));
2887 if (r == NULL)
2888 {
2889 tree mem_size, offset_node;
2890 byte_representation (arg1, &offset_node, &mem_size, NULL);
2891 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2892 ? 0 : tree_to_uhwi (offset_node);
2893 /* MEM_SIZE is the size of the array the string literal
2894 is stored in. */
2895 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2896 gcc_checking_assert (string_length <= string_size);
2897 if (length <= string_size)
2898 {
2899 replace_call_with_value (gsi, val: build_int_cst (ptr_type_node, 0));
2900 return true;
2901 }
2902 }
2903 else
2904 {
2905 unsigned HOST_WIDE_INT offset = r - p1;
2906 gimple_seq stmts = NULL;
2907 if (lhs != NULL_TREE)
2908 {
2909 tree offset_cst = build_int_cst (sizetype, offset);
2910 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2911 arg1, offset_cst);
2912 gimple_seq_add_stmt_without_update (&stmts, stmt);
2913 }
2914 else
2915 gimple_seq_add_stmt_without_update (&stmts,
2916 gimple_build_nop ());
2917
2918 gsi_replace_with_seq_vops (si_p: gsi, stmts);
2919 return true;
2920 }
2921 }
2922
2923 return false;
2924}
2925
2926/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2927 to the call. IGNORE is true if the value returned
2928 by the builtin will be ignored. UNLOCKED is true is true if this
2929 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2930 the known length of the string. Return NULL_TREE if no simplification
2931 was possible. */
2932
2933static bool
2934gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2935 tree arg0, tree arg1,
2936 bool unlocked)
2937{
2938 gimple *stmt = gsi_stmt (i: *gsi);
2939
2940 /* If we're using an unlocked function, assume the other unlocked
2941 functions exist explicitly. */
2942 tree const fn_fputc = (unlocked
2943 ? builtin_decl_explicit (fncode: BUILT_IN_FPUTC_UNLOCKED)
2944 : builtin_decl_implicit (fncode: BUILT_IN_FPUTC));
2945 tree const fn_fwrite = (unlocked
2946 ? builtin_decl_explicit (fncode: BUILT_IN_FWRITE_UNLOCKED)
2947 : builtin_decl_implicit (fncode: BUILT_IN_FWRITE));
2948
2949 /* If the return value is used, don't do the transformation. */
2950 if (gimple_call_lhs (gs: stmt))
2951 return false;
2952
2953 /* Get the length of the string passed to fputs. If the length
2954 can't be determined, punt. */
2955 tree len = get_maxval_strlen (arg: arg0, rkind: SRK_STRLEN);
2956 if (!len || TREE_CODE (len) != INTEGER_CST)
2957 return false;
2958
2959 switch (compare_tree_int (len, 1))
2960 {
2961 case -1: /* length is 0, delete the call entirely . */
2962 replace_call_with_value (gsi, integer_zero_node);
2963 return true;
2964
2965 case 0: /* length is 1, call fputc. */
2966 {
2967 const char *p = c_getstr (arg0);
2968 if (p != NULL)
2969 {
2970 if (!fn_fputc)
2971 return false;
2972
2973 gimple *repl
2974 = gimple_build_call (fn_fputc, 2,
2975 build_int_cst (integer_type_node, p[0]),
2976 arg1);
2977 replace_call_with_call_and_fold (gsi, repl);
2978 return true;
2979 }
2980 }
2981 /* FALLTHROUGH */
2982 case 1: /* length is greater than 1, call fwrite. */
2983 {
2984 /* If optimizing for size keep fputs. */
2985 if (optimize_function_for_size_p (cfun))
2986 return false;
2987 /* New argument list transforming fputs(string, stream) to
2988 fwrite(string, 1, len, stream). */
2989 if (!fn_fwrite)
2990 return false;
2991
2992 gimple *repl
2993 = gimple_build_call (fn_fwrite, 4, arg0, size_one_node,
2994 fold_convert (size_type_node, len), arg1);
2995 replace_call_with_call_and_fold (gsi, repl);
2996 return true;
2997 }
2998 default:
2999 gcc_unreachable ();
3000 }
3001}
3002
3003/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3004 DEST, SRC, LEN, and SIZE are the arguments to the call.
3005 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3006 code of the builtin. If MAXLEN is not NULL, it is maximum length
3007 passed as third argument. */
3008
3009static bool
3010gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
3011 tree dest, tree src, tree len, tree size,
3012 enum built_in_function fcode)
3013{
3014 gimple *stmt = gsi_stmt (i: *gsi);
3015 location_t loc = gimple_location (g: stmt);
3016 bool ignore = gimple_call_lhs (gs: stmt) == NULL_TREE;
3017 tree fn;
3018
3019 /* If SRC and DEST are the same (and not volatile), return DEST
3020 (resp. DEST+LEN for __mempcpy_chk). */
3021 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, flags: 0))
3022 {
3023 if (fcode != BUILT_IN_MEMPCPY_CHK)
3024 {
3025 replace_call_with_value (gsi, val: dest);
3026 return true;
3027 }
3028 else
3029 {
3030 gimple_seq stmts = NULL;
3031 len = gimple_convert_to_ptrofftype (seq: &stmts, loc, op: len);
3032 tree temp = gimple_build (seq: &stmts, loc, code: POINTER_PLUS_EXPR,
3033 TREE_TYPE (dest), ops: dest, ops: len);
3034 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3035 replace_call_with_value (gsi, val: temp);
3036 return true;
3037 }
3038 }
3039
3040 tree maxlen = get_maxval_strlen (arg: len, rkind: SRK_INT_VALUE);
3041 if (! integer_all_onesp (size)
3042 && !known_lower (stmt, len, size)
3043 && !known_lower (stmt, len: maxlen, size))
3044 {
3045 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3046 least try to optimize (void) __mempcpy_chk () into
3047 (void) __memcpy_chk () */
3048 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3049 {
3050 fn = builtin_decl_explicit (fncode: BUILT_IN_MEMCPY_CHK);
3051 if (!fn)
3052 return false;
3053
3054 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3055 replace_call_with_call_and_fold (gsi, repl);
3056 return true;
3057 }
3058 return false;
3059 }
3060
3061 fn = NULL_TREE;
3062 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3063 mem{cpy,pcpy,move,set} is available. */
3064 switch (fcode)
3065 {
3066 case BUILT_IN_MEMCPY_CHK:
3067 fn = builtin_decl_explicit (fncode: BUILT_IN_MEMCPY);
3068 break;
3069 case BUILT_IN_MEMPCPY_CHK:
3070 fn = builtin_decl_explicit (fncode: BUILT_IN_MEMPCPY);
3071 break;
3072 case BUILT_IN_MEMMOVE_CHK:
3073 fn = builtin_decl_explicit (fncode: BUILT_IN_MEMMOVE);
3074 break;
3075 case BUILT_IN_MEMSET_CHK:
3076 fn = builtin_decl_explicit (fncode: BUILT_IN_MEMSET);
3077 break;
3078 default:
3079 break;
3080 }
3081
3082 if (!fn)
3083 return false;
3084
3085 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3086 replace_call_with_call_and_fold (gsi, repl);
3087 return true;
3088}
3089
3090/* Print a message in the dump file recording transformation of FROM to TO. */
3091
3092static void
3093dump_transformation (gcall *from, gcall *to)
3094{
3095 if (dump_enabled_p ())
3096 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n",
3097 gimple_call_fn (gs: from), gimple_call_fn (gs: to));
3098}
3099
3100/* Fold a call to the __st[rp]cpy_chk builtin.
3101 DEST, SRC, and SIZE are the arguments to the call.
3102 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3103 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3104 strings passed as second argument. */
3105
3106static bool
3107gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3108 tree dest,
3109 tree src, tree size,
3110 enum built_in_function fcode)
3111{
3112 gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi));
3113 location_t loc = gimple_location (g: stmt);
3114 bool ignore = gimple_call_lhs (gs: stmt) == NULL_TREE;
3115 tree len, fn;
3116
3117 /* If SRC and DEST are the same (and not volatile), return DEST. */
3118 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, flags: 0))
3119 {
3120 /* Issue -Wrestrict unless the pointers are null (those do
3121 not point to objects and so do not indicate an overlap;
3122 such calls could be the result of sanitization and jump
3123 threading). */
3124 if (!integer_zerop (dest)
3125 && !warning_suppressed_p (stmt, OPT_Wrestrict))
3126 {
3127 tree func = gimple_call_fndecl (gs: stmt);
3128
3129 warning_at (loc, OPT_Wrestrict,
3130 "%qD source argument is the same as destination",
3131 func);
3132 }
3133
3134 replace_call_with_value (gsi, val: dest);
3135 return true;
3136 }
3137
3138 tree maxlen = get_maxval_strlen (arg: src, rkind: SRK_STRLENMAX);
3139 if (! integer_all_onesp (size))
3140 {
3141 len = c_strlen (src, 1);
3142 if (!known_lower (stmt, len, size, strict: true)
3143 && !known_lower (stmt, len: maxlen, size, strict: true))
3144 {
3145 if (fcode == BUILT_IN_STPCPY_CHK)
3146 {
3147 if (! ignore)
3148 return false;
3149
3150 /* If return value of __stpcpy_chk is ignored,
3151 optimize into __strcpy_chk. */
3152 fn = builtin_decl_explicit (fncode: BUILT_IN_STRCPY_CHK);
3153 if (!fn)
3154 return false;
3155
3156 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3157 replace_call_with_call_and_fold (gsi, repl);
3158 return true;
3159 }
3160
3161 if (! len || TREE_SIDE_EFFECTS (len))
3162 return false;
3163
3164 /* If c_strlen returned something, but not provably less than size,
3165 transform __strcpy_chk into __memcpy_chk. */
3166 fn = builtin_decl_explicit (fncode: BUILT_IN_MEMCPY_CHK);
3167 if (!fn)
3168 return false;
3169
3170 gimple_seq stmts = NULL;
3171 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3172 len = gimple_convert (seq: &stmts, loc, size_type_node, op: len);
3173 len = gimple_build (seq: &stmts, loc, code: PLUS_EXPR, size_type_node, ops: len,
3174 ops: build_int_cst (size_type_node, 1));
3175 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3176 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3177 replace_call_with_call_and_fold (gsi, repl);
3178 return true;
3179 }
3180 }
3181
3182 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3183 fn = builtin_decl_explicit (fncode: fcode == BUILT_IN_STPCPY_CHK && !ignore
3184 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3185 if (!fn)
3186 return false;
3187
3188 gcall *repl = gimple_build_call (fn, 2, dest, src);
3189 dump_transformation (from: stmt, to: repl);
3190 replace_call_with_call_and_fold (gsi, repl);
3191 return true;
3192}
3193
3194/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3195 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3196 length passed as third argument. IGNORE is true if return value can be
3197 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3198
3199static bool
3200gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3201 tree dest, tree src,
3202 tree len, tree size,
3203 enum built_in_function fcode)
3204{
3205 gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi));
3206 bool ignore = gimple_call_lhs (gs: stmt) == NULL_TREE;
3207 tree fn;
3208
3209 tree maxlen = get_maxval_strlen (arg: len, rkind: SRK_INT_VALUE);
3210 if (! integer_all_onesp (size)
3211 && !known_lower (stmt, len, size) && !known_lower (stmt, len: maxlen, size))
3212 {
3213 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3214 {
3215 /* If return value of __stpncpy_chk is ignored,
3216 optimize into __strncpy_chk. */
3217 fn = builtin_decl_explicit (fncode: BUILT_IN_STRNCPY_CHK);
3218 if (fn)
3219 {
3220 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3221 replace_call_with_call_and_fold (gsi, repl);
3222 return true;
3223 }
3224 }
3225 return false;
3226 }
3227
3228 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3229 fn = builtin_decl_explicit (fncode: fcode == BUILT_IN_STPNCPY_CHK && !ignore
3230 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3231 if (!fn)
3232 return false;
3233
3234 gcall *repl = gimple_build_call (fn, 3, dest, src, len);
3235 dump_transformation (from: stmt, to: repl);
3236 replace_call_with_call_and_fold (gsi, repl);
3237 return true;
3238}
3239
3240/* Fold function call to builtin stpcpy with arguments DEST and SRC.
3241 Return NULL_TREE if no simplification can be made. */
3242
3243static bool
3244gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3245{
3246 gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi));
3247 location_t loc = gimple_location (g: stmt);
3248 tree dest = gimple_call_arg (gs: stmt, index: 0);
3249 tree src = gimple_call_arg (gs: stmt, index: 1);
3250 tree fn, lenp1;
3251
3252 /* If the result is unused, replace stpcpy with strcpy. */
3253 if (gimple_call_lhs (gs: stmt) == NULL_TREE)
3254 {
3255 tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCPY);
3256 if (!fn)
3257 return false;
3258 gimple_call_set_fndecl (gs: stmt, decl: fn);
3259 fold_stmt (gsi);
3260 return true;
3261 }
3262
3263 /* Set to non-null if ARG refers to an unterminated array. */
3264 c_strlen_data data = { };
3265 /* The size of the unterminated array if SRC referes to one. */
3266 tree size;
3267 /* True if the size is exact/constant, false if it's the lower bound
3268 of a range. */
3269 bool exact;
3270 tree len = c_strlen (src, 1, &data, 1);
3271 if (!len
3272 || TREE_CODE (len) != INTEGER_CST)
3273 {
3274 data.decl = unterminated_array (src, &size, &exact);
3275 if (!data.decl)
3276 return false;
3277 }
3278
3279 if (data.decl)
3280 {
3281 /* Avoid folding calls with unterminated arrays. */
3282 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3283 warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
3284 exact);
3285 suppress_warning (stmt, OPT_Wstringop_overread);
3286 return false;
3287 }
3288
3289 if (optimize_function_for_size_p (cfun)
3290 /* If length is zero it's small enough. */
3291 && !integer_zerop (len))
3292 return false;
3293
3294 /* If the source has a known length replace stpcpy with memcpy. */
3295 fn = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY);
3296 if (!fn)
3297 return false;
3298
3299 gimple_seq stmts = NULL;
3300 tree tem = gimple_convert (seq: &stmts, loc, size_type_node, op: len);
3301 lenp1 = gimple_build (seq: &stmts, loc, code: PLUS_EXPR, size_type_node,
3302 ops: tem, ops: build_int_cst (size_type_node, 1));
3303 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3304 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3305 gimple_move_vops (repl, stmt);
3306 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3307 /* Replace the result with dest + len. */
3308 stmts = NULL;
3309 tem = gimple_convert (seq: &stmts, loc, sizetype, op: len);
3310 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3311 gassign *ret = gimple_build_assign (gimple_call_lhs (gs: stmt),
3312 POINTER_PLUS_EXPR, dest, tem);
3313 gsi_replace (gsi, ret, false);
3314 /* Finally fold the memcpy call. */
3315 gimple_stmt_iterator gsi2 = *gsi;
3316 gsi_prev (i: &gsi2);
3317 fold_stmt (&gsi2);
3318 return true;
3319}
3320
3321/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3322 NULL_TREE if a normal call should be emitted rather than expanding
3323 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3324 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3325 passed as second argument. */
3326
3327static bool
3328gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3329 enum built_in_function fcode)
3330{
3331 gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi));
3332 tree dest, size, len, fn, fmt, flag;
3333 const char *fmt_str;
3334
3335 /* Verify the required arguments in the original call. */
3336 if (gimple_call_num_args (gs: stmt) < 5)
3337 return false;
3338
3339 dest = gimple_call_arg (gs: stmt, index: 0);
3340 len = gimple_call_arg (gs: stmt, index: 1);
3341 flag = gimple_call_arg (gs: stmt, index: 2);
3342 size = gimple_call_arg (gs: stmt, index: 3);
3343 fmt = gimple_call_arg (gs: stmt, index: 4);
3344
3345 tree maxlen = get_maxval_strlen (arg: len, rkind: SRK_INT_VALUE);
3346 if (! integer_all_onesp (size)
3347 && !known_lower (stmt, len, size) && !known_lower (stmt, len: maxlen, size))
3348 return false;
3349
3350 if (!init_target_chars ())
3351 return false;
3352
3353 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3354 or if format doesn't contain % chars or is "%s". */
3355 if (! integer_zerop (flag))
3356 {
3357 fmt_str = c_getstr (fmt);
3358 if (fmt_str == NULL)
3359 return false;
3360 if (strchr (s: fmt_str, c: target_percent) != NULL
3361 && strcmp (s1: fmt_str, s2: target_percent_s))
3362 return false;
3363 }
3364
3365 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3366 available. */
3367 fn = builtin_decl_explicit (fncode: fcode == BUILT_IN_VSNPRINTF_CHK
3368 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3369 if (!fn)
3370 return false;
3371
3372 /* Replace the called function and the first 5 argument by 3 retaining
3373 trailing varargs. */
3374 gimple_call_set_fndecl (gs: stmt, decl: fn);
3375 gimple_call_set_fntype (call_stmt: stmt, TREE_TYPE (fn));
3376 gimple_call_set_arg (gs: stmt, index: 0, arg: dest);
3377 gimple_call_set_arg (gs: stmt, index: 1, arg: len);
3378 gimple_call_set_arg (gs: stmt, index: 2, arg: fmt);
3379 for (unsigned i = 3; i < gimple_call_num_args (gs: stmt) - 2; ++i)
3380 gimple_call_set_arg (gs: stmt, index: i, arg: gimple_call_arg (gs: stmt, index: i + 2));
3381 gimple_set_num_ops (gs: stmt, num_ops: gimple_num_ops (gs: stmt) - 2);
3382 fold_stmt (gsi);
3383 return true;
3384}
3385
3386/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3387 Return NULL_TREE if a normal call should be emitted rather than
3388 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3389 or BUILT_IN_VSPRINTF_CHK. */
3390
3391static bool
3392gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3393 enum built_in_function fcode)
3394{
3395 gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi));
3396 tree dest, size, len, fn, fmt, flag;
3397 const char *fmt_str;
3398 unsigned nargs = gimple_call_num_args (gs: stmt);
3399
3400 /* Verify the required arguments in the original call. */
3401 if (nargs < 4)
3402 return false;
3403 dest = gimple_call_arg (gs: stmt, index: 0);
3404 flag = gimple_call_arg (gs: stmt, index: 1);
3405 size = gimple_call_arg (gs: stmt, index: 2);
3406 fmt = gimple_call_arg (gs: stmt, index: 3);
3407
3408 len = NULL_TREE;
3409
3410 if (!init_target_chars ())
3411 return false;
3412
3413 /* Check whether the format is a literal string constant. */
3414 fmt_str = c_getstr (fmt);
3415 if (fmt_str != NULL)
3416 {
3417 /* If the format doesn't contain % args or %%, we know the size. */
3418 if (strchr (s: fmt_str, c: target_percent) == 0)
3419 {
3420 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3421 len = build_int_cstu (size_type_node, strlen (s: fmt_str));
3422 }
3423 /* If the format is "%s" and first ... argument is a string literal,
3424 we know the size too. */
3425 else if (fcode == BUILT_IN_SPRINTF_CHK
3426 && strcmp (s1: fmt_str, s2: target_percent_s) == 0)
3427 {
3428 tree arg;
3429
3430 if (nargs == 5)
3431 {
3432 arg = gimple_call_arg (gs: stmt, index: 4);
3433 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3434 len = c_strlen (arg, 1);
3435 }
3436 }
3437 }
3438
3439 if (! integer_all_onesp (size) && !known_lower (stmt, len, size, strict: true))
3440 return false;
3441
3442 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3443 or if format doesn't contain % chars or is "%s". */
3444 if (! integer_zerop (flag))
3445 {
3446 if (fmt_str == NULL)
3447 return false;
3448 if (strchr (s: fmt_str, c: target_percent) != NULL
3449 && strcmp (s1: fmt_str, s2: target_percent_s))
3450 return false;
3451 }
3452
3453 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3454 fn = builtin_decl_explicit (fncode: fcode == BUILT_IN_VSPRINTF_CHK
3455 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3456 if (!fn)
3457 return false;
3458
3459 /* Replace the called function and the first 4 argument by 2 retaining
3460 trailing varargs. */
3461 gimple_call_set_fndecl (gs: stmt, decl: fn);
3462 gimple_call_set_fntype (call_stmt: stmt, TREE_TYPE (fn));
3463 gimple_call_set_arg (gs: stmt, index: 0, arg: dest);
3464 gimple_call_set_arg (gs: stmt, index: 1, arg: fmt);
3465 for (unsigned i = 2; i < gimple_call_num_args (gs: stmt) - 2; ++i)
3466 gimple_call_set_arg (gs: stmt, index: i, arg: gimple_call_arg (gs: stmt, index: i + 2));
3467 gimple_set_num_ops (gs: stmt, num_ops: gimple_num_ops (gs: stmt) - 2);
3468 fold_stmt (gsi);
3469 return true;
3470}
3471
3472/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3473 ORIG may be null if this is a 2-argument call. We don't attempt to
3474 simplify calls with more than 3 arguments.
3475
3476 Return true if simplification was possible, otherwise false. */
3477
3478bool
3479gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3480{
3481 gimple *stmt = gsi_stmt (i: *gsi);
3482
3483 /* Verify the required arguments in the original call. We deal with two
3484 types of sprintf() calls: 'sprintf (str, fmt)' and
3485 'sprintf (dest, "%s", orig)'. */
3486 if (gimple_call_num_args (gs: stmt) > 3)
3487 return false;
3488
3489 tree orig = NULL_TREE;
3490 if (gimple_call_num_args (gs: stmt) == 3)
3491 orig = gimple_call_arg (gs: stmt, index: 2);
3492
3493 /* Check whether the format is a literal string constant. */
3494 tree fmt = gimple_call_arg (gs: stmt, index: 1);
3495 const char *fmt_str = c_getstr (fmt);
3496 if (fmt_str == NULL)
3497 return false;
3498
3499 tree dest = gimple_call_arg (gs: stmt, index: 0);
3500
3501 if (!init_target_chars ())
3502 return false;
3503
3504 tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCPY);
3505 if (!fn)
3506 return false;
3507
3508 /* If the format doesn't contain % args or %%, use strcpy. */
3509 if (strchr (s: fmt_str, c: target_percent) == NULL)
3510 {
3511 /* Don't optimize sprintf (buf, "abc", ptr++). */
3512 if (orig)
3513 return false;
3514
3515 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3516 'format' is known to contain no % formats. */
3517 gimple_seq stmts = NULL;
3518 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3519
3520 /* Propagate the NO_WARNING bit to avoid issuing the same
3521 warning more than once. */
3522 copy_warning (repl, stmt);
3523
3524 gimple_seq_add_stmt_without_update (&stmts, repl);
3525 if (tree lhs = gimple_call_lhs (gs: stmt))
3526 {
3527 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3528 strlen (s: fmt_str)));
3529 gimple_seq_add_stmt_without_update (&stmts, repl);
3530 gsi_replace_with_seq_vops (si_p: gsi, stmts);
3531 /* gsi now points at the assignment to the lhs, get a
3532 stmt iterator to the memcpy call.
3533 ??? We can't use gsi_for_stmt as that doesn't work when the
3534 CFG isn't built yet. */
3535 gimple_stmt_iterator gsi2 = *gsi;
3536 gsi_prev (i: &gsi2);
3537 fold_stmt (&gsi2);
3538 }
3539 else
3540 {
3541 gsi_replace_with_seq_vops (si_p: gsi, stmts);
3542 fold_stmt (gsi);
3543 }
3544 return true;
3545 }
3546
3547 /* If the format is "%s", use strcpy if the result isn't used. */
3548 else if (fmt_str && strcmp (s1: fmt_str, s2: target_percent_s) == 0)
3549 {
3550 /* Don't crash on sprintf (str1, "%s"). */
3551 if (!orig)
3552 return false;
3553
3554 /* Don't fold calls with source arguments of invalid (nonpointer)
3555 types. */
3556 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3557 return false;
3558
3559 tree orig_len = NULL_TREE;
3560 if (gimple_call_lhs (gs: stmt))
3561 {
3562 orig_len = get_maxval_strlen (arg: orig, rkind: SRK_STRLEN);
3563 if (!orig_len)
3564 return false;
3565 }
3566
3567 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3568 gimple_seq stmts = NULL;
3569 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3570
3571 /* Propagate the NO_WARNING bit to avoid issuing the same
3572 warning more than once. */
3573 copy_warning (repl, stmt);
3574
3575 gimple_seq_add_stmt_without_update (&stmts, repl);
3576 if (tree lhs = gimple_call_lhs (gs: stmt))
3577 {
3578 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3579 TREE_TYPE (orig_len)))
3580 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3581 repl = gimple_build_assign (lhs, orig_len);
3582 gimple_seq_add_stmt_without_update (&stmts, repl);
3583 gsi_replace_with_seq_vops (si_p: gsi, stmts);
3584 /* gsi now points at the assignment to the lhs, get a
3585 stmt iterator to the memcpy call.
3586 ??? We can't use gsi_for_stmt as that doesn't work when the
3587 CFG isn't built yet. */
3588 gimple_stmt_iterator gsi2 = *gsi;
3589 gsi_prev (i: &gsi2);
3590 fold_stmt (&gsi2);
3591 }
3592 else
3593 {
3594 gsi_replace_with_seq_vops (si_p: gsi, stmts);
3595 fold_stmt (gsi);
3596 }
3597 return true;
3598 }
3599 return false;
3600}
3601
3602/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3603 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3604 attempt to simplify calls with more than 4 arguments.
3605
3606 Return true if simplification was possible, otherwise false. */
3607
3608bool
3609gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3610{
3611 gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi));
3612 tree dest = gimple_call_arg (gs: stmt, index: 0);
3613 tree destsize = gimple_call_arg (gs: stmt, index: 1);
3614 tree fmt = gimple_call_arg (gs: stmt, index: 2);
3615 tree orig = NULL_TREE;
3616 const char *fmt_str = NULL;
3617
3618 if (gimple_call_num_args (gs: stmt) > 4)
3619 return false;
3620
3621 if (gimple_call_num_args (gs: stmt) == 4)
3622 orig = gimple_call_arg (gs: stmt, index: 3);
3623
3624 /* Check whether the format is a literal string constant. */
3625 fmt_str = c_getstr (fmt);
3626 if (fmt_str == NULL)
3627 return false;
3628
3629 if (!init_target_chars ())
3630 return false;
3631
3632 /* If the format doesn't contain % args or %%, use strcpy. */
3633 if (strchr (s: fmt_str, c: target_percent) == NULL)
3634 {
3635 tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCPY);
3636 if (!fn)
3637 return false;
3638
3639 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3640 if (orig)
3641 return false;
3642
3643 tree len = build_int_cstu (TREE_TYPE (destsize), strlen (s: fmt_str));
3644
3645 /* We could expand this as
3646 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3647 or to
3648 memcpy (str, fmt_with_nul_at_cstm1, cst);
3649 but in the former case that might increase code size
3650 and in the latter case grow .rodata section too much.
3651 So punt for now. */
3652 if (!known_lower (stmt, len, size: destsize, strict: true))
3653 return false;
3654
3655 gimple_seq stmts = NULL;
3656 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3657 gimple_seq_add_stmt_without_update (&stmts, repl);
3658 if (tree lhs = gimple_call_lhs (gs: stmt))
3659 {
3660 repl = gimple_build_assign (lhs,
3661 fold_convert (TREE_TYPE (lhs), len));
3662 gimple_seq_add_stmt_without_update (&stmts, repl);
3663 gsi_replace_with_seq_vops (si_p: gsi, stmts);
3664 /* gsi now points at the assignment to the lhs, get a
3665 stmt iterator to the memcpy call.
3666 ??? We can't use gsi_for_stmt as that doesn't work when the
3667 CFG isn't built yet. */
3668 gimple_stmt_iterator gsi2 = *gsi;
3669 gsi_prev (i: &gsi2);
3670 fold_stmt (&gsi2);
3671 }
3672 else
3673 {
3674 gsi_replace_with_seq_vops (si_p: gsi, stmts);
3675 fold_stmt (gsi);
3676 }
3677 return true;
3678 }
3679
3680 /* If the format is "%s", use strcpy if the result isn't used. */
3681 else if (fmt_str && strcmp (s1: fmt_str, s2: target_percent_s) == 0)
3682 {
3683 tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCPY);
3684 if (!fn)
3685 return false;
3686
3687 /* Don't crash on snprintf (str1, cst, "%s"). */
3688 if (!orig)
3689 return false;
3690
3691 tree orig_len = get_maxval_strlen (arg: orig, rkind: SRK_STRLEN);
3692
3693 /* We could expand this as
3694 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3695 or to
3696 memcpy (str1, str2_with_nul_at_cstm1, cst);
3697 but in the former case that might increase code size
3698 and in the latter case grow .rodata section too much.
3699 So punt for now. */
3700 if (!known_lower (stmt, len: orig_len, size: destsize, strict: true))
3701 return false;
3702
3703 /* Convert snprintf (str1, cst, "%s", str2) into
3704 strcpy (str1, str2) if strlen (str2) < cst. */
3705 gimple_seq stmts = NULL;
3706 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3707 gimple_seq_add_stmt_without_update (&stmts, repl);
3708 if (tree lhs = gimple_call_lhs (gs: stmt))
3709 {
3710 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3711 TREE_TYPE (orig_len)))
3712 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3713 repl = gimple_build_assign (lhs, orig_len);
3714 gimple_seq_add_stmt_without_update (&stmts, repl);
3715 gsi_replace_with_seq_vops (si_p: gsi, stmts);
3716 /* gsi now points at the assignment to the lhs, get a
3717 stmt iterator to the memcpy call.
3718 ??? We can't use gsi_for_stmt as that doesn't work when the
3719 CFG isn't built yet. */
3720 gimple_stmt_iterator gsi2 = *gsi;
3721 gsi_prev (i: &gsi2);
3722 fold_stmt (&gsi2);
3723 }
3724 else
3725 {
3726 gsi_replace_with_seq_vops (si_p: gsi, stmts);
3727 fold_stmt (gsi);
3728 }
3729 return true;
3730 }
3731 return false;
3732}
3733
3734/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3735 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3736 more than 3 arguments, and ARG may be null in the 2-argument case.
3737
3738 Return NULL_TREE if no simplification was possible, otherwise return the
3739 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3740 code of the function to be simplified. */
3741
3742static bool
3743gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3744 tree fp, tree fmt, tree arg,
3745 enum built_in_function fcode)
3746{
3747 gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi));
3748 tree fn_fputc, fn_fputs;
3749 const char *fmt_str = NULL;
3750
3751 /* If the return value is used, don't do the transformation. */
3752 if (gimple_call_lhs (gs: stmt) != NULL_TREE)
3753 return false;
3754
3755 /* Check whether the format is a literal string constant. */
3756 fmt_str = c_getstr (fmt);
3757 if (fmt_str == NULL)
3758 return false;
3759
3760 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3761 {
3762 /* If we're using an unlocked function, assume the other
3763 unlocked functions exist explicitly. */
3764 fn_fputc = builtin_decl_explicit (fncode: BUILT_IN_FPUTC_UNLOCKED);
3765 fn_fputs = builtin_decl_explicit (fncode: BUILT_IN_FPUTS_UNLOCKED);
3766 }
3767 else
3768 {
3769 fn_fputc = builtin_decl_implicit (fncode: BUILT_IN_FPUTC);
3770 fn_fputs = builtin_decl_implicit (fncode: BUILT_IN_FPUTS);
3771 }
3772
3773 if (!init_target_chars ())
3774 return false;
3775
3776 /* If the format doesn't contain % args or %%, use strcpy. */
3777 if (strchr (s: fmt_str, c: target_percent) == NULL)
3778 {
3779 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3780 && arg)
3781 return false;
3782
3783 /* If the format specifier was "", fprintf does nothing. */
3784 if (fmt_str[0] == '\0')
3785 {
3786 replace_call_with_value (gsi, NULL_TREE);
3787 return true;
3788 }
3789
3790 /* When "string" doesn't contain %, replace all cases of
3791 fprintf (fp, string) with fputs (string, fp). The fputs
3792 builtin will take care of special cases like length == 1. */
3793 if (fn_fputs)
3794 {
3795 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3796 replace_call_with_call_and_fold (gsi, repl);
3797 return true;
3798 }
3799 }
3800
3801 /* The other optimizations can be done only on the non-va_list variants. */
3802 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3803 return false;
3804
3805 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3806 else if (strcmp (s1: fmt_str, s2: target_percent_s) == 0)
3807 {
3808 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3809 return false;
3810 if (fn_fputs)
3811 {
3812 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3813 replace_call_with_call_and_fold (gsi, repl);
3814 return true;
3815 }
3816 }
3817
3818 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3819 else if (strcmp (s1: fmt_str, s2: target_percent_c) == 0)
3820 {
3821 if (!arg
3822 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3823 return false;
3824 if (fn_fputc)
3825 {
3826 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3827 replace_call_with_call_and_fold (gsi, repl);
3828 return true;
3829 }
3830 }
3831
3832 return false;
3833}
3834
3835/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3836 FMT and ARG are the arguments to the call; we don't fold cases with
3837 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3838
3839 Return NULL_TREE if no simplification was possible, otherwise return the
3840 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3841 code of the function to be simplified. */
3842
3843static bool
3844gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3845 tree arg, enum built_in_function fcode)
3846{
3847 gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi));
3848 tree fn_putchar, fn_puts, newarg;
3849 const char *fmt_str = NULL;
3850
3851 /* If the return value is used, don't do the transformation. */
3852 if (gimple_call_lhs (gs: stmt) != NULL_TREE)
3853 return false;
3854
3855 /* Check whether the format is a literal string constant. */
3856 fmt_str = c_getstr (fmt);
3857 if (fmt_str == NULL)
3858 return false;
3859
3860 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3861 {
3862 /* If we're using an unlocked function, assume the other
3863 unlocked functions exist explicitly. */
3864 fn_putchar = builtin_decl_explicit (fncode: BUILT_IN_PUTCHAR_UNLOCKED);
3865 fn_puts = builtin_decl_explicit (fncode: BUILT_IN_PUTS_UNLOCKED);
3866 }
3867 else
3868 {
3869 fn_putchar = builtin_decl_implicit (fncode: BUILT_IN_PUTCHAR);
3870 fn_puts = builtin_decl_implicit (fncode: BUILT_IN_PUTS);
3871 }
3872
3873 if (!init_target_chars ())
3874 return false;
3875
3876 if (strcmp (s1: fmt_str, s2: target_percent_s) == 0
3877 || strchr (s: fmt_str, c: target_percent) == NULL)
3878 {
3879 const char *str;
3880
3881 if (strcmp (s1: fmt_str, s2: target_percent_s) == 0)
3882 {
3883 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3884 return false;
3885
3886 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3887 return false;
3888
3889 str = c_getstr (arg);
3890 if (str == NULL)
3891 return false;
3892 }
3893 else
3894 {
3895 /* The format specifier doesn't contain any '%' characters. */
3896 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3897 && arg)
3898 return false;
3899 str = fmt_str;
3900 }
3901
3902 /* If the string was "", printf does nothing. */
3903 if (str[0] == '\0')
3904 {
3905 replace_call_with_value (gsi, NULL_TREE);
3906 return true;
3907 }
3908
3909 /* If the string has length of 1, call putchar. */
3910 if (str[1] == '\0')
3911 {
3912 /* Given printf("c"), (where c is any one character,)
3913 convert "c"[0] to an int and pass that to the replacement
3914 function. */
3915 newarg = build_int_cst (integer_type_node, str[0]);
3916 if (fn_putchar)
3917 {
3918 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3919 replace_call_with_call_and_fold (gsi, repl);
3920 return true;
3921 }
3922 }
3923 else
3924 {
3925 /* If the string was "string\n", call puts("string"). */
3926 size_t len = strlen (s: str);
3927 if ((unsigned char)str[len - 1] == target_newline
3928 && (size_t) (int) len == len
3929 && (int) len > 0)
3930 {
3931 char *newstr;
3932
3933 /* Create a NUL-terminated string that's one char shorter
3934 than the original, stripping off the trailing '\n'. */
3935 newstr = xstrdup (str);
3936 newstr[len - 1] = '\0';
3937 newarg = build_string_literal (len, newstr);
3938 free (ptr: newstr);
3939 if (fn_puts)
3940 {
3941 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3942 replace_call_with_call_and_fold (gsi, repl);
3943 return true;
3944 }
3945 }
3946 else
3947 /* We'd like to arrange to call fputs(string,stdout) here,
3948 but we need stdout and don't have a way to get it yet. */
3949 return false;
3950 }
3951 }
3952
3953 /* The other optimizations can be done only on the non-va_list variants. */
3954 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3955 return false;
3956
3957 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3958 else if (strcmp (s1: fmt_str, s2: target_percent_s_newline) == 0)
3959 {
3960 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3961 return false;
3962 if (fn_puts)
3963 {
3964 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3965 replace_call_with_call_and_fold (gsi, repl);
3966 return true;
3967 }
3968 }
3969
3970 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3971 else if (strcmp (s1: fmt_str, s2: target_percent_c) == 0)
3972 {
3973 if (!arg || ! useless_type_conversion_p (integer_type_node,
3974 TREE_TYPE (arg)))
3975 return false;
3976 if (fn_putchar)
3977 {
3978 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3979 replace_call_with_call_and_fold (gsi, repl);
3980 return true;
3981 }
3982 }
3983
3984 return false;
3985}
3986
3987
3988
3989/* Fold a call to __builtin_strlen with known length LEN. */
3990
3991static bool
3992gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3993{
3994 gimple *stmt = gsi_stmt (i: *gsi);
3995 tree arg = gimple_call_arg (gs: stmt, index: 0);
3996
3997 wide_int minlen;
3998 wide_int maxlen;
3999
4000 c_strlen_data lendata = { };
4001 if (get_range_strlen (arg, pdata: &lendata, /* eltsize = */ 1)
4002 && !lendata.decl
4003 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4004 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4005 {
4006 /* The range of lengths refers to either a single constant
4007 string or to the longest and shortest constant string
4008 referenced by the argument of the strlen() call, or to
4009 the strings that can possibly be stored in the arrays
4010 the argument refers to. */
4011 minlen = wi::to_wide (t: lendata.minlen);
4012 maxlen = wi::to_wide (t: lendata.maxlen);
4013 }
4014 else
4015 {
4016 unsigned prec = TYPE_PRECISION (sizetype);
4017
4018 minlen = wi::shwi (val: 0, precision: prec);
4019 maxlen = wi::to_wide (t: max_object_size (), prec) - 2;
4020 }
4021
4022 if (minlen == maxlen)
4023 {
4024 /* Fold the strlen call to a constant. */
4025 tree type = TREE_TYPE (lendata.minlen);
4026 tree len = force_gimple_operand_gsi (gsi,
4027 wide_int_to_tree (type, cst: minlen),
4028 true, NULL, true, GSI_SAME_STMT);
4029 replace_call_with_value (gsi, val: len);
4030 return true;
4031 }
4032
4033 /* Set the strlen() range to [0, MAXLEN]. */
4034 if (tree lhs = gimple_call_lhs (gs: stmt))
4035 set_strlen_range (lhs, minlen, maxlen);
4036
4037 return false;
4038}
4039
4040/* Fold a call to __builtin_acc_on_device. */
4041
4042static bool
4043gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4044{
4045 /* Defer folding until we know which compiler we're in. */
4046 if (symtab->state != EXPANSION)
4047 return false;
4048
4049 unsigned val_host = GOMP_DEVICE_HOST;
4050 unsigned val_dev = GOMP_DEVICE_NONE;
4051
4052#ifdef ACCEL_COMPILER
4053 val_host = GOMP_DEVICE_NOT_HOST;
4054 val_dev = ACCEL_COMPILER_acc_device;
4055#endif
4056
4057 location_t loc = gimple_location (g: gsi_stmt (i: *gsi));
4058
4059 tree host_eq = make_ssa_name (boolean_type_node);
4060 gimple *host_ass = gimple_build_assign
4061 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4062 gimple_set_location (g: host_ass, location: loc);
4063 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4064
4065 tree dev_eq = make_ssa_name (boolean_type_node);
4066 gimple *dev_ass = gimple_build_assign
4067 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4068 gimple_set_location (g: dev_ass, location: loc);
4069 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4070
4071 tree result = make_ssa_name (boolean_type_node);
4072 gimple *result_ass = gimple_build_assign
4073 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4074 gimple_set_location (g: result_ass, location: loc);
4075 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4076
4077 replace_call_with_value (gsi, val: result);
4078
4079 return true;
4080}
4081
4082/* Fold realloc (0, n) -> malloc (n). */
4083
4084static bool
4085gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4086{
4087 gimple *stmt = gsi_stmt (i: *gsi);
4088 tree arg = gimple_call_arg (gs: stmt, index: 0);
4089 tree size = gimple_call_arg (gs: stmt, index: 1);
4090
4091 if (operand_equal_p (arg, null_pointer_node, flags: 0))
4092 {
4093 tree fn_malloc = builtin_decl_implicit (fncode: BUILT_IN_MALLOC);
4094 if (fn_malloc)
4095 {
4096 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4097 replace_call_with_call_and_fold (gsi, repl);
4098 return true;
4099 }
4100 }
4101 return false;
4102}
4103
4104/* Number of bytes into which any type but aggregate, vector or
4105 _BitInt types should fit. */
4106static constexpr size_t clear_padding_unit
4107 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4108/* Buffer size on which __builtin_clear_padding folding code works. */
4109static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4110
4111/* Data passed through __builtin_clear_padding folding. */
4112struct clear_padding_struct {
4113 location_t loc;
4114 /* 0 during __builtin_clear_padding folding, nonzero during
4115 clear_type_padding_in_mask. In that case, instead of clearing the
4116 non-padding bits in union_ptr array clear the padding bits in there. */
4117 bool clear_in_mask;
4118 tree base;
4119 tree alias_type;
4120 gimple_stmt_iterator *gsi;
4121 /* Alignment of buf->base + 0. */
4122 unsigned align;
4123 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4124 HOST_WIDE_INT off;
4125 /* Number of padding bytes before buf->off that don't have padding clear
4126 code emitted yet. */
4127 HOST_WIDE_INT padding_bytes;
4128 /* The size of the whole object. Never emit code to touch
4129 buf->base + buf->sz or following bytes. */
4130 HOST_WIDE_INT sz;
4131 /* Number of bytes recorded in buf->buf. */
4132 size_t size;
4133 /* When inside union, instead of emitting code we and bits inside of
4134 the union_ptr array. */
4135 unsigned char *union_ptr;
4136 /* Set bits mean padding bits that need to be cleared by the builtin. */
4137 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4138};
4139
4140/* Emit code to clear padding requested in BUF->buf - set bits
4141 in there stand for padding that should be cleared. FULL is true
4142 if everything from the buffer should be flushed, otherwise
4143 it can leave up to 2 * clear_padding_unit bytes for further
4144 processing. */
4145
4146static void
4147clear_padding_flush (clear_padding_struct *buf, bool full)
4148{
4149 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4150 if (!full && buf->size < 2 * clear_padding_unit)
4151 return;
4152 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4153 size_t end = buf->size;
4154 if (!full)
4155 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4156 * clear_padding_unit);
4157 size_t padding_bytes = buf->padding_bytes;
4158 if (buf->union_ptr)
4159 {
4160 if (buf->clear_in_mask)
4161 {
4162 /* During clear_type_padding_in_mask, clear the padding
4163 bits set in buf->buf in the buf->union_ptr mask. */
4164 for (size_t i = 0; i < end; i++)
4165 {
4166 if (buf->buf[i] == (unsigned char) ~0)
4167 padding_bytes++;
4168 else
4169 {
4170 memset (s: &buf->union_ptr[buf->off + i - padding_bytes],
4171 c: 0, n: padding_bytes);
4172 padding_bytes = 0;
4173 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4174 }
4175 }
4176 if (full)
4177 {
4178 memset (s: &buf->union_ptr[buf->off + end - padding_bytes],
4179 c: 0, n: padding_bytes);
4180 buf->off = 0;
4181 buf->size = 0;
4182 buf->padding_bytes = 0;
4183 }
4184 else
4185 {
4186 memmove (dest: buf->buf, src: buf->buf + end, n: buf->size - end);
4187 buf->off += end;
4188 buf->size -= end;
4189 buf->padding_bytes = padding_bytes;
4190 }
4191 return;
4192 }
4193 /* Inside of a union, instead of emitting any code, instead
4194 clear all bits in the union_ptr buffer that are clear
4195 in buf. Whole padding bytes don't clear anything. */
4196 for (size_t i = 0; i < end; i++)
4197 {
4198 if (buf->buf[i] == (unsigned char) ~0)
4199 padding_bytes++;
4200 else
4201 {
4202 padding_bytes = 0;
4203 buf->union_ptr[buf->off + i] &= buf->buf[i];
4204 }
4205 }
4206 if (full)
4207 {
4208 buf->off = 0;
4209 buf->size = 0;
4210 buf->padding_bytes = 0;
4211 }
4212 else
4213 {
4214 memmove (dest: buf->buf, src: buf->buf + end, n: buf->size - end);
4215 buf->off += end;
4216 buf->size -= end;
4217 buf->padding_bytes = padding_bytes;
4218 }
4219 return;
4220 }
4221 size_t wordsize = UNITS_PER_WORD;
4222 for (size_t i = 0; i < end; i += wordsize)
4223 {
4224 size_t nonzero_first = wordsize;
4225 size_t nonzero_last = 0;
4226 size_t zero_first = wordsize;
4227 size_t zero_last = 0;
4228 bool all_ones = true, bytes_only = true;
4229 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4230 > (unsigned HOST_WIDE_INT) buf->sz)
4231 {
4232 gcc_assert (wordsize > 1);
4233 wordsize /= 2;
4234 i -= wordsize;
4235 continue;
4236 }
4237 for (size_t j = i; j < i + wordsize && j < end; j++)
4238 {
4239 if (buf->buf[j])
4240 {
4241 if (nonzero_first == wordsize)
4242 {
4243 nonzero_first = j - i;
4244 nonzero_last = j - i;
4245 }
4246 if (nonzero_last != j - i)
4247 all_ones = false;
4248 nonzero_last = j + 1 - i;
4249 }
4250 else
4251 {
4252 if (zero_first == wordsize)
4253 zero_first = j - i;
4254 zero_last = j + 1 - i;
4255 }
4256 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4257 {
4258 all_ones = false;
4259 bytes_only = false;
4260 }
4261 }
4262 size_t padding_end = i;
4263 if (padding_bytes)
4264 {
4265 if (nonzero_first == 0
4266 && nonzero_last == wordsize
4267 && all_ones)
4268 {
4269 /* All bits are padding and we had some padding
4270 before too. Just extend it. */
4271 padding_bytes += wordsize;
4272 continue;
4273 }
4274 if (all_ones && nonzero_first == 0)
4275 {
4276 padding_bytes += nonzero_last;
4277 padding_end += nonzero_last;
4278 nonzero_first = wordsize;
4279 nonzero_last = 0;
4280 }
4281 else if (bytes_only && nonzero_first == 0)
4282 {
4283 gcc_assert (zero_first && zero_first != wordsize);
4284 padding_bytes += zero_first;
4285 padding_end += zero_first;
4286 }
4287 tree atype, src;
4288 if (padding_bytes == 1)
4289 {
4290 atype = char_type_node;
4291 src = build_zero_cst (char_type_node);
4292 }
4293 else
4294 {
4295 atype = build_array_type_nelts (char_type_node, padding_bytes);
4296 src = build_constructor (atype, NULL);
4297 }
4298 tree dst = build2_loc (loc: buf->loc, code: MEM_REF, type: atype, arg0: buf->base,
4299 arg1: build_int_cst (buf->alias_type,
4300 buf->off + padding_end
4301 - padding_bytes));
4302 gimple *g = gimple_build_assign (dst, src);
4303 gimple_set_location (g, location: buf->loc);
4304 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4305 padding_bytes = 0;
4306 buf->padding_bytes = 0;
4307 }
4308 if (nonzero_first == wordsize)
4309 /* All bits in a word are 0, there are no padding bits. */
4310 continue;
4311 if (all_ones && nonzero_last == wordsize)
4312 {
4313 /* All bits between nonzero_first and end of word are padding
4314 bits, start counting padding_bytes. */
4315 padding_bytes = nonzero_last - nonzero_first;
4316 continue;
4317 }
4318 if (bytes_only)
4319 {
4320 /* If bitfields aren't involved in this word, prefer storing
4321 individual bytes or groups of them over performing a RMW
4322 operation on the whole word. */
4323 gcc_assert (i + zero_last <= end);
4324 for (size_t j = padding_end; j < i + zero_last; j++)
4325 {
4326 if (buf->buf[j])
4327 {
4328 size_t k;
4329 for (k = j; k < i + zero_last; k++)
4330 if (buf->buf[k] == 0)
4331 break;
4332 HOST_WIDE_INT off = buf->off + j;
4333 tree atype, src;
4334 if (k - j == 1)
4335 {
4336 atype = char_type_node;
4337 src = build_zero_cst (char_type_node);
4338 }
4339 else
4340 {
4341 atype = build_array_type_nelts (char_type_node, k - j);
4342 src = build_constructor (atype, NULL);
4343 }
4344 tree dst = build2_loc (loc: buf->loc, code: MEM_REF, type: atype,
4345 arg0: buf->base,
4346 arg1: build_int_cst (buf->alias_type, off));
4347 gimple *g = gimple_build_assign (dst, src);
4348 gimple_set_location (g, location: buf->loc);
4349 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4350 j = k;
4351 }
4352 }
4353 if (nonzero_last == wordsize)
4354 padding_bytes = nonzero_last - zero_last;
4355 continue;
4356 }
4357 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4358 {
4359 if (nonzero_last - nonzero_first <= eltsz
4360 && ((nonzero_first & ~(eltsz - 1))
4361 == ((nonzero_last - 1) & ~(eltsz - 1))))
4362 {
4363 tree type;
4364 if (eltsz == 1)
4365 type = char_type_node;
4366 else
4367 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4368 0);
4369 size_t start = nonzero_first & ~(eltsz - 1);
4370 HOST_WIDE_INT off = buf->off + i + start;
4371 tree atype = type;
4372 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4373 atype = build_aligned_type (type, buf->align);
4374 tree dst = build2_loc (loc: buf->loc, code: MEM_REF, type: atype, arg0: buf->base,
4375 arg1: build_int_cst (buf->alias_type, off));
4376 tree src;
4377 gimple *g;
4378 if (all_ones
4379 && nonzero_first == start
4380 && nonzero_last == start + eltsz)
4381 src = build_zero_cst (type);
4382 else
4383 {
4384 src = make_ssa_name (var: type);
4385 tree tmp_dst = unshare_expr (dst);
4386 /* The folding introduces a read from the tmp_dst, we should
4387 prevent uninitialized warning analysis from issuing warning
4388 for such fake read. In order to suppress warning only for
4389 this expr, we should set the location of tmp_dst to
4390 UNKNOWN_LOCATION first, then suppress_warning will call
4391 set_no_warning_bit to set the no_warning flag only for
4392 tmp_dst. */
4393 SET_EXPR_LOCATION (tmp_dst, UNKNOWN_LOCATION);
4394 suppress_warning (tmp_dst, OPT_Wuninitialized);
4395 g = gimple_build_assign (src, tmp_dst);
4396 gimple_set_location (g, location: buf->loc);
4397 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4398 tree mask = native_interpret_expr (type,
4399 buf->buf + i + start,
4400 eltsz);
4401 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4402 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4403 tree src_masked = make_ssa_name (var: type);
4404 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4405 src, mask);
4406 gimple_set_location (g, location: buf->loc);
4407 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4408 src = src_masked;
4409 }
4410 g = gimple_build_assign (dst, src);
4411 gimple_set_location (g, location: buf->loc);
4412 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4413 break;
4414 }
4415 }
4416 }
4417 if (full)
4418 {
4419 if (padding_bytes)
4420 {
4421 tree atype, src;
4422 if (padding_bytes == 1)
4423 {
4424 atype = char_type_node;
4425 src = build_zero_cst (char_type_node);
4426 }
4427 else
4428 {
4429 atype = build_array_type_nelts (char_type_node, padding_bytes);
4430 src = build_constructor (atype, NULL);
4431 }
4432 tree dst = build2_loc (loc: buf->loc, code: MEM_REF, type: atype, arg0: buf->base,
4433 arg1: build_int_cst (buf->alias_type,
4434 buf->off + end
4435 - padding_bytes));
4436 gimple *g = gimple_build_assign (dst, src);
4437 gimple_set_location (g, location: buf->loc);
4438 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4439 }
4440 size_t end_rem = end % UNITS_PER_WORD;
4441 buf->off += end - end_rem;
4442 buf->size = end_rem;
4443 memset (s: buf->buf, c: 0, n: buf->size);
4444 buf->padding_bytes = 0;
4445 }
4446 else
4447 {
4448 memmove (dest: buf->buf, src: buf->buf + end, n: buf->size - end);
4449 buf->off += end;
4450 buf->size -= end;
4451 buf->padding_bytes = padding_bytes;
4452 }
4453}
4454
4455/* Append PADDING_BYTES padding bytes. */
4456
4457static void
4458clear_padding_add_padding (clear_padding_struct *buf,
4459 HOST_WIDE_INT padding_bytes)
4460{
4461 if (padding_bytes == 0)
4462 return;
4463 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4464 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4465 clear_padding_flush (buf, full: false);
4466 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4467 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4468 {
4469 memset (s: buf->buf + buf->size, c: ~0, n: clear_padding_buf_size - buf->size);
4470 padding_bytes -= clear_padding_buf_size - buf->size;
4471 buf->size = clear_padding_buf_size;
4472 clear_padding_flush (buf, full: false);
4473 gcc_assert (buf->padding_bytes);
4474 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4475 is guaranteed to be all ones. */
4476 padding_bytes += buf->size;
4477 buf->size = padding_bytes % UNITS_PER_WORD;
4478 memset (s: buf->buf, c: ~0, n: buf->size);
4479 buf->off += padding_bytes - buf->size;
4480 buf->padding_bytes += padding_bytes - buf->size;
4481 }
4482 else
4483 {
4484 memset (s: buf->buf + buf->size, c: ~0, n: padding_bytes);
4485 buf->size += padding_bytes;
4486 }
4487}
4488
4489static void clear_padding_type (clear_padding_struct *, tree,
4490 HOST_WIDE_INT, bool);
4491
4492/* Clear padding bits of union type TYPE. */
4493
4494static void
4495clear_padding_union (clear_padding_struct *buf, tree type,
4496 HOST_WIDE_INT sz, bool for_auto_init)
4497{
4498 clear_padding_struct *union_buf;
4499 HOST_WIDE_INT start_off = 0, next_off = 0;
4500 size_t start_size = 0;
4501 if (buf->union_ptr)
4502 {
4503 start_off = buf->off + buf->size;
4504 next_off = start_off + sz;
4505 start_size = start_off % UNITS_PER_WORD;
4506 start_off -= start_size;
4507 clear_padding_flush (buf, full: true);
4508 union_buf = buf;
4509 }
4510 else
4511 {
4512 if (sz + buf->size > clear_padding_buf_size)
4513 clear_padding_flush (buf, full: false);
4514 union_buf = XALLOCA (clear_padding_struct);
4515 union_buf->loc = buf->loc;
4516 union_buf->clear_in_mask = buf->clear_in_mask;
4517 union_buf->base = NULL_TREE;
4518 union_buf->alias_type = NULL_TREE;
4519 union_buf->gsi = NULL;
4520 union_buf->align = 0;
4521 union_buf->off = 0;
4522 union_buf->padding_bytes = 0;
4523 union_buf->sz = sz;
4524 union_buf->size = 0;
4525 if (sz + buf->size <= clear_padding_buf_size)
4526 union_buf->union_ptr = buf->buf + buf->size;
4527 else
4528 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4529 memset (s: union_buf->union_ptr, c: ~0, n: sz);
4530 }
4531
4532 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4533 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4534 {
4535 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4536 {
4537 if (TREE_TYPE (field) == error_mark_node)
4538 continue;
4539 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4540 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4541 if (!buf->clear_in_mask && !for_auto_init)
4542 error_at (buf->loc, "flexible array member %qD does not have "
4543 "well defined padding bits for %qs",
4544 field, "__builtin_clear_padding");
4545 continue;
4546 }
4547 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4548 gcc_assert (union_buf->size == 0);
4549 union_buf->off = start_off;
4550 union_buf->size = start_size;
4551 memset (s: union_buf->buf, c: ~0, n: start_size);
4552 clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
4553 clear_padding_add_padding (buf: union_buf, padding_bytes: sz - fldsz);
4554 clear_padding_flush (buf: union_buf, full: true);
4555 }
4556
4557 if (buf == union_buf)
4558 {
4559 buf->off = next_off;
4560 buf->size = next_off % UNITS_PER_WORD;
4561 buf->off -= buf->size;
4562 memset (s: buf->buf, c: ~0, n: buf->size);
4563 }
4564 else if (sz + buf->size <= clear_padding_buf_size)
4565 buf->size += sz;
4566 else
4567 {
4568 unsigned char *union_ptr = union_buf->union_ptr;
4569 while (sz)
4570 {
4571 clear_padding_flush (buf, full: false);
4572 HOST_WIDE_INT this_sz
4573 = MIN ((unsigned HOST_WIDE_INT) sz,
4574 clear_padding_buf_size - buf->size);
4575 memcpy (dest: buf->buf + buf->size, src: union_ptr, n: this_sz);
4576 buf->size += this_sz;
4577 union_ptr += this_sz;
4578 sz -= this_sz;
4579 }
4580 XDELETE (union_buf->union_ptr);
4581 }
4582}
4583
4584/* The only known floating point formats with padding bits are the
4585 IEEE extended ones. */
4586
4587static bool
4588clear_padding_real_needs_padding_p (tree type)
4589{
4590 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4591 return (fmt->b == 2
4592 && fmt->signbit_ro == fmt->signbit_rw
4593 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4594}
4595
4596/* _BitInt has padding bits if it isn't extended in the ABI and has smaller
4597 precision than bits in limb or corresponding number of limbs. */
4598
4599static bool
4600clear_padding_bitint_needs_padding_p (tree type)
4601{
4602 struct bitint_info info;
4603 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
4604 gcc_assert (ok);
4605 if (info.extended)
4606 return false;
4607 scalar_int_mode limb_mode = as_a <scalar_int_mode> (m: info.limb_mode);
4608 if (TYPE_PRECISION (type) < GET_MODE_PRECISION (mode: limb_mode))
4609 return true;
4610 else if (TYPE_PRECISION (type) == GET_MODE_PRECISION (mode: limb_mode))
4611 return false;
4612 else
4613 return (((unsigned) TYPE_PRECISION (type))
4614 % GET_MODE_PRECISION (mode: limb_mode)) != 0;
4615}
4616
4617/* Return true if TYPE might contain any padding bits. */
4618
4619bool
4620clear_padding_type_may_have_padding_p (tree type)
4621{
4622 switch (TREE_CODE (type))
4623 {
4624 case RECORD_TYPE:
4625 case UNION_TYPE:
4626 return true;
4627 case ARRAY_TYPE:
4628 case COMPLEX_TYPE:
4629 case VECTOR_TYPE:
4630 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4631 case REAL_TYPE:
4632 return clear_padding_real_needs_padding_p (type);
4633 case BITINT_TYPE:
4634 return clear_padding_bitint_needs_padding_p (type);
4635 default:
4636 return false;
4637 }
4638}
4639
4640/* Emit a runtime loop:
4641 for (; buf.base != end; buf.base += sz)
4642 __builtin_clear_padding (buf.base); */
4643
4644static void
4645clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4646 tree end, bool for_auto_init)
4647{
4648 tree l1 = create_artificial_label (buf->loc);
4649 tree l2 = create_artificial_label (buf->loc);
4650 tree l3 = create_artificial_label (buf->loc);
4651 gimple *g = gimple_build_goto (dest: l2);
4652 gimple_set_location (g, location: buf->loc);
4653 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4654 g = gimple_build_label (label: l1);
4655 gimple_set_location (g, location: buf->loc);
4656 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4657 clear_padding_type (buf, type, buf->sz, for_auto_init);
4658 clear_padding_flush (buf, full: true);
4659 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4660 size_int (buf->sz));
4661 gimple_set_location (g, location: buf->loc);
4662 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4663 g = gimple_build_label (label: l2);
4664 gimple_set_location (g, location: buf->loc);
4665 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4666 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4667 gimple_set_location (g, location: buf->loc);
4668 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4669 g = gimple_build_label (label: l3);
4670 gimple_set_location (g, location: buf->loc);
4671 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4672}
4673
4674/* Clear padding bits for TYPE. Called recursively from
4675 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4676 the __builtin_clear_padding is not called by the end user,
4677 instead, it's inserted by the compiler to initialize the
4678 paddings of automatic variable. Therefore, we should not
4679 emit the error messages for flexible array members to confuse
4680 the end user. */
4681
4682static void
4683clear_padding_type (clear_padding_struct *buf, tree type,
4684 HOST_WIDE_INT sz, bool for_auto_init)
4685{
4686 switch (TREE_CODE (type))
4687 {
4688 case RECORD_TYPE:
4689 HOST_WIDE_INT cur_pos;
4690 cur_pos = 0;
4691 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4692 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4693 {
4694 tree ftype = TREE_TYPE (field);
4695 if (DECL_BIT_FIELD (field))
4696 {
4697 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4698 if (fldsz == 0)
4699 continue;
4700 HOST_WIDE_INT pos = int_byte_position (field);
4701 if (pos >= sz)
4702 continue;
4703 HOST_WIDE_INT bpos
4704 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4705 bpos %= BITS_PER_UNIT;
4706 HOST_WIDE_INT end
4707 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4708 if (pos + end > cur_pos)
4709 {
4710 clear_padding_add_padding (buf, padding_bytes: pos + end - cur_pos);
4711 cur_pos = pos + end;
4712 }
4713 gcc_assert (cur_pos > pos
4714 && ((unsigned HOST_WIDE_INT) buf->size
4715 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4716 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4717 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4718 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4719 " in %qs", "__builtin_clear_padding");
4720 else if (BYTES_BIG_ENDIAN)
4721 {
4722 /* Big endian. */
4723 if (bpos + fldsz <= BITS_PER_UNIT)
4724 *p &= ~(((1 << fldsz) - 1)
4725 << (BITS_PER_UNIT - bpos - fldsz));
4726 else
4727 {
4728 if (bpos)
4729 {
4730 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4731 p++;
4732 fldsz -= BITS_PER_UNIT - bpos;
4733 }
4734 memset (s: p, c: 0, n: fldsz / BITS_PER_UNIT);
4735 p += fldsz / BITS_PER_UNIT;
4736 fldsz %= BITS_PER_UNIT;
4737 if (fldsz)
4738 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4739 }
4740 }
4741 else
4742 {
4743 /* Little endian. */
4744 if (bpos + fldsz <= BITS_PER_UNIT)
4745 *p &= ~(((1 << fldsz) - 1) << bpos);
4746 else
4747 {
4748 if (bpos)
4749 {
4750 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4751 p++;
4752 fldsz -= BITS_PER_UNIT - bpos;
4753 }
4754 memset (s: p, c: 0, n: fldsz / BITS_PER_UNIT);
4755 p += fldsz / BITS_PER_UNIT;
4756 fldsz %= BITS_PER_UNIT;
4757 if (fldsz)
4758 *p &= ~((1 << fldsz) - 1);
4759 }
4760 }
4761 }
4762 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4763 {
4764 if (ftype == error_mark_node)
4765 continue;
4766 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4767 && !COMPLETE_TYPE_P (ftype));
4768 if (!buf->clear_in_mask && !for_auto_init)
4769 error_at (buf->loc, "flexible array member %qD does not "
4770 "have well defined padding bits for %qs",
4771 field, "__builtin_clear_padding");
4772 }
4773 else if (is_empty_type (ftype))
4774 continue;
4775 else
4776 {
4777 HOST_WIDE_INT pos = int_byte_position (field);
4778 if (pos >= sz)
4779 continue;
4780 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4781 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4782 clear_padding_add_padding (buf, padding_bytes: pos - cur_pos);
4783 cur_pos = pos;
4784 if (tree asbase = lang_hooks.types.classtype_as_base (field))
4785 ftype = asbase;
4786 clear_padding_type (buf, type: ftype, sz: fldsz, for_auto_init);
4787 cur_pos += fldsz;
4788 }
4789 }
4790 gcc_assert (sz >= cur_pos);
4791 clear_padding_add_padding (buf, padding_bytes: sz - cur_pos);
4792 break;
4793 case ARRAY_TYPE:
4794 HOST_WIDE_INT nelts, fldsz;
4795 fldsz = int_size_in_bytes (TREE_TYPE (type));
4796 if (fldsz == 0)
4797 break;
4798 nelts = sz / fldsz;
4799 if (nelts > 1
4800 && sz > 8 * UNITS_PER_WORD
4801 && buf->union_ptr == NULL
4802 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4803 {
4804 /* For sufficiently large array of more than one elements,
4805 emit a runtime loop to keep code size manageable. */
4806 tree base = buf->base;
4807 unsigned int prev_align = buf->align;
4808 HOST_WIDE_INT off = buf->off + buf->size;
4809 HOST_WIDE_INT prev_sz = buf->sz;
4810 clear_padding_flush (buf, full: true);
4811 tree elttype = TREE_TYPE (type);
4812 buf->base = create_tmp_var (build_pointer_type (elttype));
4813 tree end = make_ssa_name (TREE_TYPE (buf->base));
4814 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4815 base, size_int (off));
4816 gimple_set_location (g, location: buf->loc);
4817 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4818 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4819 size_int (sz));
4820 gimple_set_location (g, location: buf->loc);
4821 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4822 buf->sz = fldsz;
4823 buf->align = TYPE_ALIGN (elttype);
4824 buf->off = 0;
4825 buf->size = 0;
4826 clear_padding_emit_loop (buf, type: elttype, end, for_auto_init);
4827 buf->base = base;
4828 buf->sz = prev_sz;
4829 buf->align = prev_align;
4830 buf->size = off % UNITS_PER_WORD;
4831 buf->off = off - buf->size;
4832 memset (s: buf->buf, c: 0, n: buf->size);
4833 break;
4834 }
4835 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4836 clear_padding_type (buf, TREE_TYPE (type), sz: fldsz, for_auto_init);
4837 break;
4838 case UNION_TYPE:
4839 clear_padding_union (buf, type, sz, for_auto_init);
4840 break;
4841 case REAL_TYPE:
4842 gcc_assert ((size_t) sz <= clear_padding_unit);
4843 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4844 clear_padding_flush (buf, full: false);
4845 if (clear_padding_real_needs_padding_p (type))
4846 {
4847 /* Use native_interpret_real + native_encode_expr to figure out
4848 which bits are padding. */
4849 memset (s: buf->buf + buf->size, c: ~0, n: sz);
4850 tree cst = native_interpret_real (type, buf->buf + buf->size, sz);
4851 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4852 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4853 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4854 for (size_t i = 0; i < (size_t) sz; i++)
4855 buf->buf[buf->size + i] ^= ~0;
4856 }
4857 else
4858 memset (s: buf->buf + buf->size, c: 0, n: sz);
4859 buf->size += sz;
4860 break;
4861 case COMPLEX_TYPE:
4862 fldsz = int_size_in_bytes (TREE_TYPE (type));
4863 clear_padding_type (buf, TREE_TYPE (type), sz: fldsz, for_auto_init);
4864 clear_padding_type (buf, TREE_TYPE (type), sz: fldsz, for_auto_init);
4865 break;
4866 case VECTOR_TYPE:
4867 nelts = TYPE_VECTOR_SUBPARTS (node: type).to_constant ();
4868 fldsz = int_size_in_bytes (TREE_TYPE (type));
4869 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4870 clear_padding_type (buf, TREE_TYPE (type), sz: fldsz, for_auto_init);
4871 break;
4872 case NULLPTR_TYPE:
4873 gcc_assert ((size_t) sz <= clear_padding_unit);
4874 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4875 clear_padding_flush (buf, full: false);
4876 memset (s: buf->buf + buf->size, c: ~0, n: sz);
4877 buf->size += sz;
4878 break;
4879 case BITINT_TYPE:
4880 {
4881 struct bitint_info info;
4882 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
4883 gcc_assert (ok);
4884 scalar_int_mode limb_mode = as_a <scalar_int_mode> (m: info.limb_mode);
4885 if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (mode: limb_mode))
4886 {
4887 gcc_assert ((size_t) sz <= clear_padding_unit);
4888 if ((unsigned HOST_WIDE_INT) sz + buf->size
4889 > clear_padding_buf_size)
4890 clear_padding_flush (buf, full: false);
4891 if (!info.extended
4892 && TYPE_PRECISION (type) < GET_MODE_PRECISION (mode: limb_mode))
4893 {
4894 int tprec = GET_MODE_PRECISION (mode: limb_mode);
4895 int prec = TYPE_PRECISION (type);
4896 tree t = build_nonstandard_integer_type (tprec, 1);
4897 tree cst = wide_int_to_tree (type: t, cst: wi::mask (width: prec, negate_p: true, precision: tprec));
4898 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4899 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4900 }
4901 else
4902 memset (s: buf->buf + buf->size, c: 0, n: sz);
4903 buf->size += sz;
4904 break;
4905 }
4906 tree limbtype
4907 = build_nonstandard_integer_type (GET_MODE_PRECISION (mode: limb_mode), 1);
4908 fldsz = int_size_in_bytes (limbtype);
4909 nelts = int_size_in_bytes (type) / fldsz;
4910 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4911 {
4912 if (!info.extended
4913 && i == (info.big_endian ? 0 : nelts - 1)
4914 && (((unsigned) TYPE_PRECISION (type))
4915 % TYPE_PRECISION (limbtype)) != 0)
4916 {
4917 int tprec = GET_MODE_PRECISION (mode: limb_mode);
4918 int prec = (((unsigned) TYPE_PRECISION (type)) % tprec);
4919 tree cst = wide_int_to_tree (type: limbtype,
4920 cst: wi::mask (width: prec, negate_p: true, precision: tprec));
4921 int len = native_encode_expr (cst, buf->buf + buf->size,
4922 fldsz);
4923 gcc_assert (len > 0 && (size_t) len == (size_t) fldsz);
4924 buf->size += fldsz;
4925 }
4926 else
4927 clear_padding_type (buf, type: limbtype, sz: fldsz, for_auto_init);
4928 }
4929 break;
4930 }
4931 default:
4932 gcc_assert ((size_t) sz <= clear_padding_unit);
4933 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4934 clear_padding_flush (buf, full: false);
4935 memset (s: buf->buf + buf->size, c: 0, n: sz);
4936 buf->size += sz;
4937 break;
4938 }
4939}
4940
4941/* Clear padding bits of TYPE in MASK. */
4942
4943void
4944clear_type_padding_in_mask (tree type, unsigned char *mask)
4945{
4946 clear_padding_struct buf;
4947 buf.loc = UNKNOWN_LOCATION;
4948 buf.clear_in_mask = true;
4949 buf.base = NULL_TREE;
4950 buf.alias_type = NULL_TREE;
4951 buf.gsi = NULL;
4952 buf.align = 0;
4953 buf.off = 0;
4954 buf.padding_bytes = 0;
4955 buf.sz = int_size_in_bytes (type);
4956 buf.size = 0;
4957 buf.union_ptr = mask;
4958 clear_padding_type (buf: &buf, type, sz: buf.sz, for_auto_init: false);
4959 clear_padding_flush (buf: &buf, full: true);
4960}
4961
4962/* Fold __builtin_clear_padding builtin. */
4963
4964static bool
4965gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4966{
4967 gimple *stmt = gsi_stmt (i: *gsi);
4968 gcc_assert (gimple_call_num_args (stmt) == 2);
4969 tree ptr = gimple_call_arg (gs: stmt, index: 0);
4970 tree typearg = gimple_call_arg (gs: stmt, index: 1);
4971 /* The 2nd argument of __builtin_clear_padding's value is used to
4972 distinguish whether this call is made by the user or by the compiler
4973 for automatic variable initialization. */
4974 bool for_auto_init = (bool) TREE_INT_CST_LOW (typearg);
4975 tree type = TREE_TYPE (TREE_TYPE (typearg));
4976 location_t loc = gimple_location (g: stmt);
4977 clear_padding_struct buf;
4978 gimple_stmt_iterator gsiprev = *gsi;
4979 /* This should be folded during the lower pass. */
4980 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4981 gcc_assert (COMPLETE_TYPE_P (type));
4982 gsi_prev (i: &gsiprev);
4983
4984 buf.loc = loc;
4985 buf.clear_in_mask = false;
4986 buf.base = ptr;
4987 buf.alias_type = NULL_TREE;
4988 buf.gsi = gsi;
4989 buf.align = get_pointer_alignment (ptr);
4990 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4991 buf.align = MAX (buf.align, talign);
4992 buf.off = 0;
4993 buf.padding_bytes = 0;
4994 buf.size = 0;
4995 buf.sz = int_size_in_bytes (type);
4996 buf.union_ptr = NULL;
4997 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4998 sorry_at (loc, "%s not supported for variable length aggregates",
4999 "__builtin_clear_padding");
5000 /* The implementation currently assumes 8-bit host and target
5001 chars which is the case for all currently supported targets
5002 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
5003 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
5004 sorry_at (loc, "%s not supported on this target",
5005 "__builtin_clear_padding");
5006 else if (!clear_padding_type_may_have_padding_p (type))
5007 ;
5008 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
5009 {
5010 tree sz = TYPE_SIZE_UNIT (type);
5011 tree elttype = type;
5012 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
5013 while (TREE_CODE (elttype) == ARRAY_TYPE
5014 && int_size_in_bytes (elttype) < 0)
5015 elttype = TREE_TYPE (elttype);
5016 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
5017 gcc_assert (eltsz >= 0);
5018 if (eltsz)
5019 {
5020 buf.base = create_tmp_var (build_pointer_type (elttype));
5021 tree end = make_ssa_name (TREE_TYPE (buf.base));
5022 gimple *g = gimple_build_assign (buf.base, ptr);
5023 gimple_set_location (g, location: loc);
5024 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5025 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
5026 gimple_set_location (g, location: loc);
5027 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5028 buf.sz = eltsz;
5029 buf.align = TYPE_ALIGN (elttype);
5030 buf.alias_type = build_pointer_type (elttype);
5031 clear_padding_emit_loop (buf: &buf, type: elttype, end, for_auto_init);
5032 }
5033 }
5034 else
5035 {
5036 if (!is_gimple_mem_ref_addr (buf.base))
5037 {
5038 buf.base = make_ssa_name (TREE_TYPE (ptr));
5039 gimple *g = gimple_build_assign (buf.base, ptr);
5040 gimple_set_location (g, location: loc);
5041 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5042 }
5043 buf.alias_type = build_pointer_type (type);
5044 clear_padding_type (buf: &buf, type, sz: buf.sz, for_auto_init);
5045 clear_padding_flush (buf: &buf, full: true);
5046 }
5047
5048 gimple_stmt_iterator gsiprev2 = *gsi;
5049 gsi_prev (i: &gsiprev2);
5050 if (gsi_stmt (i: gsiprev) == gsi_stmt (i: gsiprev2))
5051 gsi_replace (gsi, gimple_build_nop (), true);
5052 else
5053 {
5054 gsi_remove (gsi, true);
5055 *gsi = gsiprev2;
5056 }
5057 return true;
5058}
5059
5060/* Fold the non-target builtin at *GSI and return whether any simplification
5061 was made. */
5062
5063static bool
5064gimple_fold_builtin (gimple_stmt_iterator *gsi)
5065{
5066 gcall *stmt = as_a <gcall *>(p: gsi_stmt (i: *gsi));
5067 tree callee = gimple_call_fndecl (gs: stmt);
5068
5069 /* Give up for always_inline inline builtins until they are
5070 inlined. */
5071 if (avoid_folding_inline_builtin (callee))
5072 return false;
5073
5074 unsigned n = gimple_call_num_args (gs: stmt);
5075 enum built_in_function fcode = DECL_FUNCTION_CODE (decl: callee);
5076 switch (fcode)
5077 {
5078 case BUILT_IN_BCMP:
5079 return gimple_fold_builtin_bcmp (gsi);
5080 case BUILT_IN_BCOPY:
5081 return gimple_fold_builtin_bcopy (gsi);
5082 case BUILT_IN_BZERO:
5083 return gimple_fold_builtin_bzero (gsi);
5084
5085 case BUILT_IN_MEMSET:
5086 return gimple_fold_builtin_memset (gsi,
5087 c: gimple_call_arg (gs: stmt, index: 1),
5088 len: gimple_call_arg (gs: stmt, index: 2));
5089 case BUILT_IN_MEMCPY:
5090 case BUILT_IN_MEMPCPY:
5091 case BUILT_IN_MEMMOVE:
5092 return gimple_fold_builtin_memory_op (gsi, dest: gimple_call_arg (gs: stmt, index: 0),
5093 src: gimple_call_arg (gs: stmt, index: 1), code: fcode);
5094 case BUILT_IN_SPRINTF_CHK:
5095 case BUILT_IN_VSPRINTF_CHK:
5096 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5097 case BUILT_IN_STRCAT_CHK:
5098 return gimple_fold_builtin_strcat_chk (gsi);
5099 case BUILT_IN_STRNCAT_CHK:
5100 return gimple_fold_builtin_strncat_chk (gsi);
5101 case BUILT_IN_STRLEN:
5102 return gimple_fold_builtin_strlen (gsi);
5103 case BUILT_IN_STRCPY:
5104 return gimple_fold_builtin_strcpy (gsi,
5105 dest: gimple_call_arg (gs: stmt, index: 0),
5106 src: gimple_call_arg (gs: stmt, index: 1));
5107 case BUILT_IN_STRNCPY:
5108 return gimple_fold_builtin_strncpy (gsi,
5109 dest: gimple_call_arg (gs: stmt, index: 0),
5110 src: gimple_call_arg (gs: stmt, index: 1),
5111 len: gimple_call_arg (gs: stmt, index: 2));
5112 case BUILT_IN_STRCAT:
5113 return gimple_fold_builtin_strcat (gsi, dst: gimple_call_arg (gs: stmt, index: 0),
5114 src: gimple_call_arg (gs: stmt, index: 1));
5115 case BUILT_IN_STRNCAT:
5116 return gimple_fold_builtin_strncat (gsi);
5117 case BUILT_IN_INDEX:
5118 case BUILT_IN_STRCHR:
5119 return gimple_fold_builtin_strchr (gsi, is_strrchr: false);
5120 case BUILT_IN_RINDEX:
5121 case BUILT_IN_STRRCHR:
5122 return gimple_fold_builtin_strchr (gsi, is_strrchr: true);
5123 case BUILT_IN_STRSTR:
5124 return gimple_fold_builtin_strstr (gsi);
5125 case BUILT_IN_STRCMP:
5126 case BUILT_IN_STRCMP_EQ:
5127 case BUILT_IN_STRCASECMP:
5128 case BUILT_IN_STRNCMP:
5129 case BUILT_IN_STRNCMP_EQ:
5130 case BUILT_IN_STRNCASECMP:
5131 return gimple_fold_builtin_string_compare (gsi);
5132 case BUILT_IN_MEMCHR:
5133 return gimple_fold_builtin_memchr (gsi);
5134 case BUILT_IN_FPUTS:
5135 return gimple_fold_builtin_fputs (gsi, arg0: gimple_call_arg (gs: stmt, index: 0),
5136 arg1: gimple_call_arg (gs: stmt, index: 1), unlocked: false);
5137 case BUILT_IN_FPUTS_UNLOCKED:
5138 return gimple_fold_builtin_fputs (gsi, arg0: gimple_call_arg (gs: stmt, index: 0),
5139 arg1: gimple_call_arg (gs: stmt, index: 1), unlocked: true);
5140 case BUILT_IN_MEMCPY_CHK:
5141 case BUILT_IN_MEMPCPY_CHK:
5142 case BUILT_IN_MEMMOVE_CHK:
5143 case BUILT_IN_MEMSET_CHK:
5144 return gimple_fold_builtin_memory_chk (gsi,
5145 dest: gimple_call_arg (gs: stmt, index: 0),
5146 src: gimple_call_arg (gs: stmt, index: 1),
5147 len: gimple_call_arg (gs: stmt, index: 2),
5148 size: gimple_call_arg (gs: stmt, index: 3),
5149 fcode);
5150 case BUILT_IN_STPCPY:
5151 return gimple_fold_builtin_stpcpy (gsi);
5152 case BUILT_IN_STRCPY_CHK:
5153 case BUILT_IN_STPCPY_CHK:
5154 return gimple_fold_builtin_stxcpy_chk (gsi,
5155 dest: gimple_call_arg (gs: stmt, index: 0),
5156 src: gimple_call_arg (gs: stmt, index: 1),
5157 size: gimple_call_arg (gs: stmt, index: 2),
5158 fcode);
5159 case BUILT_IN_STRNCPY_CHK:
5160 case BUILT_IN_STPNCPY_CHK:
5161 return gimple_fold_builtin_stxncpy_chk (gsi,
5162 dest: gimple_call_arg (gs: stmt, index: 0),
5163 src: gimple_call_arg (gs: stmt, index: 1),
5164 len: gimple_call_arg (gs: stmt, index: 2),
5165 size: gimple_call_arg (gs: stmt, index: 3),
5166 fcode);
5167 case BUILT_IN_SNPRINTF_CHK:
5168 case BUILT_IN_VSNPRINTF_CHK:
5169 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5170
5171 case BUILT_IN_FPRINTF:
5172 case BUILT_IN_FPRINTF_UNLOCKED:
5173 case BUILT_IN_VFPRINTF:
5174 if (n == 2 || n == 3)
5175 return gimple_fold_builtin_fprintf (gsi,
5176 fp: gimple_call_arg (gs: stmt, index: 0),
5177 fmt: gimple_call_arg (gs: stmt, index: 1),
5178 arg: n == 3
5179 ? gimple_call_arg (gs: stmt, index: 2)
5180 : NULL_TREE,
5181 fcode);
5182 break;
5183 case BUILT_IN_FPRINTF_CHK:
5184 case BUILT_IN_VFPRINTF_CHK:
5185 if (n == 3 || n == 4)
5186 return gimple_fold_builtin_fprintf (gsi,
5187 fp: gimple_call_arg (gs: stmt, index: 0),
5188 fmt: gimple_call_arg (gs: stmt, index: 2),
5189 arg: n == 4
5190 ? gimple_call_arg (gs: stmt, index: 3)
5191 : NULL_TREE,
5192 fcode);
5193 break;
5194 case BUILT_IN_PRINTF:
5195 case BUILT_IN_PRINTF_UNLOCKED:
5196 case BUILT_IN_VPRINTF:
5197 if (n == 1 || n == 2)
5198 return gimple_fold_builtin_printf (gsi, fmt: gimple_call_arg (gs: stmt, index: 0),
5199 arg: n == 2
5200 ? gimple_call_arg (gs: stmt, index: 1)
5201 : NULL_TREE, fcode);
5202 break;
5203 case BUILT_IN_PRINTF_CHK:
5204 case BUILT_IN_VPRINTF_CHK:
5205 if (n == 2 || n == 3)
5206 return gimple_fold_builtin_printf (gsi, fmt: gimple_call_arg (gs: stmt, index: 1),
5207 arg: n == 3
5208 ? gimple_call_arg (gs: stmt, index: 2)
5209 : NULL_TREE, fcode);
5210 break;
5211 case BUILT_IN_ACC_ON_DEVICE:
5212 return gimple_fold_builtin_acc_on_device (gsi,
5213 arg0: gimple_call_arg (gs: stmt, index: 0));
5214 case BUILT_IN_REALLOC:
5215 return gimple_fold_builtin_realloc (gsi);
5216
5217 case BUILT_IN_CLEAR_PADDING:
5218 return gimple_fold_builtin_clear_padding (gsi);
5219
5220 default:;
5221 }
5222
5223 /* Try the generic builtin folder. */
5224 bool ignore = (gimple_call_lhs (gs: stmt) == NULL);
5225 tree result = fold_call_stmt (stmt, ignore);
5226 if (result)
5227 {
5228 if (ignore)
5229 STRIP_NOPS (result);
5230 else
5231 result = fold_convert (gimple_call_return_type (stmt), result);
5232 gimplify_and_update_call_from_tree (si_p: gsi, expr: result);
5233 return true;
5234 }
5235
5236 return false;
5237}
5238
5239/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5240 function calls to constants, where possible. */
5241
5242static tree
5243fold_internal_goacc_dim (const gimple *call)
5244{
5245 int axis = oacc_get_ifn_dim_arg (stmt: call);
5246 int size = oacc_get_fn_dim_size (fn: current_function_decl, axis);
5247 tree result = NULL_TREE;
5248 tree type = TREE_TYPE (gimple_call_lhs (call));
5249
5250 switch (gimple_call_internal_fn (gs: call))
5251 {
5252 case IFN_GOACC_DIM_POS:
5253 /* If the size is 1, we know the answer. */
5254 if (size == 1)
5255 result = build_int_cst (type, 0);
5256 break;
5257 case IFN_GOACC_DIM_SIZE:
5258 /* If the size is not dynamic, we know the answer. */
5259 if (size)
5260 result = build_int_cst (type, size);
5261 break;
5262 default:
5263 break;
5264 }
5265
5266 return result;
5267}
5268
5269/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5270 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5271 &var where var is only addressable because of such calls. */
5272
5273bool
5274optimize_atomic_compare_exchange_p (gimple *stmt)
5275{
5276 if (gimple_call_num_args (gs: stmt) != 6
5277 || !flag_inline_atomics
5278 || !optimize
5279 || sanitize_flags_p (flag: SANITIZE_THREAD | SANITIZE_ADDRESS)
5280 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5281 || !gimple_vdef (g: stmt)
5282 || !gimple_vuse (g: stmt))
5283 return false;
5284
5285 tree fndecl = gimple_call_fndecl (gs: stmt);
5286 switch (DECL_FUNCTION_CODE (decl: fndecl))
5287 {
5288 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5289 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5290 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5291 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5292 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5293 break;
5294 default:
5295 return false;
5296 }
5297
5298 tree expected = gimple_call_arg (gs: stmt, index: 1);
5299 if (TREE_CODE (expected) != ADDR_EXPR
5300 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5301 return false;
5302
5303 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5304 if (!is_gimple_reg_type (type: etype)
5305 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5306 || TREE_THIS_VOLATILE (etype)
5307 || VECTOR_TYPE_P (etype)
5308 || TREE_CODE (etype) == COMPLEX_TYPE
5309 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5310 might not preserve all the bits. See PR71716. */
5311 || SCALAR_FLOAT_TYPE_P (etype)
5312 || maybe_ne (TYPE_PRECISION (etype),
5313 b: GET_MODE_BITSIZE (TYPE_MODE (etype))))
5314 return false;
5315
5316 tree weak = gimple_call_arg (gs: stmt, index: 3);
5317 if (!integer_zerop (weak) && !integer_onep (weak))
5318 return false;
5319
5320 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5321 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5322 machine_mode mode = TYPE_MODE (itype);
5323
5324 if (direct_optab_handler (op: atomic_compare_and_swap_optab, mode)
5325 == CODE_FOR_nothing
5326 && optab_handler (op: sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5327 return false;
5328
5329 if (maybe_ne (a: int_size_in_bytes (etype), b: GET_MODE_SIZE (mode)))
5330 return false;
5331
5332 return true;
5333}
5334
5335/* Fold
5336 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5337 into
5338 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5339 i = IMAGPART_EXPR <t>;
5340 r = (_Bool) i;
5341 e = REALPART_EXPR <t>; */
5342
5343void
5344fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5345{
5346 gimple *stmt = gsi_stmt (i: *gsi);
5347 tree fndecl = gimple_call_fndecl (gs: stmt);
5348 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5349 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5350 tree ctype = build_complex_type (itype);
5351 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5352 bool throws = false;
5353 edge e = NULL;
5354 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5355 expected);
5356 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5357 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5358 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5359 {
5360 g = gimple_build_assign (make_ssa_name (var: itype), VIEW_CONVERT_EXPR,
5361 build1 (VIEW_CONVERT_EXPR, itype,
5362 gimple_assign_lhs (gs: g)));
5363 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5364 }
5365 int flag = (integer_onep (gimple_call_arg (gs: stmt, index: 3)) ? 256 : 0)
5366 + int_size_in_bytes (itype);
5367 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5368 gimple_call_arg (gs: stmt, index: 0),
5369 gimple_assign_lhs (gs: g),
5370 gimple_call_arg (gs: stmt, index: 2),
5371 build_int_cst (integer_type_node, flag),
5372 gimple_call_arg (gs: stmt, index: 4),
5373 gimple_call_arg (gs: stmt, index: 5));
5374 tree lhs = make_ssa_name (var: ctype);
5375 gimple_call_set_lhs (gs: g, lhs);
5376 gimple_move_vops (g, stmt);
5377 tree oldlhs = gimple_call_lhs (gs: stmt);
5378 if (stmt_can_throw_internal (cfun, stmt))
5379 {
5380 throws = true;
5381 e = find_fallthru_edge (edges: gsi_bb (i: *gsi)->succs);
5382 }
5383 gimple_call_set_nothrow (s: as_a <gcall *> (p: g),
5384 nothrow_p: gimple_call_nothrow_p (s: as_a <gcall *> (p: stmt)));
5385 gimple_call_set_lhs (gs: stmt, NULL_TREE);
5386 gsi_replace (gsi, g, true);
5387 if (oldlhs)
5388 {
5389 g = gimple_build_assign (make_ssa_name (var: itype), IMAGPART_EXPR,
5390 build1 (IMAGPART_EXPR, itype, lhs));
5391 if (throws)
5392 {
5393 gsi_insert_on_edge_immediate (e, g);
5394 *gsi = gsi_for_stmt (g);
5395 }
5396 else
5397 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5398 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (gs: g));
5399 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5400 }
5401 g = gimple_build_assign (make_ssa_name (var: itype), REALPART_EXPR,
5402 build1 (REALPART_EXPR, itype, lhs));
5403 if (throws && oldlhs == NULL_TREE)
5404 {
5405 gsi_insert_on_edge_immediate (e, g);
5406 *gsi = gsi_for_stmt (g);
5407 }
5408 else
5409 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5410 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5411 {
5412 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5413 VIEW_CONVERT_EXPR,
5414 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5415 gimple_assign_lhs (gs: g)));
5416 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5417 }
5418 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (gs: g));
5419 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5420 *gsi = gsiret;
5421}
5422
5423/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5424 doesn't fit into TYPE. The test for overflow should be regardless of
5425 -fwrapv, and even for unsigned types. */
5426
5427bool
5428arith_overflowed_p (enum tree_code code, const_tree type,
5429 const_tree arg0, const_tree arg1)
5430{
5431 widest2_int warg0 = widest2_int_cst (arg0);
5432 widest2_int warg1 = widest2_int_cst (arg1);
5433 widest2_int wres;
5434 switch (code)
5435 {
5436 case PLUS_EXPR: wres = wi::add (x: warg0, y: warg1); break;
5437 case MINUS_EXPR: wres = wi::sub (x: warg0, y: warg1); break;
5438 case MULT_EXPR: wres = wi::mul (x: warg0, y: warg1); break;
5439 default: gcc_unreachable ();
5440 }
5441 signop sign = TYPE_SIGN (type);
5442 if (sign == UNSIGNED && wi::neg_p (x: wres))
5443 return true;
5444 return wi::min_precision (x: wres, sgn: sign) > TYPE_PRECISION (type);
5445}
5446
5447/* If IFN_{MASK,LEN,MASK_LEN}_LOAD/STORE call CALL is unconditional,
5448 return a MEM_REF for the memory it references, otherwise return null.
5449 VECTYPE is the type of the memory vector. MASK_P indicates it's for
5450 MASK if true, otherwise it's for LEN. */
5451
5452static tree
5453gimple_fold_partial_load_store_mem_ref (gcall *call, tree vectype, bool mask_p)
5454{
5455 tree ptr = gimple_call_arg (gs: call, index: 0);
5456 tree alias_align = gimple_call_arg (gs: call, index: 1);
5457 if (!tree_fits_uhwi_p (alias_align))
5458 return NULL_TREE;
5459
5460 if (mask_p)
5461 {
5462 tree mask = gimple_call_arg (gs: call, index: 2);
5463 if (!integer_all_onesp (mask))
5464 return NULL_TREE;
5465 }
5466 else
5467 {
5468 internal_fn ifn = gimple_call_internal_fn (gs: call);
5469 int len_index = internal_fn_len_index (ifn);
5470 tree basic_len = gimple_call_arg (gs: call, index: len_index);
5471 if (!poly_int_tree_p (t: basic_len))
5472 return NULL_TREE;
5473 tree bias = gimple_call_arg (gs: call, index: len_index + 1);
5474 gcc_assert (TREE_CODE (bias) == INTEGER_CST);
5475 /* For LEN_LOAD/LEN_STORE/MASK_LEN_LOAD/MASK_LEN_STORE,
5476 we don't fold when (bias + len) != VF. */
5477 if (maybe_ne (a: wi::to_poly_widest (t: basic_len) + wi::to_widest (t: bias),
5478 b: GET_MODE_NUNITS (TYPE_MODE (vectype))))
5479 return NULL_TREE;
5480
5481 /* For MASK_LEN_{LOAD,STORE}, we should also check whether
5482 the mask is all ones mask. */
5483 if (ifn == IFN_MASK_LEN_LOAD || ifn == IFN_MASK_LEN_STORE)
5484 {
5485 tree mask = gimple_call_arg (gs: call, index: internal_fn_mask_index (ifn));
5486 if (!integer_all_onesp (mask))
5487 return NULL_TREE;
5488 }
5489 }
5490
5491 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5492 if (TYPE_ALIGN (vectype) != align)
5493 vectype = build_aligned_type (vectype, align);
5494 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5495 return fold_build2 (MEM_REF, vectype, ptr, offset);
5496}
5497
5498/* Try to fold IFN_{MASK,LEN}_LOAD call CALL. Return true on success.
5499 MASK_P indicates it's for MASK if true, otherwise it's for LEN. */
5500
5501static bool
5502gimple_fold_partial_load (gimple_stmt_iterator *gsi, gcall *call, bool mask_p)
5503{
5504 tree lhs = gimple_call_lhs (gs: call);
5505 if (!lhs)
5506 return false;
5507
5508 if (tree rhs
5509 = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (lhs), mask_p))
5510 {
5511 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5512 gimple_set_location (g: new_stmt, location: gimple_location (g: call));
5513 gimple_move_vops (new_stmt, call);
5514 gsi_replace (gsi, new_stmt, false);
5515 return true;
5516 }
5517 return false;
5518}
5519
5520/* Try to fold IFN_{MASK,LEN}_STORE call CALL. Return true on success.
5521 MASK_P indicates it's for MASK if true, otherwise it's for LEN. */
5522
5523static bool
5524gimple_fold_partial_store (gimple_stmt_iterator *gsi, gcall *call,
5525 bool mask_p)
5526{
5527 internal_fn ifn = gimple_call_internal_fn (gs: call);
5528 tree rhs = gimple_call_arg (gs: call, index: internal_fn_stored_value_index (ifn));
5529 if (tree lhs
5530 = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (rhs), mask_p))
5531 {
5532 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5533 gimple_set_location (g: new_stmt, location: gimple_location (g: call));
5534 gimple_move_vops (new_stmt, call);
5535 gsi_replace (gsi, new_stmt, false);
5536 return true;
5537 }
5538 return false;
5539}
5540
5541/* Attempt to fold a call statement referenced by the statement iterator GSI.
5542 The statement may be replaced by another statement, e.g., if the call
5543 simplifies to a constant value. Return true if any changes were made.
5544 It is assumed that the operands have been previously folded. */
5545
5546static bool
5547gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5548{
5549 gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: *gsi));
5550 tree callee;
5551 bool changed = false;
5552
5553 /* Check for virtual calls that became direct calls. */
5554 callee = gimple_call_fn (gs: stmt);
5555 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5556 {
5557 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5558 {
5559 if (dump_file && virtual_method_call_p (callee)
5560 && !possible_polymorphic_call_target_p
5561 (ref: callee, stmt, n: cgraph_node::get (decl: gimple_call_addr_fndecl
5562 (OBJ_TYPE_REF_EXPR (callee)))))
5563 {
5564 fprintf (stream: dump_file,
5565 format: "Type inheritance inconsistent devirtualization of ");
5566 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5567 fprintf (stream: dump_file, format: " to ");
5568 print_generic_expr (dump_file, callee, TDF_SLIM);
5569 fprintf (stream: dump_file, format: "\n");
5570 }
5571
5572 gimple_call_set_fn (gs: stmt, OBJ_TYPE_REF_EXPR (callee));
5573 changed = true;
5574 }
5575 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5576 {
5577 bool final;
5578 vec <cgraph_node *>targets
5579 = possible_polymorphic_call_targets (ref: callee, call: stmt, completep: &final);
5580 if (final && targets.length () <= 1 && dbg_cnt (index: devirt))
5581 {
5582 tree lhs = gimple_call_lhs (gs: stmt);
5583 if (dump_enabled_p ())
5584 {
5585 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5586 "folding virtual function call to %s\n",
5587 targets.length () == 1
5588 ? targets[0]->name ()
5589 : "__builtin_unreachable");
5590 }
5591 if (targets.length () == 1)
5592 {
5593 tree fndecl = targets[0]->decl;
5594 gimple_call_set_fndecl (gs: stmt, decl: fndecl);
5595 changed = true;
5596 /* If changing the call to __cxa_pure_virtual
5597 or similar noreturn function, adjust gimple_call_fntype
5598 too. */
5599 if (gimple_call_noreturn_p (s: stmt)
5600 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5601 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5602 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5603 == void_type_node))
5604 gimple_call_set_fntype (call_stmt: stmt, TREE_TYPE (fndecl));
5605 /* If the call becomes noreturn, remove the lhs. */
5606 if (lhs
5607 && gimple_call_noreturn_p (s: stmt)
5608 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5609 || should_remove_lhs_p (lhs)))
5610 {
5611 if (TREE_CODE (lhs) == SSA_NAME)
5612 {
5613 tree var = create_tmp_var (TREE_TYPE (lhs));
5614 tree def = get_or_create_ssa_default_def (cfun, var);
5615 gimple *new_stmt = gimple_build_assign (lhs, def);
5616 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5617 }
5618 gimple_call_set_lhs (gs: stmt, NULL_TREE);
5619 }
5620 maybe_remove_unused_call_args (cfun, stmt);
5621 }
5622 else
5623 {
5624 location_t loc = gimple_location (g: stmt);
5625 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
5626 gimple_call_set_ctrl_altering (s: new_stmt, ctrl_altering_p: false);
5627 /* If the call had a SSA name as lhs morph that into
5628 an uninitialized value. */
5629 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5630 {
5631 tree var = create_tmp_var (TREE_TYPE (lhs));
5632 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5633 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5634 set_ssa_default_def (cfun, var, lhs);
5635 }
5636 gimple_move_vops (new_stmt, stmt);
5637 gsi_replace (gsi, new_stmt, false);
5638 return true;
5639 }
5640 }
5641 }
5642 }
5643
5644 /* Check for indirect calls that became direct calls, and then
5645 no longer require a static chain. */
5646 if (gimple_call_chain (gs: stmt))
5647 {
5648 tree fn = gimple_call_fndecl (gs: stmt);
5649 if (fn && !DECL_STATIC_CHAIN (fn))
5650 {
5651 gimple_call_set_chain (call_stmt: stmt, NULL);
5652 changed = true;
5653 }
5654 }
5655
5656 if (inplace)
5657 return changed;
5658
5659 /* Check for builtins that CCP can handle using information not
5660 available in the generic fold routines. */
5661 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5662 {
5663 if (gimple_fold_builtin (gsi))
5664 changed = true;
5665 }
5666 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5667 {
5668 changed |= targetm.gimple_fold_builtin (gsi);
5669 }
5670 else if (gimple_call_internal_p (gs: stmt))
5671 {
5672 enum tree_code subcode = ERROR_MARK;
5673 tree result = NULL_TREE;
5674 bool cplx_result = false;
5675 bool uaddc_usubc = false;
5676 tree overflow = NULL_TREE;
5677 switch (gimple_call_internal_fn (gs: stmt))
5678 {
5679 case IFN_BUILTIN_EXPECT:
5680 result = fold_builtin_expect (gimple_location (g: stmt),
5681 gimple_call_arg (gs: stmt, index: 0),
5682 gimple_call_arg (gs: stmt, index: 1),
5683 gimple_call_arg (gs: stmt, index: 2),
5684 NULL_TREE);
5685 break;
5686 case IFN_UBSAN_OBJECT_SIZE:
5687 {
5688 tree offset = gimple_call_arg (gs: stmt, index: 1);
5689 tree objsize = gimple_call_arg (gs: stmt, index: 2);
5690 if (integer_all_onesp (objsize)
5691 || (TREE_CODE (offset) == INTEGER_CST
5692 && TREE_CODE (objsize) == INTEGER_CST
5693 && tree_int_cst_le (t1: offset, t2: objsize)))
5694 {
5695 replace_call_with_value (gsi, NULL_TREE);
5696 return true;
5697 }
5698 }
5699 break;
5700 case IFN_UBSAN_PTR:
5701 if (integer_zerop (gimple_call_arg (gs: stmt, index: 1)))
5702 {
5703 replace_call_with_value (gsi, NULL_TREE);
5704 return true;
5705 }
5706 break;
5707 case IFN_UBSAN_BOUNDS:
5708 {
5709 tree index = gimple_call_arg (gs: stmt, index: 1);
5710 tree bound = gimple_call_arg (gs: stmt, index: 2);
5711 if (TREE_CODE (index) == INTEGER_CST
5712 && TREE_CODE (bound) == INTEGER_CST)
5713 {
5714 index = fold_convert (TREE_TYPE (bound), index);
5715 if (TREE_CODE (index) == INTEGER_CST
5716 && tree_int_cst_lt (t1: index, t2: bound))
5717 {
5718 replace_call_with_value (gsi, NULL_TREE);
5719 return true;
5720 }
5721 }
5722 }
5723 break;
5724 case IFN_GOACC_DIM_SIZE:
5725 case IFN_GOACC_DIM_POS:
5726 result = fold_internal_goacc_dim (call: stmt);
5727 break;
5728 case IFN_UBSAN_CHECK_ADD:
5729 subcode = PLUS_EXPR;
5730 break;
5731 case IFN_UBSAN_CHECK_SUB:
5732 subcode = MINUS_EXPR;
5733 break;
5734 case IFN_UBSAN_CHECK_MUL:
5735 subcode = MULT_EXPR;
5736 break;
5737 case IFN_ADD_OVERFLOW:
5738 subcode = PLUS_EXPR;
5739 cplx_result = true;
5740 break;
5741 case IFN_SUB_OVERFLOW:
5742 subcode = MINUS_EXPR;
5743 cplx_result = true;
5744 break;
5745 case IFN_MUL_OVERFLOW:
5746 subcode = MULT_EXPR;
5747 cplx_result = true;
5748 break;
5749 case IFN_UADDC:
5750 subcode = PLUS_EXPR;
5751 cplx_result = true;
5752 uaddc_usubc = true;
5753 break;
5754 case IFN_USUBC:
5755 subcode = MINUS_EXPR;
5756 cplx_result = true;
5757 uaddc_usubc = true;
5758 break;
5759 case IFN_MASK_LOAD:
5760 changed |= gimple_fold_partial_load (gsi, call: stmt, mask_p: true);
5761 break;
5762 case IFN_MASK_STORE:
5763 changed |= gimple_fold_partial_store (gsi, call: stmt, mask_p: true);
5764 break;
5765 case IFN_LEN_LOAD:
5766 case IFN_MASK_LEN_LOAD:
5767 changed |= gimple_fold_partial_load (gsi, call: stmt, mask_p: false);
5768 break;
5769 case IFN_LEN_STORE:
5770 case IFN_MASK_LEN_STORE:
5771 changed |= gimple_fold_partial_store (gsi, call: stmt, mask_p: false);
5772 break;
5773 default:
5774 break;
5775 }
5776 if (subcode != ERROR_MARK)
5777 {
5778 tree arg0 = gimple_call_arg (gs: stmt, index: 0);
5779 tree arg1 = gimple_call_arg (gs: stmt, index: 1);
5780 tree arg2 = NULL_TREE;
5781 tree type = TREE_TYPE (arg0);
5782 if (cplx_result)
5783 {
5784 tree lhs = gimple_call_lhs (gs: stmt);
5785 if (lhs == NULL_TREE)
5786 type = NULL_TREE;
5787 else
5788 type = TREE_TYPE (TREE_TYPE (lhs));
5789 if (uaddc_usubc)
5790 arg2 = gimple_call_arg (gs: stmt, index: 2);
5791 }
5792 if (type == NULL_TREE)
5793 ;
5794 else if (uaddc_usubc)
5795 {
5796 if (!integer_zerop (arg2))
5797 ;
5798 /* x = y + 0 + 0; x = y - 0 - 0; */
5799 else if (integer_zerop (arg1))
5800 result = arg0;
5801 /* x = 0 + y + 0; */
5802 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5803 result = arg1;
5804 /* x = y - y - 0; */
5805 else if (subcode == MINUS_EXPR
5806 && operand_equal_p (arg0, arg1, flags: 0))
5807 result = integer_zero_node;
5808 }
5809 /* x = y + 0; x = y - 0; x = y * 0; */
5810 else if (integer_zerop (arg1))
5811 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5812 /* x = 0 + y; x = 0 * y; */
5813 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5814 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5815 /* x = y - y; */
5816 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, flags: 0))
5817 result = integer_zero_node;
5818 /* x = y * 1; x = 1 * y; */
5819 else if (subcode == MULT_EXPR && integer_onep (arg1))
5820 result = arg0;
5821 else if (subcode == MULT_EXPR && integer_onep (arg0))
5822 result = arg1;
5823 if (result)
5824 {
5825 if (result == integer_zero_node)
5826 result = build_zero_cst (type);
5827 else if (cplx_result && TREE_TYPE (result) != type)
5828 {
5829 if (TREE_CODE (result) == INTEGER_CST)
5830 {
5831 if (arith_overflowed_p (code: PLUS_EXPR, type, arg0: result,
5832 integer_zero_node))
5833 overflow = build_one_cst (type);
5834 }
5835 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5836 && TYPE_UNSIGNED (type))
5837 || (TYPE_PRECISION (type)
5838 < (TYPE_PRECISION (TREE_TYPE (result))
5839 + (TYPE_UNSIGNED (TREE_TYPE (result))
5840 && !TYPE_UNSIGNED (type)))))
5841 result = NULL_TREE;
5842 if (result)
5843 result = fold_convert (type, result);
5844 }
5845 }
5846 }
5847
5848 if (result)
5849 {
5850 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5851 result = drop_tree_overflow (result);
5852 if (cplx_result)
5853 {
5854 if (overflow == NULL_TREE)
5855 overflow = build_zero_cst (TREE_TYPE (result));
5856 tree ctype = build_complex_type (TREE_TYPE (result));
5857 if (TREE_CODE (result) == INTEGER_CST
5858 && TREE_CODE (overflow) == INTEGER_CST)
5859 result = build_complex (ctype, result, overflow);
5860 else
5861 result = build2_loc (loc: gimple_location (g: stmt), code: COMPLEX_EXPR,
5862 type: ctype, arg0: result, arg1: overflow);
5863 }
5864 gimplify_and_update_call_from_tree (si_p: gsi, expr: result);
5865 changed = true;
5866 }
5867 }
5868
5869 return changed;
5870}
5871
5872
5873/* Return true whether NAME has a use on STMT. Note this can return
5874 false even though there's a use on STMT if SSA operands are not
5875 up-to-date. */
5876
5877static bool
5878has_use_on_stmt (tree name, gimple *stmt)
5879{
5880 ssa_op_iter iter;
5881 tree op;
5882 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5883 if (op == name)
5884 return true;
5885 return false;
5886}
5887
5888/* Worker for fold_stmt_1 dispatch to pattern based folding with
5889 gimple_simplify.
5890
5891 Replaces *GSI with the simplification result in RCODE and OPS
5892 and the associated statements in *SEQ. Does the replacement
5893 according to INPLACE and returns true if the operation succeeded. */
5894
5895static bool
5896replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5897 gimple_match_op *res_op,
5898 gimple_seq *seq, bool inplace)
5899{
5900 gimple *stmt = gsi_stmt (i: *gsi);
5901 tree *ops = res_op->ops;
5902 unsigned int num_ops = res_op->num_ops;
5903
5904 /* Play safe and do not allow abnormals to be mentioned in
5905 newly created statements. See also maybe_push_res_to_seq.
5906 As an exception allow such uses if there was a use of the
5907 same SSA name on the old stmt. */
5908 for (unsigned int i = 0; i < num_ops; ++i)
5909 if (TREE_CODE (ops[i]) == SSA_NAME
5910 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5911 && !has_use_on_stmt (name: ops[i], stmt))
5912 return false;
5913
5914 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5915 for (unsigned int i = 0; i < 2; ++i)
5916 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5917 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5918 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5919 return false;
5920
5921 /* Don't insert new statements when INPLACE is true, even if we could
5922 reuse STMT for the final statement. */
5923 if (inplace && !gimple_seq_empty_p (s: *seq))
5924 return false;
5925
5926 if (gcond *cond_stmt = dyn_cast <gcond *> (p: stmt))
5927 {
5928 gcc_assert (res_op->code.is_tree_code ());
5929 auto code = tree_code (res_op->code);
5930 if (TREE_CODE_CLASS (code) == tcc_comparison
5931 /* GIMPLE_CONDs condition may not throw. */
5932 && (!flag_exceptions
5933 || !cfun->can_throw_non_call_exceptions
5934 || !operation_could_trap_p (code,
5935 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5936 false, NULL_TREE)))
5937 gimple_cond_set_condition (stmt: cond_stmt, code, lhs: ops[0], rhs: ops[1]);
5938 else if (code == SSA_NAME)
5939 gimple_cond_set_condition (stmt: cond_stmt, code: NE_EXPR, lhs: ops[0],
5940 rhs: build_zero_cst (TREE_TYPE (ops[0])));
5941 else if (code == INTEGER_CST)
5942 {
5943 if (integer_zerop (ops[0]))
5944 gimple_cond_make_false (gs: cond_stmt);
5945 else
5946 gimple_cond_make_true (gs: cond_stmt);
5947 }
5948 else if (!inplace)
5949 {
5950 tree res = maybe_push_res_to_seq (res_op, seq);
5951 if (!res)
5952 return false;
5953 gimple_cond_set_condition (stmt: cond_stmt, code: NE_EXPR, lhs: res,
5954 rhs: build_zero_cst (TREE_TYPE (res)));
5955 }
5956 else
5957 return false;
5958 if (dump_file && (dump_flags & TDF_DETAILS))
5959 {
5960 fprintf (stream: dump_file, format: "gimple_simplified to ");
5961 if (!gimple_seq_empty_p (s: *seq))
5962 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5963 print_gimple_stmt (dump_file, gsi_stmt (i: *gsi),
5964 0, TDF_SLIM);
5965 }
5966 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5967 return true;
5968 }
5969 else if (is_gimple_assign (gs: stmt)
5970 && res_op->code.is_tree_code ())
5971 {
5972 auto code = tree_code (res_op->code);
5973 if (!inplace
5974 || gimple_num_ops (gs: stmt) > get_gimple_rhs_num_ops (code))
5975 {
5976 maybe_build_generic_op (res_op);
5977 gimple_assign_set_rhs_with_ops (gsi, code,
5978 res_op->op_or_null (i: 0),
5979 res_op->op_or_null (i: 1),
5980 res_op->op_or_null (i: 2));
5981 if (dump_file && (dump_flags & TDF_DETAILS))
5982 {
5983 fprintf (stream: dump_file, format: "gimple_simplified to ");
5984 if (!gimple_seq_empty_p (s: *seq))
5985 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5986 print_gimple_stmt (dump_file, gsi_stmt (i: *gsi),
5987 0, TDF_SLIM);
5988 }
5989 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5990 return true;
5991 }
5992 }
5993 else if (res_op->code.is_fn_code ()
5994 && gimple_call_combined_fn (stmt) == combined_fn (res_op->code))
5995 {
5996 gcc_assert (num_ops == gimple_call_num_args (stmt));
5997 for (unsigned int i = 0; i < num_ops; ++i)
5998 gimple_call_set_arg (gs: stmt, index: i, arg: ops[i]);
5999 if (dump_file && (dump_flags & TDF_DETAILS))
6000 {
6001 fprintf (stream: dump_file, format: "gimple_simplified to ");
6002 if (!gimple_seq_empty_p (s: *seq))
6003 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6004 print_gimple_stmt (dump_file, gsi_stmt (i: *gsi), 0, TDF_SLIM);
6005 }
6006 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
6007 return true;
6008 }
6009 else if (!inplace)
6010 {
6011 if (gimple_has_lhs (stmt))
6012 {
6013 tree lhs = gimple_get_lhs (stmt);
6014 if (!maybe_push_res_to_seq (res_op, seq, res: lhs))
6015 return false;
6016 if (dump_file && (dump_flags & TDF_DETAILS))
6017 {
6018 fprintf (stream: dump_file, format: "gimple_simplified to ");
6019 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6020 }
6021 gsi_replace_with_seq_vops (si_p: gsi, stmts: *seq);
6022 return true;
6023 }
6024 else
6025 gcc_unreachable ();
6026 }
6027
6028 return false;
6029}
6030
6031/* Canonicalize MEM_REFs invariant address operand after propagation. */
6032
6033static bool
6034maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
6035{
6036 bool res = false;
6037 tree *orig_t = t;
6038
6039 if (TREE_CODE (*t) == ADDR_EXPR)
6040 t = &TREE_OPERAND (*t, 0);
6041
6042 /* The C and C++ frontends use an ARRAY_REF for indexing with their
6043 generic vector extension. The actual vector referenced is
6044 view-converted to an array type for this purpose. If the index
6045 is constant the canonical representation in the middle-end is a
6046 BIT_FIELD_REF so re-write the former to the latter here. */
6047 if (TREE_CODE (*t) == ARRAY_REF
6048 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
6049 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
6050 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
6051 {
6052 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
6053 if (VECTOR_TYPE_P (vtype))
6054 {
6055 tree low = array_ref_low_bound (*t);
6056 if (TREE_CODE (low) == INTEGER_CST)
6057 {
6058 if (tree_int_cst_le (t1: low, TREE_OPERAND (*t, 1)))
6059 {
6060 widest_int idx = wi::sub (x: wi::to_widest (TREE_OPERAND (*t, 1)),
6061 y: wi::to_widest (t: low));
6062 idx = wi::mul (x: idx, y: wi::to_widest
6063 (TYPE_SIZE (TREE_TYPE (*t))));
6064 widest_int ext
6065 = wi::add (x: idx, y: wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
6066 if (wi::les_p (x: ext, y: wi::to_widest (TYPE_SIZE (vtype))))
6067 {
6068 *t = build3_loc (EXPR_LOCATION (*t), code: BIT_FIELD_REF,
6069 TREE_TYPE (*t),
6070 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
6071 TYPE_SIZE (TREE_TYPE (*t)),
6072 arg2: wide_int_to_tree (bitsizetype, cst: idx));
6073 res = true;
6074 }
6075 }
6076 }
6077 }
6078 }
6079
6080 while (handled_component_p (t: *t))
6081 t = &TREE_OPERAND (*t, 0);
6082
6083 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
6084 of invariant addresses into a SSA name MEM_REF address. */
6085 if (TREE_CODE (*t) == MEM_REF
6086 || TREE_CODE (*t) == TARGET_MEM_REF)
6087 {
6088 tree addr = TREE_OPERAND (*t, 0);
6089 if (TREE_CODE (addr) == ADDR_EXPR
6090 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
6091 || handled_component_p (TREE_OPERAND (addr, 0))))
6092 {
6093 tree base;
6094 poly_int64 coffset;
6095 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
6096 &coffset);
6097 if (!base)
6098 {
6099 if (is_debug)
6100 return false;
6101 gcc_unreachable ();
6102 }
6103
6104 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
6105 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
6106 TREE_OPERAND (*t, 1),
6107 size_int (coffset));
6108 res = true;
6109 }
6110 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
6111 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
6112 }
6113
6114 /* Canonicalize back MEM_REFs to plain reference trees if the object
6115 accessed is a decl that has the same access semantics as the MEM_REF. */
6116 if (TREE_CODE (*t) == MEM_REF
6117 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
6118 && integer_zerop (TREE_OPERAND (*t, 1))
6119 && MR_DEPENDENCE_CLIQUE (*t) == 0)
6120 {
6121 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6122 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6123 if (/* Same volatile qualification. */
6124 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6125 /* Same TBAA behavior with -fstrict-aliasing. */
6126 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6127 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6128 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6129 /* Same alignment. */
6130 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6131 /* We have to look out here to not drop a required conversion
6132 from the rhs to the lhs if *t appears on the lhs or vice-versa
6133 if it appears on the rhs. Thus require strict type
6134 compatibility. */
6135 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6136 {
6137 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6138 res = true;
6139 }
6140 }
6141
6142 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6143 && TREE_CODE (*t) == MEM_REF
6144 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6145 {
6146 tree base;
6147 poly_int64 coffset;
6148 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6149 &coffset);
6150 if (base)
6151 {
6152 gcc_assert (TREE_CODE (base) == MEM_REF);
6153 poly_int64 moffset;
6154 if (mem_ref_offset (base).to_shwi (r: &moffset))
6155 {
6156 coffset += moffset;
6157 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (r: &moffset))
6158 {
6159 coffset += moffset;
6160 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6161 return true;
6162 }
6163 }
6164 }
6165 }
6166
6167 /* Canonicalize TARGET_MEM_REF in particular with respect to
6168 the indexes becoming constant. */
6169 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6170 {
6171 tree tem = maybe_fold_tmr (*t);
6172 if (tem)
6173 {
6174 *t = tem;
6175 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6176 recompute_tree_invariant_for_addr_expr (*orig_t);
6177 res = true;
6178 }
6179 }
6180
6181 return res;
6182}
6183
6184/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6185 distinguishes both cases. */
6186
6187static bool
6188fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6189{
6190 bool changed = false;
6191 gimple *stmt = gsi_stmt (i: *gsi);
6192 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6193 unsigned i;
6194 fold_defer_overflow_warnings ();
6195
6196 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6197 after propagation.
6198 ??? This shouldn't be done in generic folding but in the
6199 propagation helpers which also know whether an address was
6200 propagated.
6201 Also canonicalize operand order. */
6202 switch (gimple_code (g: stmt))
6203 {
6204 case GIMPLE_ASSIGN:
6205 if (gimple_assign_rhs_class (gs: stmt) == GIMPLE_SINGLE_RHS)
6206 {
6207 tree *rhs = gimple_assign_rhs1_ptr (gs: stmt);
6208 if ((REFERENCE_CLASS_P (*rhs)
6209 || TREE_CODE (*rhs) == ADDR_EXPR)
6210 && maybe_canonicalize_mem_ref_addr (t: rhs))
6211 changed = true;
6212 tree *lhs = gimple_assign_lhs_ptr (gs: stmt);
6213 if (REFERENCE_CLASS_P (*lhs)
6214 && maybe_canonicalize_mem_ref_addr (t: lhs))
6215 changed = true;
6216 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6217 This cannot be done in maybe_canonicalize_mem_ref_addr
6218 as the gimple now has two operands rather than one.
6219 The same reason why this can't be done in
6220 maybe_canonicalize_mem_ref_addr is the same reason why
6221 this can't be done inplace. */
6222 if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR)
6223 {
6224 tree inner = TREE_OPERAND (*rhs, 0);
6225 if (TREE_CODE (inner) == MEM_REF
6226 && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME
6227 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6228 {
6229 tree ptr = TREE_OPERAND (inner, 0);
6230 tree addon = TREE_OPERAND (inner, 1);
6231 addon = fold_convert (sizetype, addon);
6232 gimple_assign_set_rhs_with_ops (gsi, code: POINTER_PLUS_EXPR,
6233 op1: ptr, op2: addon);
6234 changed = true;
6235 stmt = gsi_stmt (i: *gsi);
6236 }
6237 }
6238 }
6239 else
6240 {
6241 /* Canonicalize operand order. */
6242 enum tree_code code = gimple_assign_rhs_code (gs: stmt);
6243 if (TREE_CODE_CLASS (code) == tcc_comparison
6244 || commutative_tree_code (code)
6245 || commutative_ternary_tree_code (code))
6246 {
6247 tree rhs1 = gimple_assign_rhs1 (gs: stmt);
6248 tree rhs2 = gimple_assign_rhs2 (gs: stmt);
6249 if (tree_swap_operands_p (rhs1, rhs2))
6250 {
6251 gimple_assign_set_rhs1 (gs: stmt, rhs: rhs2);
6252 gimple_assign_set_rhs2 (gs: stmt, rhs: rhs1);
6253 if (TREE_CODE_CLASS (code) == tcc_comparison)
6254 gimple_assign_set_rhs_code (s: stmt,
6255 code: swap_tree_comparison (code));
6256 changed = true;
6257 }
6258 }
6259 }
6260 break;
6261 case GIMPLE_CALL:
6262 {
6263 gcall *call = as_a<gcall *> (p: stmt);
6264 for (i = 0; i < gimple_call_num_args (gs: call); ++i)
6265 {
6266 tree *arg = gimple_call_arg_ptr (gs: call, index: i);
6267 if (REFERENCE_CLASS_P (*arg)
6268 && maybe_canonicalize_mem_ref_addr (t: arg))
6269 changed = true;
6270 }
6271 tree *lhs = gimple_call_lhs_ptr (gs: call);
6272 if (*lhs
6273 && REFERENCE_CLASS_P (*lhs)
6274 && maybe_canonicalize_mem_ref_addr (t: lhs))
6275 changed = true;
6276 if (*lhs)
6277 {
6278 combined_fn cfn = gimple_call_combined_fn (call);
6279 internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs));
6280 int opno = first_commutative_argument (ifn);
6281 if (opno >= 0)
6282 {
6283 tree arg1 = gimple_call_arg (gs: call, index: opno);
6284 tree arg2 = gimple_call_arg (gs: call, index: opno + 1);
6285 if (tree_swap_operands_p (arg1, arg2))
6286 {
6287 gimple_call_set_arg (gs: call, index: opno, arg: arg2);
6288 gimple_call_set_arg (gs: call, index: opno + 1, arg: arg1);
6289 changed = true;
6290 }
6291 }
6292 }
6293 break;
6294 }
6295 case GIMPLE_ASM:
6296 {
6297 gasm *asm_stmt = as_a <gasm *> (p: stmt);
6298 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6299 {
6300 tree link = gimple_asm_output_op (asm_stmt, index: i);
6301 tree op = TREE_VALUE (link);
6302 if (REFERENCE_CLASS_P (op)
6303 && maybe_canonicalize_mem_ref_addr (t: &TREE_VALUE (link)))
6304 changed = true;
6305 }
6306 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6307 {
6308 tree link = gimple_asm_input_op (asm_stmt, index: i);
6309 tree op = TREE_VALUE (link);
6310 if ((REFERENCE_CLASS_P (op)
6311 || TREE_CODE (op) == ADDR_EXPR)
6312 && maybe_canonicalize_mem_ref_addr (t: &TREE_VALUE (link)))
6313 changed = true;
6314 }
6315 }
6316 break;
6317 case GIMPLE_DEBUG:
6318 if (gimple_debug_bind_p (s: stmt))
6319 {
6320 tree *val = gimple_debug_bind_get_value_ptr (dbg: stmt);
6321 if (*val
6322 && (REFERENCE_CLASS_P (*val)
6323 || TREE_CODE (*val) == ADDR_EXPR)
6324 && maybe_canonicalize_mem_ref_addr (t: val, is_debug: true))
6325 changed = true;
6326 }
6327 break;
6328 case GIMPLE_COND:
6329 {
6330 /* Canonicalize operand order. */
6331 tree lhs = gimple_cond_lhs (gs: stmt);
6332 tree rhs = gimple_cond_rhs (gs: stmt);
6333 if (tree_swap_operands_p (lhs, rhs))
6334 {
6335 gcond *gc = as_a <gcond *> (p: stmt);
6336 gimple_cond_set_lhs (gs: gc, lhs: rhs);
6337 gimple_cond_set_rhs (gs: gc, rhs: lhs);
6338 gimple_cond_set_code (gs: gc,
6339 code: swap_tree_comparison (gimple_cond_code (gs: gc)));
6340 changed = true;
6341 }
6342 }
6343 default:;
6344 }
6345
6346 /* Dispatch to pattern-based folding. */
6347 if (!inplace
6348 || is_gimple_assign (gs: stmt)
6349 || gimple_code (g: stmt) == GIMPLE_COND)
6350 {
6351 gimple_seq seq = NULL;
6352 gimple_match_op res_op;
6353 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6354 valueize, valueize))
6355 {
6356 if (replace_stmt_with_simplification (gsi, res_op: &res_op, seq: &seq, inplace))
6357 changed = true;
6358 else
6359 gimple_seq_discard (seq);
6360 }
6361 }
6362
6363 stmt = gsi_stmt (i: *gsi);
6364
6365 /* Fold the main computation performed by the statement. */
6366 switch (gimple_code (g: stmt))
6367 {
6368 case GIMPLE_ASSIGN:
6369 {
6370 /* Try to canonicalize for boolean-typed X the comparisons
6371 X == 0, X == 1, X != 0, and X != 1. */
6372 if (gimple_assign_rhs_code (gs: stmt) == EQ_EXPR
6373 || gimple_assign_rhs_code (gs: stmt) == NE_EXPR)
6374 {
6375 tree lhs = gimple_assign_lhs (gs: stmt);
6376 tree op1 = gimple_assign_rhs1 (gs: stmt);
6377 tree op2 = gimple_assign_rhs2 (gs: stmt);
6378 tree type = TREE_TYPE (op1);
6379
6380 /* Check whether the comparison operands are of the same boolean
6381 type as the result type is.
6382 Check that second operand is an integer-constant with value
6383 one or zero. */
6384 if (TREE_CODE (op2) == INTEGER_CST
6385 && (integer_zerop (op2) || integer_onep (op2))
6386 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6387 {
6388 enum tree_code cmp_code = gimple_assign_rhs_code (gs: stmt);
6389 bool is_logical_not = false;
6390
6391 /* X == 0 and X != 1 is a logical-not.of X
6392 X == 1 and X != 0 is X */
6393 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6394 || (cmp_code == NE_EXPR && integer_onep (op2)))
6395 is_logical_not = true;
6396
6397 if (is_logical_not == false)
6398 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6399 /* Only for one-bit precision typed X the transformation
6400 !X -> ~X is valied. */
6401 else if (TYPE_PRECISION (type) == 1)
6402 gimple_assign_set_rhs_with_ops (gsi, code: BIT_NOT_EXPR, op1);
6403 /* Otherwise we use !X -> X ^ 1. */
6404 else
6405 gimple_assign_set_rhs_with_ops (gsi, code: BIT_XOR_EXPR, op1,
6406 op2: build_int_cst (type, 1));
6407 changed = true;
6408 break;
6409 }
6410 }
6411
6412 unsigned old_num_ops = gimple_num_ops (gs: stmt);
6413 tree lhs = gimple_assign_lhs (gs: stmt);
6414 tree new_rhs = fold_gimple_assign (si: gsi);
6415 if (new_rhs
6416 && !useless_type_conversion_p (TREE_TYPE (lhs),
6417 TREE_TYPE (new_rhs)))
6418 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6419 if (new_rhs
6420 && (!inplace
6421 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6422 {
6423 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6424 changed = true;
6425 }
6426 break;
6427 }
6428
6429 case GIMPLE_CALL:
6430 changed |= gimple_fold_call (gsi, inplace);
6431 break;
6432
6433 case GIMPLE_DEBUG:
6434 if (gimple_debug_bind_p (s: stmt))
6435 {
6436 tree val = gimple_debug_bind_get_value (dbg: stmt);
6437 if (val && REFERENCE_CLASS_P (val))
6438 {
6439 tree tem = maybe_fold_reference (expr: val);
6440 if (tem)
6441 {
6442 gimple_debug_bind_set_value (dbg: stmt, value: tem);
6443 changed = true;
6444 }
6445 }
6446 }
6447 break;
6448
6449 case GIMPLE_RETURN:
6450 {
6451 greturn *ret_stmt = as_a<greturn *> (p: stmt);
6452 tree ret = gimple_return_retval(gs: ret_stmt);
6453
6454 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6455 {
6456 tree val = valueize (ret);
6457 if (val && val != ret
6458 && may_propagate_copy (ret, val))
6459 {
6460 gimple_return_set_retval (gs: ret_stmt, retval: val);
6461 changed = true;
6462 }
6463 }
6464 }
6465 break;
6466
6467 default:;
6468 }
6469
6470 stmt = gsi_stmt (i: *gsi);
6471
6472 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6473 return changed;
6474}
6475
6476/* Valueziation callback that ends up not following SSA edges. */
6477
6478tree
6479no_follow_ssa_edges (tree)
6480{
6481 return NULL_TREE;
6482}
6483
6484/* Valueization callback that ends up following single-use SSA edges only. */
6485
6486tree
6487follow_single_use_edges (tree val)
6488{
6489 if (TREE_CODE (val) == SSA_NAME
6490 && !has_single_use (var: val))
6491 return NULL_TREE;
6492 return val;
6493}
6494
6495/* Valueization callback that follows all SSA edges. */
6496
6497tree
6498follow_all_ssa_edges (tree val)
6499{
6500 return val;
6501}
6502
6503/* Fold the statement pointed to by GSI. In some cases, this function may
6504 replace the whole statement with a new one. Returns true iff folding
6505 makes any changes.
6506 The statement pointed to by GSI should be in valid gimple form but may
6507 be in unfolded state as resulting from for example constant propagation
6508 which can produce *&x = 0. */
6509
6510bool
6511fold_stmt (gimple_stmt_iterator *gsi)
6512{
6513 return fold_stmt_1 (gsi, inplace: false, valueize: no_follow_ssa_edges);
6514}
6515
6516bool
6517fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6518{
6519 return fold_stmt_1 (gsi, inplace: false, valueize);
6520}
6521
6522/* Perform the minimal folding on statement *GSI. Only operations like
6523 *&x created by constant propagation are handled. The statement cannot
6524 be replaced with a new one. Return true if the statement was
6525 changed, false otherwise.
6526 The statement *GSI should be in valid gimple form but may
6527 be in unfolded state as resulting from for example constant propagation
6528 which can produce *&x = 0. */
6529
6530bool
6531fold_stmt_inplace (gimple_stmt_iterator *gsi)
6532{
6533 gimple *stmt = gsi_stmt (i: *gsi);
6534 bool changed = fold_stmt_1 (gsi, inplace: true, valueize: no_follow_ssa_edges);
6535 gcc_assert (gsi_stmt (*gsi) == stmt);
6536 return changed;
6537}
6538
6539/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6540 if EXPR is null or we don't know how.
6541 If non-null, the result always has boolean type. */
6542
6543static tree
6544canonicalize_bool (tree expr, bool invert)
6545{
6546 if (!expr)
6547 return NULL_TREE;
6548 else if (invert)
6549 {
6550 if (integer_nonzerop (expr))
6551 return boolean_false_node;
6552 else if (integer_zerop (expr))
6553 return boolean_true_node;
6554 else if (TREE_CODE (expr) == SSA_NAME)
6555 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6556 build_int_cst (TREE_TYPE (expr), 0));
6557 else if (COMPARISON_CLASS_P (expr))
6558 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6559 boolean_type_node,
6560 TREE_OPERAND (expr, 0),
6561 TREE_OPERAND (expr, 1));
6562 else
6563 return NULL_TREE;
6564 }
6565 else
6566 {
6567 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6568 return expr;
6569 if (integer_nonzerop (expr))
6570 return boolean_true_node;
6571 else if (integer_zerop (expr))
6572 return boolean_false_node;
6573 else if (TREE_CODE (expr) == SSA_NAME)
6574 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6575 build_int_cst (TREE_TYPE (expr), 0));
6576 else if (COMPARISON_CLASS_P (expr))
6577 return fold_build2 (TREE_CODE (expr),
6578 boolean_type_node,
6579 TREE_OPERAND (expr, 0),
6580 TREE_OPERAND (expr, 1));
6581 else
6582 return NULL_TREE;
6583 }
6584}
6585
6586/* Check to see if a boolean expression EXPR is logically equivalent to the
6587 comparison (OP1 CODE OP2). Check for various identities involving
6588 SSA_NAMEs. */
6589
6590static bool
6591same_bool_comparison_p (const_tree expr, enum tree_code code,
6592 const_tree op1, const_tree op2)
6593{
6594 gimple *s;
6595
6596 /* The obvious case. */
6597 if (TREE_CODE (expr) == code
6598 && operand_equal_p (TREE_OPERAND (expr, 0), op1, flags: 0)
6599 && operand_equal_p (TREE_OPERAND (expr, 1), op2, flags: 0))
6600 return true;
6601
6602 /* Check for comparing (name, name != 0) and the case where expr
6603 is an SSA_NAME with a definition matching the comparison. */
6604 if (TREE_CODE (expr) == SSA_NAME
6605 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6606 {
6607 if (operand_equal_p (expr, op1, flags: 0))
6608 return ((code == NE_EXPR && integer_zerop (op2))
6609 || (code == EQ_EXPR && integer_nonzerop (op2)));
6610 s = SSA_NAME_DEF_STMT (expr);
6611 if (is_gimple_assign (gs: s)
6612 && gimple_assign_rhs_code (gs: s) == code
6613 && operand_equal_p (gimple_assign_rhs1 (gs: s), op1, flags: 0)
6614 && operand_equal_p (gimple_assign_rhs2 (gs: s), op2, flags: 0))
6615 return true;
6616 }
6617
6618 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6619 of name is a comparison, recurse. */
6620 if (TREE_CODE (op1) == SSA_NAME
6621 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6622 {
6623 s = SSA_NAME_DEF_STMT (op1);
6624 if (is_gimple_assign (gs: s)
6625 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6626 {
6627 enum tree_code c = gimple_assign_rhs_code (gs: s);
6628 if ((c == NE_EXPR && integer_zerop (op2))
6629 || (c == EQ_EXPR && integer_nonzerop (op2)))
6630 return same_bool_comparison_p (expr, code: c,
6631 op1: gimple_assign_rhs1 (gs: s),
6632 op2: gimple_assign_rhs2 (gs: s));
6633 if ((c == EQ_EXPR && integer_zerop (op2))
6634 || (c == NE_EXPR && integer_nonzerop (op2)))
6635 return same_bool_comparison_p (expr,
6636 code: invert_tree_comparison (c, false),
6637 op1: gimple_assign_rhs1 (gs: s),
6638 op2: gimple_assign_rhs2 (gs: s));
6639 }
6640 }
6641 return false;
6642}
6643
6644/* Check to see if two boolean expressions OP1 and OP2 are logically
6645 equivalent. */
6646
6647static bool
6648same_bool_result_p (const_tree op1, const_tree op2)
6649{
6650 /* Simple cases first. */
6651 if (operand_equal_p (op1, op2, flags: 0))
6652 return true;
6653
6654 /* Check the cases where at least one of the operands is a comparison.
6655 These are a bit smarter than operand_equal_p in that they apply some
6656 identifies on SSA_NAMEs. */
6657 if (COMPARISON_CLASS_P (op2)
6658 && same_bool_comparison_p (expr: op1, TREE_CODE (op2),
6659 TREE_OPERAND (op2, 0),
6660 TREE_OPERAND (op2, 1)))
6661 return true;
6662 if (COMPARISON_CLASS_P (op1)
6663 && same_bool_comparison_p (expr: op2, TREE_CODE (op1),
6664 TREE_OPERAND (op1, 0),
6665 TREE_OPERAND (op1, 1)))
6666 return true;
6667
6668 /* Default case. */
6669 return false;
6670}
6671
6672/* Forward declarations for some mutually recursive functions. */
6673
6674static tree
6675and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6676 enum tree_code code2, tree op2a, tree op2b, basic_block);
6677static tree
6678and_var_with_comparison (tree type, tree var, bool invert,
6679 enum tree_code code2, tree op2a, tree op2b,
6680 basic_block);
6681static tree
6682and_var_with_comparison_1 (tree type, gimple *stmt,
6683 enum tree_code code2, tree op2a, tree op2b,
6684 basic_block);
6685static tree
6686or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6687 enum tree_code code2, tree op2a, tree op2b,
6688 basic_block);
6689static tree
6690or_var_with_comparison (tree, tree var, bool invert,
6691 enum tree_code code2, tree op2a, tree op2b,
6692 basic_block);
6693static tree
6694or_var_with_comparison_1 (tree, gimple *stmt,
6695 enum tree_code code2, tree op2a, tree op2b,
6696 basic_block);
6697
6698/* Helper function for and_comparisons_1: try to simplify the AND of the
6699 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6700 If INVERT is true, invert the value of the VAR before doing the AND.
6701 Return NULL_EXPR if we can't simplify this to a single expression. */
6702
6703static tree
6704and_var_with_comparison (tree type, tree var, bool invert,
6705 enum tree_code code2, tree op2a, tree op2b,
6706 basic_block outer_cond_bb)
6707{
6708 tree t;
6709 gimple *stmt = SSA_NAME_DEF_STMT (var);
6710
6711 /* We can only deal with variables whose definitions are assignments. */
6712 if (!is_gimple_assign (gs: stmt))
6713 return NULL_TREE;
6714
6715 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6716 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6717 Then we only have to consider the simpler non-inverted cases. */
6718 if (invert)
6719 t = or_var_with_comparison_1 (type, stmt,
6720 code2: invert_tree_comparison (code2, false),
6721 op2a, op2b, outer_cond_bb);
6722 else
6723 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
6724 outer_cond_bb);
6725 return canonicalize_bool (expr: t, invert);
6726}
6727
6728/* Try to simplify the AND of the ssa variable defined by the assignment
6729 STMT with the comparison specified by (OP2A CODE2 OP2B).
6730 Return NULL_EXPR if we can't simplify this to a single expression. */
6731
6732static tree
6733and_var_with_comparison_1 (tree type, gimple *stmt,
6734 enum tree_code code2, tree op2a, tree op2b,
6735 basic_block outer_cond_bb)
6736{
6737 tree var = gimple_assign_lhs (gs: stmt);
6738 tree true_test_var = NULL_TREE;
6739 tree false_test_var = NULL_TREE;
6740 enum tree_code innercode = gimple_assign_rhs_code (gs: stmt);
6741
6742 /* Check for identities like (var AND (var == 0)) => false. */
6743 if (TREE_CODE (op2a) == SSA_NAME
6744 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6745 {
6746 if ((code2 == NE_EXPR && integer_zerop (op2b))
6747 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6748 {
6749 true_test_var = op2a;
6750 if (var == true_test_var)
6751 return var;
6752 }
6753 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6754 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6755 {
6756 false_test_var = op2a;
6757 if (var == false_test_var)
6758 return boolean_false_node;
6759 }
6760 }
6761
6762 /* If the definition is a comparison, recurse on it. */
6763 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6764 {
6765 tree t = and_comparisons_1 (type, code1: innercode,
6766 op1a: gimple_assign_rhs1 (gs: stmt),
6767 op1b: gimple_assign_rhs2 (gs: stmt),
6768 code2,
6769 op2a,
6770 op2b, outer_cond_bb);
6771 if (t)
6772 return t;
6773 }
6774
6775 /* If the definition is an AND or OR expression, we may be able to
6776 simplify by reassociating. */
6777 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6778 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6779 {
6780 tree inner1 = gimple_assign_rhs1 (gs: stmt);
6781 tree inner2 = gimple_assign_rhs2 (gs: stmt);
6782 gimple *s;
6783 tree t;
6784 tree partial = NULL_TREE;
6785 bool is_and = (innercode == BIT_AND_EXPR);
6786
6787 /* Check for boolean identities that don't require recursive examination
6788 of inner1/inner2:
6789 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6790 inner1 AND (inner1 OR inner2) => inner1
6791 !inner1 AND (inner1 AND inner2) => false
6792 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6793 Likewise for similar cases involving inner2. */
6794 if (inner1 == true_test_var)
6795 return (is_and ? var : inner1);
6796 else if (inner2 == true_test_var)
6797 return (is_and ? var : inner2);
6798 else if (inner1 == false_test_var)
6799 return (is_and
6800 ? boolean_false_node
6801 : and_var_with_comparison (type, var: inner2, invert: false, code2, op2a,
6802 op2b, outer_cond_bb));
6803 else if (inner2 == false_test_var)
6804 return (is_and
6805 ? boolean_false_node
6806 : and_var_with_comparison (type, var: inner1, invert: false, code2, op2a,
6807 op2b, outer_cond_bb));
6808
6809 /* Next, redistribute/reassociate the AND across the inner tests.
6810 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6811 if (TREE_CODE (inner1) == SSA_NAME
6812 && is_gimple_assign (gs: s = SSA_NAME_DEF_STMT (inner1))
6813 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6814 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (gs: s),
6815 gimple_assign_rhs1 (gs: s),
6816 gimple_assign_rhs2 (gs: s),
6817 code2, op2a, op2b,
6818 outer_cond_bb)))
6819 {
6820 /* Handle the AND case, where we are reassociating:
6821 (inner1 AND inner2) AND (op2a code2 op2b)
6822 => (t AND inner2)
6823 If the partial result t is a constant, we win. Otherwise
6824 continue on to try reassociating with the other inner test. */
6825 if (is_and)
6826 {
6827 if (integer_onep (t))
6828 return inner2;
6829 else if (integer_zerop (t))
6830 return boolean_false_node;
6831 }
6832
6833 /* Handle the OR case, where we are redistributing:
6834 (inner1 OR inner2) AND (op2a code2 op2b)
6835 => (t OR (inner2 AND (op2a code2 op2b))) */
6836 else if (integer_onep (t))
6837 return boolean_true_node;
6838
6839 /* Save partial result for later. */
6840 partial = t;
6841 }
6842
6843 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6844 if (TREE_CODE (inner2) == SSA_NAME
6845 && is_gimple_assign (gs: s = SSA_NAME_DEF_STMT (inner2))
6846 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6847 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (gs: s),
6848 gimple_assign_rhs1 (gs: s),
6849 gimple_assign_rhs2 (gs: s),
6850 code2, op2a, op2b,
6851 outer_cond_bb)))
6852 {
6853 /* Handle the AND case, where we are reassociating:
6854 (inner1 AND inner2) AND (op2a code2 op2b)
6855 => (inner1 AND t) */
6856 if (is_and)
6857 {
6858 if (integer_onep (t))
6859 return inner1;
6860 else if (integer_zerop (t))
6861 return boolean_false_node;
6862 /* If both are the same, we can apply the identity
6863 (x AND x) == x. */
6864 else if (partial && same_bool_result_p (op1: t, op2: partial))
6865 return t;
6866 }
6867
6868 /* Handle the OR case. where we are redistributing:
6869 (inner1 OR inner2) AND (op2a code2 op2b)
6870 => (t OR (inner1 AND (op2a code2 op2b)))
6871 => (t OR partial) */
6872 else
6873 {
6874 if (integer_onep (t))
6875 return boolean_true_node;
6876 else if (partial)
6877 {
6878 /* We already got a simplification for the other
6879 operand to the redistributed OR expression. The
6880 interesting case is when at least one is false.
6881 Or, if both are the same, we can apply the identity
6882 (x OR x) == x. */
6883 if (integer_zerop (partial))
6884 return t;
6885 else if (integer_zerop (t))
6886 return partial;
6887 else if (same_bool_result_p (op1: t, op2: partial))
6888 return t;
6889 }
6890 }
6891 }
6892 }
6893 return NULL_TREE;
6894}
6895
6896/* Try to simplify the AND of two comparisons defined by
6897 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6898 If this can be done without constructing an intermediate value,
6899 return the resulting tree; otherwise NULL_TREE is returned.
6900 This function is deliberately asymmetric as it recurses on SSA_DEFs
6901 in the first comparison but not the second. */
6902
6903static tree
6904and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6905 enum tree_code code2, tree op2a, tree op2b,
6906 basic_block outer_cond_bb)
6907{
6908 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6909
6910 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6911 if (operand_equal_p (op1a, op2a, flags: 0)
6912 && operand_equal_p (op1b, op2b, flags: 0))
6913 {
6914 /* Result will be either NULL_TREE, or a combined comparison. */
6915 tree t = combine_comparisons (UNKNOWN_LOCATION,
6916 TRUTH_ANDIF_EXPR, code1, code2,
6917 truth_type, op1a, op1b);
6918 if (t)
6919 return t;
6920 }
6921
6922 /* Likewise the swapped case of the above. */
6923 if (operand_equal_p (op1a, op2b, flags: 0)
6924 && operand_equal_p (op1b, op2a, flags: 0))
6925 {
6926 /* Result will be either NULL_TREE, or a combined comparison. */
6927 tree t = combine_comparisons (UNKNOWN_LOCATION,
6928 TRUTH_ANDIF_EXPR, code1,
6929 swap_tree_comparison (code2),
6930 truth_type, op1a, op1b);
6931 if (t)
6932 return t;
6933 }
6934
6935 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6936 NAME's definition is a truth value. See if there are any simplifications
6937 that can be done against the NAME's definition. */
6938 if (TREE_CODE (op1a) == SSA_NAME
6939 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6940 && (integer_zerop (op1b) || integer_onep (op1b)))
6941 {
6942 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6943 || (code1 == NE_EXPR && integer_onep (op1b)));
6944 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6945 switch (gimple_code (g: stmt))
6946 {
6947 case GIMPLE_ASSIGN:
6948 /* Try to simplify by copy-propagating the definition. */
6949 return and_var_with_comparison (type, var: op1a, invert, code2, op2a,
6950 op2b, outer_cond_bb);
6951
6952 case GIMPLE_PHI:
6953 /* If every argument to the PHI produces the same result when
6954 ANDed with the second comparison, we win.
6955 Do not do this unless the type is bool since we need a bool
6956 result here anyway. */
6957 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6958 {
6959 tree result = NULL_TREE;
6960 unsigned i;
6961 for (i = 0; i < gimple_phi_num_args (gs: stmt); i++)
6962 {
6963 tree arg = gimple_phi_arg_def (gs: stmt, index: i);
6964
6965 /* If this PHI has itself as an argument, ignore it.
6966 If all the other args produce the same result,
6967 we're still OK. */
6968 if (arg == gimple_phi_result (gs: stmt))
6969 continue;
6970 else if (TREE_CODE (arg) == INTEGER_CST)
6971 {
6972 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6973 {
6974 if (!result)
6975 result = boolean_false_node;
6976 else if (!integer_zerop (result))
6977 return NULL_TREE;
6978 }
6979 else if (!result)
6980 result = fold_build2 (code2, boolean_type_node,
6981 op2a, op2b);
6982 else if (!same_bool_comparison_p (expr: result,
6983 code: code2, op1: op2a, op2: op2b))
6984 return NULL_TREE;
6985 }
6986 else if (TREE_CODE (arg) == SSA_NAME
6987 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6988 {
6989 tree temp;
6990 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6991 /* In simple cases we can look through PHI nodes,
6992 but we have to be careful with loops.
6993 See PR49073. */
6994 if (! dom_info_available_p (CDI_DOMINATORS)
6995 || gimple_bb (g: def_stmt) == gimple_bb (g: stmt)
6996 || dominated_by_p (CDI_DOMINATORS,
6997 gimple_bb (g: def_stmt),
6998 gimple_bb (g: stmt)))
6999 return NULL_TREE;
7000 temp = and_var_with_comparison (type, var: arg, invert, code2,
7001 op2a, op2b,
7002 outer_cond_bb);
7003 if (!temp)
7004 return NULL_TREE;
7005 else if (!result)
7006 result = temp;
7007 else if (!same_bool_result_p (op1: result, op2: temp))
7008 return NULL_TREE;
7009 }
7010 else
7011 return NULL_TREE;
7012 }
7013 return result;
7014 }
7015
7016 default:
7017 break;
7018 }
7019 }
7020 return NULL_TREE;
7021}
7022
7023static basic_block fosa_bb;
7024static vec<std::pair<tree, flow_sensitive_info_storage> > *fosa_unwind;
7025static tree
7026follow_outer_ssa_edges (tree val)
7027{
7028 if (TREE_CODE (val) == SSA_NAME
7029 && !SSA_NAME_IS_DEFAULT_DEF (val))
7030 {
7031 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
7032 if (!def_bb
7033 || def_bb == fosa_bb
7034 || (dom_info_available_p (CDI_DOMINATORS)
7035 && (def_bb == fosa_bb
7036 || dominated_by_p (CDI_DOMINATORS, fosa_bb, def_bb))))
7037 return val;
7038 /* We cannot temporarily rewrite stmts with undefined overflow
7039 behavior, so avoid expanding them. */
7040 if ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (val))
7041 || POINTER_TYPE_P (TREE_TYPE (val)))
7042 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (val)))
7043 return NULL_TREE;
7044 flow_sensitive_info_storage storage;
7045 storage.save_and_clear (val);
7046 /* If the definition does not dominate fosa_bb temporarily reset
7047 flow-sensitive info. */
7048 fosa_unwind->safe_push (obj: std::make_pair (x&: val, y&: storage));
7049 return val;
7050 }
7051 return val;
7052}
7053
7054/* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
7055 : try to simplify the AND/OR of the ssa variable VAR with the comparison
7056 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
7057 simplify this to a single expression. As we are going to lower the cost
7058 of building SSA names / gimple stmts significantly, we need to allocate
7059 them ont the stack. This will cause the code to be a bit ugly. */
7060
7061static tree
7062maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
7063 enum tree_code code1,
7064 tree op1a, tree op1b,
7065 enum tree_code code2, tree op2a,
7066 tree op2b,
7067 basic_block outer_cond_bb)
7068{
7069 /* Allocate gimple stmt1 on the stack. */
7070 gassign *stmt1
7071 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7072 gimple_init (g: stmt1, code: GIMPLE_ASSIGN, num_ops: 3);
7073 gimple_assign_set_rhs_code (s: stmt1, code: code1);
7074 gimple_assign_set_rhs1 (gs: stmt1, rhs: op1a);
7075 gimple_assign_set_rhs2 (gs: stmt1, rhs: op1b);
7076 gimple_set_bb (stmt1, NULL);
7077
7078 /* Allocate gimple stmt2 on the stack. */
7079 gassign *stmt2
7080 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7081 gimple_init (g: stmt2, code: GIMPLE_ASSIGN, num_ops: 3);
7082 gimple_assign_set_rhs_code (s: stmt2, code: code2);
7083 gimple_assign_set_rhs1 (gs: stmt2, rhs: op2a);
7084 gimple_assign_set_rhs2 (gs: stmt2, rhs: op2b);
7085 gimple_set_bb (stmt2, NULL);
7086
7087 /* Allocate SSA names(lhs1) on the stack. */
7088 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
7089 memset (s: lhs1, c: 0, n: sizeof (tree_ssa_name));
7090 TREE_SET_CODE (lhs1, SSA_NAME);
7091 TREE_TYPE (lhs1) = type;
7092 init_ssa_name_imm_use (lhs1);
7093
7094 /* Allocate SSA names(lhs2) on the stack. */
7095 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
7096 memset (s: lhs2, c: 0, n: sizeof (tree_ssa_name));
7097 TREE_SET_CODE (lhs2, SSA_NAME);
7098 TREE_TYPE (lhs2) = type;
7099 init_ssa_name_imm_use (lhs2);
7100
7101 gimple_assign_set_lhs (gs: stmt1, lhs: lhs1);
7102 gimple_assign_set_lhs (gs: stmt2, lhs: lhs2);
7103
7104 gimple_match_op op (gimple_match_cond::UNCOND, code,
7105 type, gimple_assign_lhs (gs: stmt1),
7106 gimple_assign_lhs (gs: stmt2));
7107 fosa_bb = outer_cond_bb;
7108 auto_vec<std::pair<tree, flow_sensitive_info_storage>, 8> unwind_stack;
7109 fosa_unwind = &unwind_stack;
7110 if (op.resimplify (NULL, (!outer_cond_bb
7111 ? follow_all_ssa_edges : follow_outer_ssa_edges)))
7112 {
7113 fosa_unwind = NULL;
7114 for (auto p : unwind_stack)
7115 p.second.restore (p.first);
7116 if (gimple_simplified_result_is_gimple_val (op: &op))
7117 {
7118 tree res = op.ops[0];
7119 if (res == lhs1)
7120 return build2 (code1, type, op1a, op1b);
7121 else if (res == lhs2)
7122 return build2 (code2, type, op2a, op2b);
7123 else
7124 return res;
7125 }
7126 else if (op.code.is_tree_code ()
7127 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
7128 {
7129 tree op0 = op.ops[0];
7130 tree op1 = op.ops[1];
7131 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
7132 return NULL_TREE; /* not simple */
7133
7134 return build2 ((enum tree_code)op.code, op.type, op0, op1);
7135 }
7136 }
7137 fosa_unwind = NULL;
7138 for (auto p : unwind_stack)
7139 p.second.restore (p.first);
7140
7141 return NULL_TREE;
7142}
7143
7144/* Try to simplify the AND of two comparisons, specified by
7145 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7146 If this can be simplified to a single expression (without requiring
7147 introducing more SSA variables to hold intermediate values),
7148 return the resulting tree. Otherwise return NULL_TREE.
7149 If the result expression is non-null, it has boolean type. */
7150
7151tree
7152maybe_fold_and_comparisons (tree type,
7153 enum tree_code code1, tree op1a, tree op1b,
7154 enum tree_code code2, tree op2a, tree op2b,
7155 basic_block outer_cond_bb)
7156{
7157 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7158 outer_cond_bb))
7159 return t;
7160
7161 if (tree t = and_comparisons_1 (type, code1: code2, op1a: op2a, op1b: op2b, code2: code1, op2a: op1a, op2b: op1b,
7162 outer_cond_bb))
7163 return t;
7164
7165 if (tree t = maybe_fold_comparisons_from_match_pd (type, code: BIT_AND_EXPR, code1,
7166 op1a, op1b, code2, op2a,
7167 op2b, outer_cond_bb))
7168 return t;
7169
7170 return NULL_TREE;
7171}
7172
7173/* Helper function for or_comparisons_1: try to simplify the OR of the
7174 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7175 If INVERT is true, invert the value of VAR before doing the OR.
7176 Return NULL_EXPR if we can't simplify this to a single expression. */
7177
7178static tree
7179or_var_with_comparison (tree type, tree var, bool invert,
7180 enum tree_code code2, tree op2a, tree op2b,
7181 basic_block outer_cond_bb)
7182{
7183 tree t;
7184 gimple *stmt = SSA_NAME_DEF_STMT (var);
7185
7186 /* We can only deal with variables whose definitions are assignments. */
7187 if (!is_gimple_assign (gs: stmt))
7188 return NULL_TREE;
7189
7190 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7191 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7192 Then we only have to consider the simpler non-inverted cases. */
7193 if (invert)
7194 t = and_var_with_comparison_1 (type, stmt,
7195 code2: invert_tree_comparison (code2, false),
7196 op2a, op2b, outer_cond_bb);
7197 else
7198 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
7199 outer_cond_bb);
7200 return canonicalize_bool (expr: t, invert);
7201}
7202
7203/* Try to simplify the OR of the ssa variable defined by the assignment
7204 STMT with the comparison specified by (OP2A CODE2 OP2B).
7205 Return NULL_EXPR if we can't simplify this to a single expression. */
7206
7207static tree
7208or_var_with_comparison_1 (tree type, gimple *stmt,
7209 enum tree_code code2, tree op2a, tree op2b,
7210 basic_block outer_cond_bb)
7211{
7212 tree var = gimple_assign_lhs (gs: stmt);
7213 tree true_test_var = NULL_TREE;
7214 tree false_test_var = NULL_TREE;
7215 enum tree_code innercode = gimple_assign_rhs_code (gs: stmt);
7216
7217 /* Check for identities like (var OR (var != 0)) => true . */
7218 if (TREE_CODE (op2a) == SSA_NAME
7219 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7220 {
7221 if ((code2 == NE_EXPR && integer_zerop (op2b))
7222 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7223 {
7224 true_test_var = op2a;
7225 if (var == true_test_var)
7226 return var;
7227 }
7228 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7229 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7230 {
7231 false_test_var = op2a;
7232 if (var == false_test_var)
7233 return boolean_true_node;
7234 }
7235 }
7236
7237 /* If the definition is a comparison, recurse on it. */
7238 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7239 {
7240 tree t = or_comparisons_1 (type, code1: innercode,
7241 op1a: gimple_assign_rhs1 (gs: stmt),
7242 op1b: gimple_assign_rhs2 (gs: stmt),
7243 code2, op2a, op2b, outer_cond_bb);
7244 if (t)
7245 return t;
7246 }
7247
7248 /* If the definition is an AND or OR expression, we may be able to
7249 simplify by reassociating. */
7250 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7251 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7252 {
7253 tree inner1 = gimple_assign_rhs1 (gs: stmt);
7254 tree inner2 = gimple_assign_rhs2 (gs: stmt);
7255 gimple *s;
7256 tree t;
7257 tree partial = NULL_TREE;
7258 bool is_or = (innercode == BIT_IOR_EXPR);
7259
7260 /* Check for boolean identities that don't require recursive examination
7261 of inner1/inner2:
7262 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7263 inner1 OR (inner1 AND inner2) => inner1
7264 !inner1 OR (inner1 OR inner2) => true
7265 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7266 */
7267 if (inner1 == true_test_var)
7268 return (is_or ? var : inner1);
7269 else if (inner2 == true_test_var)
7270 return (is_or ? var : inner2);
7271 else if (inner1 == false_test_var)
7272 return (is_or
7273 ? boolean_true_node
7274 : or_var_with_comparison (type, var: inner2, invert: false, code2, op2a,
7275 op2b, outer_cond_bb));
7276 else if (inner2 == false_test_var)
7277 return (is_or
7278 ? boolean_true_node
7279 : or_var_with_comparison (type, var: inner1, invert: false, code2, op2a,
7280 op2b, outer_cond_bb));
7281
7282 /* Next, redistribute/reassociate the OR across the inner tests.
7283 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7284 if (TREE_CODE (inner1) == SSA_NAME
7285 && is_gimple_assign (gs: s = SSA_NAME_DEF_STMT (inner1))
7286 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7287 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (gs: s),
7288 gimple_assign_rhs1 (gs: s),
7289 gimple_assign_rhs2 (gs: s),
7290 code2, op2a, op2b,
7291 outer_cond_bb)))
7292 {
7293 /* Handle the OR case, where we are reassociating:
7294 (inner1 OR inner2) OR (op2a code2 op2b)
7295 => (t OR inner2)
7296 If the partial result t is a constant, we win. Otherwise
7297 continue on to try reassociating with the other inner test. */
7298 if (is_or)
7299 {
7300 if (integer_onep (t))
7301 return boolean_true_node;
7302 else if (integer_zerop (t))
7303 return inner2;
7304 }
7305
7306 /* Handle the AND case, where we are redistributing:
7307 (inner1 AND inner2) OR (op2a code2 op2b)
7308 => (t AND (inner2 OR (op2a code op2b))) */
7309 else if (integer_zerop (t))
7310 return boolean_false_node;
7311
7312 /* Save partial result for later. */
7313 partial = t;
7314 }
7315
7316 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7317 if (TREE_CODE (inner2) == SSA_NAME
7318 && is_gimple_assign (gs: s = SSA_NAME_DEF_STMT (inner2))
7319 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7320 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (gs: s),
7321 gimple_assign_rhs1 (gs: s),
7322 gimple_assign_rhs2 (gs: s),
7323 code2, op2a, op2b,
7324 outer_cond_bb)))
7325 {
7326 /* Handle the OR case, where we are reassociating:
7327 (inner1 OR inner2) OR (op2a code2 op2b)
7328 => (inner1 OR t)
7329 => (t OR partial) */
7330 if (is_or)
7331 {
7332 if (integer_zerop (t))
7333 return inner1;
7334 else if (integer_onep (t))
7335 return boolean_true_node;
7336 /* If both are the same, we can apply the identity
7337 (x OR x) == x. */
7338 else if (partial && same_bool_result_p (op1: t, op2: partial))
7339 return t;
7340 }
7341
7342 /* Handle the AND case, where we are redistributing:
7343 (inner1 AND inner2) OR (op2a code2 op2b)
7344 => (t AND (inner1 OR (op2a code2 op2b)))
7345 => (t AND partial) */
7346 else
7347 {
7348 if (integer_zerop (t))
7349 return boolean_false_node;
7350 else if (partial)
7351 {
7352 /* We already got a simplification for the other
7353 operand to the redistributed AND expression. The
7354 interesting case is when at least one is true.
7355 Or, if both are the same, we can apply the identity
7356 (x AND x) == x. */
7357 if (integer_onep (partial))
7358 return t;
7359 else if (integer_onep (t))
7360 return partial;
7361 else if (same_bool_result_p (op1: t, op2: partial))
7362 return t;
7363 }
7364 }
7365 }
7366 }
7367 return NULL_TREE;
7368}
7369
7370/* Try to simplify the OR of two comparisons defined by
7371 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7372 If this can be done without constructing an intermediate value,
7373 return the resulting tree; otherwise NULL_TREE is returned.
7374 This function is deliberately asymmetric as it recurses on SSA_DEFs
7375 in the first comparison but not the second. */
7376
7377static tree
7378or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7379 enum tree_code code2, tree op2a, tree op2b,
7380 basic_block outer_cond_bb)
7381{
7382 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7383
7384 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7385 if (operand_equal_p (op1a, op2a, flags: 0)
7386 && operand_equal_p (op1b, op2b, flags: 0))
7387 {
7388 /* Result will be either NULL_TREE, or a combined comparison. */
7389 tree t = combine_comparisons (UNKNOWN_LOCATION,
7390 TRUTH_ORIF_EXPR, code1, code2,
7391 truth_type, op1a, op1b);
7392 if (t)
7393 return t;
7394 }
7395
7396 /* Likewise the swapped case of the above. */
7397 if (operand_equal_p (op1a, op2b, flags: 0)
7398 && operand_equal_p (op1b, op2a, flags: 0))
7399 {
7400 /* Result will be either NULL_TREE, or a combined comparison. */
7401 tree t = combine_comparisons (UNKNOWN_LOCATION,
7402 TRUTH_ORIF_EXPR, code1,
7403 swap_tree_comparison (code2),
7404 truth_type, op1a, op1b);
7405 if (t)
7406 return t;
7407 }
7408
7409 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7410 NAME's definition is a truth value. See if there are any simplifications
7411 that can be done against the NAME's definition. */
7412 if (TREE_CODE (op1a) == SSA_NAME
7413 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7414 && (integer_zerop (op1b) || integer_onep (op1b)))
7415 {
7416 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7417 || (code1 == NE_EXPR && integer_onep (op1b)));
7418 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7419 switch (gimple_code (g: stmt))
7420 {
7421 case GIMPLE_ASSIGN:
7422 /* Try to simplify by copy-propagating the definition. */
7423 return or_var_with_comparison (type, var: op1a, invert, code2, op2a,
7424 op2b, outer_cond_bb);
7425
7426 case GIMPLE_PHI:
7427 /* If every argument to the PHI produces the same result when
7428 ORed with the second comparison, we win.
7429 Do not do this unless the type is bool since we need a bool
7430 result here anyway. */
7431 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7432 {
7433 tree result = NULL_TREE;
7434 unsigned i;
7435 for (i = 0; i < gimple_phi_num_args (gs: stmt); i++)
7436 {
7437 tree arg = gimple_phi_arg_def (gs: stmt, index: i);
7438
7439 /* If this PHI has itself as an argument, ignore it.
7440 If all the other args produce the same result,
7441 we're still OK. */
7442 if (arg == gimple_phi_result (gs: stmt))
7443 continue;
7444 else if (TREE_CODE (arg) == INTEGER_CST)
7445 {
7446 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7447 {
7448 if (!result)
7449 result = boolean_true_node;
7450 else if (!integer_onep (result))
7451 return NULL_TREE;
7452 }
7453 else if (!result)
7454 result = fold_build2 (code2, boolean_type_node,
7455 op2a, op2b);
7456 else if (!same_bool_comparison_p (expr: result,
7457 code: code2, op1: op2a, op2: op2b))
7458 return NULL_TREE;
7459 }
7460 else if (TREE_CODE (arg) == SSA_NAME
7461 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7462 {
7463 tree temp;
7464 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7465 /* In simple cases we can look through PHI nodes,
7466 but we have to be careful with loops.
7467 See PR49073. */
7468 if (! dom_info_available_p (CDI_DOMINATORS)
7469 || gimple_bb (g: def_stmt) == gimple_bb (g: stmt)
7470 || dominated_by_p (CDI_DOMINATORS,
7471 gimple_bb (g: def_stmt),
7472 gimple_bb (g: stmt)))
7473 return NULL_TREE;
7474 temp = or_var_with_comparison (type, var: arg, invert, code2,
7475 op2a, op2b, outer_cond_bb);
7476 if (!temp)
7477 return NULL_TREE;
7478 else if (!result)
7479 result = temp;
7480 else if (!same_bool_result_p (op1: result, op2: temp))
7481 return NULL_TREE;
7482 }
7483 else
7484 return NULL_TREE;
7485 }
7486 return result;
7487 }
7488
7489 default:
7490 break;
7491 }
7492 }
7493 return NULL_TREE;
7494}
7495
7496/* Try to simplify the OR of two comparisons, specified by
7497 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7498 If this can be simplified to a single expression (without requiring
7499 introducing more SSA variables to hold intermediate values),
7500 return the resulting tree. Otherwise return NULL_TREE.
7501 If the result expression is non-null, it has boolean type. */
7502
7503tree
7504maybe_fold_or_comparisons (tree type,
7505 enum tree_code code1, tree op1a, tree op1b,
7506 enum tree_code code2, tree op2a, tree op2b,
7507 basic_block outer_cond_bb)
7508{
7509 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7510 outer_cond_bb))
7511 return t;
7512
7513 if (tree t = or_comparisons_1 (type, code1: code2, op1a: op2a, op1b: op2b, code2: code1, op2a: op1a, op2b: op1b,
7514 outer_cond_bb))
7515 return t;
7516
7517 if (tree t = maybe_fold_comparisons_from_match_pd (type, code: BIT_IOR_EXPR, code1,
7518 op1a, op1b, code2, op2a,
7519 op2b, outer_cond_bb))
7520 return t;
7521
7522 return NULL_TREE;
7523}
7524
7525/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7526
7527 Either NULL_TREE, a simplified but non-constant or a constant
7528 is returned.
7529
7530 ??? This should go into a gimple-fold-inline.h file to be eventually
7531 privatized with the single valueize function used in the various TUs
7532 to avoid the indirect function call overhead. */
7533
7534tree
7535gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7536 tree (*gvalueize) (tree))
7537{
7538 gimple_match_op res_op;
7539 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7540 edges if there are intermediate VARYING defs. For this reason
7541 do not follow SSA edges here even though SCCVN can technically
7542 just deal fine with that. */
7543 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7544 {
7545 tree res = NULL_TREE;
7546 if (gimple_simplified_result_is_gimple_val (op: &res_op))
7547 res = res_op.ops[0];
7548 else if (mprts_hook)
7549 res = mprts_hook (&res_op);
7550 if (res)
7551 {
7552 if (dump_file && dump_flags & TDF_DETAILS)
7553 {
7554 fprintf (stream: dump_file, format: "Match-and-simplified ");
7555 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7556 fprintf (stream: dump_file, format: " to ");
7557 print_generic_expr (dump_file, res);
7558 fprintf (stream: dump_file, format: "\n");
7559 }
7560 return res;
7561 }
7562 }
7563
7564 location_t loc = gimple_location (g: stmt);
7565 switch (gimple_code (g: stmt))
7566 {
7567 case GIMPLE_ASSIGN:
7568 {
7569 enum tree_code subcode = gimple_assign_rhs_code (gs: stmt);
7570
7571 switch (get_gimple_rhs_class (code: subcode))
7572 {
7573 case GIMPLE_SINGLE_RHS:
7574 {
7575 tree rhs = gimple_assign_rhs1 (gs: stmt);
7576 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7577
7578 if (TREE_CODE (rhs) == SSA_NAME)
7579 {
7580 /* If the RHS is an SSA_NAME, return its known constant value,
7581 if any. */
7582 return (*valueize) (rhs);
7583 }
7584 /* Handle propagating invariant addresses into address
7585 operations. */
7586 else if (TREE_CODE (rhs) == ADDR_EXPR
7587 && !is_gimple_min_invariant (rhs))
7588 {
7589 poly_int64 offset = 0;
7590 tree base;
7591 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7592 &offset,
7593 valueize);
7594 if (base
7595 && (CONSTANT_CLASS_P (base)
7596 || decl_address_invariant_p (base)))
7597 return build_invariant_address (TREE_TYPE (rhs),
7598 base, offset);
7599 }
7600 else if (TREE_CODE (rhs) == CONSTRUCTOR
7601 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7602 && known_eq (CONSTRUCTOR_NELTS (rhs),
7603 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7604 {
7605 unsigned i, nelts;
7606 tree val;
7607
7608 nelts = CONSTRUCTOR_NELTS (rhs);
7609 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7610 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7611 {
7612 val = (*valueize) (val);
7613 if (TREE_CODE (val) == INTEGER_CST
7614 || TREE_CODE (val) == REAL_CST
7615 || TREE_CODE (val) == FIXED_CST)
7616 vec.quick_push (obj: val);
7617 else
7618 return NULL_TREE;
7619 }
7620
7621 return vec.build ();
7622 }
7623 if (subcode == OBJ_TYPE_REF)
7624 {
7625 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7626 /* If callee is constant, we can fold away the wrapper. */
7627 if (is_gimple_min_invariant (val))
7628 return val;
7629 }
7630
7631 if (kind == tcc_reference)
7632 {
7633 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7634 || TREE_CODE (rhs) == REALPART_EXPR
7635 || TREE_CODE (rhs) == IMAGPART_EXPR)
7636 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7637 {
7638 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7639 return fold_unary_loc (EXPR_LOCATION (rhs),
7640 TREE_CODE (rhs),
7641 TREE_TYPE (rhs), val);
7642 }
7643 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7644 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7645 {
7646 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7647 return fold_ternary_loc (EXPR_LOCATION (rhs),
7648 TREE_CODE (rhs),
7649 TREE_TYPE (rhs), val,
7650 TREE_OPERAND (rhs, 1),
7651 TREE_OPERAND (rhs, 2));
7652 }
7653 else if (TREE_CODE (rhs) == MEM_REF
7654 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7655 {
7656 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7657 if (TREE_CODE (val) == ADDR_EXPR
7658 && is_gimple_min_invariant (val))
7659 {
7660 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7661 unshare_expr (val),
7662 TREE_OPERAND (rhs, 1));
7663 if (tem)
7664 rhs = tem;
7665 }
7666 }
7667 return fold_const_aggregate_ref_1 (rhs, valueize);
7668 }
7669 else if (kind == tcc_declaration)
7670 return get_symbol_constant_value (sym: rhs);
7671 return rhs;
7672 }
7673
7674 case GIMPLE_UNARY_RHS:
7675 return NULL_TREE;
7676
7677 case GIMPLE_BINARY_RHS:
7678 /* Translate &x + CST into an invariant form suitable for
7679 further propagation. */
7680 if (subcode == POINTER_PLUS_EXPR)
7681 {
7682 tree op0 = (*valueize) (gimple_assign_rhs1 (gs: stmt));
7683 tree op1 = (*valueize) (gimple_assign_rhs2 (gs: stmt));
7684 if (TREE_CODE (op0) == ADDR_EXPR
7685 && TREE_CODE (op1) == INTEGER_CST)
7686 {
7687 tree off = fold_convert (ptr_type_node, op1);
7688 return build1_loc
7689 (loc, code: ADDR_EXPR, TREE_TYPE (op0),
7690 fold_build2 (MEM_REF,
7691 TREE_TYPE (TREE_TYPE (op0)),
7692 unshare_expr (op0), off));
7693 }
7694 }
7695 /* Canonicalize bool != 0 and bool == 0 appearing after
7696 valueization. While gimple_simplify handles this
7697 it can get confused by the ~X == 1 -> X == 0 transform
7698 which we cant reduce to a SSA name or a constant
7699 (and we have no way to tell gimple_simplify to not
7700 consider those transforms in the first place). */
7701 else if (subcode == EQ_EXPR
7702 || subcode == NE_EXPR)
7703 {
7704 tree lhs = gimple_assign_lhs (gs: stmt);
7705 tree op0 = gimple_assign_rhs1 (gs: stmt);
7706 if (useless_type_conversion_p (TREE_TYPE (lhs),
7707 TREE_TYPE (op0)))
7708 {
7709 tree op1 = (*valueize) (gimple_assign_rhs2 (gs: stmt));
7710 op0 = (*valueize) (op0);
7711 if (TREE_CODE (op0) == INTEGER_CST)
7712 std::swap (a&: op0, b&: op1);
7713 if (TREE_CODE (op1) == INTEGER_CST
7714 && ((subcode == NE_EXPR && integer_zerop (op1))
7715 || (subcode == EQ_EXPR && integer_onep (op1))))
7716 return op0;
7717 }
7718 }
7719 return NULL_TREE;
7720
7721 case GIMPLE_TERNARY_RHS:
7722 {
7723 /* Handle ternary operators that can appear in GIMPLE form. */
7724 tree op0 = (*valueize) (gimple_assign_rhs1 (gs: stmt));
7725 tree op1 = (*valueize) (gimple_assign_rhs2 (gs: stmt));
7726 tree op2 = (*valueize) (gimple_assign_rhs3 (gs: stmt));
7727 return fold_ternary_loc (loc, subcode,
7728 TREE_TYPE (gimple_assign_lhs (stmt)),
7729 op0, op1, op2);
7730 }
7731
7732 default:
7733 gcc_unreachable ();
7734 }
7735 }
7736
7737 case GIMPLE_CALL:
7738 {
7739 tree fn;
7740 gcall *call_stmt = as_a <gcall *> (p: stmt);
7741
7742 if (gimple_call_internal_p (gs: stmt))
7743 {
7744 enum tree_code subcode = ERROR_MARK;
7745 switch (gimple_call_internal_fn (gs: stmt))
7746 {
7747 case IFN_UBSAN_CHECK_ADD:
7748 subcode = PLUS_EXPR;
7749 break;
7750 case IFN_UBSAN_CHECK_SUB:
7751 subcode = MINUS_EXPR;
7752 break;
7753 case IFN_UBSAN_CHECK_MUL:
7754 subcode = MULT_EXPR;
7755 break;
7756 case IFN_BUILTIN_EXPECT:
7757 {
7758 tree arg0 = gimple_call_arg (gs: stmt, index: 0);
7759 tree op0 = (*valueize) (arg0);
7760 if (TREE_CODE (op0) == INTEGER_CST)
7761 return op0;
7762 return NULL_TREE;
7763 }
7764 default:
7765 return NULL_TREE;
7766 }
7767 tree arg0 = gimple_call_arg (gs: stmt, index: 0);
7768 tree arg1 = gimple_call_arg (gs: stmt, index: 1);
7769 tree op0 = (*valueize) (arg0);
7770 tree op1 = (*valueize) (arg1);
7771
7772 if (TREE_CODE (op0) != INTEGER_CST
7773 || TREE_CODE (op1) != INTEGER_CST)
7774 {
7775 switch (subcode)
7776 {
7777 case MULT_EXPR:
7778 /* x * 0 = 0 * x = 0 without overflow. */
7779 if (integer_zerop (op0) || integer_zerop (op1))
7780 return build_zero_cst (TREE_TYPE (arg0));
7781 break;
7782 case MINUS_EXPR:
7783 /* y - y = 0 without overflow. */
7784 if (operand_equal_p (op0, op1, flags: 0))
7785 return build_zero_cst (TREE_TYPE (arg0));
7786 break;
7787 default:
7788 break;
7789 }
7790 }
7791 tree res
7792 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7793 if (res
7794 && TREE_CODE (res) == INTEGER_CST
7795 && !TREE_OVERFLOW (res))
7796 return res;
7797 return NULL_TREE;
7798 }
7799
7800 fn = (*valueize) (gimple_call_fn (gs: stmt));
7801 if (TREE_CODE (fn) == ADDR_EXPR
7802 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7803 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7804 && gimple_builtin_call_types_compatible_p (stmt,
7805 TREE_OPERAND (fn, 0)))
7806 {
7807 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7808 tree retval;
7809 unsigned i;
7810 for (i = 0; i < gimple_call_num_args (gs: stmt); ++i)
7811 args[i] = (*valueize) (gimple_call_arg (gs: stmt, index: i));
7812 retval = fold_builtin_call_array (loc,
7813 gimple_call_return_type (gs: call_stmt),
7814 fn, gimple_call_num_args (gs: stmt), args);
7815 if (retval)
7816 {
7817 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7818 STRIP_NOPS (retval);
7819 retval = fold_convert (gimple_call_return_type (call_stmt),
7820 retval);
7821 }
7822 return retval;
7823 }
7824 return NULL_TREE;
7825 }
7826
7827 default:
7828 return NULL_TREE;
7829 }
7830}
7831
7832/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7833 Returns NULL_TREE if folding to a constant is not possible, otherwise
7834 returns a constant according to is_gimple_min_invariant. */
7835
7836tree
7837gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7838{
7839 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7840 if (res && is_gimple_min_invariant (res))
7841 return res;
7842 return NULL_TREE;
7843}
7844
7845
7846/* The following set of functions are supposed to fold references using
7847 their constant initializers. */
7848
7849/* See if we can find constructor defining value of BASE.
7850 When we know the consructor with constant offset (such as
7851 base is array[40] and we do know constructor of array), then
7852 BIT_OFFSET is adjusted accordingly.
7853
7854 As a special case, return error_mark_node when constructor
7855 is not explicitly available, but it is known to be zero
7856 such as 'static const int a;'. */
7857static tree
7858get_base_constructor (tree base, poly_int64 *bit_offset,
7859 tree (*valueize)(tree))
7860{
7861 poly_int64 bit_offset2, size, max_size;
7862 bool reverse;
7863
7864 if (TREE_CODE (base) == MEM_REF)
7865 {
7866 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7867 if (!boff.to_shwi (r: bit_offset))
7868 return NULL_TREE;
7869
7870 if (valueize
7871 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7872 base = valueize (TREE_OPERAND (base, 0));
7873 if (!base || TREE_CODE (base) != ADDR_EXPR)
7874 return NULL_TREE;
7875 base = TREE_OPERAND (base, 0);
7876 }
7877 else if (valueize
7878 && TREE_CODE (base) == SSA_NAME)
7879 base = valueize (base);
7880
7881 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7882 DECL_INITIAL. If BASE is a nested reference into another
7883 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7884 the inner reference. */
7885 switch (TREE_CODE (base))
7886 {
7887 case VAR_DECL:
7888 case CONST_DECL:
7889 {
7890 tree init = ctor_for_folding (base);
7891
7892 /* Our semantic is exact opposite of ctor_for_folding;
7893 NULL means unknown, while error_mark_node is 0. */
7894 if (init == error_mark_node)
7895 return NULL_TREE;
7896 if (!init)
7897 return error_mark_node;
7898 return init;
7899 }
7900
7901 case VIEW_CONVERT_EXPR:
7902 return get_base_constructor (TREE_OPERAND (base, 0),
7903 bit_offset, valueize);
7904
7905 case ARRAY_REF:
7906 case COMPONENT_REF:
7907 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7908 &reverse);
7909 if (!known_size_p (a: max_size) || maybe_ne (a: size, b: max_size))
7910 return NULL_TREE;
7911 *bit_offset += bit_offset2;
7912 return get_base_constructor (base, bit_offset, valueize);
7913
7914 case CONSTRUCTOR:
7915 return base;
7916
7917 default:
7918 if (CONSTANT_CLASS_P (base))
7919 return base;
7920
7921 return NULL_TREE;
7922 }
7923}
7924
7925/* CTOR is a CONSTRUCTOR of an array or vector type. Fold a reference of SIZE
7926 bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
7927 the reference; otherwise the type of the referenced element is used instead.
7928 When SIZE is zero, attempt to fold a reference to the entire element OFFSET
7929 refers to. Increment *SUBOFF by the bit offset of the accessed element. */
7930
7931static tree
7932fold_array_ctor_reference (tree type, tree ctor,
7933 unsigned HOST_WIDE_INT offset,
7934 unsigned HOST_WIDE_INT size,
7935 tree from_decl,
7936 unsigned HOST_WIDE_INT *suboff)
7937{
7938 offset_int low_bound;
7939 offset_int elt_size;
7940 offset_int access_index;
7941 tree domain_type = NULL_TREE;
7942 HOST_WIDE_INT inner_offset;
7943
7944 /* Compute low bound and elt size. */
7945 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7946 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7947 if (domain_type && TYPE_MIN_VALUE (domain_type))
7948 {
7949 /* Static constructors for variably sized objects make no sense. */
7950 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7951 return NULL_TREE;
7952 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7953 }
7954 else
7955 low_bound = 0;
7956 /* Static constructors for variably sized objects make no sense. */
7957 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7958 return NULL_TREE;
7959 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7960
7961 /* When TYPE is non-null, verify that it specifies a constant-sized
7962 access of a multiple of the array element size. Avoid division
7963 by zero below when ELT_SIZE is zero, such as with the result of
7964 an initializer for a zero-length array or an empty struct. */
7965 if (elt_size == 0
7966 || (type
7967 && (!TYPE_SIZE_UNIT (type)
7968 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7969 return NULL_TREE;
7970
7971 /* Compute the array index we look for. */
7972 access_index = wi::udiv_trunc (x: offset_int (offset / BITS_PER_UNIT),
7973 y: elt_size);
7974 access_index += low_bound;
7975
7976 /* And offset within the access. */
7977 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7978
7979 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7980 if (size > elt_sz * BITS_PER_UNIT)
7981 {
7982 /* native_encode_expr constraints. */
7983 if (size > MAX_BITSIZE_MODE_ANY_MODE
7984 || size % BITS_PER_UNIT != 0
7985 || inner_offset % BITS_PER_UNIT != 0
7986 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7987 return NULL_TREE;
7988
7989 unsigned ctor_idx;
7990 tree val = get_array_ctor_element_at_index (ctor, access_index,
7991 &ctor_idx);
7992 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7993 return build_zero_cst (type);
7994
7995 /* native-encode adjacent ctor elements. */
7996 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7997 unsigned bufoff = 0;
7998 offset_int index = 0;
7999 offset_int max_index = access_index;
8000 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
8001 if (!val)
8002 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8003 else if (!CONSTANT_CLASS_P (val))
8004 return NULL_TREE;
8005 if (!elt->index)
8006 ;
8007 else if (TREE_CODE (elt->index) == RANGE_EXPR)
8008 {
8009 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
8010 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
8011 }
8012 else
8013 index = max_index = wi::to_offset (t: elt->index);
8014 index = wi::umax (x: index, y: access_index);
8015 do
8016 {
8017 if (bufoff + elt_sz > sizeof (buf))
8018 elt_sz = sizeof (buf) - bufoff;
8019 int len = native_encode_expr (val, buf + bufoff, elt_sz,
8020 off: inner_offset / BITS_PER_UNIT);
8021 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
8022 return NULL_TREE;
8023 inner_offset = 0;
8024 bufoff += len;
8025
8026 access_index += 1;
8027 if (wi::cmpu (x: access_index, y: index) == 0)
8028 val = elt->value;
8029 else if (wi::cmpu (x: access_index, y: max_index) > 0)
8030 {
8031 ctor_idx++;
8032 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
8033 {
8034 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8035 ++max_index;
8036 }
8037 else
8038 {
8039 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
8040 index = 0;
8041 max_index = access_index;
8042 if (!elt->index)
8043 ;
8044 else if (TREE_CODE (elt->index) == RANGE_EXPR)
8045 {
8046 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
8047 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
8048 }
8049 else
8050 index = max_index = wi::to_offset (t: elt->index);
8051 index = wi::umax (x: index, y: access_index);
8052 if (wi::cmpu (x: access_index, y: index) == 0)
8053 val = elt->value;
8054 else
8055 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8056 }
8057 }
8058 }
8059 while (bufoff < size / BITS_PER_UNIT);
8060 *suboff += size;
8061 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
8062 }
8063
8064 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
8065 {
8066 if (!size && TREE_CODE (val) != CONSTRUCTOR)
8067 {
8068 /* For the final reference to the entire accessed element
8069 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
8070 may be null) in favor of the type of the element, and set
8071 SIZE to the size of the accessed element. */
8072 inner_offset = 0;
8073 type = TREE_TYPE (val);
8074 size = elt_sz * BITS_PER_UNIT;
8075 }
8076 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
8077 && TREE_CODE (val) == CONSTRUCTOR
8078 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
8079 /* If this isn't the last element in the CTOR and a CTOR itself
8080 and it does not cover the whole object we are requesting give up
8081 since we're not set up for combining from multiple CTORs. */
8082 return NULL_TREE;
8083
8084 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
8085 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
8086 suboff);
8087 }
8088
8089 /* Memory not explicitly mentioned in constructor is 0 (or
8090 the reference is out of range). */
8091 return type ? build_zero_cst (type) : NULL_TREE;
8092}
8093
8094/* CTOR is a CONSTRUCTOR of a record or union type. Fold a reference of SIZE
8095 bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
8096 the reference; otherwise the type of the referenced member is used instead.
8097 When SIZE is zero, attempt to fold a reference to the entire member OFFSET
8098 refers to. Increment *SUBOFF by the bit offset of the accessed member. */
8099
8100static tree
8101fold_nonarray_ctor_reference (tree type, tree ctor,
8102 unsigned HOST_WIDE_INT offset,
8103 unsigned HOST_WIDE_INT size,
8104 tree from_decl,
8105 unsigned HOST_WIDE_INT *suboff)
8106{
8107 unsigned HOST_WIDE_INT cnt;
8108 tree cfield, cval;
8109
8110 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
8111 {
8112 tree byte_offset = DECL_FIELD_OFFSET (cfield);
8113 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
8114 tree field_size = DECL_SIZE (cfield);
8115
8116 if (!field_size)
8117 {
8118 /* Determine the size of the flexible array member from
8119 the size of the initializer provided for it. */
8120 field_size = TYPE_SIZE (TREE_TYPE (cval));
8121 }
8122
8123 /* Variable sized objects in static constructors makes no sense,
8124 but field_size can be NULL for flexible array members. */
8125 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
8126 && TREE_CODE (byte_offset) == INTEGER_CST
8127 && (field_size != NULL_TREE
8128 ? TREE_CODE (field_size) == INTEGER_CST
8129 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
8130
8131 /* Compute bit offset of the field. */
8132 offset_int bitoffset
8133 = (wi::to_offset (t: field_offset)
8134 + (wi::to_offset (t: byte_offset) << LOG2_BITS_PER_UNIT));
8135 /* Compute bit offset where the field ends. */
8136 offset_int bitoffset_end;
8137 if (field_size != NULL_TREE)
8138 bitoffset_end = bitoffset + wi::to_offset (t: field_size);
8139 else
8140 bitoffset_end = 0;
8141
8142 /* Compute the bit offset of the end of the desired access.
8143 As a special case, if the size of the desired access is
8144 zero, assume the access is to the entire field (and let
8145 the caller make any necessary adjustments by storing
8146 the actual bounds of the field in FIELDBOUNDS). */
8147 offset_int access_end = offset_int (offset);
8148 if (size)
8149 access_end += size;
8150 else
8151 access_end = bitoffset_end;
8152
8153 /* Is there any overlap between the desired access at
8154 [OFFSET, OFFSET+SIZE) and the offset of the field within
8155 the object at [BITOFFSET, BITOFFSET_END)? */
8156 if (wi::cmps (x: access_end, y: bitoffset) > 0
8157 && (field_size == NULL_TREE
8158 || wi::lts_p (x: offset, y: bitoffset_end)))
8159 {
8160 *suboff += bitoffset.to_uhwi ();
8161
8162 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
8163 {
8164 /* For the final reference to the entire accessed member
8165 (SIZE is zero), reset OFFSET, disegard TYPE (which may
8166 be null) in favor of the type of the member, and set
8167 SIZE to the size of the accessed member. */
8168 offset = bitoffset.to_uhwi ();
8169 type = TREE_TYPE (cval);
8170 size = (bitoffset_end - bitoffset).to_uhwi ();
8171 }
8172
8173 /* We do have overlap. Now see if the field is large enough
8174 to cover the access. Give up for accesses that extend
8175 beyond the end of the object or that span multiple fields. */
8176 if (wi::cmps (x: access_end, y: bitoffset_end) > 0)
8177 return NULL_TREE;
8178 if (offset < bitoffset)
8179 return NULL_TREE;
8180
8181 offset_int inner_offset = offset_int (offset) - bitoffset;
8182
8183 /* Integral bit-fields are left-justified on big-endian targets, so
8184 we must arrange for native_encode_int to start at their MSB. */
8185 if (DECL_BIT_FIELD (cfield) && INTEGRAL_TYPE_P (TREE_TYPE (cfield)))
8186 {
8187 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8188 return NULL_TREE;
8189 const unsigned int encoding_size
8190 = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (cfield)));
8191 if (BYTES_BIG_ENDIAN)
8192 inner_offset += encoding_size - wi::to_offset (t: field_size);
8193 }
8194
8195 return fold_ctor_reference (type, cval,
8196 inner_offset.to_uhwi (), size,
8197 from_decl, suboff);
8198 }
8199 }
8200
8201 if (!type)
8202 return NULL_TREE;
8203
8204 return build_zero_cst (type);
8205}
8206
8207/* CTOR is a value initializing memory. Fold a reference of TYPE and
8208 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
8209 is zero, attempt to fold a reference to the entire subobject
8210 which OFFSET refers to. This is used when folding accesses to
8211 string members of aggregates. When non-null, set *SUBOFF to
8212 the bit offset of the accessed subobject. */
8213
8214tree
8215fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
8216 const poly_uint64 &poly_size, tree from_decl,
8217 unsigned HOST_WIDE_INT *suboff /* = NULL */)
8218{
8219 tree ret;
8220
8221 /* We found the field with exact match. */
8222 if (type
8223 && useless_type_conversion_p (type, TREE_TYPE (ctor))
8224 && known_eq (poly_offset, 0U))
8225 return canonicalize_constructor_val (cval: unshare_expr (ctor), from_decl);
8226
8227 /* The remaining optimizations need a constant size and offset. */
8228 unsigned HOST_WIDE_INT size, offset;
8229 if (!poly_size.is_constant (const_value: &size) || !poly_offset.is_constant (const_value: &offset))
8230 return NULL_TREE;
8231
8232 /* We are at the end of walk, see if we can view convert the
8233 result. */
8234 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8235 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8236 && known_eq (wi::to_poly_widest (TYPE_SIZE (type)), size)
8237 && known_eq (wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ctor))), size))
8238 {
8239 ret = canonicalize_constructor_val (cval: unshare_expr (ctor), from_decl);
8240 if (ret)
8241 {
8242 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8243 if (ret)
8244 STRIP_USELESS_TYPE_CONVERSION (ret);
8245 }
8246 return ret;
8247 }
8248
8249 /* For constants and byte-aligned/sized reads, try to go through
8250 native_encode/interpret. */
8251 if (CONSTANT_CLASS_P (ctor)
8252 && BITS_PER_UNIT == 8
8253 && offset % BITS_PER_UNIT == 0
8254 && offset / BITS_PER_UNIT <= INT_MAX
8255 && size % BITS_PER_UNIT == 0
8256 && size <= MAX_BITSIZE_MODE_ANY_MODE
8257 && can_native_interpret_type_p (type))
8258 {
8259 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8260 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8261 off: offset / BITS_PER_UNIT);
8262 if (len > 0)
8263 return native_interpret_expr (type, buf, len);
8264 }
8265
8266 /* For constructors, try first a recursive local processing, but in any case
8267 this requires the native storage order. */
8268 if (TREE_CODE (ctor) == CONSTRUCTOR
8269 && !(AGGREGATE_TYPE_P (TREE_TYPE (ctor))
8270 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (ctor))))
8271 {
8272 unsigned HOST_WIDE_INT dummy = 0;
8273 if (!suboff)
8274 suboff = &dummy;
8275
8276 tree ret;
8277 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8278 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8279 ret = fold_array_ctor_reference (type, ctor, offset, size,
8280 from_decl, suboff);
8281 else
8282 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8283 from_decl, suboff);
8284
8285 /* Otherwise fall back to native_encode_initializer. This may be done
8286 only from the outermost fold_ctor_reference call (because it itself
8287 recurses into CONSTRUCTORs and doesn't update suboff). */
8288 if (ret == NULL_TREE
8289 && suboff == &dummy
8290 && BITS_PER_UNIT == 8
8291 && offset % BITS_PER_UNIT == 0
8292 && offset / BITS_PER_UNIT <= INT_MAX
8293 && size % BITS_PER_UNIT == 0
8294 && size <= MAX_BITSIZE_MODE_ANY_MODE
8295 && can_native_interpret_type_p (type))
8296 {
8297 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8298 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8299 off: offset / BITS_PER_UNIT);
8300 if (len > 0)
8301 return native_interpret_expr (type, buf, len);
8302 }
8303
8304 return ret;
8305 }
8306
8307 return NULL_TREE;
8308}
8309
8310/* Return the tree representing the element referenced by T if T is an
8311 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8312 names using VALUEIZE. Return NULL_TREE otherwise. */
8313
8314tree
8315fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8316{
8317 tree ctor, idx, base;
8318 poly_int64 offset, size, max_size;
8319 tree tem;
8320 bool reverse;
8321
8322 if (TREE_THIS_VOLATILE (t))
8323 return NULL_TREE;
8324
8325 if (DECL_P (t))
8326 return get_symbol_constant_value (sym: t);
8327
8328 tem = fold_read_from_constant_string (t);
8329 if (tem)
8330 return tem;
8331
8332 switch (TREE_CODE (t))
8333 {
8334 case ARRAY_REF:
8335 case ARRAY_RANGE_REF:
8336 /* Constant indexes are handled well by get_base_constructor.
8337 Only special case variable offsets.
8338 FIXME: This code can't handle nested references with variable indexes
8339 (they will be handled only by iteration of ccp). Perhaps we can bring
8340 get_ref_base_and_extent here and make it use a valueize callback. */
8341 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8342 && valueize
8343 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8344 && poly_int_tree_p (t: idx))
8345 {
8346 tree low_bound, unit_size;
8347
8348 /* If the resulting bit-offset is constant, track it. */
8349 if ((low_bound = array_ref_low_bound (t),
8350 poly_int_tree_p (t: low_bound))
8351 && (unit_size = array_ref_element_size (t),
8352 tree_fits_uhwi_p (unit_size)))
8353 {
8354 poly_offset_int woffset
8355 = wi::sext (a: wi::to_poly_offset (t: idx)
8356 - wi::to_poly_offset (t: low_bound),
8357 TYPE_PRECISION (sizetype));
8358 woffset *= tree_to_uhwi (unit_size);
8359 woffset *= BITS_PER_UNIT;
8360 if (woffset.to_shwi (r: &offset))
8361 {
8362 base = TREE_OPERAND (t, 0);
8363 ctor = get_base_constructor (base, bit_offset: &offset, valueize);
8364 /* Empty constructor. Always fold to 0. */
8365 if (ctor == error_mark_node)
8366 return build_zero_cst (TREE_TYPE (t));
8367 /* Out of bound array access. Value is undefined,
8368 but don't fold. */
8369 if (maybe_lt (a: offset, b: 0))
8370 return NULL_TREE;
8371 /* We cannot determine ctor. */
8372 if (!ctor)
8373 return NULL_TREE;
8374 return fold_ctor_reference (TREE_TYPE (t), ctor, poly_offset: offset,
8375 poly_size: tree_to_uhwi (unit_size)
8376 * BITS_PER_UNIT,
8377 from_decl: base);
8378 }
8379 }
8380 }
8381 /* Fallthru. */
8382
8383 case COMPONENT_REF:
8384 case BIT_FIELD_REF:
8385 case TARGET_MEM_REF:
8386 case MEM_REF:
8387 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8388 ctor = get_base_constructor (base, bit_offset: &offset, valueize);
8389
8390 /* Empty constructor. Always fold to 0. */
8391 if (ctor == error_mark_node)
8392 return build_zero_cst (TREE_TYPE (t));
8393 /* We do not know precise address. */
8394 if (!known_size_p (a: max_size) || maybe_ne (a: max_size, b: size))
8395 return NULL_TREE;
8396 /* We cannot determine ctor. */
8397 if (!ctor)
8398 return NULL_TREE;
8399
8400 /* Out of bound array access. Value is undefined, but don't fold. */
8401 if (maybe_lt (a: offset, b: 0))
8402 return NULL_TREE;
8403
8404 tem = fold_ctor_reference (TREE_TYPE (t), ctor, poly_offset: offset, poly_size: size, from_decl: base);
8405 if (tem)
8406 return tem;
8407
8408 /* For bit field reads try to read the representative and
8409 adjust. */
8410 if (TREE_CODE (t) == COMPONENT_REF
8411 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8412 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8413 {
8414 HOST_WIDE_INT csize, coffset;
8415 tree field = TREE_OPERAND (t, 1);
8416 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8417 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8418 && size.is_constant (const_value: &csize)
8419 && offset.is_constant (const_value: &coffset)
8420 && (coffset % BITS_PER_UNIT != 0
8421 || csize % BITS_PER_UNIT != 0)
8422 && !reverse
8423 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8424 {
8425 poly_int64 bitoffset;
8426 poly_uint64 field_offset, repr_offset;
8427 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), value: &field_offset)
8428 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), value: &repr_offset))
8429 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8430 else
8431 bitoffset = 0;
8432 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8433 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8434 HOST_WIDE_INT bitoff;
8435 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8436 - TYPE_PRECISION (TREE_TYPE (field)));
8437 if (bitoffset.is_constant (const_value: &bitoff)
8438 && bitoff >= 0
8439 && bitoff <= diff)
8440 {
8441 offset -= bitoff;
8442 size = tree_to_uhwi (DECL_SIZE (repr));
8443
8444 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, poly_offset: offset,
8445 poly_size: size, from_decl: base);
8446 if (tem && TREE_CODE (tem) == INTEGER_CST)
8447 {
8448 if (!BYTES_BIG_ENDIAN)
8449 tem = wide_int_to_tree (TREE_TYPE (field),
8450 cst: wi::lrshift (x: wi::to_wide (t: tem),
8451 y: bitoff));
8452 else
8453 tem = wide_int_to_tree (TREE_TYPE (field),
8454 cst: wi::lrshift (x: wi::to_wide (t: tem),
8455 y: diff - bitoff));
8456 return tem;
8457 }
8458 }
8459 }
8460 }
8461 break;
8462
8463 case REALPART_EXPR:
8464 case IMAGPART_EXPR:
8465 {
8466 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8467 if (c && TREE_CODE (c) == COMPLEX_CST)
8468 return fold_build1_loc (EXPR_LOCATION (t),
8469 TREE_CODE (t), TREE_TYPE (t), c);
8470 break;
8471 }
8472
8473 default:
8474 break;
8475 }
8476
8477 return NULL_TREE;
8478}
8479
8480tree
8481fold_const_aggregate_ref (tree t)
8482{
8483 return fold_const_aggregate_ref_1 (t, NULL);
8484}
8485
8486/* Lookup virtual method with index TOKEN in a virtual table V
8487 at OFFSET.
8488 Set CAN_REFER if non-NULL to false if method
8489 is not referable or if the virtual table is ill-formed (such as rewriten
8490 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8491
8492tree
8493gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8494 tree v,
8495 unsigned HOST_WIDE_INT offset,
8496 bool *can_refer)
8497{
8498 tree vtable = v, init, fn;
8499 unsigned HOST_WIDE_INT size;
8500 unsigned HOST_WIDE_INT elt_size, access_index;
8501 tree domain_type;
8502
8503 if (can_refer)
8504 *can_refer = true;
8505
8506 /* First of all double check we have virtual table. */
8507 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8508 {
8509 /* Pass down that we lost track of the target. */
8510 if (can_refer)
8511 *can_refer = false;
8512 return NULL_TREE;
8513 }
8514
8515 init = ctor_for_folding (v);
8516
8517 /* The virtual tables should always be born with constructors
8518 and we always should assume that they are avaialble for
8519 folding. At the moment we do not stream them in all cases,
8520 but it should never happen that ctor seem unreachable. */
8521 gcc_assert (init);
8522 if (init == error_mark_node)
8523 {
8524 /* Pass down that we lost track of the target. */
8525 if (can_refer)
8526 *can_refer = false;
8527 return NULL_TREE;
8528 }
8529 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8530 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8531 offset *= BITS_PER_UNIT;
8532 offset += token * size;
8533
8534 /* Lookup the value in the constructor that is assumed to be array.
8535 This is equivalent to
8536 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8537 offset, size, NULL);
8538 but in a constant time. We expect that frontend produced a simple
8539 array without indexed initializers. */
8540
8541 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8542 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8543 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8544 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8545
8546 access_index = offset / BITS_PER_UNIT / elt_size;
8547 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8548
8549 /* The C++ FE can now produce indexed fields, and we check if the indexes
8550 match. */
8551 if (access_index < CONSTRUCTOR_NELTS (init))
8552 {
8553 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8554 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8555 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8556 STRIP_NOPS (fn);
8557 }
8558 else
8559 fn = NULL;
8560
8561 /* For type inconsistent program we may end up looking up virtual method
8562 in virtual table that does not contain TOKEN entries. We may overrun
8563 the virtual table and pick up a constant or RTTI info pointer.
8564 In any case the call is undefined. */
8565 if (!fn
8566 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8567 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8568 fn = builtin_decl_unreachable ();
8569 else
8570 {
8571 fn = TREE_OPERAND (fn, 0);
8572
8573 /* When cgraph node is missing and function is not public, we cannot
8574 devirtualize. This can happen in WHOPR when the actual method
8575 ends up in other partition, because we found devirtualization
8576 possibility too late. */
8577 if (!can_refer_decl_in_current_unit_p (decl: fn, from_decl: vtable))
8578 {
8579 if (can_refer)
8580 {
8581 *can_refer = false;
8582 return fn;
8583 }
8584 return NULL_TREE;
8585 }
8586 }
8587
8588 /* Make sure we create a cgraph node for functions we'll reference.
8589 They can be non-existent if the reference comes from an entry
8590 of an external vtable for example. */
8591 cgraph_node::get_create (fn);
8592
8593 return fn;
8594}
8595
8596/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8597 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8598 KNOWN_BINFO carries the binfo describing the true type of
8599 OBJ_TYPE_REF_OBJECT(REF).
8600 Set CAN_REFER if non-NULL to false if method
8601 is not referable or if the virtual table is ill-formed (such as rewriten
8602 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8603
8604tree
8605gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8606 bool *can_refer)
8607{
8608 unsigned HOST_WIDE_INT offset;
8609 tree v;
8610
8611 v = BINFO_VTABLE (known_binfo);
8612 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8613 if (!v)
8614 return NULL_TREE;
8615
8616 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8617 {
8618 if (can_refer)
8619 *can_refer = false;
8620 return NULL_TREE;
8621 }
8622 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8623}
8624
8625/* Given a pointer value T, return a simplified version of an
8626 indirection through T, or NULL_TREE if no simplification is
8627 possible. Note that the resulting type may be different from
8628 the type pointed to in the sense that it is still compatible
8629 from the langhooks point of view. */
8630
8631tree
8632gimple_fold_indirect_ref (tree t)
8633{
8634 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8635 tree sub = t;
8636 tree subtype;
8637
8638 STRIP_NOPS (sub);
8639 subtype = TREE_TYPE (sub);
8640 if (!POINTER_TYPE_P (subtype)
8641 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8642 return NULL_TREE;
8643
8644 if (TREE_CODE (sub) == ADDR_EXPR)
8645 {
8646 tree op = TREE_OPERAND (sub, 0);
8647 tree optype = TREE_TYPE (op);
8648 /* *&p => p */
8649 if (useless_type_conversion_p (type, optype))
8650 return op;
8651
8652 /* *(foo *)&fooarray => fooarray[0] */
8653 if (TREE_CODE (optype) == ARRAY_TYPE
8654 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8655 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8656 {
8657 tree type_domain = TYPE_DOMAIN (optype);
8658 tree min_val = size_zero_node;
8659 if (type_domain && TYPE_MIN_VALUE (type_domain))
8660 min_val = TYPE_MIN_VALUE (type_domain);
8661 if (TREE_CODE (min_val) == INTEGER_CST)
8662 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8663 }
8664 /* *(foo *)&complexfoo => __real__ complexfoo */
8665 else if (TREE_CODE (optype) == COMPLEX_TYPE
8666 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8667 return fold_build1 (REALPART_EXPR, type, op);
8668 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8669 else if (TREE_CODE (optype) == VECTOR_TYPE
8670 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8671 {
8672 tree part_width = TYPE_SIZE (type);
8673 tree index = bitsize_int (0);
8674 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8675 }
8676 }
8677
8678 /* *(p + CST) -> ... */
8679 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8680 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8681 {
8682 tree addr = TREE_OPERAND (sub, 0);
8683 tree off = TREE_OPERAND (sub, 1);
8684 tree addrtype;
8685
8686 STRIP_NOPS (addr);
8687 addrtype = TREE_TYPE (addr);
8688
8689 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8690 if (TREE_CODE (addr) == ADDR_EXPR
8691 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8692 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8693 && tree_fits_uhwi_p (off))
8694 {
8695 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8696 tree part_width = TYPE_SIZE (type);
8697 unsigned HOST_WIDE_INT part_widthi
8698 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8699 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8700 tree index = bitsize_int (indexi);
8701 if (known_lt (offset / part_widthi,
8702 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8703 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8704 part_width, index);
8705 }
8706
8707 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8708 if (TREE_CODE (addr) == ADDR_EXPR
8709 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8710 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8711 {
8712 tree size = TYPE_SIZE_UNIT (type);
8713 if (tree_int_cst_equal (size, off))
8714 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8715 }
8716
8717 /* *(p + CST) -> MEM_REF <p, CST>. */
8718 if (TREE_CODE (addr) != ADDR_EXPR
8719 || DECL_P (TREE_OPERAND (addr, 0)))
8720 return fold_build2 (MEM_REF, type,
8721 addr,
8722 wide_int_to_tree (ptype, wi::to_wide (off)));
8723 }
8724
8725 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8726 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8727 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8728 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8729 {
8730 tree type_domain;
8731 tree min_val = size_zero_node;
8732 tree osub = sub;
8733 sub = gimple_fold_indirect_ref (t: sub);
8734 if (! sub)
8735 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8736 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8737 if (type_domain && TYPE_MIN_VALUE (type_domain))
8738 min_val = TYPE_MIN_VALUE (type_domain);
8739 if (TREE_CODE (min_val) == INTEGER_CST)
8740 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8741 }
8742
8743 return NULL_TREE;
8744}
8745
8746/* Return true if CODE is an operation that when operating on signed
8747 integer types involves undefined behavior on overflow and the
8748 operation can be expressed with unsigned arithmetic. */
8749
8750bool
8751arith_code_with_undefined_signed_overflow (tree_code code)
8752{
8753 switch (code)
8754 {
8755 case ABS_EXPR:
8756 case PLUS_EXPR:
8757 case MINUS_EXPR:
8758 case MULT_EXPR:
8759 case NEGATE_EXPR:
8760 case POINTER_PLUS_EXPR:
8761 return true;
8762 default:
8763 return false;
8764 }
8765}
8766
8767/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8768 operation that can be transformed to unsigned arithmetic by converting
8769 its operand, carrying out the operation in the corresponding unsigned
8770 type and converting the result back to the original type.
8771
8772 If IN_PLACE is true, *GSI points to STMT, adjust the stmt in place and
8773 return NULL.
8774 Otherwise returns a sequence of statements that replace STMT and also
8775 contain a modified form of STMT itself. */
8776
8777static gimple_seq
8778rewrite_to_defined_overflow (gimple_stmt_iterator *gsi, gimple *stmt,
8779 bool in_place)
8780{
8781 if (dump_file && (dump_flags & TDF_DETAILS))
8782 {
8783 fprintf (stream: dump_file, format: "rewriting stmt with undefined signed "
8784 "overflow ");
8785 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8786 }
8787
8788 tree lhs = gimple_assign_lhs (gs: stmt);
8789 tree type = unsigned_type_for (TREE_TYPE (lhs));
8790 gimple_seq stmts = NULL;
8791 if (gimple_assign_rhs_code (gs: stmt) == ABS_EXPR)
8792 gimple_assign_set_rhs_code (s: stmt, code: ABSU_EXPR);
8793 else
8794 for (unsigned i = 1; i < gimple_num_ops (gs: stmt); ++i)
8795 {
8796 tree op = gimple_op (gs: stmt, i);
8797 op = gimple_convert (seq: &stmts, type, op);
8798 gimple_set_op (gs: stmt, i, op);
8799 }
8800 gimple_assign_set_lhs (gs: stmt, lhs: make_ssa_name (var: type, stmt));
8801 if (gimple_assign_rhs_code (gs: stmt) == POINTER_PLUS_EXPR)
8802 gimple_assign_set_rhs_code (s: stmt, code: PLUS_EXPR);
8803 gimple_set_modified (s: stmt, modifiedp: true);
8804 if (in_place)
8805 {
8806 if (stmts)
8807 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
8808 stmts = NULL;
8809 }
8810 else
8811 gimple_seq_add_stmt (&stmts, stmt);
8812 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (gs: stmt));
8813 if (in_place)
8814 {
8815 gsi_insert_after (gsi, cvt, GSI_SAME_STMT);
8816 update_stmt (s: stmt);
8817 }
8818 else
8819 gimple_seq_add_stmt (&stmts, cvt);
8820
8821 return stmts;
8822}
8823
8824void
8825rewrite_to_defined_overflow (gimple_stmt_iterator *gsi)
8826{
8827 rewrite_to_defined_overflow (gsi, stmt: gsi_stmt (i: *gsi), in_place: true);
8828}
8829
8830gimple_seq
8831rewrite_to_defined_overflow (gimple *stmt)
8832{
8833 return rewrite_to_defined_overflow (gsi: nullptr, stmt, in_place: false);
8834}
8835
8836/* The valueization hook we use for the gimple_build API simplification.
8837 This makes us match fold_buildN behavior by only combining with
8838 statements in the sequence(s) we are currently building. */
8839
8840static tree
8841gimple_build_valueize (tree op)
8842{
8843 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8844 return op;
8845 return NULL_TREE;
8846}
8847
8848/* Helper for gimple_build to perform the final insertion of stmts on SEQ. */
8849
8850static inline void
8851gimple_build_insert_seq (gimple_stmt_iterator *gsi,
8852 bool before, gsi_iterator_update update,
8853 gimple_seq seq)
8854{
8855 if (before)
8856 {
8857 if (gsi->bb)
8858 gsi_insert_seq_before (gsi, seq, update);
8859 else
8860 gsi_insert_seq_before_without_update (gsi, seq, update);
8861 }
8862 else
8863 {
8864 if (gsi->bb)
8865 gsi_insert_seq_after (gsi, seq, update);
8866 else
8867 gsi_insert_seq_after_without_update (gsi, seq, update);
8868 }
8869}
8870
8871/* Build the expression CODE OP0 of type TYPE with location LOC,
8872 simplifying it first if possible. Returns the built
8873 expression value and inserts statements possibly defining it
8874 before GSI if BEFORE is true or after GSI if false and advance
8875 the iterator accordingly.
8876 If gsi refers to a basic block simplifying is allowed to look
8877 at all SSA defs while when it does not it is restricted to
8878 SSA defs that are not associated with a basic block yet,
8879 indicating they belong to the currently building sequence. */
8880
8881tree
8882gimple_build (gimple_stmt_iterator *gsi,
8883 bool before, gsi_iterator_update update,
8884 location_t loc, enum tree_code code, tree type, tree op0)
8885{
8886 gimple_seq seq = NULL;
8887 tree res
8888 = gimple_simplify (code, type, op0, &seq,
8889 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8890 if (!res)
8891 {
8892 res = create_tmp_reg_or_ssa_name (type);
8893 gimple *stmt;
8894 if (code == REALPART_EXPR
8895 || code == IMAGPART_EXPR
8896 || code == VIEW_CONVERT_EXPR)
8897 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8898 else
8899 stmt = gimple_build_assign (res, code, op0);
8900 gimple_set_location (g: stmt, location: loc);
8901 gimple_seq_add_stmt_without_update (&seq, stmt);
8902 }
8903 gimple_build_insert_seq (gsi, before, update, seq);
8904 return res;
8905}
8906
8907/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8908 simplifying it first if possible. Returns the built
8909 expression value inserting any new statements at GSI honoring BEFORE
8910 and UPDATE. */
8911
8912tree
8913gimple_build (gimple_stmt_iterator *gsi,
8914 bool before, gsi_iterator_update update,
8915 location_t loc, enum tree_code code, tree type,
8916 tree op0, tree op1)
8917{
8918 gimple_seq seq = NULL;
8919 tree res
8920 = gimple_simplify (code, type, op0, op1, &seq,
8921 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8922 if (!res)
8923 {
8924 res = create_tmp_reg_or_ssa_name (type);
8925 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8926 gimple_set_location (g: stmt, location: loc);
8927 gimple_seq_add_stmt_without_update (&seq, stmt);
8928 }
8929 gimple_build_insert_seq (gsi, before, update, seq);
8930 return res;
8931}
8932
8933/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8934 simplifying it first if possible. Returns the built
8935 expression value inserting any new statements at GSI honoring BEFORE
8936 and UPDATE. */
8937
8938tree
8939gimple_build (gimple_stmt_iterator *gsi,
8940 bool before, gsi_iterator_update update,
8941 location_t loc, enum tree_code code, tree type,
8942 tree op0, tree op1, tree op2)
8943{
8944
8945 gimple_seq seq = NULL;
8946 tree res
8947 = gimple_simplify (code, type, op0, op1, op2, &seq,
8948 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8949 if (!res)
8950 {
8951 res = create_tmp_reg_or_ssa_name (type);
8952 gimple *stmt;
8953 if (code == BIT_FIELD_REF)
8954 stmt = gimple_build_assign (res, code,
8955 build3 (code, type, op0, op1, op2));
8956 else
8957 stmt = gimple_build_assign (res, code, op0, op1, op2);
8958 gimple_set_location (g: stmt, location: loc);
8959 gimple_seq_add_stmt_without_update (&seq, stmt);
8960 }
8961 gimple_build_insert_seq (gsi, before, update, seq);
8962 return res;
8963}
8964
8965/* Build the call FN () with a result of type TYPE (or no result if TYPE is
8966 void) with a location LOC. Returns the built expression value (or NULL_TREE
8967 if TYPE is void) inserting any new statements at GSI honoring BEFORE
8968 and UPDATE. */
8969
8970tree
8971gimple_build (gimple_stmt_iterator *gsi,
8972 bool before, gsi_iterator_update update,
8973 location_t loc, combined_fn fn, tree type)
8974{
8975 tree res = NULL_TREE;
8976 gimple_seq seq = NULL;
8977 gcall *stmt;
8978 if (internal_fn_p (code: fn))
8979 stmt = gimple_build_call_internal (as_internal_fn (code: fn), 0);
8980 else
8981 {
8982 tree decl = builtin_decl_implicit (fncode: as_builtin_fn (code: fn));
8983 stmt = gimple_build_call (decl, 0);
8984 }
8985 if (!VOID_TYPE_P (type))
8986 {
8987 res = create_tmp_reg_or_ssa_name (type);
8988 gimple_call_set_lhs (gs: stmt, lhs: res);
8989 }
8990 gimple_set_location (g: stmt, location: loc);
8991 gimple_seq_add_stmt_without_update (&seq, stmt);
8992 gimple_build_insert_seq (gsi, before, update, seq);
8993 return res;
8994}
8995
8996/* Build the call FN (ARG0) with a result of type TYPE
8997 (or no result if TYPE is void) with location LOC,
8998 simplifying it first if possible. Returns the built
8999 expression value (or NULL_TREE if TYPE is void) inserting any new
9000 statements at GSI honoring BEFORE and UPDATE. */
9001
9002tree
9003gimple_build (gimple_stmt_iterator *gsi,
9004 bool before, gsi_iterator_update update,
9005 location_t loc, combined_fn fn,
9006 tree type, tree arg0)
9007{
9008 gimple_seq seq = NULL;
9009 tree res = gimple_simplify (fn, type, arg0, &seq, gimple_build_valueize);
9010 if (!res)
9011 {
9012 gcall *stmt;
9013 if (internal_fn_p (code: fn))
9014 stmt = gimple_build_call_internal (as_internal_fn (code: fn), 1, arg0);
9015 else
9016 {
9017 tree decl = builtin_decl_implicit (fncode: as_builtin_fn (code: fn));
9018 stmt = gimple_build_call (decl, 1, arg0);
9019 }
9020 if (!VOID_TYPE_P (type))
9021 {
9022 res = create_tmp_reg_or_ssa_name (type);
9023 gimple_call_set_lhs (gs: stmt, lhs: res);
9024 }
9025 gimple_set_location (g: stmt, location: loc);
9026 gimple_seq_add_stmt_without_update (&seq, stmt);
9027 }
9028 gimple_build_insert_seq (gsi, before, update, seq);
9029 return res;
9030}
9031
9032/* Build the call FN (ARG0, ARG1) with a result of type TYPE
9033 (or no result if TYPE is void) with location LOC,
9034 simplifying it first if possible. Returns the built
9035 expression value (or NULL_TREE if TYPE is void) inserting any new
9036 statements at GSI honoring BEFORE and UPDATE. */
9037
9038tree
9039gimple_build (gimple_stmt_iterator *gsi,
9040 bool before, gsi_iterator_update update,
9041 location_t loc, combined_fn fn,
9042 tree type, tree arg0, tree arg1)
9043{
9044 gimple_seq seq = NULL;
9045 tree res = gimple_simplify (fn, type, arg0, arg1, &seq,
9046 gimple_build_valueize);
9047 if (!res)
9048 {
9049 gcall *stmt;
9050 if (internal_fn_p (code: fn))
9051 stmt = gimple_build_call_internal (as_internal_fn (code: fn), 2, arg0, arg1);
9052 else
9053 {
9054 tree decl = builtin_decl_implicit (fncode: as_builtin_fn (code: fn));
9055 stmt = gimple_build_call (decl, 2, arg0, arg1);
9056 }
9057 if (!VOID_TYPE_P (type))
9058 {
9059 res = create_tmp_reg_or_ssa_name (type);
9060 gimple_call_set_lhs (gs: stmt, lhs: res);
9061 }
9062 gimple_set_location (g: stmt, location: loc);
9063 gimple_seq_add_stmt_without_update (&seq, stmt);
9064 }
9065 gimple_build_insert_seq (gsi, before, update, seq);
9066 return res;
9067}
9068
9069/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
9070 (or no result if TYPE is void) with location LOC,
9071 simplifying it first if possible. Returns the built
9072 expression value (or NULL_TREE if TYPE is void) inserting any new
9073 statements at GSI honoring BEFORE and UPDATE. */
9074
9075tree
9076gimple_build (gimple_stmt_iterator *gsi,
9077 bool before, gsi_iterator_update update,
9078 location_t loc, combined_fn fn,
9079 tree type, tree arg0, tree arg1, tree arg2)
9080{
9081 gimple_seq seq = NULL;
9082 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
9083 &seq, gimple_build_valueize);
9084 if (!res)
9085 {
9086 gcall *stmt;
9087 if (internal_fn_p (code: fn))
9088 stmt = gimple_build_call_internal (as_internal_fn (code: fn),
9089 3, arg0, arg1, arg2);
9090 else
9091 {
9092 tree decl = builtin_decl_implicit (fncode: as_builtin_fn (code: fn));
9093 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
9094 }
9095 if (!VOID_TYPE_P (type))
9096 {
9097 res = create_tmp_reg_or_ssa_name (type);
9098 gimple_call_set_lhs (gs: stmt, lhs: res);
9099 }
9100 gimple_set_location (g: stmt, location: loc);
9101 gimple_seq_add_stmt_without_update (&seq, stmt);
9102 }
9103 gimple_build_insert_seq (gsi, before, update, seq);
9104 return res;
9105}
9106
9107/* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
9108 void) with location LOC, simplifying it first if possible. Returns the
9109 built expression value (or NULL_TREE if TYPE is void) inserting any new
9110 statements at GSI honoring BEFORE and UPDATE. */
9111
9112tree
9113gimple_build (gimple_stmt_iterator *gsi,
9114 bool before, gsi_iterator_update update,
9115 location_t loc, code_helper code, tree type, tree op0)
9116{
9117 if (code.is_tree_code ())
9118 return gimple_build (gsi, before, update, loc, code: tree_code (code), type, op0);
9119 return gimple_build (gsi, before, update, loc, fn: combined_fn (code), type, arg0: op0);
9120}
9121
9122/* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
9123 void) with location LOC, simplifying it first if possible. Returns the
9124 built expression value (or NULL_TREE if TYPE is void) inserting any new
9125 statements at GSI honoring BEFORE and UPDATE. */
9126
9127tree
9128gimple_build (gimple_stmt_iterator *gsi,
9129 bool before, gsi_iterator_update update,
9130 location_t loc, code_helper code, tree type, tree op0, tree op1)
9131{
9132 if (code.is_tree_code ())
9133 return gimple_build (gsi, before, update,
9134 loc, code: tree_code (code), type, op0, op1);
9135 return gimple_build (gsi, before, update,
9136 loc, fn: combined_fn (code), type, arg0: op0, arg1: op1);
9137}
9138
9139/* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
9140 is void) with location LOC, simplifying it first if possible. Returns the
9141 built expression value (or NULL_TREE if TYPE is void) inserting any new
9142 statements at GSI honoring BEFORE and UPDATE. */
9143
9144tree
9145gimple_build (gimple_stmt_iterator *gsi,
9146 bool before, gsi_iterator_update update,
9147 location_t loc, code_helper code,
9148 tree type, tree op0, tree op1, tree op2)
9149{
9150 if (code.is_tree_code ())
9151 return gimple_build (gsi, before, update,
9152 loc, code: tree_code (code), type, op0, op1, op2);
9153 return gimple_build (gsi, before, update,
9154 loc, fn: combined_fn (code), type, arg0: op0, arg1: op1, arg2: op2);
9155}
9156
9157/* Build the conversion (TYPE) OP with a result of type TYPE
9158 with location LOC if such conversion is neccesary in GIMPLE,
9159 simplifying it first.
9160 Returns the built expression inserting any new statements
9161 at GSI honoring BEFORE and UPDATE. */
9162
9163tree
9164gimple_convert (gimple_stmt_iterator *gsi,
9165 bool before, gsi_iterator_update update,
9166 location_t loc, tree type, tree op)
9167{
9168 if (useless_type_conversion_p (type, TREE_TYPE (op)))
9169 return op;
9170 return gimple_build (gsi, before, update, loc, code: NOP_EXPR, type, op0: op);
9171}
9172
9173/* Build the conversion (ptrofftype) OP with a result of a type
9174 compatible with ptrofftype with location LOC if such conversion
9175 is neccesary in GIMPLE, simplifying it first.
9176 Returns the built expression value inserting any new statements
9177 at GSI honoring BEFORE and UPDATE. */
9178
9179tree
9180gimple_convert_to_ptrofftype (gimple_stmt_iterator *gsi,
9181 bool before, gsi_iterator_update update,
9182 location_t loc, tree op)
9183{
9184 if (ptrofftype_p (TREE_TYPE (op)))
9185 return op;
9186 return gimple_convert (gsi, before, update, loc, sizetype, op);
9187}
9188
9189/* Build a vector of type TYPE in which each element has the value OP.
9190 Return a gimple value for the result, inserting any new statements
9191 at GSI honoring BEFORE and UPDATE. */
9192
9193tree
9194gimple_build_vector_from_val (gimple_stmt_iterator *gsi,
9195 bool before, gsi_iterator_update update,
9196 location_t loc, tree type, tree op)
9197{
9198 if (!TYPE_VECTOR_SUBPARTS (node: type).is_constant ()
9199 && !CONSTANT_CLASS_P (op))
9200 return gimple_build (gsi, before, update,
9201 loc, code: VEC_DUPLICATE_EXPR, type, op0: op);
9202
9203 tree res, vec = build_vector_from_val (type, op);
9204 if (is_gimple_val (vec))
9205 return vec;
9206 if (gimple_in_ssa_p (cfun))
9207 res = make_ssa_name (var: type);
9208 else
9209 res = create_tmp_reg (type);
9210 gimple_seq seq = NULL;
9211 gimple *stmt = gimple_build_assign (res, vec);
9212 gimple_set_location (g: stmt, location: loc);
9213 gimple_seq_add_stmt_without_update (&seq, stmt);
9214 gimple_build_insert_seq (gsi, before, update, seq);
9215 return res;
9216}
9217
9218/* Build a vector from BUILDER, handling the case in which some elements
9219 are non-constant. Return a gimple value for the result, inserting
9220 any new instructions to GSI honoring BEFORE and UPDATE.
9221
9222 BUILDER must not have a stepped encoding on entry. This is because
9223 the function is not geared up to handle the arithmetic that would
9224 be needed in the variable case, and any code building a vector that
9225 is known to be constant should use BUILDER->build () directly. */
9226
9227tree
9228gimple_build_vector (gimple_stmt_iterator *gsi,
9229 bool before, gsi_iterator_update update,
9230 location_t loc, tree_vector_builder *builder)
9231{
9232 gcc_assert (builder->nelts_per_pattern () <= 2);
9233 unsigned int encoded_nelts = builder->encoded_nelts ();
9234 for (unsigned int i = 0; i < encoded_nelts; ++i)
9235 if (!CONSTANT_CLASS_P ((*builder)[i]))
9236 {
9237 gimple_seq seq = NULL;
9238 tree type = builder->type ();
9239 unsigned int nelts = TYPE_VECTOR_SUBPARTS (node: type).to_constant ();
9240 vec<constructor_elt, va_gc> *v;
9241 vec_alloc (v, nelems: nelts);
9242 for (i = 0; i < nelts; ++i)
9243 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
9244
9245 tree res;
9246 if (gimple_in_ssa_p (cfun))
9247 res = make_ssa_name (var: type);
9248 else
9249 res = create_tmp_reg (type);
9250 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
9251 gimple_set_location (g: stmt, location: loc);
9252 gimple_seq_add_stmt_without_update (&seq, stmt);
9253 gimple_build_insert_seq (gsi, before, update, seq);
9254 return res;
9255 }
9256 return builder->build ();
9257}
9258
9259/* Emit gimple statements into &stmts that take a value given in OLD_SIZE
9260 and generate a value guaranteed to be rounded upwards to ALIGN.
9261
9262 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
9263
9264tree
9265gimple_build_round_up (gimple_stmt_iterator *gsi,
9266 bool before, gsi_iterator_update update,
9267 location_t loc, tree type,
9268 tree old_size, unsigned HOST_WIDE_INT align)
9269{
9270 unsigned HOST_WIDE_INT tg_mask = align - 1;
9271 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
9272 gcc_assert (INTEGRAL_TYPE_P (type));
9273 tree tree_mask = build_int_cst (type, tg_mask);
9274 tree oversize = gimple_build (gsi, before, update,
9275 loc, code: PLUS_EXPR, type, op0: old_size, op1: tree_mask);
9276
9277 tree mask = build_int_cst (type, -align);
9278 return gimple_build (gsi, before, update,
9279 loc, code: BIT_AND_EXPR, type, op0: oversize, op1: mask);
9280}
9281
9282/* Return true if the result of assignment STMT is known to be non-negative.
9283 If the return value is based on the assumption that signed overflow is
9284 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9285 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9286
9287static bool
9288gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9289 int depth)
9290{
9291 enum tree_code code = gimple_assign_rhs_code (gs: stmt);
9292 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
9293 switch (get_gimple_rhs_class (code))
9294 {
9295 case GIMPLE_UNARY_RHS:
9296 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (gs: stmt),
9297 type,
9298 gimple_assign_rhs1 (gs: stmt),
9299 strict_overflow_p, depth);
9300 case GIMPLE_BINARY_RHS:
9301 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (gs: stmt),
9302 type,
9303 gimple_assign_rhs1 (gs: stmt),
9304 gimple_assign_rhs2 (gs: stmt),
9305 strict_overflow_p, depth);
9306 case GIMPLE_TERNARY_RHS:
9307 return false;
9308 case GIMPLE_SINGLE_RHS:
9309 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (gs: stmt),
9310 strict_overflow_p, depth);
9311 case GIMPLE_INVALID_RHS:
9312 break;
9313 }
9314 gcc_unreachable ();
9315}
9316
9317/* Return true if return value of call STMT is known to be non-negative.
9318 If the return value is based on the assumption that signed overflow is
9319 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9320 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9321
9322static bool
9323gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9324 int depth)
9325{
9326 tree arg0 = gimple_call_num_args (gs: stmt) > 0 ?
9327 gimple_call_arg (gs: stmt, index: 0) : NULL_TREE;
9328 tree arg1 = gimple_call_num_args (gs: stmt) > 1 ?
9329 gimple_call_arg (gs: stmt, index: 1) : NULL_TREE;
9330 tree lhs = gimple_call_lhs (gs: stmt);
9331 return (lhs
9332 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
9333 gimple_call_combined_fn (stmt),
9334 arg0, arg1,
9335 strict_overflow_p, depth));
9336}
9337
9338/* Return true if return value of call STMT is known to be non-negative.
9339 If the return value is based on the assumption that signed overflow is
9340 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9341 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9342
9343static bool
9344gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9345 int depth)
9346{
9347 for (unsigned i = 0; i < gimple_phi_num_args (gs: stmt); ++i)
9348 {
9349 tree arg = gimple_phi_arg_def (gs: stmt, index: i);
9350 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
9351 return false;
9352 }
9353 return true;
9354}
9355
9356/* Return true if STMT is known to compute a non-negative value.
9357 If the return value is based on the assumption that signed overflow is
9358 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9359 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9360
9361bool
9362gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9363 int depth)
9364{
9365 tree type = gimple_range_type (s: stmt);
9366 if (type && frange::supports_p (type))
9367 {
9368 frange r;
9369 bool sign;
9370 if (get_global_range_query ()->range_of_stmt (r, stmt)
9371 && r.signbit_p (signbit&: sign))
9372 return !sign;
9373 }
9374 switch (gimple_code (g: stmt))
9375 {
9376 case GIMPLE_ASSIGN:
9377 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
9378 depth);
9379 case GIMPLE_CALL:
9380 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
9381 depth);
9382 case GIMPLE_PHI:
9383 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
9384 depth);
9385 default:
9386 return false;
9387 }
9388}
9389
9390/* Return true if the floating-point value computed by assignment STMT
9391 is known to have an integer value. We also allow +Inf, -Inf and NaN
9392 to be considered integer values. Return false for signaling NaN.
9393
9394 DEPTH is the current nesting depth of the query. */
9395
9396static bool
9397gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9398{
9399 enum tree_code code = gimple_assign_rhs_code (gs: stmt);
9400 switch (get_gimple_rhs_class (code))
9401 {
9402 case GIMPLE_UNARY_RHS:
9403 return integer_valued_real_unary_p (gimple_assign_rhs_code (gs: stmt),
9404 gimple_assign_rhs1 (gs: stmt), depth);
9405 case GIMPLE_BINARY_RHS:
9406 return integer_valued_real_binary_p (gimple_assign_rhs_code (gs: stmt),
9407 gimple_assign_rhs1 (gs: stmt),
9408 gimple_assign_rhs2 (gs: stmt), depth);
9409 case GIMPLE_TERNARY_RHS:
9410 return false;
9411 case GIMPLE_SINGLE_RHS:
9412 return integer_valued_real_single_p (gimple_assign_rhs1 (gs: stmt), depth);
9413 case GIMPLE_INVALID_RHS:
9414 break;
9415 }
9416 gcc_unreachable ();
9417}
9418
9419/* Return true if the floating-point value computed by call STMT is known
9420 to have an integer value. We also allow +Inf, -Inf and NaN to be
9421 considered integer values. Return false for signaling NaN.
9422
9423 DEPTH is the current nesting depth of the query. */
9424
9425static bool
9426gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9427{
9428 tree arg0 = (gimple_call_num_args (gs: stmt) > 0
9429 ? gimple_call_arg (gs: stmt, index: 0)
9430 : NULL_TREE);
9431 tree arg1 = (gimple_call_num_args (gs: stmt) > 1
9432 ? gimple_call_arg (gs: stmt, index: 1)
9433 : NULL_TREE);
9434 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9435 arg0, arg1, depth);
9436}
9437
9438/* Return true if the floating-point result of phi STMT is known to have
9439 an integer value. We also allow +Inf, -Inf and NaN to be considered
9440 integer values. Return false for signaling NaN.
9441
9442 DEPTH is the current nesting depth of the query. */
9443
9444static bool
9445gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9446{
9447 for (unsigned i = 0; i < gimple_phi_num_args (gs: stmt); ++i)
9448 {
9449 tree arg = gimple_phi_arg_def (gs: stmt, index: i);
9450 if (!integer_valued_real_single_p (arg, depth + 1))
9451 return false;
9452 }
9453 return true;
9454}
9455
9456/* Return true if the floating-point value computed by STMT is known
9457 to have an integer value. We also allow +Inf, -Inf and NaN to be
9458 considered integer values. Return false for signaling NaN.
9459
9460 DEPTH is the current nesting depth of the query. */
9461
9462bool
9463gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9464{
9465 switch (gimple_code (g: stmt))
9466 {
9467 case GIMPLE_ASSIGN:
9468 return gimple_assign_integer_valued_real_p (stmt, depth);
9469 case GIMPLE_CALL:
9470 return gimple_call_integer_valued_real_p (stmt, depth);
9471 case GIMPLE_PHI:
9472 return gimple_phi_integer_valued_real_p (stmt, depth);
9473 default:
9474 return false;
9475 }
9476}
9477

source code of gcc/gimple-fold.cc