1 | /* Helpers for the autogenerated gimple-match.cc file. |
2 | Copyright (C) 2023-2024 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "backend.h" |
24 | #include "target.h" |
25 | #include "rtl.h" |
26 | #include "tree.h" |
27 | #include "gimple.h" |
28 | #include "ssa.h" |
29 | #include "cgraph.h" |
30 | #include "vec-perm-indices.h" |
31 | #include "fold-const.h" |
32 | #include "fold-const-call.h" |
33 | #include "stor-layout.h" |
34 | #include "gimple-iterator.h" |
35 | #include "gimple-fold.h" |
36 | #include "calls.h" |
37 | #include "tree-dfa.h" |
38 | #include "builtins.h" |
39 | #include "gimple-match.h" |
40 | #include "tree-pass.h" |
41 | #include "internal-fn.h" |
42 | #include "case-cfn-macros.h" |
43 | #include "gimplify.h" |
44 | #include "optabs-tree.h" |
45 | #include "tree-eh.h" |
46 | #include "dbgcnt.h" |
47 | #include "tm.h" |
48 | #include "gimple-range.h" |
49 | #include "langhooks.h" |
50 | |
51 | tree (*mprts_hook) (gimple_match_op *); |
52 | |
53 | extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree), |
54 | code_helper, tree, tree); |
55 | extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree), |
56 | code_helper, tree, tree, tree); |
57 | extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree), |
58 | code_helper, tree, tree, tree, tree); |
59 | extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree), |
60 | code_helper, tree, tree, tree, tree, tree); |
61 | extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree), |
62 | code_helper, tree, tree, tree, tree, tree, tree); |
63 | extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree), |
64 | code_helper, tree, tree, tree, tree, tree, tree, |
65 | tree); |
66 | extern bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree), |
67 | code_helper, tree, tree, tree, tree, tree, tree, |
68 | tree, tree); |
69 | |
70 | /* Functions that are needed by gimple-match but that are exported and used in |
71 | other places in the compiler. */ |
72 | |
73 | tree gimple_simplify (enum tree_code, tree, tree, gimple_seq *, |
74 | tree (*)(tree)); |
75 | tree gimple_simplify (enum tree_code, tree, tree, tree, gimple_seq *, |
76 | tree (*)(tree)); |
77 | tree gimple_simplify (enum tree_code, tree, tree, tree, tree, gimple_seq *, |
78 | tree (*)(tree)); |
79 | tree gimple_simplify (combined_fn, tree, tree, gimple_seq *, |
80 | tree (*)(tree)); |
81 | tree gimple_simplify (combined_fn, tree, tree, tree, gimple_seq *, |
82 | tree (*)(tree)); |
83 | tree gimple_simplify (combined_fn, tree, tree, tree, tree, gimple_seq *, |
84 | tree (*)(tree)); |
85 | |
86 | tree do_valueize (tree, tree (*)(tree), bool &); |
87 | tree do_valueize (tree (*)(tree), tree); |
88 | |
89 | /* Forward declarations of the private auto-generated matchers. |
90 | They expect valueized operands in canonical order and do not |
91 | perform simplification of all-constant operands. */ |
92 | |
93 | static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *, tree (*)(tree)); |
94 | static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *, tree (*)(tree)); |
95 | static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *, tree (*)(tree)); |
96 | static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *, tree (*)(tree)); |
97 | static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *, tree (*)(tree)); |
98 | static bool gimple_resimplify6 (gimple_seq *, gimple_match_op *, tree (*)(tree)); |
99 | static bool gimple_resimplify7 (gimple_seq *, gimple_match_op *, tree (*)(tree)); |
100 | |
101 | /* Match and simplify the toplevel valueized operation THIS. |
102 | Replaces THIS with a simplified and/or canonicalized result and |
103 | returns whether any change was made. */ |
104 | |
105 | bool |
106 | gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree)) |
107 | { |
108 | switch (num_ops) |
109 | { |
110 | case 1: |
111 | return gimple_resimplify1 (seq, this, valueize); |
112 | case 2: |
113 | return gimple_resimplify2 (seq, this, valueize); |
114 | case 3: |
115 | return gimple_resimplify3 (seq, this, valueize); |
116 | case 4: |
117 | return gimple_resimplify4 (seq, this, valueize); |
118 | case 5: |
119 | return gimple_resimplify5 (seq, this, valueize); |
120 | case 6: |
121 | return gimple_resimplify6 (seq, this, valueize); |
122 | case 7: |
123 | return gimple_resimplify7 (seq, this, valueize); |
124 | default: |
125 | gcc_unreachable (); |
126 | } |
127 | } |
128 | |
129 | /* Return whether T is a constant that we'll dispatch to fold to |
130 | evaluate fully constant expressions. */ |
131 | |
132 | static inline bool |
133 | constant_for_folding (tree t) |
134 | { |
135 | return (CONSTANT_CLASS_P (t) |
136 | /* The following is only interesting to string builtins. */ |
137 | || (TREE_CODE (t) == ADDR_EXPR |
138 | && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)); |
139 | } |
140 | |
141 | /* Try to convert conditional operation ORIG_OP into an IFN_COND_* |
142 | operation. Return true on success, storing the new operation in NEW_OP. */ |
143 | |
144 | static bool |
145 | convert_conditional_op (gimple_match_op *orig_op, |
146 | gimple_match_op *new_op) |
147 | { |
148 | internal_fn ifn; |
149 | if (orig_op->code.is_tree_code ()) |
150 | ifn = get_conditional_internal_fn ((tree_code) orig_op->code); |
151 | else |
152 | { |
153 | auto cfn = combined_fn (orig_op->code); |
154 | if (!internal_fn_p (code: cfn)) |
155 | return false; |
156 | ifn = get_conditional_internal_fn (as_internal_fn (code: cfn)); |
157 | } |
158 | if (ifn == IFN_LAST) |
159 | return false; |
160 | unsigned int num_ops = orig_op->num_ops; |
161 | unsigned int num_cond_ops = 2; |
162 | if (orig_op->cond.len) |
163 | { |
164 | /* Add the length and bias parameters. */ |
165 | ifn = get_len_internal_fn (ifn); |
166 | num_cond_ops = 4; |
167 | } |
168 | new_op->set_op (code_in: as_combined_fn (fn: ifn), type_in: orig_op->type, num_ops_in: num_ops + num_cond_ops); |
169 | new_op->ops[0] = orig_op->cond.cond; |
170 | for (unsigned int i = 0; i < num_ops; ++i) |
171 | new_op->ops[i + 1] = orig_op->ops[i]; |
172 | tree else_value = orig_op->cond.else_value; |
173 | if (!else_value) |
174 | else_value = targetm.preferred_else_value (ifn, orig_op->type, |
175 | num_ops, orig_op->ops); |
176 | new_op->ops[num_ops + 1] = else_value; |
177 | if (orig_op->cond.len) |
178 | { |
179 | new_op->ops[num_ops + 2] = orig_op->cond.len; |
180 | new_op->ops[num_ops + 3] = orig_op->cond.bias; |
181 | } |
182 | return true; |
183 | } |
184 | /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting |
185 | VALUEIZED to true if valueization changed OP. */ |
186 | |
187 | inline tree |
188 | do_valueize (tree op, tree (*valueize)(tree), bool &valueized) |
189 | { |
190 | if (valueize && TREE_CODE (op) == SSA_NAME) |
191 | { |
192 | tree tem = valueize (op); |
193 | if (tem && tem != op) |
194 | { |
195 | op = tem; |
196 | valueized = true; |
197 | } |
198 | } |
199 | return op; |
200 | } |
201 | |
202 | /* If in GIMPLE the operation described by RES_OP should be single-rhs, |
203 | build a GENERIC tree for that expression and update RES_OP accordingly. */ |
204 | |
205 | void |
206 | maybe_build_generic_op (gimple_match_op *res_op) |
207 | { |
208 | tree_code code = (tree_code) res_op->code; |
209 | tree val; |
210 | switch (code) |
211 | { |
212 | case REALPART_EXPR: |
213 | case IMAGPART_EXPR: |
214 | case VIEW_CONVERT_EXPR: |
215 | val = build1 (code, res_op->type, res_op->ops[0]); |
216 | res_op->set_value (val); |
217 | break; |
218 | case BIT_FIELD_REF: |
219 | val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1], |
220 | res_op->ops[2]); |
221 | REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse; |
222 | res_op->set_value (val); |
223 | break; |
224 | default:; |
225 | } |
226 | } |
227 | |
228 | /* Try to build RES_OP, which is known to be a call to FN. Return null |
229 | if the target doesn't support the function. */ |
230 | |
231 | static gcall * |
232 | build_call_internal (internal_fn fn, gimple_match_op *res_op) |
233 | { |
234 | if (direct_internal_fn_p (fn)) |
235 | { |
236 | tree_pair types = direct_internal_fn_types (fn, res_op->type, |
237 | res_op->ops); |
238 | if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH)) |
239 | { |
240 | switch (fn) |
241 | { |
242 | case IFN_CLZ: |
243 | case IFN_CTZ: |
244 | case IFN_CLRSB: |
245 | case IFN_FFS: |
246 | case IFN_POPCOUNT: |
247 | case IFN_PARITY: |
248 | /* For these 6 builtins large/huge _BitInt operand is ok |
249 | before bitint lowering pass. */ |
250 | if (res_op->num_ops >= 1 |
251 | && TREE_CODE (TREE_TYPE (res_op->ops[0])) == BITINT_TYPE |
252 | && (TYPE_PRECISION (TREE_TYPE (res_op->ops[0])) |
253 | > MAX_FIXED_MODE_SIZE) |
254 | && cfun |
255 | && (cfun->curr_properties & PROP_gimple_lbitint) == 0) |
256 | break; |
257 | return NULL; |
258 | |
259 | default: |
260 | return NULL; |
261 | } |
262 | } |
263 | } |
264 | return gimple_build_call_internal (fn, res_op->num_ops, |
265 | res_op->op_or_null (i: 0), |
266 | res_op->op_or_null (i: 1), |
267 | res_op->op_or_null (i: 2), |
268 | res_op->op_or_null (i: 3), |
269 | res_op->op_or_null (i: 4), |
270 | res_op->op_or_null (i: 5), |
271 | res_op->op_or_null (i: 6)); |
272 | } |
273 | |
274 | /* RES_OP is the result of a simplification. If it is conditional, |
275 | try to replace it with the equivalent UNCOND form, such as an |
276 | IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the |
277 | result of the replacement if appropriate, adding any new statements to |
278 | SEQ and using VALUEIZE as the valueization function. Return true if |
279 | this resimplification occurred and resulted in at least one change. */ |
280 | |
281 | static bool |
282 | maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op, |
283 | tree (*valueize) (tree)) |
284 | { |
285 | if (!res_op->cond.cond) |
286 | return false; |
287 | |
288 | if (!res_op->cond.else_value |
289 | && res_op->code.is_tree_code ()) |
290 | { |
291 | /* The "else" value doesn't matter. If the "then" value is a |
292 | gimple value, just use it unconditionally. This isn't a |
293 | simplification in itself, since there was no operation to |
294 | build in the first place. */ |
295 | if (gimple_simplified_result_is_gimple_val (op: res_op)) |
296 | { |
297 | res_op->cond.cond = NULL_TREE; |
298 | return false; |
299 | } |
300 | |
301 | /* Likewise if the operation would not trap. */ |
302 | bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type) |
303 | && TYPE_OVERFLOW_TRAPS (res_op->type)); |
304 | tree_code op_code = (tree_code) res_op->code; |
305 | bool op_could_trap; |
306 | |
307 | /* COND_EXPR will trap if, and only if, the condition |
308 | traps and hence we have to check this. For all other operations, we |
309 | don't need to consider the operands. */ |
310 | if (op_code == COND_EXPR) |
311 | op_could_trap = generic_expr_could_trap_p (expr: res_op->ops[0]); |
312 | else |
313 | op_could_trap = operation_could_trap_p ((tree_code) res_op->code, |
314 | FLOAT_TYPE_P (res_op->type), |
315 | honor_trapv, |
316 | res_op->op_or_null (i: 1)); |
317 | |
318 | if (!op_could_trap) |
319 | { |
320 | res_op->cond.cond = NULL_TREE; |
321 | return false; |
322 | } |
323 | } |
324 | |
325 | /* If the "then" value is a gimple value and the "else" value matters, |
326 | create a VEC_COND_EXPR between them, then see if it can be further |
327 | simplified. */ |
328 | gimple_match_op new_op; |
329 | if (res_op->cond.else_value |
330 | && VECTOR_TYPE_P (res_op->type) |
331 | && gimple_simplified_result_is_gimple_val (op: res_op)) |
332 | { |
333 | tree len = res_op->cond.len; |
334 | if (!len) |
335 | new_op.set_op (code_in: VEC_COND_EXPR, type_in: res_op->type, |
336 | op0: res_op->cond.cond, op1: res_op->ops[0], |
337 | op2: res_op->cond.else_value); |
338 | else |
339 | new_op.set_op (code_in: IFN_VCOND_MASK_LEN, type_in: res_op->type, |
340 | op0: res_op->cond.cond, op1: res_op->ops[0], |
341 | op2: res_op->cond.else_value, |
342 | op3: res_op->cond.len, op4: res_op->cond.bias); |
343 | *res_op = new_op; |
344 | return gimple_resimplify3 (seq, res_op, valueize); |
345 | } |
346 | |
347 | /* Otherwise try rewriting the operation as an IFN_COND_* call. |
348 | Again, this isn't a simplification in itself, since it's what |
349 | RES_OP already described. */ |
350 | if (convert_conditional_op (orig_op: res_op, new_op: &new_op)) |
351 | *res_op = new_op; |
352 | |
353 | return false; |
354 | } |
355 | |
356 | /* If RES_OP is a call to a conditional internal function, try simplifying |
357 | the associated unconditional operation and using the result to build |
358 | a new conditional operation. For example, if RES_OP is: |
359 | |
360 | IFN_COND_ADD (COND, A, B, ELSE) |
361 | |
362 | try simplifying (plus A B) and using the result to build a replacement |
363 | for the whole IFN_COND_ADD. |
364 | |
365 | Return true if this approach led to a simplification, otherwise leave |
366 | RES_OP unchanged (and so suitable for other simplifications). When |
367 | returning true, add any new statements to SEQ and use VALUEIZE as the |
368 | valueization function. |
369 | |
370 | RES_OP is known to be a call to IFN. */ |
371 | |
372 | static bool |
373 | try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op, |
374 | gimple_seq *seq, tree (*valueize) (tree)) |
375 | { |
376 | code_helper op; |
377 | tree_code code = conditional_internal_fn_code (ifn); |
378 | int len_index = internal_fn_len_index (ifn); |
379 | if (code != ERROR_MARK) |
380 | op = code; |
381 | else |
382 | { |
383 | ifn = get_unconditional_internal_fn (ifn); |
384 | if (ifn == IFN_LAST) |
385 | return false; |
386 | op = as_combined_fn (fn: ifn); |
387 | } |
388 | |
389 | unsigned int num_ops = res_op->num_ops; |
390 | /* num_cond_ops = 2 for COND_ADD (MASK and ELSE) |
391 | wheras num_cond_ops = 4 for COND_LEN_ADD (MASK, ELSE, LEN and BIAS). */ |
392 | unsigned int num_cond_ops = len_index < 0 ? 2 : 4; |
393 | tree else_value |
394 | = len_index < 0 ? res_op->ops[num_ops - 1] : res_op->ops[num_ops - 3]; |
395 | tree len = len_index < 0 ? NULL_TREE : res_op->ops[num_ops - 2]; |
396 | tree bias = len_index < 0 ? NULL_TREE : res_op->ops[num_ops - 1]; |
397 | gimple_match_op cond_op (gimple_match_cond (res_op->ops[0], |
398 | else_value, len, bias), |
399 | op, res_op->type, num_ops - num_cond_ops); |
400 | |
401 | memcpy (dest: cond_op.ops, src: res_op->ops + 1, n: (num_ops - 1) * sizeof *cond_op.ops); |
402 | switch (num_ops - num_cond_ops) |
403 | { |
404 | case 1: |
405 | if (!gimple_resimplify1 (seq, &cond_op, valueize)) |
406 | return false; |
407 | break; |
408 | case 2: |
409 | if (!gimple_resimplify2 (seq, &cond_op, valueize)) |
410 | return false; |
411 | break; |
412 | case 3: |
413 | if (!gimple_resimplify3 (seq, &cond_op, valueize)) |
414 | return false; |
415 | break; |
416 | default: |
417 | gcc_unreachable (); |
418 | } |
419 | *res_op = cond_op; |
420 | maybe_resimplify_conditional_op (seq, res_op, valueize); |
421 | return true; |
422 | } |
423 | |
424 | /* Helper for the autogenerated code, valueize OP. */ |
425 | |
426 | tree |
427 | do_valueize (tree (*valueize)(tree), tree op) |
428 | { |
429 | if (valueize && TREE_CODE (op) == SSA_NAME) |
430 | { |
431 | tree tem = valueize (op); |
432 | if (tem) |
433 | return tem; |
434 | } |
435 | return op; |
436 | } |
437 | |
438 | /* Push the exploded expression described by RES_OP as a statement to |
439 | SEQ if necessary and return a gimple value denoting the value of the |
440 | expression. If RES is not NULL then the result will be always RES |
441 | and even gimple values are pushed to SEQ. */ |
442 | |
443 | tree |
444 | maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res) |
445 | { |
446 | tree *ops = res_op->ops; |
447 | unsigned num_ops = res_op->num_ops; |
448 | |
449 | /* The caller should have converted conditional operations into an UNCOND |
450 | form and resimplified as appropriate. The conditional form only |
451 | survives this far if that conversion failed. */ |
452 | if (res_op->cond.cond) |
453 | return NULL_TREE; |
454 | |
455 | if (res_op->code.is_tree_code ()) |
456 | { |
457 | if (!res |
458 | && gimple_simplified_result_is_gimple_val (op: res_op)) |
459 | return ops[0]; |
460 | if (mprts_hook) |
461 | { |
462 | tree tem = mprts_hook (res_op); |
463 | if (tem) |
464 | return tem; |
465 | } |
466 | } |
467 | |
468 | if (!seq) |
469 | return NULL_TREE; |
470 | |
471 | /* Play safe and do not allow abnormals to be mentioned in |
472 | newly created statements. */ |
473 | for (unsigned int i = 0; i < num_ops; ++i) |
474 | if (TREE_CODE (ops[i]) == SSA_NAME |
475 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])) |
476 | return NULL_TREE; |
477 | |
478 | if (num_ops > 0 && COMPARISON_CLASS_P (ops[0])) |
479 | for (unsigned int i = 0; i < 2; ++i) |
480 | if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME |
481 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))) |
482 | return NULL_TREE; |
483 | |
484 | if (res_op->code.is_tree_code ()) |
485 | { |
486 | auto code = tree_code (res_op->code); |
487 | if (!res) |
488 | { |
489 | if (gimple_in_ssa_p (cfun)) |
490 | res = make_ssa_name (var: res_op->type); |
491 | else |
492 | res = create_tmp_reg (res_op->type); |
493 | } |
494 | maybe_build_generic_op (res_op); |
495 | gimple *new_stmt = gimple_build_assign (res, code, |
496 | res_op->op_or_null (i: 0), |
497 | res_op->op_or_null (i: 1), |
498 | res_op->op_or_null (i: 2)); |
499 | gimple_seq_add_stmt_without_update (seq, new_stmt); |
500 | return res; |
501 | } |
502 | else |
503 | { |
504 | gcc_assert (num_ops != 0); |
505 | auto fn = combined_fn (res_op->code); |
506 | gcall *new_stmt = NULL; |
507 | if (internal_fn_p (code: fn)) |
508 | { |
509 | /* Generate the given function if we can. */ |
510 | internal_fn ifn = as_internal_fn (code: fn); |
511 | new_stmt = build_call_internal (fn: ifn, res_op); |
512 | if (!new_stmt) |
513 | return NULL_TREE; |
514 | } |
515 | else |
516 | { |
517 | /* Find the function we want to call. */ |
518 | tree decl = builtin_decl_implicit (fncode: as_builtin_fn (code: fn)); |
519 | if (!decl) |
520 | return NULL; |
521 | |
522 | /* We can't and should not emit calls to non-const functions. */ |
523 | if (!(flags_from_decl_or_type (decl) & ECF_CONST)) |
524 | return NULL; |
525 | |
526 | new_stmt = gimple_build_call (decl, num_ops, |
527 | res_op->op_or_null (i: 0), |
528 | res_op->op_or_null (i: 1), |
529 | res_op->op_or_null (i: 2), |
530 | res_op->op_or_null (i: 3), |
531 | res_op->op_or_null (i: 4)); |
532 | } |
533 | if (!res) |
534 | { |
535 | if (gimple_in_ssa_p (cfun)) |
536 | res = make_ssa_name (var: res_op->type); |
537 | else |
538 | res = create_tmp_reg (res_op->type); |
539 | } |
540 | gimple_call_set_lhs (gs: new_stmt, lhs: res); |
541 | gimple_seq_add_stmt_without_update (seq, new_stmt); |
542 | return res; |
543 | } |
544 | } |
545 | |
546 | |
547 | /* Public API overloads follow for operation being tree_code or |
548 | built_in_function and for one to three operands or arguments. |
549 | They return NULL_TREE if nothing could be simplified or |
550 | the resulting simplified value with parts pushed to SEQ. |
551 | If SEQ is NULL then if the simplification needs to create |
552 | new stmts it will fail. If VALUEIZE is non-NULL then all |
553 | SSA names will be valueized using that hook prior to |
554 | applying simplifications. */ |
555 | |
556 | /* Unary ops. */ |
557 | |
558 | tree |
559 | gimple_simplify (enum tree_code code, tree type, |
560 | tree op0, |
561 | gimple_seq *seq, tree (*valueize)(tree)) |
562 | { |
563 | if (constant_for_folding (t: op0)) |
564 | { |
565 | tree res = const_unop (code, type, op0); |
566 | if (res != NULL_TREE |
567 | && CONSTANT_CLASS_P (res)) |
568 | return res; |
569 | } |
570 | |
571 | gimple_match_op res_op; |
572 | if (!gimple_simplify (&res_op, seq, valueize, code, type, op0)) |
573 | return NULL_TREE; |
574 | return maybe_push_res_to_seq (res_op: &res_op, seq); |
575 | } |
576 | |
577 | /* Binary ops. */ |
578 | |
579 | tree |
580 | gimple_simplify (enum tree_code code, tree type, |
581 | tree op0, tree op1, |
582 | gimple_seq *seq, tree (*valueize)(tree)) |
583 | { |
584 | if (constant_for_folding (t: op0) && constant_for_folding (t: op1)) |
585 | { |
586 | tree res = const_binop (code, type, op0, op1); |
587 | if (res != NULL_TREE |
588 | && CONSTANT_CLASS_P (res)) |
589 | return res; |
590 | } |
591 | |
592 | /* Canonicalize operand order both for matching and fallback stmt |
593 | generation. */ |
594 | if ((commutative_tree_code (code) |
595 | || TREE_CODE_CLASS (code) == tcc_comparison) |
596 | && tree_swap_operands_p (op0, op1)) |
597 | { |
598 | std::swap (a&: op0, b&: op1); |
599 | if (TREE_CODE_CLASS (code) == tcc_comparison) |
600 | code = swap_tree_comparison (code); |
601 | } |
602 | |
603 | gimple_match_op res_op; |
604 | if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1)) |
605 | return NULL_TREE; |
606 | return maybe_push_res_to_seq (res_op: &res_op, seq); |
607 | } |
608 | |
609 | /* Ternary ops. */ |
610 | |
611 | tree |
612 | gimple_simplify (enum tree_code code, tree type, |
613 | tree op0, tree op1, tree op2, |
614 | gimple_seq *seq, tree (*valueize)(tree)) |
615 | { |
616 | if (constant_for_folding (t: op0) && constant_for_folding (t: op1) |
617 | && constant_for_folding (t: op2)) |
618 | { |
619 | tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2); |
620 | if (res != NULL_TREE |
621 | && CONSTANT_CLASS_P (res)) |
622 | return res; |
623 | } |
624 | |
625 | /* Canonicalize operand order both for matching and fallback stmt |
626 | generation. */ |
627 | if (commutative_ternary_tree_code (code) |
628 | && tree_swap_operands_p (op0, op1)) |
629 | std::swap (a&: op0, b&: op1); |
630 | |
631 | gimple_match_op res_op; |
632 | if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2)) |
633 | return NULL_TREE; |
634 | return maybe_push_res_to_seq (res_op: &res_op, seq); |
635 | } |
636 | |
637 | /* Builtin or internal function with one argument. */ |
638 | |
639 | tree |
640 | gimple_simplify (combined_fn fn, tree type, |
641 | tree arg0, |
642 | gimple_seq *seq, tree (*valueize)(tree)) |
643 | { |
644 | if (constant_for_folding (t: arg0)) |
645 | { |
646 | tree res = fold_const_call (fn, type, arg0); |
647 | if (res && CONSTANT_CLASS_P (res)) |
648 | return res; |
649 | } |
650 | |
651 | gimple_match_op res_op; |
652 | if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0)) |
653 | return NULL_TREE; |
654 | return maybe_push_res_to_seq (res_op: &res_op, seq); |
655 | } |
656 | |
657 | /* Builtin or internal function with two arguments. */ |
658 | |
659 | tree |
660 | gimple_simplify (combined_fn fn, tree type, |
661 | tree arg0, tree arg1, |
662 | gimple_seq *seq, tree (*valueize)(tree)) |
663 | { |
664 | if (constant_for_folding (t: arg0) |
665 | && constant_for_folding (t: arg1)) |
666 | { |
667 | tree res = fold_const_call (fn, type, arg0, arg1); |
668 | if (res && CONSTANT_CLASS_P (res)) |
669 | return res; |
670 | } |
671 | |
672 | gimple_match_op res_op; |
673 | if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1)) |
674 | return NULL_TREE; |
675 | return maybe_push_res_to_seq (res_op: &res_op, seq); |
676 | } |
677 | |
678 | /* Builtin or internal function with three arguments. */ |
679 | |
680 | tree |
681 | gimple_simplify (combined_fn fn, tree type, |
682 | tree arg0, tree arg1, tree arg2, |
683 | gimple_seq *seq, tree (*valueize)(tree)) |
684 | { |
685 | if (constant_for_folding (t: arg0) |
686 | && constant_for_folding (t: arg1) |
687 | && constant_for_folding (t: arg2)) |
688 | { |
689 | tree res = fold_const_call (fn, type, arg0, arg1, arg2); |
690 | if (res && CONSTANT_CLASS_P (res)) |
691 | return res; |
692 | } |
693 | |
694 | gimple_match_op res_op; |
695 | if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2)) |
696 | return NULL_TREE; |
697 | return maybe_push_res_to_seq (res_op: &res_op, seq); |
698 | } |
699 | |
700 | /* Common subroutine of gimple_extract_op and gimple_simplify. Try to |
701 | describe STMT in RES_OP, returning true on success. Before recording |
702 | an operand, call: |
703 | |
704 | - VALUEIZE_CONDITION for a COND_EXPR condition |
705 | - VALUEIZE_OP for every other top-level operand |
706 | |
707 | Both routines take a tree argument and returns a tree. */ |
708 | |
709 | template<typename ValueizeOp, typename ValueizeCondition> |
710 | inline bool |
711 | (gimple *stmt, gimple_match_op *res_op, |
712 | ValueizeOp valueize_op, |
713 | ValueizeCondition valueize_condition) |
714 | { |
715 | switch (gimple_code (g: stmt)) |
716 | { |
717 | case GIMPLE_ASSIGN: |
718 | { |
719 | enum tree_code code = gimple_assign_rhs_code (gs: stmt); |
720 | tree type = TREE_TYPE (gimple_assign_lhs (stmt)); |
721 | switch (gimple_assign_rhs_class (gs: stmt)) |
722 | { |
723 | case GIMPLE_SINGLE_RHS: |
724 | if (code == REALPART_EXPR |
725 | || code == IMAGPART_EXPR |
726 | || code == VIEW_CONVERT_EXPR) |
727 | { |
728 | tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); |
729 | res_op->set_op (code, type, valueize_op (op0)); |
730 | return true; |
731 | } |
732 | else if (code == BIT_FIELD_REF) |
733 | { |
734 | tree rhs1 = gimple_assign_rhs1 (gs: stmt); |
735 | tree op0 = valueize_op (TREE_OPERAND (rhs1, 0)); |
736 | res_op->set_op (code, type, op0, |
737 | TREE_OPERAND (rhs1, 1), |
738 | TREE_OPERAND (rhs1, 2), |
739 | REF_REVERSE_STORAGE_ORDER (rhs1)); |
740 | return true; |
741 | } |
742 | else if (code == SSA_NAME) |
743 | { |
744 | tree op0 = gimple_assign_rhs1 (gs: stmt); |
745 | res_op->set_op (TREE_CODE (op0), type, valueize_op (op0)); |
746 | return true; |
747 | } |
748 | break; |
749 | case GIMPLE_UNARY_RHS: |
750 | { |
751 | tree rhs1 = gimple_assign_rhs1 (gs: stmt); |
752 | res_op->set_op (code, type, valueize_op (rhs1)); |
753 | return true; |
754 | } |
755 | case GIMPLE_BINARY_RHS: |
756 | { |
757 | tree rhs1 = valueize_op (gimple_assign_rhs1 (gs: stmt)); |
758 | tree rhs2 = valueize_op (gimple_assign_rhs2 (gs: stmt)); |
759 | res_op->set_op (code_in: code, type_in: type, op0: rhs1, op1: rhs2); |
760 | return true; |
761 | } |
762 | case GIMPLE_TERNARY_RHS: |
763 | { |
764 | tree rhs1 = gimple_assign_rhs1 (gs: stmt); |
765 | if (code == COND_EXPR && COMPARISON_CLASS_P (rhs1)) |
766 | rhs1 = valueize_condition (rhs1); |
767 | else |
768 | rhs1 = valueize_op (rhs1); |
769 | tree rhs2 = valueize_op (gimple_assign_rhs2 (gs: stmt)); |
770 | tree rhs3 = valueize_op (gimple_assign_rhs3 (gs: stmt)); |
771 | res_op->set_op (code_in: code, type_in: type, op0: rhs1, op1: rhs2, op2: rhs3); |
772 | return true; |
773 | } |
774 | default: |
775 | gcc_unreachable (); |
776 | } |
777 | break; |
778 | } |
779 | |
780 | case GIMPLE_CALL: |
781 | /* ??? This way we can't simplify calls with side-effects. */ |
782 | if (gimple_call_lhs (gs: stmt) != NULL_TREE |
783 | && gimple_call_num_args (gs: stmt) >= 1 |
784 | && gimple_call_num_args (gs: stmt) <= 7) |
785 | { |
786 | combined_fn cfn; |
787 | if (gimple_call_internal_p (gs: stmt)) |
788 | cfn = as_combined_fn (fn: gimple_call_internal_fn (gs: stmt)); |
789 | else |
790 | { |
791 | tree fn = gimple_call_fn (gs: stmt); |
792 | if (!fn) |
793 | return false; |
794 | |
795 | fn = valueize_op (fn); |
796 | if (TREE_CODE (fn) != ADDR_EXPR |
797 | || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL) |
798 | return false; |
799 | |
800 | tree decl = TREE_OPERAND (fn, 0); |
801 | if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL |
802 | || !gimple_builtin_call_types_compatible_p (stmt, decl)) |
803 | return false; |
804 | |
805 | cfn = as_combined_fn (fn: DECL_FUNCTION_CODE (decl)); |
806 | } |
807 | |
808 | unsigned int num_args = gimple_call_num_args (gs: stmt); |
809 | res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args); |
810 | for (unsigned i = 0; i < num_args; ++i) |
811 | res_op->ops[i] = valueize_op (gimple_call_arg (gs: stmt, index: i)); |
812 | return true; |
813 | } |
814 | break; |
815 | |
816 | case GIMPLE_COND: |
817 | { |
818 | tree lhs = valueize_op (gimple_cond_lhs (gs: stmt)); |
819 | tree rhs = valueize_op (gimple_cond_rhs (gs: stmt)); |
820 | res_op->set_op (code_in: gimple_cond_code (gs: stmt), boolean_type_node, op0: lhs, op1: rhs); |
821 | return true; |
822 | } |
823 | |
824 | default: |
825 | break; |
826 | } |
827 | |
828 | return false; |
829 | } |
830 | |
831 | /* Try to describe STMT in RES_OP, returning true on success. |
832 | For GIMPLE_CONDs, describe the condition that is being tested. |
833 | For GIMPLE_ASSIGNs, describe the rhs of the assignment. |
834 | For GIMPLE_CALLs, describe the call. */ |
835 | |
836 | bool |
837 | (gimple *stmt, gimple_match_op *res_op) |
838 | { |
839 | auto nop = [](tree op) { return op; }; |
840 | return gimple_extract (stmt, res_op, valueize_op: nop, valueize_condition: nop); |
841 | } |
842 | |
843 | /* The main STMT based simplification entry. It is used by the fold_stmt |
844 | and the fold_stmt_to_constant APIs. */ |
845 | |
846 | bool |
847 | gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq, |
848 | tree (*valueize)(tree), tree (*top_valueize)(tree)) |
849 | { |
850 | bool valueized = false; |
851 | auto valueize_op = [&](tree op) |
852 | { |
853 | return do_valueize (op, valueize: top_valueize, valueized); |
854 | }; |
855 | auto valueize_condition = [&](tree op) -> tree |
856 | { |
857 | bool cond_valueized = false; |
858 | tree lhs = do_valueize (TREE_OPERAND (op, 0), valueize: top_valueize, |
859 | valueized&: cond_valueized); |
860 | tree rhs = do_valueize (TREE_OPERAND (op, 1), valueize: top_valueize, |
861 | valueized&: cond_valueized); |
862 | gimple_match_op res_op2 (res_op->cond, TREE_CODE (op), |
863 | TREE_TYPE (op), lhs, rhs); |
864 | if ((gimple_resimplify2 (seq, &res_op2, valueize) |
865 | || cond_valueized) |
866 | && res_op2.code.is_tree_code ()) |
867 | { |
868 | auto code = tree_code (res_op2.code); |
869 | if (TREE_CODE_CLASS (code) == tcc_comparison) |
870 | { |
871 | valueized = true; |
872 | return build2 (code, TREE_TYPE (op), |
873 | res_op2.ops[0], res_op2.ops[1]); |
874 | } |
875 | else if (code == SSA_NAME |
876 | || code == INTEGER_CST |
877 | || code == VECTOR_CST) |
878 | { |
879 | valueized = true; |
880 | return res_op2.ops[0]; |
881 | } |
882 | } |
883 | return valueize_op (op); |
884 | }; |
885 | |
886 | if (!gimple_extract (stmt, res_op, valueize_op, valueize_condition)) |
887 | return false; |
888 | |
889 | if (res_op->code.is_internal_fn ()) |
890 | { |
891 | internal_fn ifn = internal_fn (res_op->code); |
892 | if (try_conditional_simplification (ifn, res_op, seq, valueize)) |
893 | return true; |
894 | } |
895 | |
896 | if (!res_op->reverse |
897 | && res_op->num_ops |
898 | && res_op->resimplify (seq, valueize)) |
899 | return true; |
900 | |
901 | return valueized; |
902 | } |
903 | |
904 | /* Helper that matches and simplifies the toplevel result from |
905 | a gimple_simplify run (where we don't want to build |
906 | a stmt in case it's used in in-place folding). Replaces |
907 | RES_OP with a simplified and/or canonicalized result and |
908 | returns whether any change was made. */ |
909 | |
910 | static bool |
911 | gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op, |
912 | tree (*valueize)(tree)) |
913 | { |
914 | if (constant_for_folding (t: res_op->ops[0])) |
915 | { |
916 | tree tem = NULL_TREE; |
917 | if (res_op->code.is_tree_code ()) |
918 | { |
919 | auto code = tree_code (res_op->code); |
920 | if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)) |
921 | && TREE_CODE_LENGTH (code) == 1) |
922 | tem = const_unop (code, res_op->type, res_op->ops[0]); |
923 | } |
924 | else |
925 | tem = fold_const_call (combined_fn (res_op->code), res_op->type, |
926 | res_op->ops[0]); |
927 | if (tem != NULL_TREE |
928 | && CONSTANT_CLASS_P (tem)) |
929 | { |
930 | if (TREE_OVERFLOW_P (tem)) |
931 | tem = drop_tree_overflow (tem); |
932 | res_op->set_value (tem); |
933 | maybe_resimplify_conditional_op (seq, res_op, valueize); |
934 | return true; |
935 | } |
936 | } |
937 | |
938 | /* Limit recursion, there are cases like PR80887 and others, for |
939 | example when value-numbering presents us with unfolded expressions |
940 | that we are really not prepared to handle without eventual |
941 | oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50 |
942 | itself as available expression. */ |
943 | static unsigned depth; |
944 | if (depth > 10) |
945 | { |
946 | if (dump_file && (dump_flags & TDF_FOLDING)) |
947 | fprintf (stream: dump_file, format: "Aborting expression simplification due to " |
948 | "deep recursion\n" ); |
949 | return false; |
950 | } |
951 | |
952 | ++depth; |
953 | gimple_match_op res_op2 (*res_op); |
954 | if (gimple_simplify (&res_op2, seq, valueize, |
955 | res_op->code, res_op->type, res_op->ops[0])) |
956 | { |
957 | --depth; |
958 | *res_op = res_op2; |
959 | return true; |
960 | } |
961 | --depth; |
962 | |
963 | if (maybe_resimplify_conditional_op (seq, res_op, valueize)) |
964 | return true; |
965 | |
966 | return false; |
967 | } |
968 | |
969 | /* Helper that matches and simplifies the toplevel result from |
970 | a gimple_simplify run (where we don't want to build |
971 | a stmt in case it's used in in-place folding). Replaces |
972 | RES_OP with a simplified and/or canonicalized result and |
973 | returns whether any change was made. */ |
974 | |
975 | static bool |
976 | gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op, |
977 | tree (*valueize)(tree)) |
978 | { |
979 | if (constant_for_folding (t: res_op->ops[0]) |
980 | && constant_for_folding (t: res_op->ops[1])) |
981 | { |
982 | tree tem = NULL_TREE; |
983 | if (res_op->code.is_tree_code ()) |
984 | { |
985 | auto code = tree_code (res_op->code); |
986 | if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)) |
987 | && TREE_CODE_LENGTH (code) == 2) |
988 | tem = const_binop (code, res_op->type, |
989 | res_op->ops[0], res_op->ops[1]); |
990 | } |
991 | else |
992 | tem = fold_const_call (combined_fn (res_op->code), res_op->type, |
993 | res_op->ops[0], res_op->ops[1]); |
994 | if (tem != NULL_TREE |
995 | && CONSTANT_CLASS_P (tem)) |
996 | { |
997 | if (TREE_OVERFLOW_P (tem)) |
998 | tem = drop_tree_overflow (tem); |
999 | res_op->set_value (tem); |
1000 | maybe_resimplify_conditional_op (seq, res_op, valueize); |
1001 | return true; |
1002 | } |
1003 | } |
1004 | |
1005 | /* Canonicalize operand order. */ |
1006 | bool canonicalized = false; |
1007 | bool is_comparison |
1008 | = (res_op->code.is_tree_code () |
1009 | && TREE_CODE_CLASS (tree_code (res_op->code)) == tcc_comparison); |
1010 | if ((is_comparison || commutative_binary_op_p (res_op->code, res_op->type)) |
1011 | && tree_swap_operands_p (res_op->ops[0], res_op->ops[1])) |
1012 | { |
1013 | std::swap (a&: res_op->ops[0], b&: res_op->ops[1]); |
1014 | if (is_comparison) |
1015 | res_op->code = swap_tree_comparison (tree_code (res_op->code)); |
1016 | canonicalized = true; |
1017 | } |
1018 | |
1019 | /* Limit recursion, see gimple_resimplify1. */ |
1020 | static unsigned depth; |
1021 | if (depth > 10) |
1022 | { |
1023 | if (dump_file && (dump_flags & TDF_FOLDING)) |
1024 | fprintf (stream: dump_file, format: "Aborting expression simplification due to " |
1025 | "deep recursion\n" ); |
1026 | return false; |
1027 | } |
1028 | |
1029 | ++depth; |
1030 | gimple_match_op res_op2 (*res_op); |
1031 | if (gimple_simplify (&res_op2, seq, valueize, |
1032 | res_op->code, res_op->type, |
1033 | res_op->ops[0], res_op->ops[1])) |
1034 | { |
1035 | --depth; |
1036 | *res_op = res_op2; |
1037 | return true; |
1038 | } |
1039 | --depth; |
1040 | |
1041 | if (maybe_resimplify_conditional_op (seq, res_op, valueize)) |
1042 | return true; |
1043 | |
1044 | return canonicalized; |
1045 | } |
1046 | |
1047 | /* Helper that matches and simplifies the toplevel result from |
1048 | a gimple_simplify run (where we don't want to build |
1049 | a stmt in case it's used in in-place folding). Replaces |
1050 | RES_OP with a simplified and/or canonicalized result and |
1051 | returns whether any change was made. */ |
1052 | |
1053 | static bool |
1054 | gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op, |
1055 | tree (*valueize)(tree)) |
1056 | { |
1057 | if (constant_for_folding (t: res_op->ops[0]) |
1058 | && constant_for_folding (t: res_op->ops[1]) |
1059 | && constant_for_folding (t: res_op->ops[2])) |
1060 | { |
1061 | tree tem = NULL_TREE; |
1062 | if (res_op->code.is_tree_code ()) |
1063 | { |
1064 | auto code = tree_code (res_op->code); |
1065 | if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)) |
1066 | && TREE_CODE_LENGTH (code) == 3) |
1067 | tem = fold_ternary/*_to_constant*/ (code, res_op->type, |
1068 | res_op->ops[0], res_op->ops[1], |
1069 | res_op->ops[2]); |
1070 | } |
1071 | else |
1072 | tem = fold_const_call (combined_fn (res_op->code), res_op->type, |
1073 | res_op->ops[0], res_op->ops[1], res_op->ops[2]); |
1074 | if (tem != NULL_TREE |
1075 | && CONSTANT_CLASS_P (tem)) |
1076 | { |
1077 | if (TREE_OVERFLOW_P (tem)) |
1078 | tem = drop_tree_overflow (tem); |
1079 | res_op->set_value (tem); |
1080 | maybe_resimplify_conditional_op (seq, res_op, valueize); |
1081 | return true; |
1082 | } |
1083 | } |
1084 | |
1085 | /* Canonicalize operand order. */ |
1086 | bool canonicalized = false; |
1087 | int argno = first_commutative_argument (res_op->code, res_op->type); |
1088 | if (argno >= 0 |
1089 | && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1])) |
1090 | { |
1091 | std::swap (a&: res_op->ops[argno], b&: res_op->ops[argno + 1]); |
1092 | canonicalized = true; |
1093 | } |
1094 | |
1095 | /* Limit recursion, see gimple_resimplify1. */ |
1096 | static unsigned depth; |
1097 | if (depth > 10) |
1098 | { |
1099 | if (dump_file && (dump_flags & TDF_FOLDING)) |
1100 | fprintf (stream: dump_file, format: "Aborting expression simplification due to " |
1101 | "deep recursion\n" ); |
1102 | return false; |
1103 | } |
1104 | |
1105 | ++depth; |
1106 | gimple_match_op res_op2 (*res_op); |
1107 | if (gimple_simplify (&res_op2, seq, valueize, |
1108 | res_op->code, res_op->type, |
1109 | res_op->ops[0], res_op->ops[1], res_op->ops[2])) |
1110 | { |
1111 | --depth; |
1112 | *res_op = res_op2; |
1113 | return true; |
1114 | } |
1115 | --depth; |
1116 | |
1117 | if (maybe_resimplify_conditional_op (seq, res_op, valueize)) |
1118 | return true; |
1119 | |
1120 | return canonicalized; |
1121 | } |
1122 | |
1123 | /* Helper that matches and simplifies the toplevel result from |
1124 | a gimple_simplify run (where we don't want to build |
1125 | a stmt in case it's used in in-place folding). Replaces |
1126 | RES_OP with a simplified and/or canonicalized result and |
1127 | returns whether any change was made. */ |
1128 | |
1129 | static bool |
1130 | gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op, |
1131 | tree (*valueize)(tree)) |
1132 | { |
1133 | /* No constant folding is defined for four-operand functions. */ |
1134 | |
1135 | /* Canonicalize operand order. */ |
1136 | bool canonicalized = false; |
1137 | int argno = first_commutative_argument (res_op->code, res_op->type); |
1138 | if (argno >= 0 |
1139 | && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1])) |
1140 | { |
1141 | std::swap (a&: res_op->ops[argno], b&: res_op->ops[argno + 1]); |
1142 | canonicalized = true; |
1143 | } |
1144 | |
1145 | /* Limit recursion, see gimple_resimplify1. */ |
1146 | static unsigned depth; |
1147 | if (depth > 10) |
1148 | { |
1149 | if (dump_file && (dump_flags & TDF_FOLDING)) |
1150 | fprintf (stream: dump_file, format: "Aborting expression simplification due to " |
1151 | "deep recursion\n" ); |
1152 | return false; |
1153 | } |
1154 | |
1155 | ++depth; |
1156 | gimple_match_op res_op2 (*res_op); |
1157 | if (gimple_simplify (&res_op2, seq, valueize, |
1158 | res_op->code, res_op->type, |
1159 | res_op->ops[0], res_op->ops[1], res_op->ops[2], |
1160 | res_op->ops[3])) |
1161 | { |
1162 | --depth; |
1163 | *res_op = res_op2; |
1164 | return true; |
1165 | } |
1166 | --depth; |
1167 | |
1168 | if (maybe_resimplify_conditional_op (seq, res_op, valueize)) |
1169 | return true; |
1170 | |
1171 | return canonicalized; |
1172 | } |
1173 | |
1174 | /* Helper that matches and simplifies the toplevel result from |
1175 | a gimple_simplify run (where we don't want to build |
1176 | a stmt in case it's used in in-place folding). Replaces |
1177 | RES_OP with a simplified and/or canonicalized result and |
1178 | returns whether any change was made. */ |
1179 | |
1180 | static bool |
1181 | gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op, |
1182 | tree (*valueize)(tree)) |
1183 | { |
1184 | /* No constant folding is defined for five-operand functions. */ |
1185 | |
1186 | /* Canonicalize operand order. */ |
1187 | bool canonicalized = false; |
1188 | int argno = first_commutative_argument (res_op->code, res_op->type); |
1189 | if (argno >= 0 |
1190 | && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1])) |
1191 | { |
1192 | std::swap (a&: res_op->ops[argno], b&: res_op->ops[argno + 1]); |
1193 | canonicalized = true; |
1194 | } |
1195 | |
1196 | gimple_match_op res_op2 (*res_op); |
1197 | if (gimple_simplify (&res_op2, seq, valueize, |
1198 | res_op->code, res_op->type, |
1199 | res_op->ops[0], res_op->ops[1], res_op->ops[2], |
1200 | res_op->ops[3], res_op->ops[4])) |
1201 | { |
1202 | *res_op = res_op2; |
1203 | return true; |
1204 | } |
1205 | |
1206 | if (maybe_resimplify_conditional_op (seq, res_op, valueize)) |
1207 | return true; |
1208 | |
1209 | return canonicalized; |
1210 | } |
1211 | |
1212 | /* Helper that matches and simplifies the toplevel result from |
1213 | a gimple_simplify run (where we don't want to build |
1214 | a stmt in case it's used in in-place folding). Replaces |
1215 | RES_OP with a simplified and/or canonicalized result and |
1216 | returns whether any change was made. */ |
1217 | |
1218 | static bool |
1219 | gimple_resimplify6 (gimple_seq *seq, gimple_match_op *res_op, |
1220 | tree (*valueize)(tree)) |
1221 | { |
1222 | /* No constant folding is defined for six-operand functions. */ |
1223 | |
1224 | /* Canonicalize operand order. */ |
1225 | bool canonicalized = false; |
1226 | int argno = first_commutative_argument (res_op->code, res_op->type); |
1227 | if (argno >= 0 |
1228 | && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1])) |
1229 | { |
1230 | std::swap (a&: res_op->ops[argno], b&: res_op->ops[argno + 1]); |
1231 | canonicalized = true; |
1232 | } |
1233 | |
1234 | gimple_match_op res_op2 (*res_op); |
1235 | if (gimple_simplify (&res_op2, seq, valueize, |
1236 | res_op->code, res_op->type, |
1237 | res_op->ops[0], res_op->ops[1], res_op->ops[2], |
1238 | res_op->ops[3], res_op->ops[4], res_op->ops[5])) |
1239 | { |
1240 | *res_op = res_op2; |
1241 | return true; |
1242 | } |
1243 | |
1244 | if (maybe_resimplify_conditional_op (seq, res_op, valueize)) |
1245 | return true; |
1246 | |
1247 | return canonicalized; |
1248 | } |
1249 | |
1250 | /* Helper that matches and simplifies the toplevel result from |
1251 | a gimple_simplify run (where we don't want to build |
1252 | a stmt in case it's used in in-place folding). Replaces |
1253 | RES_OP with a simplified and/or canonicalized result and |
1254 | returns whether any change was made. */ |
1255 | |
1256 | static bool |
1257 | gimple_resimplify7 (gimple_seq *seq, gimple_match_op *res_op, |
1258 | tree (*valueize)(tree)) |
1259 | { |
1260 | /* No constant folding is defined for seven-operand functions. */ |
1261 | |
1262 | /* Canonicalize operand order. */ |
1263 | bool canonicalized = false; |
1264 | int argno = first_commutative_argument (res_op->code, res_op->type); |
1265 | if (argno >= 0 |
1266 | && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1])) |
1267 | { |
1268 | std::swap (a&: res_op->ops[argno], b&: res_op->ops[argno + 1]); |
1269 | canonicalized = true; |
1270 | } |
1271 | |
1272 | gimple_match_op res_op2 (*res_op); |
1273 | if (gimple_simplify (&res_op2, seq, valueize, |
1274 | res_op->code, res_op->type, |
1275 | res_op->ops[0], res_op->ops[1], res_op->ops[2], |
1276 | res_op->ops[3], res_op->ops[4], res_op->ops[5], |
1277 | res_op->ops[6])) |
1278 | { |
1279 | *res_op = res_op2; |
1280 | return true; |
1281 | } |
1282 | |
1283 | if (maybe_resimplify_conditional_op (seq, res_op, valueize)) |
1284 | return true; |
1285 | |
1286 | return canonicalized; |
1287 | } |
1288 | |
1289 | /* Return a canonical form for CODE when operating on TYPE. The idea |
1290 | is to remove redundant ways of representing the same operation so |
1291 | that code_helpers can be hashed and compared for equality. |
1292 | |
1293 | The only current canonicalization is to replace built-in functions |
1294 | with internal functions, in cases where internal-fn.def defines |
1295 | such an internal function. |
1296 | |
1297 | Note that the new code_helper cannot necessarily be used in place of |
1298 | the original code_helper. For example, the new code_helper might be |
1299 | an internal function that the target does not support. */ |
1300 | |
1301 | code_helper |
1302 | canonicalize_code (code_helper code, tree type) |
1303 | { |
1304 | if (code.is_fn_code ()) |
1305 | return associated_internal_fn (combined_fn (code), type); |
1306 | return code; |
1307 | } |
1308 | |
1309 | /* Return true if CODE is a binary operation and if CODE is commutative when |
1310 | operating on type TYPE. */ |
1311 | |
1312 | bool |
1313 | commutative_binary_op_p (code_helper code, tree type) |
1314 | { |
1315 | if (code.is_tree_code ()) |
1316 | return commutative_tree_code (tree_code (code)); |
1317 | auto cfn = combined_fn (code); |
1318 | return commutative_binary_fn_p (associated_internal_fn (cfn, type)); |
1319 | } |
1320 | |
1321 | /* Return true if CODE represents a ternary operation and if the first two |
1322 | operands are commutative when CODE is operating on TYPE. */ |
1323 | |
1324 | bool |
1325 | commutative_ternary_op_p (code_helper code, tree type) |
1326 | { |
1327 | if (code.is_tree_code ()) |
1328 | return commutative_ternary_tree_code (tree_code (code)); |
1329 | auto cfn = combined_fn (code); |
1330 | return commutative_ternary_fn_p (associated_internal_fn (cfn, type)); |
1331 | } |
1332 | |
1333 | /* If CODE is commutative in two consecutive operands, return the |
1334 | index of the first, otherwise return -1. */ |
1335 | |
1336 | int |
1337 | first_commutative_argument (code_helper code, tree type) |
1338 | { |
1339 | if (code.is_tree_code ()) |
1340 | { |
1341 | auto tcode = tree_code (code); |
1342 | if (commutative_tree_code (tcode) |
1343 | || commutative_ternary_tree_code (tcode)) |
1344 | return 0; |
1345 | return -1; |
1346 | } |
1347 | auto cfn = combined_fn (code); |
1348 | return first_commutative_argument (associated_internal_fn (cfn, type)); |
1349 | } |
1350 | |
1351 | /* Return true if CODE is a binary operation that is associative when |
1352 | operating on type TYPE. */ |
1353 | |
1354 | bool |
1355 | associative_binary_op_p (code_helper code, tree type) |
1356 | { |
1357 | if (code.is_tree_code ()) |
1358 | return associative_tree_code (tree_code (code)); |
1359 | auto cfn = combined_fn (code); |
1360 | return associative_binary_fn_p (associated_internal_fn (cfn, type)); |
1361 | } |
1362 | |
1363 | /* Return true if the target directly supports operation CODE on type TYPE. |
1364 | QUERY_TYPE acts as for optab_for_tree_code. */ |
1365 | |
1366 | bool |
1367 | directly_supported_p (code_helper code, tree type, optab_subtype query_type) |
1368 | { |
1369 | if (code.is_tree_code ()) |
1370 | { |
1371 | direct_optab optab = optab_for_tree_code (tree_code (code), type, |
1372 | query_type); |
1373 | return (optab != unknown_optab |
1374 | && optab_handler (op: optab, TYPE_MODE (type)) != CODE_FOR_nothing); |
1375 | } |
1376 | gcc_assert (query_type == optab_default |
1377 | || (query_type == optab_vector && VECTOR_TYPE_P (type)) |
1378 | || (query_type == optab_scalar && !VECTOR_TYPE_P (type))); |
1379 | internal_fn ifn = associated_internal_fn (combined_fn (code), type); |
1380 | return (direct_internal_fn_p (fn: ifn) |
1381 | && direct_internal_fn_supported_p (ifn, type, OPTIMIZE_FOR_SPEED)); |
1382 | } |
1383 | |
1384 | /* A wrapper around the internal-fn.cc versions of get_conditional_internal_fn |
1385 | for a code_helper CODE operating on type TYPE. */ |
1386 | |
1387 | internal_fn |
1388 | get_conditional_internal_fn (code_helper code, tree type) |
1389 | { |
1390 | if (code.is_tree_code ()) |
1391 | return get_conditional_internal_fn (tree_code (code)); |
1392 | auto cfn = combined_fn (code); |
1393 | return get_conditional_internal_fn (associated_internal_fn (cfn, type)); |
1394 | } |
1395 | |