1 | /* Lower complex number operations to scalar operations. |
2 | Copyright (C) 2004-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it |
7 | under the terms of the GNU General Public License as published by the |
8 | Free Software Foundation; either version 3, or (at your option) any |
9 | later version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT |
12 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "backend.h" |
24 | #include "rtl.h" |
25 | #include "tree.h" |
26 | #include "gimple.h" |
27 | #include "cfghooks.h" |
28 | #include "tree-pass.h" |
29 | #include "ssa.h" |
30 | #include "fold-const.h" |
31 | #include "stor-layout.h" |
32 | #include "tree-eh.h" |
33 | #include "gimplify.h" |
34 | #include "gimple-iterator.h" |
35 | #include "gimplify-me.h" |
36 | #include "tree-cfg.h" |
37 | #include "tree-dfa.h" |
38 | #include "tree-ssa.h" |
39 | #include "tree-ssa-propagate.h" |
40 | #include "tree-hasher.h" |
41 | #include "cfgloop.h" |
42 | #include "cfganal.h" |
43 | #include "gimple-fold.h" |
44 | #include "diagnostic-core.h" |
45 | |
46 | |
47 | /* For each complex ssa name, a lattice value. We're interested in finding |
48 | out whether a complex number is degenerate in some way, having only real |
49 | or only complex parts. */ |
50 | |
51 | enum |
52 | { |
53 | UNINITIALIZED = 0, |
54 | ONLY_REAL = 1, |
55 | ONLY_IMAG = 2, |
56 | VARYING = 3 |
57 | }; |
58 | |
59 | /* The type complex_lattice_t holds combinations of the above |
60 | constants. */ |
61 | typedef int complex_lattice_t; |
62 | |
63 | #define PAIR(a, b) ((a) << 2 | (b)) |
64 | |
65 | class complex_propagate : public ssa_propagation_engine |
66 | { |
67 | enum ssa_prop_result visit_stmt (gimple *, edge *, tree *) final override; |
68 | enum ssa_prop_result visit_phi (gphi *) final override; |
69 | }; |
70 | |
71 | static vec<complex_lattice_t> complex_lattice_values; |
72 | |
73 | /* For each complex variable, a pair of variables for the components exists in |
74 | the hashtable. */ |
75 | static int_tree_htab_type *complex_variable_components; |
76 | |
77 | /* For each complex SSA_NAME, a pair of ssa names for the components. */ |
78 | static vec<tree> complex_ssa_name_components; |
79 | |
80 | /* Vector of PHI triplets (original complex PHI and corresponding real and |
81 | imag PHIs if real and/or imag PHIs contain temporarily |
82 | non-SSA_NAME/non-invariant args that need to be replaced by SSA_NAMEs. */ |
83 | static vec<gphi *> phis_to_revisit; |
84 | |
85 | /* BBs that need EH cleanup. */ |
86 | static bitmap need_eh_cleanup; |
87 | |
88 | /* Lookup UID in the complex_variable_components hashtable and return the |
89 | associated tree. */ |
90 | static tree |
91 | cvc_lookup (unsigned int uid) |
92 | { |
93 | struct int_tree_map in; |
94 | in.uid = uid; |
95 | return complex_variable_components->find_with_hash (comparable: in, hash: uid).to; |
96 | } |
97 | |
98 | /* Insert the pair UID, TO into the complex_variable_components hashtable. */ |
99 | |
100 | static void |
101 | cvc_insert (unsigned int uid, tree to) |
102 | { |
103 | int_tree_map h; |
104 | int_tree_map *loc; |
105 | |
106 | h.uid = uid; |
107 | loc = complex_variable_components->find_slot_with_hash (comparable: h, hash: uid, insert: INSERT); |
108 | loc->uid = uid; |
109 | loc->to = to; |
110 | } |
111 | |
112 | /* Return true if T is not a zero constant. In the case of real values, |
113 | we're only interested in +0.0. */ |
114 | |
115 | static int |
116 | some_nonzerop (tree t) |
117 | { |
118 | int zerop = false; |
119 | |
120 | /* Operations with real or imaginary part of a complex number zero |
121 | cannot be treated the same as operations with a real or imaginary |
122 | operand if we care about the signs of zeros in the result. */ |
123 | if (TREE_CODE (t) == REAL_CST && !flag_signed_zeros) |
124 | zerop = real_identical (&TREE_REAL_CST (t), &dconst0); |
125 | else if (TREE_CODE (t) == FIXED_CST) |
126 | zerop = fixed_zerop (t); |
127 | else if (TREE_CODE (t) == INTEGER_CST) |
128 | zerop = integer_zerop (t); |
129 | |
130 | return !zerop; |
131 | } |
132 | |
133 | |
134 | /* Compute a lattice value from the components of a complex type REAL |
135 | and IMAG. */ |
136 | |
137 | static complex_lattice_t |
138 | find_lattice_value_parts (tree real, tree imag) |
139 | { |
140 | int r, i; |
141 | complex_lattice_t ret; |
142 | |
143 | r = some_nonzerop (t: real); |
144 | i = some_nonzerop (t: imag); |
145 | ret = r * ONLY_REAL + i * ONLY_IMAG; |
146 | |
147 | /* ??? On occasion we could do better than mapping 0+0i to real, but we |
148 | certainly don't want to leave it UNINITIALIZED, which eventually gets |
149 | mapped to VARYING. */ |
150 | if (ret == UNINITIALIZED) |
151 | ret = ONLY_REAL; |
152 | |
153 | return ret; |
154 | } |
155 | |
156 | |
157 | /* Compute a lattice value from gimple_val T. */ |
158 | |
159 | static complex_lattice_t |
160 | find_lattice_value (tree t) |
161 | { |
162 | tree real, imag; |
163 | |
164 | switch (TREE_CODE (t)) |
165 | { |
166 | case SSA_NAME: |
167 | return complex_lattice_values[SSA_NAME_VERSION (t)]; |
168 | |
169 | case COMPLEX_CST: |
170 | real = TREE_REALPART (t); |
171 | imag = TREE_IMAGPART (t); |
172 | break; |
173 | |
174 | default: |
175 | gcc_unreachable (); |
176 | } |
177 | |
178 | return find_lattice_value_parts (real, imag); |
179 | } |
180 | |
181 | /* Determine if LHS is something for which we're interested in seeing |
182 | simulation results. */ |
183 | |
184 | static bool |
185 | is_complex_reg (tree lhs) |
186 | { |
187 | return TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE && is_gimple_reg (lhs); |
188 | } |
189 | |
190 | /* Mark the incoming parameters to the function as VARYING. */ |
191 | |
192 | static void |
193 | init_parameter_lattice_values (void) |
194 | { |
195 | tree parm, ssa_name; |
196 | |
197 | for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm)) |
198 | if (is_complex_reg (lhs: parm) |
199 | && (ssa_name = ssa_default_def (cfun, parm)) != NULL_TREE) |
200 | complex_lattice_values[SSA_NAME_VERSION (ssa_name)] = VARYING; |
201 | } |
202 | |
203 | /* Initialize simulation state for each statement. Return false if we |
204 | found no statements we want to simulate, and thus there's nothing |
205 | for the entire pass to do. */ |
206 | |
207 | static bool |
208 | init_dont_simulate_again (void) |
209 | { |
210 | basic_block bb; |
211 | bool saw_a_complex_op = false; |
212 | |
213 | FOR_EACH_BB_FN (bb, cfun) |
214 | { |
215 | for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); |
216 | gsi_next (i: &gsi)) |
217 | { |
218 | gphi *phi = gsi.phi (); |
219 | prop_set_simulate_again (s: phi, |
220 | visit_p: is_complex_reg (lhs: gimple_phi_result (gs: phi))); |
221 | } |
222 | |
223 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); |
224 | gsi_next (i: &gsi)) |
225 | { |
226 | gimple *stmt; |
227 | tree op0, op1; |
228 | bool sim_again_p; |
229 | |
230 | stmt = gsi_stmt (i: gsi); |
231 | op0 = op1 = NULL_TREE; |
232 | |
233 | /* Most control-altering statements must be initially |
234 | simulated, else we won't cover the entire cfg. */ |
235 | sim_again_p = stmt_ends_bb_p (stmt); |
236 | |
237 | switch (gimple_code (g: stmt)) |
238 | { |
239 | case GIMPLE_CALL: |
240 | if (gimple_call_lhs (gs: stmt)) |
241 | sim_again_p = is_complex_reg (lhs: gimple_call_lhs (gs: stmt)); |
242 | break; |
243 | |
244 | case GIMPLE_ASSIGN: |
245 | sim_again_p = is_complex_reg (lhs: gimple_assign_lhs (gs: stmt)); |
246 | if (gimple_assign_rhs_code (gs: stmt) == REALPART_EXPR |
247 | || gimple_assign_rhs_code (gs: stmt) == IMAGPART_EXPR) |
248 | op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); |
249 | else |
250 | op0 = gimple_assign_rhs1 (gs: stmt); |
251 | if (gimple_num_ops (gs: stmt) > 2) |
252 | op1 = gimple_assign_rhs2 (gs: stmt); |
253 | break; |
254 | |
255 | case GIMPLE_COND: |
256 | op0 = gimple_cond_lhs (gs: stmt); |
257 | op1 = gimple_cond_rhs (gs: stmt); |
258 | break; |
259 | |
260 | default: |
261 | break; |
262 | } |
263 | |
264 | if (op0 || op1) |
265 | switch (gimple_expr_code (stmt)) |
266 | { |
267 | case EQ_EXPR: |
268 | case NE_EXPR: |
269 | case PLUS_EXPR: |
270 | case MINUS_EXPR: |
271 | case MULT_EXPR: |
272 | case TRUNC_DIV_EXPR: |
273 | case CEIL_DIV_EXPR: |
274 | case FLOOR_DIV_EXPR: |
275 | case ROUND_DIV_EXPR: |
276 | case RDIV_EXPR: |
277 | if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE |
278 | || TREE_CODE (TREE_TYPE (op1)) == COMPLEX_TYPE) |
279 | saw_a_complex_op = true; |
280 | break; |
281 | |
282 | case NEGATE_EXPR: |
283 | case CONJ_EXPR: |
284 | if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE) |
285 | saw_a_complex_op = true; |
286 | break; |
287 | |
288 | case REALPART_EXPR: |
289 | case IMAGPART_EXPR: |
290 | /* The total store transformation performed during |
291 | gimplification creates such uninitialized loads |
292 | and we need to lower the statement to be able |
293 | to fix things up. */ |
294 | if (TREE_CODE (op0) == SSA_NAME |
295 | && ssa_undefined_value_p (op0)) |
296 | saw_a_complex_op = true; |
297 | break; |
298 | |
299 | default: |
300 | /* When expand_complex_move would trigger make sure we |
301 | perform lowering even when there is no actual complex |
302 | operation. This helps consistency and vectorization. */ |
303 | if (TREE_CODE (TREE_TYPE (gimple_op (stmt, 0))) == COMPLEX_TYPE) |
304 | saw_a_complex_op = true; |
305 | break; |
306 | } |
307 | |
308 | prop_set_simulate_again (s: stmt, visit_p: sim_again_p); |
309 | } |
310 | } |
311 | |
312 | return saw_a_complex_op; |
313 | } |
314 | |
315 | |
316 | /* Evaluate statement STMT against the complex lattice defined above. */ |
317 | |
318 | enum ssa_prop_result |
319 | complex_propagate::visit_stmt (gimple *stmt, edge *taken_edge_p ATTRIBUTE_UNUSED, |
320 | tree *result_p) |
321 | { |
322 | complex_lattice_t new_l, old_l, op1_l, op2_l; |
323 | unsigned int ver; |
324 | tree lhs; |
325 | |
326 | lhs = gimple_get_lhs (stmt); |
327 | /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs. */ |
328 | if (!lhs || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) |
329 | return SSA_PROP_VARYING; |
330 | |
331 | /* These conditions should be satisfied due to the initial filter |
332 | set up in init_dont_simulate_again. */ |
333 | gcc_assert (TREE_CODE (lhs) == SSA_NAME); |
334 | gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE); |
335 | |
336 | *result_p = lhs; |
337 | ver = SSA_NAME_VERSION (lhs); |
338 | old_l = complex_lattice_values[ver]; |
339 | |
340 | switch (gimple_expr_code (stmt)) |
341 | { |
342 | case SSA_NAME: |
343 | case COMPLEX_CST: |
344 | new_l = find_lattice_value (t: gimple_assign_rhs1 (gs: stmt)); |
345 | break; |
346 | |
347 | case COMPLEX_EXPR: |
348 | new_l = find_lattice_value_parts (real: gimple_assign_rhs1 (gs: stmt), |
349 | imag: gimple_assign_rhs2 (gs: stmt)); |
350 | break; |
351 | |
352 | case PLUS_EXPR: |
353 | case MINUS_EXPR: |
354 | op1_l = find_lattice_value (t: gimple_assign_rhs1 (gs: stmt)); |
355 | op2_l = find_lattice_value (t: gimple_assign_rhs2 (gs: stmt)); |
356 | |
357 | /* We've set up the lattice values such that IOR neatly |
358 | models addition. */ |
359 | new_l = op1_l | op2_l; |
360 | break; |
361 | |
362 | case MULT_EXPR: |
363 | case RDIV_EXPR: |
364 | case TRUNC_DIV_EXPR: |
365 | case CEIL_DIV_EXPR: |
366 | case FLOOR_DIV_EXPR: |
367 | case ROUND_DIV_EXPR: |
368 | op1_l = find_lattice_value (t: gimple_assign_rhs1 (gs: stmt)); |
369 | op2_l = find_lattice_value (t: gimple_assign_rhs2 (gs: stmt)); |
370 | |
371 | /* Obviously, if either varies, so does the result. */ |
372 | if (op1_l == VARYING || op2_l == VARYING) |
373 | new_l = VARYING; |
374 | /* Don't prematurely promote variables if we've not yet seen |
375 | their inputs. */ |
376 | else if (op1_l == UNINITIALIZED) |
377 | new_l = op2_l; |
378 | else if (op2_l == UNINITIALIZED) |
379 | new_l = op1_l; |
380 | else |
381 | { |
382 | /* At this point both numbers have only one component. If the |
383 | numbers are of opposite kind, the result is imaginary, |
384 | otherwise the result is real. The add/subtract translates |
385 | the real/imag from/to 0/1; the ^ performs the comparison. */ |
386 | new_l = ((op1_l - ONLY_REAL) ^ (op2_l - ONLY_REAL)) + ONLY_REAL; |
387 | |
388 | /* Don't allow the lattice value to flip-flop indefinitely. */ |
389 | new_l |= old_l; |
390 | } |
391 | break; |
392 | |
393 | case NEGATE_EXPR: |
394 | case CONJ_EXPR: |
395 | new_l = find_lattice_value (t: gimple_assign_rhs1 (gs: stmt)); |
396 | break; |
397 | |
398 | default: |
399 | new_l = VARYING; |
400 | break; |
401 | } |
402 | |
403 | /* If nothing changed this round, let the propagator know. */ |
404 | if (new_l == old_l) |
405 | return SSA_PROP_NOT_INTERESTING; |
406 | |
407 | complex_lattice_values[ver] = new_l; |
408 | return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING; |
409 | } |
410 | |
411 | /* Evaluate a PHI node against the complex lattice defined above. */ |
412 | |
413 | enum ssa_prop_result |
414 | complex_propagate::visit_phi (gphi *phi) |
415 | { |
416 | complex_lattice_t new_l, old_l; |
417 | unsigned int ver; |
418 | tree lhs; |
419 | int i; |
420 | |
421 | lhs = gimple_phi_result (gs: phi); |
422 | |
423 | /* This condition should be satisfied due to the initial filter |
424 | set up in init_dont_simulate_again. */ |
425 | gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE); |
426 | |
427 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) |
428 | return SSA_PROP_VARYING; |
429 | |
430 | /* We've set up the lattice values such that IOR neatly models PHI meet. */ |
431 | new_l = UNINITIALIZED; |
432 | for (i = gimple_phi_num_args (gs: phi) - 1; i >= 0; --i) |
433 | new_l |= find_lattice_value (t: gimple_phi_arg_def (gs: phi, index: i)); |
434 | |
435 | ver = SSA_NAME_VERSION (lhs); |
436 | old_l = complex_lattice_values[ver]; |
437 | |
438 | if (new_l == old_l) |
439 | return SSA_PROP_NOT_INTERESTING; |
440 | |
441 | complex_lattice_values[ver] = new_l; |
442 | return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING; |
443 | } |
444 | |
445 | /* Create one backing variable for a complex component of ORIG. */ |
446 | |
447 | static tree |
448 | create_one_component_var (tree type, tree orig, const char *prefix, |
449 | const char *suffix, enum tree_code code) |
450 | { |
451 | tree r = create_tmp_var (type, prefix); |
452 | |
453 | DECL_SOURCE_LOCATION (r) = DECL_SOURCE_LOCATION (orig); |
454 | DECL_ARTIFICIAL (r) = 1; |
455 | |
456 | if (DECL_NAME (orig) && !DECL_IGNORED_P (orig)) |
457 | { |
458 | const char *name = IDENTIFIER_POINTER (DECL_NAME (orig)); |
459 | name = ACONCAT ((name, suffix, NULL)); |
460 | DECL_NAME (r) = get_identifier (name); |
461 | |
462 | SET_DECL_DEBUG_EXPR (r, build1 (code, type, orig)); |
463 | DECL_HAS_DEBUG_EXPR_P (r) = 1; |
464 | DECL_IGNORED_P (r) = 0; |
465 | copy_warning (r, orig); |
466 | } |
467 | else |
468 | { |
469 | DECL_IGNORED_P (r) = 1; |
470 | suppress_warning (r); |
471 | } |
472 | |
473 | return r; |
474 | } |
475 | |
476 | /* Retrieve a value for a complex component of VAR. */ |
477 | |
478 | static tree |
479 | get_component_var (tree var, bool imag_p) |
480 | { |
481 | size_t decl_index = DECL_UID (var) * 2 + imag_p; |
482 | tree ret = cvc_lookup (uid: decl_index); |
483 | |
484 | if (ret == NULL) |
485 | { |
486 | ret = create_one_component_var (TREE_TYPE (TREE_TYPE (var)), orig: var, |
487 | prefix: imag_p ? "CI" : "CR" , |
488 | suffix: imag_p ? "$imag" : "$real" , |
489 | code: imag_p ? IMAGPART_EXPR : REALPART_EXPR); |
490 | cvc_insert (uid: decl_index, to: ret); |
491 | } |
492 | |
493 | return ret; |
494 | } |
495 | |
496 | /* Retrieve a value for a complex component of SSA_NAME. */ |
497 | |
498 | static tree |
499 | get_component_ssa_name (tree ssa_name, bool imag_p) |
500 | { |
501 | complex_lattice_t lattice = find_lattice_value (t: ssa_name); |
502 | size_t ssa_name_index; |
503 | tree ret; |
504 | |
505 | if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG)) |
506 | { |
507 | tree inner_type = TREE_TYPE (TREE_TYPE (ssa_name)); |
508 | if (SCALAR_FLOAT_TYPE_P (inner_type)) |
509 | return build_real (inner_type, dconst0); |
510 | else |
511 | return build_int_cst (inner_type, 0); |
512 | } |
513 | |
514 | ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p; |
515 | ret = complex_ssa_name_components[ssa_name_index]; |
516 | if (ret == NULL) |
517 | { |
518 | if (SSA_NAME_VAR (ssa_name)) |
519 | ret = get_component_var (SSA_NAME_VAR (ssa_name), imag_p); |
520 | else |
521 | ret = TREE_TYPE (TREE_TYPE (ssa_name)); |
522 | ret = make_ssa_name (var: ret); |
523 | |
524 | /* Copy some properties from the original. In particular, whether it |
525 | is used in an abnormal phi, and whether it's uninitialized. */ |
526 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret) |
527 | = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name); |
528 | if (SSA_NAME_IS_DEFAULT_DEF (ssa_name) |
529 | && VAR_P (SSA_NAME_VAR (ssa_name))) |
530 | { |
531 | SSA_NAME_DEF_STMT (ret) = SSA_NAME_DEF_STMT (ssa_name); |
532 | set_ssa_default_def (cfun, SSA_NAME_VAR (ret), ret); |
533 | } |
534 | |
535 | complex_ssa_name_components[ssa_name_index] = ret; |
536 | } |
537 | |
538 | return ret; |
539 | } |
540 | |
541 | /* Set a value for a complex component of SSA_NAME, return a |
542 | gimple_seq of stuff that needs doing. */ |
543 | |
544 | static gimple_seq |
545 | set_component_ssa_name (tree ssa_name, bool imag_p, tree value) |
546 | { |
547 | complex_lattice_t lattice = find_lattice_value (t: ssa_name); |
548 | size_t ssa_name_index; |
549 | tree comp; |
550 | gimple *last; |
551 | gimple_seq list; |
552 | |
553 | /* We know the value must be zero, else there's a bug in our lattice |
554 | analysis. But the value may well be a variable known to contain |
555 | zero. We should be safe ignoring it. */ |
556 | if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG)) |
557 | return NULL; |
558 | |
559 | /* If we've already assigned an SSA_NAME to this component, then this |
560 | means that our walk of the basic blocks found a use before the set. |
561 | This is fine. Now we should create an initialization for the value |
562 | we created earlier. */ |
563 | ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p; |
564 | comp = complex_ssa_name_components[ssa_name_index]; |
565 | if (comp) |
566 | ; |
567 | |
568 | /* If we've nothing assigned, and the value we're given is already stable, |
569 | then install that as the value for this SSA_NAME. This preemptively |
570 | copy-propagates the value, which avoids unnecessary memory allocation. */ |
571 | else if (is_gimple_min_invariant (value) |
572 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) |
573 | { |
574 | complex_ssa_name_components[ssa_name_index] = value; |
575 | return NULL; |
576 | } |
577 | else if (TREE_CODE (value) == SSA_NAME |
578 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) |
579 | { |
580 | /* Replace an anonymous base value with the variable from cvc_lookup. |
581 | This should result in better debug info. */ |
582 | if (!SSA_NAME_IS_DEFAULT_DEF (value) |
583 | && SSA_NAME_VAR (ssa_name) |
584 | && (!SSA_NAME_VAR (value) || DECL_IGNORED_P (SSA_NAME_VAR (value))) |
585 | && !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name))) |
586 | { |
587 | comp = get_component_var (SSA_NAME_VAR (ssa_name), imag_p); |
588 | replace_ssa_name_symbol (value, comp); |
589 | } |
590 | |
591 | complex_ssa_name_components[ssa_name_index] = value; |
592 | return NULL; |
593 | } |
594 | |
595 | /* Finally, we need to stabilize the result by installing the value into |
596 | a new ssa name. */ |
597 | else |
598 | comp = get_component_ssa_name (ssa_name, imag_p); |
599 | |
600 | /* Do all the work to assign VALUE to COMP. */ |
601 | list = NULL; |
602 | value = force_gimple_operand (value, &list, false, NULL); |
603 | last = gimple_build_assign (comp, value); |
604 | gimple_seq_add_stmt (&list, last); |
605 | gcc_assert (SSA_NAME_DEF_STMT (comp) == last); |
606 | |
607 | return list; |
608 | } |
609 | |
610 | /* Extract the real or imaginary part of a complex variable or constant. |
611 | Make sure that it's a proper gimple_val and gimplify it if not. |
612 | Emit any new code before gsi. */ |
613 | |
614 | static tree |
615 | (gimple_stmt_iterator *gsi, tree t, bool imagpart_p, |
616 | bool gimple_p, bool phiarg_p = false) |
617 | { |
618 | switch (TREE_CODE (t)) |
619 | { |
620 | case COMPLEX_CST: |
621 | return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t); |
622 | |
623 | case COMPLEX_EXPR: |
624 | gcc_unreachable (); |
625 | |
626 | case BIT_FIELD_REF: |
627 | { |
628 | tree inner_type = TREE_TYPE (TREE_TYPE (t)); |
629 | t = unshare_expr (t); |
630 | TREE_TYPE (t) = inner_type; |
631 | TREE_OPERAND (t, 1) = TYPE_SIZE (inner_type); |
632 | if (imagpart_p) |
633 | TREE_OPERAND (t, 2) = size_binop (PLUS_EXPR, TREE_OPERAND (t, 2), |
634 | TYPE_SIZE (inner_type)); |
635 | if (gimple_p) |
636 | t = force_gimple_operand_gsi (gsi, t, true, NULL, true, |
637 | GSI_SAME_STMT); |
638 | return t; |
639 | } |
640 | |
641 | case VAR_DECL: |
642 | case RESULT_DECL: |
643 | case PARM_DECL: |
644 | case COMPONENT_REF: |
645 | case ARRAY_REF: |
646 | case VIEW_CONVERT_EXPR: |
647 | case MEM_REF: |
648 | { |
649 | tree inner_type = TREE_TYPE (TREE_TYPE (t)); |
650 | |
651 | t = build1 ((imagpart_p ? IMAGPART_EXPR : REALPART_EXPR), |
652 | inner_type, unshare_expr (t)); |
653 | |
654 | if (gimple_p) |
655 | t = force_gimple_operand_gsi (gsi, t, true, NULL, true, |
656 | GSI_SAME_STMT); |
657 | |
658 | return t; |
659 | } |
660 | |
661 | case SSA_NAME: |
662 | t = get_component_ssa_name (ssa_name: t, imag_p: imagpart_p); |
663 | if (TREE_CODE (t) == SSA_NAME && SSA_NAME_DEF_STMT (t) == NULL) |
664 | gcc_assert (phiarg_p); |
665 | return t; |
666 | |
667 | default: |
668 | gcc_unreachable (); |
669 | } |
670 | } |
671 | |
672 | /* Update the complex components of the ssa name on the lhs of STMT. */ |
673 | |
674 | static void |
675 | update_complex_components (gimple_stmt_iterator *gsi, gimple *stmt, tree r, |
676 | tree i) |
677 | { |
678 | tree lhs; |
679 | gimple_seq list; |
680 | |
681 | lhs = gimple_get_lhs (stmt); |
682 | |
683 | list = set_component_ssa_name (ssa_name: lhs, imag_p: false, value: r); |
684 | if (list) |
685 | gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); |
686 | |
687 | list = set_component_ssa_name (ssa_name: lhs, imag_p: true, value: i); |
688 | if (list) |
689 | gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); |
690 | } |
691 | |
692 | static void |
693 | update_complex_components_on_edge (edge e, tree lhs, tree r, tree i) |
694 | { |
695 | gimple_seq list; |
696 | |
697 | list = set_component_ssa_name (ssa_name: lhs, imag_p: false, value: r); |
698 | if (list) |
699 | gsi_insert_seq_on_edge (e, list); |
700 | |
701 | list = set_component_ssa_name (ssa_name: lhs, imag_p: true, value: i); |
702 | if (list) |
703 | gsi_insert_seq_on_edge (e, list); |
704 | } |
705 | |
706 | |
707 | /* Update an assignment to a complex variable in place. */ |
708 | |
709 | static void |
710 | update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i) |
711 | { |
712 | gimple *old_stmt = gsi_stmt (i: *gsi); |
713 | gimple_assign_set_rhs_with_ops (gsi, code: COMPLEX_EXPR, op1: r, op2: i); |
714 | gimple *stmt = gsi_stmt (i: *gsi); |
715 | update_stmt (s: stmt); |
716 | if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)) |
717 | bitmap_set_bit (need_eh_cleanup, gimple_bb (g: stmt)->index); |
718 | |
719 | update_complex_components (gsi, stmt: gsi_stmt (i: *gsi), r, i); |
720 | } |
721 | |
722 | |
723 | /* Generate code at the entry point of the function to initialize the |
724 | component variables for a complex parameter. */ |
725 | |
726 | static void |
727 | update_parameter_components (void) |
728 | { |
729 | edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
730 | tree parm; |
731 | |
732 | for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm)) |
733 | { |
734 | tree type = TREE_TYPE (parm); |
735 | tree ssa_name, r, i; |
736 | |
737 | if (TREE_CODE (type) != COMPLEX_TYPE || !is_gimple_reg (parm)) |
738 | continue; |
739 | |
740 | type = TREE_TYPE (type); |
741 | ssa_name = ssa_default_def (cfun, parm); |
742 | if (!ssa_name) |
743 | continue; |
744 | |
745 | r = build1 (REALPART_EXPR, type, ssa_name); |
746 | i = build1 (IMAGPART_EXPR, type, ssa_name); |
747 | update_complex_components_on_edge (e: entry_edge, lhs: ssa_name, r, i); |
748 | } |
749 | } |
750 | |
751 | /* Generate code to set the component variables of a complex variable |
752 | to match the PHI statements in block BB. */ |
753 | |
754 | static void |
755 | update_phi_components (basic_block bb) |
756 | { |
757 | gphi_iterator gsi; |
758 | |
759 | for (gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
760 | { |
761 | gphi *phi = gsi.phi (); |
762 | |
763 | if (is_complex_reg (lhs: gimple_phi_result (gs: phi))) |
764 | { |
765 | gphi *p[2] = { NULL, NULL }; |
766 | unsigned int i, j, n; |
767 | bool revisit_phi = false; |
768 | |
769 | for (j = 0; j < 2; j++) |
770 | { |
771 | tree l = get_component_ssa_name (ssa_name: gimple_phi_result (gs: phi), imag_p: j > 0); |
772 | if (TREE_CODE (l) == SSA_NAME) |
773 | p[j] = create_phi_node (l, bb); |
774 | } |
775 | |
776 | for (i = 0, n = gimple_phi_num_args (gs: phi); i < n; ++i) |
777 | { |
778 | tree comp, arg = gimple_phi_arg_def (gs: phi, index: i); |
779 | for (j = 0; j < 2; j++) |
780 | if (p[j]) |
781 | { |
782 | comp = extract_component (NULL, t: arg, imagpart_p: j > 0, gimple_p: false, phiarg_p: true); |
783 | if (TREE_CODE (comp) == SSA_NAME |
784 | && SSA_NAME_DEF_STMT (comp) == NULL) |
785 | { |
786 | /* For the benefit of any gimple simplification during |
787 | this pass that might walk SSA_NAME def stmts, |
788 | don't add SSA_NAMEs without definitions into the |
789 | PHI arguments, but put a decl in there instead |
790 | temporarily, and revisit this PHI later on. */ |
791 | if (SSA_NAME_VAR (comp)) |
792 | comp = SSA_NAME_VAR (comp); |
793 | else |
794 | comp = create_tmp_reg (TREE_TYPE (comp), |
795 | get_name (comp)); |
796 | revisit_phi = true; |
797 | } |
798 | SET_PHI_ARG_DEF (p[j], i, comp); |
799 | } |
800 | } |
801 | |
802 | if (revisit_phi) |
803 | { |
804 | phis_to_revisit.safe_push (obj: phi); |
805 | phis_to_revisit.safe_push (obj: p[0]); |
806 | phis_to_revisit.safe_push (obj: p[1]); |
807 | } |
808 | } |
809 | } |
810 | } |
811 | |
812 | /* Expand a complex move to scalars. */ |
813 | |
814 | static void |
815 | expand_complex_move (gimple_stmt_iterator *gsi, tree type) |
816 | { |
817 | tree inner_type = TREE_TYPE (type); |
818 | tree r, i, lhs, rhs; |
819 | gimple *stmt = gsi_stmt (i: *gsi); |
820 | |
821 | if (is_gimple_assign (gs: stmt)) |
822 | { |
823 | lhs = gimple_assign_lhs (gs: stmt); |
824 | if (gimple_num_ops (gs: stmt) == 2) |
825 | rhs = gimple_assign_rhs1 (gs: stmt); |
826 | else |
827 | rhs = NULL_TREE; |
828 | } |
829 | else if (is_gimple_call (gs: stmt)) |
830 | { |
831 | lhs = gimple_call_lhs (gs: stmt); |
832 | rhs = NULL_TREE; |
833 | } |
834 | else |
835 | gcc_unreachable (); |
836 | |
837 | if (TREE_CODE (lhs) == SSA_NAME) |
838 | { |
839 | if (is_ctrl_altering_stmt (stmt)) |
840 | { |
841 | edge e; |
842 | |
843 | /* The value is not assigned on the exception edges, so we need not |
844 | concern ourselves there. We do need to update on the fallthru |
845 | edge. Find it. */ |
846 | e = find_fallthru_edge (edges: gsi_bb (i: *gsi)->succs); |
847 | if (!e) |
848 | gcc_unreachable (); |
849 | |
850 | r = build1 (REALPART_EXPR, inner_type, lhs); |
851 | i = build1 (IMAGPART_EXPR, inner_type, lhs); |
852 | update_complex_components_on_edge (e, lhs, r, i); |
853 | } |
854 | else if (is_gimple_call (gs: stmt) |
855 | || gimple_has_side_effects (stmt) |
856 | || gimple_assign_rhs_code (gs: stmt) == PAREN_EXPR) |
857 | { |
858 | r = build1 (REALPART_EXPR, inner_type, lhs); |
859 | i = build1 (IMAGPART_EXPR, inner_type, lhs); |
860 | update_complex_components (gsi, stmt, r, i); |
861 | } |
862 | else |
863 | { |
864 | if (gimple_assign_rhs_code (gs: stmt) != COMPLEX_EXPR) |
865 | { |
866 | r = extract_component (gsi, t: rhs, imagpart_p: 0, gimple_p: true); |
867 | i = extract_component (gsi, t: rhs, imagpart_p: 1, gimple_p: true); |
868 | } |
869 | else |
870 | { |
871 | r = gimple_assign_rhs1 (gs: stmt); |
872 | i = gimple_assign_rhs2 (gs: stmt); |
873 | } |
874 | update_complex_assignment (gsi, r, i); |
875 | } |
876 | } |
877 | else if (rhs |
878 | && (TREE_CODE (rhs) == SSA_NAME || TREE_CODE (rhs) == COMPLEX_CST) |
879 | && !TREE_SIDE_EFFECTS (lhs)) |
880 | { |
881 | tree x; |
882 | gimple *t; |
883 | location_t loc; |
884 | |
885 | loc = gimple_location (g: stmt); |
886 | r = extract_component (gsi, t: rhs, imagpart_p: 0, gimple_p: false); |
887 | i = extract_component (gsi, t: rhs, imagpart_p: 1, gimple_p: false); |
888 | |
889 | x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs)); |
890 | t = gimple_build_assign (x, r); |
891 | gimple_set_location (g: t, location: loc); |
892 | gsi_insert_before (gsi, t, GSI_SAME_STMT); |
893 | |
894 | if (stmt == gsi_stmt (i: *gsi)) |
895 | { |
896 | x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs)); |
897 | gimple_assign_set_lhs (gs: stmt, lhs: x); |
898 | gimple_assign_set_rhs1 (gs: stmt, rhs: i); |
899 | } |
900 | else |
901 | { |
902 | x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs)); |
903 | t = gimple_build_assign (x, i); |
904 | gimple_set_location (g: t, location: loc); |
905 | gsi_insert_before (gsi, t, GSI_SAME_STMT); |
906 | |
907 | stmt = gsi_stmt (i: *gsi); |
908 | gcc_assert (gimple_code (stmt) == GIMPLE_RETURN); |
909 | gimple_return_set_retval (gs: as_a <greturn *> (p: stmt), retval: lhs); |
910 | } |
911 | |
912 | update_stmt (s: stmt); |
913 | } |
914 | } |
915 | |
916 | /* Expand complex addition to scalars: |
917 | a + b = (ar + br) + i(ai + bi) |
918 | a - b = (ar - br) + i(ai + bi) |
919 | */ |
920 | |
921 | static void |
922 | expand_complex_addition (gimple_stmt_iterator *gsi, tree inner_type, |
923 | tree ar, tree ai, tree br, tree bi, |
924 | enum tree_code code, |
925 | complex_lattice_t al, complex_lattice_t bl) |
926 | { |
927 | tree rr, ri; |
928 | gimple_seq stmts = NULL; |
929 | location_t loc = gimple_location (g: gsi_stmt (i: *gsi)); |
930 | |
931 | switch (PAIR (al, bl)) |
932 | { |
933 | case PAIR (ONLY_REAL, ONLY_REAL): |
934 | rr = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ar, ops: br); |
935 | ri = ai; |
936 | break; |
937 | |
938 | case PAIR (ONLY_REAL, ONLY_IMAG): |
939 | rr = ar; |
940 | if (code == MINUS_EXPR) |
941 | ri = gimple_build (seq: &stmts, loc, code: MINUS_EXPR, type: inner_type, ops: ai, ops: bi); |
942 | else |
943 | ri = bi; |
944 | break; |
945 | |
946 | case PAIR (ONLY_IMAG, ONLY_REAL): |
947 | if (code == MINUS_EXPR) |
948 | rr = gimple_build (seq: &stmts, loc, code: MINUS_EXPR, type: inner_type, ops: ar, ops: br); |
949 | else |
950 | rr = br; |
951 | ri = ai; |
952 | break; |
953 | |
954 | case PAIR (ONLY_IMAG, ONLY_IMAG): |
955 | rr = ar; |
956 | ri = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ai, ops: bi); |
957 | break; |
958 | |
959 | case PAIR (VARYING, ONLY_REAL): |
960 | rr = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ar, ops: br); |
961 | ri = ai; |
962 | break; |
963 | |
964 | case PAIR (VARYING, ONLY_IMAG): |
965 | rr = ar; |
966 | ri = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ai, ops: bi); |
967 | break; |
968 | |
969 | case PAIR (ONLY_REAL, VARYING): |
970 | if (code == MINUS_EXPR) |
971 | goto general; |
972 | rr = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ar, ops: br); |
973 | ri = bi; |
974 | break; |
975 | |
976 | case PAIR (ONLY_IMAG, VARYING): |
977 | if (code == MINUS_EXPR) |
978 | goto general; |
979 | rr = br; |
980 | ri = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ai, ops: bi); |
981 | break; |
982 | |
983 | case PAIR (VARYING, VARYING): |
984 | general: |
985 | rr = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ar, ops: br); |
986 | ri = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ai, ops: bi); |
987 | break; |
988 | |
989 | default: |
990 | gcc_unreachable (); |
991 | } |
992 | |
993 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
994 | update_complex_assignment (gsi, r: rr, i: ri); |
995 | } |
996 | |
997 | /* Expand a complex multiplication or division to a libcall to the c99 |
998 | compliant routines. TYPE is the complex type of the operation. |
999 | If INPLACE_P replace the statement at GSI with |
1000 | the libcall and return NULL_TREE. Else insert the call, assign its |
1001 | result to an output variable and return that variable. If INPLACE_P |
1002 | is true then the statement being replaced should be an assignment |
1003 | statement. */ |
1004 | |
1005 | static tree |
1006 | expand_complex_libcall (gimple_stmt_iterator *gsi, tree type, tree ar, tree ai, |
1007 | tree br, tree bi, enum tree_code code, bool inplace_p) |
1008 | { |
1009 | machine_mode mode; |
1010 | enum built_in_function bcode; |
1011 | tree fn, lhs; |
1012 | gcall *stmt; |
1013 | |
1014 | mode = TYPE_MODE (type); |
1015 | gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT); |
1016 | |
1017 | if (code == MULT_EXPR) |
1018 | bcode = ((enum built_in_function) |
1019 | (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); |
1020 | else if (code == RDIV_EXPR) |
1021 | bcode = ((enum built_in_function) |
1022 | (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); |
1023 | else |
1024 | gcc_unreachable (); |
1025 | fn = builtin_decl_explicit (fncode: bcode); |
1026 | stmt = gimple_build_call (fn, 4, ar, ai, br, bi); |
1027 | |
1028 | if (inplace_p) |
1029 | { |
1030 | gimple *old_stmt = gsi_stmt (i: *gsi); |
1031 | gimple_call_set_nothrow (s: stmt, nothrow_p: !stmt_could_throw_p (cfun, old_stmt)); |
1032 | lhs = gimple_assign_lhs (gs: old_stmt); |
1033 | gimple_call_set_lhs (gs: stmt, lhs); |
1034 | gsi_replace (gsi, stmt, true); |
1035 | |
1036 | type = TREE_TYPE (type); |
1037 | if (stmt_can_throw_internal (cfun, stmt)) |
1038 | { |
1039 | edge_iterator ei; |
1040 | edge e; |
1041 | FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs) |
1042 | if (!(e->flags & EDGE_EH)) |
1043 | break; |
1044 | basic_block bb = split_edge (e); |
1045 | gimple_stmt_iterator gsi2 = gsi_start_bb (bb); |
1046 | update_complex_components (gsi: &gsi2, stmt, |
1047 | r: build1 (REALPART_EXPR, type, lhs), |
1048 | i: build1 (IMAGPART_EXPR, type, lhs)); |
1049 | return NULL_TREE; |
1050 | } |
1051 | else |
1052 | update_complex_components (gsi, stmt, |
1053 | r: build1 (REALPART_EXPR, type, lhs), |
1054 | i: build1 (IMAGPART_EXPR, type, lhs)); |
1055 | SSA_NAME_DEF_STMT (lhs) = stmt; |
1056 | return NULL_TREE; |
1057 | } |
1058 | |
1059 | gimple_call_set_nothrow (s: stmt, nothrow_p: true); |
1060 | lhs = make_ssa_name (var: type); |
1061 | gimple_call_set_lhs (gs: stmt, lhs); |
1062 | gsi_insert_before (gsi, stmt, GSI_SAME_STMT); |
1063 | |
1064 | return lhs; |
1065 | } |
1066 | |
1067 | /* Perform a complex multiplication on two complex constants A, B represented |
1068 | by AR, AI, BR, BI of type TYPE. |
1069 | The operation we want is: a * b = (ar*br - ai*bi) + i(ar*bi + br*ai). |
1070 | Insert the GIMPLE statements into GSI. Store the real and imaginary |
1071 | components of the result into RR and RI. */ |
1072 | |
1073 | static void |
1074 | expand_complex_multiplication_components (gimple_seq *stmts, location_t loc, |
1075 | tree type, tree ar, tree ai, |
1076 | tree br, tree bi, |
1077 | tree *rr, tree *ri) |
1078 | { |
1079 | tree t1, t2, t3, t4; |
1080 | |
1081 | t1 = gimple_build (seq: stmts, loc, code: MULT_EXPR, type, ops: ar, ops: br); |
1082 | t2 = gimple_build (seq: stmts, loc, code: MULT_EXPR, type, ops: ai, ops: bi); |
1083 | t3 = gimple_build (seq: stmts, loc, code: MULT_EXPR, type, ops: ar, ops: bi); |
1084 | |
1085 | /* Avoid expanding redundant multiplication for the common |
1086 | case of squaring a complex number. */ |
1087 | if (ar == br && ai == bi) |
1088 | t4 = t3; |
1089 | else |
1090 | t4 = gimple_build (seq: stmts, loc, code: MULT_EXPR, type, ops: ai, ops: br); |
1091 | |
1092 | *rr = gimple_build (seq: stmts, loc, code: MINUS_EXPR, type, ops: t1, ops: t2); |
1093 | *ri = gimple_build (seq: stmts, loc, code: PLUS_EXPR, type, ops: t3, ops: t4); |
1094 | } |
1095 | |
1096 | /* Expand complex multiplication to scalars: |
1097 | a * b = (ar*br - ai*bi) + i(ar*bi + br*ai) |
1098 | */ |
1099 | |
1100 | static void |
1101 | expand_complex_multiplication (gimple_stmt_iterator *gsi, tree type, |
1102 | tree ar, tree ai, tree br, tree bi, |
1103 | complex_lattice_t al, complex_lattice_t bl) |
1104 | { |
1105 | tree rr, ri; |
1106 | tree inner_type = TREE_TYPE (type); |
1107 | location_t loc = gimple_location (g: gsi_stmt (i: *gsi)); |
1108 | gimple_seq stmts = NULL; |
1109 | |
1110 | if (al < bl) |
1111 | { |
1112 | complex_lattice_t tl; |
1113 | rr = ar, ar = br, br = rr; |
1114 | ri = ai, ai = bi, bi = ri; |
1115 | tl = al, al = bl, bl = tl; |
1116 | } |
1117 | |
1118 | switch (PAIR (al, bl)) |
1119 | { |
1120 | case PAIR (ONLY_REAL, ONLY_REAL): |
1121 | rr = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ar, ops: br); |
1122 | ri = ai; |
1123 | break; |
1124 | |
1125 | case PAIR (ONLY_IMAG, ONLY_REAL): |
1126 | rr = ar; |
1127 | if (TREE_CODE (ai) == REAL_CST |
1128 | && real_identical (&TREE_REAL_CST (ai), &dconst1)) |
1129 | ri = br; |
1130 | else |
1131 | ri = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ai, ops: br); |
1132 | break; |
1133 | |
1134 | case PAIR (ONLY_IMAG, ONLY_IMAG): |
1135 | rr = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ai, ops: bi); |
1136 | rr = gimple_build (seq: &stmts, loc, code: NEGATE_EXPR, type: inner_type, ops: rr); |
1137 | ri = ar; |
1138 | break; |
1139 | |
1140 | case PAIR (VARYING, ONLY_REAL): |
1141 | rr = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ar, ops: br); |
1142 | ri = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ai, ops: br); |
1143 | break; |
1144 | |
1145 | case PAIR (VARYING, ONLY_IMAG): |
1146 | rr = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ai, ops: bi); |
1147 | rr = gimple_build (seq: &stmts, loc, code: NEGATE_EXPR, type: inner_type, ops: rr); |
1148 | ri = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ar, ops: bi); |
1149 | break; |
1150 | |
1151 | case PAIR (VARYING, VARYING): |
1152 | if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type)) |
1153 | { |
1154 | /* If optimizing for size or not at all just do a libcall. |
1155 | Same if there are exception-handling edges or signaling NaNs. */ |
1156 | if (optimize == 0 || optimize_bb_for_size_p (gsi_bb (i: *gsi)) |
1157 | || stmt_can_throw_internal (cfun, gsi_stmt (i: *gsi)) |
1158 | || flag_signaling_nans) |
1159 | { |
1160 | expand_complex_libcall (gsi, type, ar, ai, br, bi, |
1161 | code: MULT_EXPR, inplace_p: true); |
1162 | return; |
1163 | } |
1164 | |
1165 | if (!HONOR_NANS (inner_type)) |
1166 | { |
1167 | /* If we are not worrying about NaNs expand to |
1168 | (ar*br - ai*bi) + i(ar*bi + br*ai) directly. */ |
1169 | expand_complex_multiplication_components (stmts: &stmts, loc, type: inner_type, |
1170 | ar, ai, br, bi, |
1171 | rr: &rr, ri: &ri); |
1172 | break; |
1173 | } |
1174 | |
1175 | /* Else, expand x = a * b into |
1176 | x = (ar*br - ai*bi) + i(ar*bi + br*ai); |
1177 | if (isunordered (__real__ x, __imag__ x)) |
1178 | x = __muldc3 (a, b); */ |
1179 | |
1180 | tree tmpr, tmpi; |
1181 | expand_complex_multiplication_components (stmts: &stmts, loc, |
1182 | type: inner_type, ar, ai, |
1183 | br, bi, rr: &tmpr, ri: &tmpi); |
1184 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
1185 | stmts = NULL; |
1186 | |
1187 | gimple *check |
1188 | = gimple_build_cond (UNORDERED_EXPR, tmpr, tmpi, |
1189 | NULL_TREE, NULL_TREE); |
1190 | |
1191 | basic_block orig_bb = gsi_bb (i: *gsi); |
1192 | /* We want to keep track of the original complex multiplication |
1193 | statement as we're going to modify it later in |
1194 | update_complex_assignment. Make sure that insert_cond_bb leaves |
1195 | that statement in the join block. */ |
1196 | gsi_prev (i: gsi); |
1197 | basic_block cond_bb |
1198 | = insert_cond_bb (gsi_bb (i: *gsi), gsi_stmt (i: *gsi), check, |
1199 | profile_probability::very_unlikely ()); |
1200 | |
1201 | gimple_stmt_iterator cond_bb_gsi = gsi_last_bb (bb: cond_bb); |
1202 | gsi_insert_after (&cond_bb_gsi, gimple_build_nop (), GSI_NEW_STMT); |
1203 | |
1204 | tree libcall_res |
1205 | = expand_complex_libcall (gsi: &cond_bb_gsi, type, ar, ai, br, |
1206 | bi, code: MULT_EXPR, inplace_p: false); |
1207 | gimple_seq stmts2 = NULL; |
1208 | tree cond_real = gimple_build (seq: &stmts2, loc, code: REALPART_EXPR, |
1209 | type: inner_type, ops: libcall_res); |
1210 | tree cond_imag = gimple_build (seq: &stmts2, loc, code: IMAGPART_EXPR, |
1211 | type: inner_type, ops: libcall_res); |
1212 | gsi_insert_seq_before (&cond_bb_gsi, stmts2, GSI_SAME_STMT); |
1213 | |
1214 | basic_block join_bb = single_succ_edge (bb: cond_bb)->dest; |
1215 | *gsi = gsi_start_nondebug_after_labels_bb (bb: join_bb); |
1216 | |
1217 | /* We have a conditional block with some assignments in cond_bb. |
1218 | Wire up the PHIs to wrap up. */ |
1219 | rr = make_ssa_name (var: inner_type); |
1220 | ri = make_ssa_name (var: inner_type); |
1221 | edge cond_to_join = single_succ_edge (bb: cond_bb); |
1222 | edge orig_to_join = find_edge (orig_bb, join_bb); |
1223 | |
1224 | gphi *real_phi = create_phi_node (rr, gsi_bb (i: *gsi)); |
1225 | add_phi_arg (real_phi, cond_real, cond_to_join, UNKNOWN_LOCATION); |
1226 | add_phi_arg (real_phi, tmpr, orig_to_join, UNKNOWN_LOCATION); |
1227 | |
1228 | gphi *imag_phi = create_phi_node (ri, gsi_bb (i: *gsi)); |
1229 | add_phi_arg (imag_phi, cond_imag, cond_to_join, UNKNOWN_LOCATION); |
1230 | add_phi_arg (imag_phi, tmpi, orig_to_join, UNKNOWN_LOCATION); |
1231 | } |
1232 | else |
1233 | /* If we are not worrying about NaNs expand to |
1234 | (ar*br - ai*bi) + i(ar*bi + br*ai) directly. */ |
1235 | expand_complex_multiplication_components (stmts: &stmts, loc, |
1236 | type: inner_type, ar, ai, |
1237 | br, bi, rr: &rr, ri: &ri); |
1238 | break; |
1239 | |
1240 | default: |
1241 | gcc_unreachable (); |
1242 | } |
1243 | |
1244 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
1245 | update_complex_assignment (gsi, r: rr, i: ri); |
1246 | } |
1247 | |
1248 | /* Keep this algorithm in sync with fold-const.cc:const_binop(). |
1249 | |
1250 | Expand complex division to scalars, straightforward algorithm. |
1251 | a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t) |
1252 | t = br*br + bi*bi |
1253 | */ |
1254 | |
1255 | static void |
1256 | expand_complex_div_straight (gimple_stmt_iterator *gsi, tree inner_type, |
1257 | tree ar, tree ai, tree br, tree bi, |
1258 | enum tree_code code) |
1259 | { |
1260 | gimple_seq stmts = NULL; |
1261 | location_t loc = gimple_location (g: gsi_stmt (i: *gsi)); |
1262 | tree rr, ri, div, t1, t2, t3; |
1263 | |
1264 | t1 = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: br, ops: br); |
1265 | t2 = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: bi, ops: bi); |
1266 | div = gimple_build (seq: &stmts, loc, code: PLUS_EXPR, type: inner_type, ops: t1, ops: t2); |
1267 | |
1268 | t1 = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ar, ops: br); |
1269 | t2 = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ai, ops: bi); |
1270 | t3 = gimple_build (seq: &stmts, loc, code: PLUS_EXPR, type: inner_type, ops: t1, ops: t2); |
1271 | rr = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: t3, ops: div); |
1272 | |
1273 | t1 = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ai, ops: br); |
1274 | t2 = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ar, ops: bi); |
1275 | t3 = gimple_build (seq: &stmts, loc, code: MINUS_EXPR, type: inner_type, ops: t1, ops: t2); |
1276 | ri = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: t3, ops: div); |
1277 | |
1278 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
1279 | update_complex_assignment (gsi, r: rr, i: ri); |
1280 | } |
1281 | |
1282 | /* Keep this algorithm in sync with fold-const.cc:const_binop(). |
1283 | |
1284 | Expand complex division to scalars, modified algorithm to minimize |
1285 | overflow with wide input ranges. */ |
1286 | |
1287 | static void |
1288 | expand_complex_div_wide (gimple_stmt_iterator *gsi, tree inner_type, |
1289 | tree ar, tree ai, tree br, tree bi, |
1290 | enum tree_code code) |
1291 | { |
1292 | tree rr, ri, ratio, div, t1, t2, tr, ti, compare; |
1293 | basic_block bb_cond, bb_true, bb_false, bb_join; |
1294 | gimple *stmt; |
1295 | gimple_seq stmts = NULL; |
1296 | location_t loc = gimple_location (g: gsi_stmt (i: *gsi)); |
1297 | |
1298 | /* Examine |br| < |bi|, and branch. */ |
1299 | t1 = gimple_build (seq: &stmts, loc, code: ABS_EXPR, type: inner_type, ops: br); |
1300 | t2 = gimple_build (seq: &stmts, loc, code: ABS_EXPR, type: inner_type, ops: bi); |
1301 | compare = gimple_build (seq: &stmts, loc, |
1302 | code: LT_EXPR, boolean_type_node, ops: t1, ops: t2); |
1303 | |
1304 | bb_cond = bb_true = bb_false = bb_join = NULL; |
1305 | rr = ri = tr = ti = NULL; |
1306 | if (TREE_CODE (compare) != INTEGER_CST) |
1307 | { |
1308 | edge e; |
1309 | gimple *stmt; |
1310 | |
1311 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
1312 | stmts = NULL; |
1313 | stmt = gimple_build_cond (NE_EXPR, compare, boolean_false_node, |
1314 | NULL_TREE, NULL_TREE); |
1315 | gsi_insert_before (gsi, stmt, GSI_SAME_STMT); |
1316 | |
1317 | /* Split the original block, and create the TRUE and FALSE blocks. */ |
1318 | e = split_block (gsi_bb (i: *gsi), stmt); |
1319 | bb_cond = e->src; |
1320 | bb_join = e->dest; |
1321 | bb_true = create_empty_bb (bb_cond); |
1322 | bb_false = create_empty_bb (bb_true); |
1323 | bb_true->count = bb_false->count |
1324 | = bb_cond->count.apply_probability (prob: profile_probability::even ()); |
1325 | |
1326 | /* Wire the blocks together. */ |
1327 | e->flags = EDGE_TRUE_VALUE; |
1328 | /* TODO: With value profile we could add an historgram to determine real |
1329 | branch outcome. */ |
1330 | e->probability = profile_probability::even (); |
1331 | redirect_edge_succ (e, bb_true); |
1332 | edge e2 = make_edge (bb_cond, bb_false, EDGE_FALSE_VALUE); |
1333 | e2->probability = profile_probability::even (); |
1334 | make_single_succ_edge (bb_true, bb_join, EDGE_FALLTHRU); |
1335 | make_single_succ_edge (bb_false, bb_join, EDGE_FALLTHRU); |
1336 | add_bb_to_loop (bb_true, bb_cond->loop_father); |
1337 | add_bb_to_loop (bb_false, bb_cond->loop_father); |
1338 | |
1339 | /* Update dominance info. Note that bb_join's data was |
1340 | updated by split_block. */ |
1341 | if (dom_info_available_p (CDI_DOMINATORS)) |
1342 | { |
1343 | set_immediate_dominator (CDI_DOMINATORS, bb_true, bb_cond); |
1344 | set_immediate_dominator (CDI_DOMINATORS, bb_false, bb_cond); |
1345 | } |
1346 | |
1347 | rr = create_tmp_reg (inner_type); |
1348 | ri = create_tmp_reg (inner_type); |
1349 | } |
1350 | else |
1351 | { |
1352 | gimple_seq_discard (stmts); |
1353 | stmts = NULL; |
1354 | } |
1355 | |
1356 | /* In the TRUE branch, we compute |
1357 | ratio = br/bi; |
1358 | div = (br * ratio) + bi; |
1359 | tr = (ar * ratio) + ai; |
1360 | ti = (ai * ratio) - ar; |
1361 | tr = tr / div; |
1362 | ti = ti / div; */ |
1363 | if (bb_true || integer_nonzerop (compare)) |
1364 | { |
1365 | if (bb_true) |
1366 | { |
1367 | *gsi = gsi_last_bb (bb: bb_true); |
1368 | gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT); |
1369 | } |
1370 | |
1371 | ratio = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: br, ops: bi); |
1372 | |
1373 | t1 = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: br, ops: ratio); |
1374 | div = gimple_build (seq: &stmts, loc, code: PLUS_EXPR, type: inner_type, ops: t1, ops: bi); |
1375 | |
1376 | t1 = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ar, ops: ratio); |
1377 | tr = gimple_build (seq: &stmts, loc, code: PLUS_EXPR, type: inner_type, ops: t1, ops: ai); |
1378 | |
1379 | t1 = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ai, ops: ratio); |
1380 | ti = gimple_build (seq: &stmts, loc, code: MINUS_EXPR, type: inner_type, ops: t1, ops: ar); |
1381 | |
1382 | tr = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: tr, ops: div); |
1383 | ti = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ti, ops: div); |
1384 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
1385 | stmts = NULL; |
1386 | |
1387 | if (bb_true) |
1388 | { |
1389 | stmt = gimple_build_assign (rr, tr); |
1390 | gsi_insert_before (gsi, stmt, GSI_SAME_STMT); |
1391 | stmt = gimple_build_assign (ri, ti); |
1392 | gsi_insert_before (gsi, stmt, GSI_SAME_STMT); |
1393 | gsi_remove (gsi, true); |
1394 | } |
1395 | } |
1396 | |
1397 | /* In the FALSE branch, we compute |
1398 | ratio = d/c; |
1399 | divisor = (d * ratio) + c; |
1400 | tr = (b * ratio) + a; |
1401 | ti = b - (a * ratio); |
1402 | tr = tr / div; |
1403 | ti = ti / div; */ |
1404 | if (bb_false || integer_zerop (compare)) |
1405 | { |
1406 | if (bb_false) |
1407 | { |
1408 | *gsi = gsi_last_bb (bb: bb_false); |
1409 | gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT); |
1410 | } |
1411 | |
1412 | ratio = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: bi, ops: br); |
1413 | |
1414 | t1 = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: bi, ops: ratio); |
1415 | div = gimple_build (seq: &stmts, loc, code: PLUS_EXPR, type: inner_type, ops: t1, ops: br); |
1416 | |
1417 | t1 = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ai, ops: ratio); |
1418 | tr = gimple_build (seq: &stmts, loc, code: PLUS_EXPR, type: inner_type, ops: t1, ops: ar); |
1419 | |
1420 | t1 = gimple_build (seq: &stmts, loc, code: MULT_EXPR, type: inner_type, ops: ar, ops: ratio); |
1421 | ti = gimple_build (seq: &stmts, loc, code: MINUS_EXPR, type: inner_type, ops: ai, ops: t1); |
1422 | |
1423 | tr = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: tr, ops: div); |
1424 | ti = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ti, ops: div); |
1425 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
1426 | stmts = NULL; |
1427 | |
1428 | if (bb_false) |
1429 | { |
1430 | stmt = gimple_build_assign (rr, tr); |
1431 | gsi_insert_before (gsi, stmt, GSI_SAME_STMT); |
1432 | stmt = gimple_build_assign (ri, ti); |
1433 | gsi_insert_before (gsi, stmt, GSI_SAME_STMT); |
1434 | gsi_remove (gsi, true); |
1435 | } |
1436 | } |
1437 | |
1438 | if (bb_join) |
1439 | *gsi = gsi_start_bb (bb: bb_join); |
1440 | else |
1441 | rr = tr, ri = ti; |
1442 | |
1443 | update_complex_assignment (gsi, r: rr, i: ri); |
1444 | } |
1445 | |
1446 | /* Expand complex division to scalars. */ |
1447 | |
1448 | static void |
1449 | expand_complex_division (gimple_stmt_iterator *gsi, tree type, |
1450 | tree ar, tree ai, tree br, tree bi, |
1451 | enum tree_code code, |
1452 | complex_lattice_t al, complex_lattice_t bl) |
1453 | { |
1454 | tree rr, ri; |
1455 | gimple_seq stmts = NULL; |
1456 | location_t loc = gimple_location (g: gsi_stmt (i: *gsi)); |
1457 | |
1458 | tree inner_type = TREE_TYPE (type); |
1459 | switch (PAIR (al, bl)) |
1460 | { |
1461 | case PAIR (ONLY_REAL, ONLY_REAL): |
1462 | rr = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ar, ops: br); |
1463 | ri = ai; |
1464 | break; |
1465 | |
1466 | case PAIR (ONLY_REAL, ONLY_IMAG): |
1467 | rr = ai; |
1468 | ri = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ar, ops: bi); |
1469 | ri = gimple_build (seq: &stmts, loc, code: NEGATE_EXPR, type: inner_type, ops: ri); |
1470 | break; |
1471 | |
1472 | case PAIR (ONLY_IMAG, ONLY_REAL): |
1473 | rr = ar; |
1474 | ri = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ai, ops: br); |
1475 | break; |
1476 | |
1477 | case PAIR (ONLY_IMAG, ONLY_IMAG): |
1478 | rr = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ai, ops: bi); |
1479 | ri = ar; |
1480 | break; |
1481 | |
1482 | case PAIR (VARYING, ONLY_REAL): |
1483 | rr = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ar, ops: br); |
1484 | ri = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ai, ops: br); |
1485 | break; |
1486 | |
1487 | case PAIR (VARYING, ONLY_IMAG): |
1488 | rr = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ai, ops: bi); |
1489 | ri = gimple_build (seq: &stmts, loc, code, type: inner_type, ops: ar, ops: bi); |
1490 | ri = gimple_build (seq: &stmts, loc, code: NEGATE_EXPR, type: inner_type, ops: ri); |
1491 | break; |
1492 | |
1493 | case PAIR (ONLY_REAL, VARYING): |
1494 | case PAIR (ONLY_IMAG, VARYING): |
1495 | case PAIR (VARYING, VARYING): |
1496 | switch (flag_complex_method) |
1497 | { |
1498 | case 0: |
1499 | /* straightforward implementation of complex divide acceptable. */ |
1500 | expand_complex_div_straight (gsi, inner_type, ar, ai, br, bi, code); |
1501 | break; |
1502 | |
1503 | case 2: |
1504 | if (SCALAR_FLOAT_TYPE_P (inner_type)) |
1505 | { |
1506 | expand_complex_libcall (gsi, type, ar, ai, br, bi, code, inplace_p: true); |
1507 | break; |
1508 | } |
1509 | /* FALLTHRU */ |
1510 | |
1511 | case 1: |
1512 | /* wide ranges of inputs must work for complex divide. */ |
1513 | expand_complex_div_wide (gsi, inner_type, ar, ai, br, bi, code); |
1514 | break; |
1515 | |
1516 | default: |
1517 | gcc_unreachable (); |
1518 | } |
1519 | return; |
1520 | |
1521 | default: |
1522 | gcc_unreachable (); |
1523 | } |
1524 | |
1525 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
1526 | update_complex_assignment (gsi, r: rr, i: ri); |
1527 | } |
1528 | |
1529 | /* Expand complex negation to scalars: |
1530 | -a = (-ar) + i(-ai) |
1531 | */ |
1532 | |
1533 | static void |
1534 | expand_complex_negation (gimple_stmt_iterator *gsi, tree inner_type, |
1535 | tree ar, tree ai) |
1536 | { |
1537 | tree rr, ri; |
1538 | gimple_seq stmts = NULL; |
1539 | location_t loc = gimple_location (g: gsi_stmt (i: *gsi)); |
1540 | |
1541 | rr = gimple_build (seq: &stmts, loc, code: NEGATE_EXPR, type: inner_type, ops: ar); |
1542 | ri = gimple_build (seq: &stmts, loc, code: NEGATE_EXPR, type: inner_type, ops: ai); |
1543 | |
1544 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
1545 | update_complex_assignment (gsi, r: rr, i: ri); |
1546 | } |
1547 | |
1548 | /* Expand complex conjugate to scalars: |
1549 | ~a = (ar) + i(-ai) |
1550 | */ |
1551 | |
1552 | static void |
1553 | expand_complex_conjugate (gimple_stmt_iterator *gsi, tree inner_type, |
1554 | tree ar, tree ai) |
1555 | { |
1556 | tree ri; |
1557 | gimple_seq stmts = NULL; |
1558 | location_t loc = gimple_location (g: gsi_stmt (i: *gsi)); |
1559 | |
1560 | ri = gimple_build (seq: &stmts, loc, code: NEGATE_EXPR, type: inner_type, ops: ai); |
1561 | |
1562 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
1563 | update_complex_assignment (gsi, r: ar, i: ri); |
1564 | } |
1565 | |
1566 | /* Expand complex comparison (EQ or NE only). */ |
1567 | |
1568 | static void |
1569 | expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai, |
1570 | tree br, tree bi, enum tree_code code) |
1571 | { |
1572 | tree cr, ci, cc, type; |
1573 | gimple *stmt = gsi_stmt (i: *gsi); |
1574 | gimple_seq stmts = NULL; |
1575 | location_t loc = gimple_location (g: stmt); |
1576 | |
1577 | cr = gimple_build (seq: &stmts, loc, code, boolean_type_node, ops: ar, ops: br); |
1578 | ci = gimple_build (seq: &stmts, loc, code, boolean_type_node, ops: ai, ops: bi); |
1579 | cc = gimple_build (seq: &stmts, loc, |
1580 | code: (code == EQ_EXPR ? BIT_AND_EXPR : BIT_IOR_EXPR), |
1581 | boolean_type_node, ops: cr, ops: ci); |
1582 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
1583 | |
1584 | switch (gimple_code (g: stmt)) |
1585 | { |
1586 | case GIMPLE_RETURN: |
1587 | { |
1588 | greturn *return_stmt = as_a <greturn *> (p: stmt); |
1589 | type = TREE_TYPE (gimple_return_retval (return_stmt)); |
1590 | gimple_return_set_retval (gs: return_stmt, fold_convert (type, cc)); |
1591 | } |
1592 | break; |
1593 | |
1594 | case GIMPLE_ASSIGN: |
1595 | type = TREE_TYPE (gimple_assign_lhs (stmt)); |
1596 | gimple_assign_set_rhs_from_tree (gsi, fold_convert (type, cc)); |
1597 | stmt = gsi_stmt (i: *gsi); |
1598 | break; |
1599 | |
1600 | case GIMPLE_COND: |
1601 | { |
1602 | gcond *cond_stmt = as_a <gcond *> (p: stmt); |
1603 | gimple_cond_set_code (gs: cond_stmt, code: EQ_EXPR); |
1604 | gimple_cond_set_lhs (gs: cond_stmt, lhs: cc); |
1605 | gimple_cond_set_rhs (gs: cond_stmt, boolean_true_node); |
1606 | } |
1607 | break; |
1608 | |
1609 | default: |
1610 | gcc_unreachable (); |
1611 | } |
1612 | |
1613 | update_stmt (s: stmt); |
1614 | if (maybe_clean_eh_stmt (stmt)) |
1615 | bitmap_set_bit (need_eh_cleanup, gimple_bb (g: stmt)->index); |
1616 | } |
1617 | |
1618 | /* Expand inline asm that sets some complex SSA_NAMEs. */ |
1619 | |
1620 | static void |
1621 | expand_complex_asm (gimple_stmt_iterator *gsi) |
1622 | { |
1623 | gasm *stmt = as_a <gasm *> (p: gsi_stmt (i: *gsi)); |
1624 | unsigned int i; |
1625 | bool diagnosed_p = false; |
1626 | |
1627 | for (i = 0; i < gimple_asm_noutputs (asm_stmt: stmt); ++i) |
1628 | { |
1629 | tree link = gimple_asm_output_op (asm_stmt: stmt, index: i); |
1630 | tree op = TREE_VALUE (link); |
1631 | if (TREE_CODE (op) == SSA_NAME |
1632 | && TREE_CODE (TREE_TYPE (op)) == COMPLEX_TYPE) |
1633 | { |
1634 | if (gimple_asm_nlabels (asm_stmt: stmt) > 0) |
1635 | { |
1636 | if (!diagnosed_p) |
1637 | { |
1638 | sorry_at (gimple_location (g: stmt), |
1639 | "%<asm goto%> with complex typed outputs" ); |
1640 | diagnosed_p = true; |
1641 | } |
1642 | /* Make sure to not ICE later, see PR105165. */ |
1643 | tree zero = build_zero_cst (TREE_TYPE (TREE_TYPE (op))); |
1644 | set_component_ssa_name (ssa_name: op, imag_p: false, value: zero); |
1645 | set_component_ssa_name (ssa_name: op, imag_p: true, value: zero); |
1646 | continue; |
1647 | } |
1648 | tree type = TREE_TYPE (op); |
1649 | tree inner_type = TREE_TYPE (type); |
1650 | tree r = build1 (REALPART_EXPR, inner_type, op); |
1651 | tree i = build1 (IMAGPART_EXPR, inner_type, op); |
1652 | gimple_seq list = set_component_ssa_name (ssa_name: op, imag_p: false, value: r); |
1653 | |
1654 | if (list) |
1655 | gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); |
1656 | |
1657 | list = set_component_ssa_name (ssa_name: op, imag_p: true, value: i); |
1658 | if (list) |
1659 | gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); |
1660 | } |
1661 | } |
1662 | } |
1663 | |
1664 | /* Process one statement. If we identify a complex operation, expand it. */ |
1665 | |
1666 | static void |
1667 | expand_complex_operations_1 (gimple_stmt_iterator *gsi) |
1668 | { |
1669 | gimple *stmt = gsi_stmt (i: *gsi); |
1670 | tree type, inner_type, lhs; |
1671 | tree ac, ar, ai, bc, br, bi; |
1672 | complex_lattice_t al, bl; |
1673 | enum tree_code code; |
1674 | |
1675 | if (gimple_code (g: stmt) == GIMPLE_ASM) |
1676 | { |
1677 | expand_complex_asm (gsi); |
1678 | return; |
1679 | } |
1680 | |
1681 | lhs = gimple_get_lhs (stmt); |
1682 | if (!lhs && gimple_code (g: stmt) != GIMPLE_COND) |
1683 | return; |
1684 | |
1685 | type = TREE_TYPE (gimple_op (stmt, 0)); |
1686 | code = gimple_expr_code (stmt); |
1687 | |
1688 | /* Initial filter for operations we handle. */ |
1689 | switch (code) |
1690 | { |
1691 | case PLUS_EXPR: |
1692 | case MINUS_EXPR: |
1693 | case MULT_EXPR: |
1694 | case TRUNC_DIV_EXPR: |
1695 | case CEIL_DIV_EXPR: |
1696 | case FLOOR_DIV_EXPR: |
1697 | case ROUND_DIV_EXPR: |
1698 | case RDIV_EXPR: |
1699 | case NEGATE_EXPR: |
1700 | case CONJ_EXPR: |
1701 | if (TREE_CODE (type) != COMPLEX_TYPE) |
1702 | return; |
1703 | inner_type = TREE_TYPE (type); |
1704 | break; |
1705 | |
1706 | case EQ_EXPR: |
1707 | case NE_EXPR: |
1708 | /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR |
1709 | subcode, so we need to access the operands using gimple_op. */ |
1710 | inner_type = TREE_TYPE (gimple_op (stmt, 1)); |
1711 | if (TREE_CODE (inner_type) != COMPLEX_TYPE) |
1712 | return; |
1713 | break; |
1714 | |
1715 | default: |
1716 | { |
1717 | tree rhs; |
1718 | |
1719 | /* GIMPLE_COND may also fallthru here, but we do not need to |
1720 | do anything with it. */ |
1721 | if (gimple_code (g: stmt) == GIMPLE_COND) |
1722 | return; |
1723 | |
1724 | if (TREE_CODE (type) == COMPLEX_TYPE) |
1725 | expand_complex_move (gsi, type); |
1726 | else if (is_gimple_assign (gs: stmt) |
1727 | && (gimple_assign_rhs_code (gs: stmt) == REALPART_EXPR |
1728 | || gimple_assign_rhs_code (gs: stmt) == IMAGPART_EXPR) |
1729 | && TREE_CODE (lhs) == SSA_NAME) |
1730 | { |
1731 | rhs = gimple_assign_rhs1 (gs: stmt); |
1732 | rhs = extract_component (gsi, TREE_OPERAND (rhs, 0), |
1733 | imagpart_p: gimple_assign_rhs_code (gs: stmt) |
1734 | == IMAGPART_EXPR, |
1735 | gimple_p: false); |
1736 | gimple_assign_set_rhs_from_tree (gsi, rhs); |
1737 | stmt = gsi_stmt (i: *gsi); |
1738 | update_stmt (s: stmt); |
1739 | } |
1740 | } |
1741 | return; |
1742 | } |
1743 | |
1744 | /* Extract the components of the two complex values. Make sure and |
1745 | handle the common case of the same value used twice specially. */ |
1746 | if (is_gimple_assign (gs: stmt)) |
1747 | { |
1748 | ac = gimple_assign_rhs1 (gs: stmt); |
1749 | bc = (gimple_num_ops (gs: stmt) > 2) ? gimple_assign_rhs2 (gs: stmt) : NULL; |
1750 | } |
1751 | /* GIMPLE_CALL cannot get here. */ |
1752 | else |
1753 | { |
1754 | ac = gimple_cond_lhs (gs: stmt); |
1755 | bc = gimple_cond_rhs (gs: stmt); |
1756 | } |
1757 | |
1758 | ar = extract_component (gsi, t: ac, imagpart_p: false, gimple_p: true); |
1759 | ai = extract_component (gsi, t: ac, imagpart_p: true, gimple_p: true); |
1760 | |
1761 | if (ac == bc) |
1762 | br = ar, bi = ai; |
1763 | else if (bc) |
1764 | { |
1765 | br = extract_component (gsi, t: bc, imagpart_p: 0, gimple_p: true); |
1766 | bi = extract_component (gsi, t: bc, imagpart_p: 1, gimple_p: true); |
1767 | } |
1768 | else |
1769 | br = bi = NULL_TREE; |
1770 | |
1771 | al = find_lattice_value (t: ac); |
1772 | if (al == UNINITIALIZED) |
1773 | al = VARYING; |
1774 | |
1775 | if (TREE_CODE_CLASS (code) == tcc_unary) |
1776 | bl = UNINITIALIZED; |
1777 | else if (ac == bc) |
1778 | bl = al; |
1779 | else |
1780 | { |
1781 | bl = find_lattice_value (t: bc); |
1782 | if (bl == UNINITIALIZED) |
1783 | bl = VARYING; |
1784 | } |
1785 | |
1786 | switch (code) |
1787 | { |
1788 | case PLUS_EXPR: |
1789 | case MINUS_EXPR: |
1790 | expand_complex_addition (gsi, inner_type, ar, ai, br, bi, code, al, bl); |
1791 | break; |
1792 | |
1793 | case MULT_EXPR: |
1794 | expand_complex_multiplication (gsi, type, ar, ai, br, bi, al, bl); |
1795 | break; |
1796 | |
1797 | case TRUNC_DIV_EXPR: |
1798 | case CEIL_DIV_EXPR: |
1799 | case FLOOR_DIV_EXPR: |
1800 | case ROUND_DIV_EXPR: |
1801 | case RDIV_EXPR: |
1802 | expand_complex_division (gsi, type, ar, ai, br, bi, code, al, bl); |
1803 | break; |
1804 | |
1805 | case NEGATE_EXPR: |
1806 | expand_complex_negation (gsi, inner_type, ar, ai); |
1807 | break; |
1808 | |
1809 | case CONJ_EXPR: |
1810 | expand_complex_conjugate (gsi, inner_type, ar, ai); |
1811 | break; |
1812 | |
1813 | case EQ_EXPR: |
1814 | case NE_EXPR: |
1815 | expand_complex_comparison (gsi, ar, ai, br, bi, code); |
1816 | break; |
1817 | |
1818 | default: |
1819 | gcc_unreachable (); |
1820 | } |
1821 | } |
1822 | |
1823 | |
1824 | /* Entry point for complex operation lowering during optimization. */ |
1825 | |
1826 | static unsigned int |
1827 | tree_lower_complex (void) |
1828 | { |
1829 | gimple_stmt_iterator gsi; |
1830 | basic_block bb; |
1831 | int n_bbs, i; |
1832 | int *rpo; |
1833 | |
1834 | if (!init_dont_simulate_again ()) |
1835 | return 0; |
1836 | |
1837 | complex_lattice_values.create (num_ssa_names); |
1838 | complex_lattice_values.safe_grow_cleared (num_ssa_names, exact: true); |
1839 | |
1840 | init_parameter_lattice_values (); |
1841 | class complex_propagate complex_propagate; |
1842 | complex_propagate.ssa_propagate (); |
1843 | |
1844 | need_eh_cleanup = BITMAP_ALLOC (NULL); |
1845 | |
1846 | complex_variable_components = new int_tree_htab_type (10); |
1847 | |
1848 | complex_ssa_name_components.create (nelems: 2 * num_ssa_names); |
1849 | complex_ssa_name_components.safe_grow_cleared (len: 2 * num_ssa_names, exact: true); |
1850 | |
1851 | update_parameter_components (); |
1852 | |
1853 | rpo = XNEWVEC (int, last_basic_block_for_fn (cfun)); |
1854 | n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false); |
1855 | for (i = 0; i < n_bbs; i++) |
1856 | { |
1857 | bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]); |
1858 | if (!bb) |
1859 | continue; |
1860 | update_phi_components (bb); |
1861 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
1862 | expand_complex_operations_1 (gsi: &gsi); |
1863 | } |
1864 | |
1865 | free (ptr: rpo); |
1866 | |
1867 | if (!phis_to_revisit.is_empty ()) |
1868 | { |
1869 | unsigned int n = phis_to_revisit.length (); |
1870 | for (unsigned int j = 0; j < n; j += 3) |
1871 | for (unsigned int k = 0; k < 2; k++) |
1872 | if (gphi *phi = phis_to_revisit[j + k + 1]) |
1873 | { |
1874 | unsigned int m = gimple_phi_num_args (gs: phi); |
1875 | for (unsigned int l = 0; l < m; ++l) |
1876 | { |
1877 | tree op = gimple_phi_arg_def (gs: phi, index: l); |
1878 | if (TREE_CODE (op) == SSA_NAME |
1879 | || is_gimple_min_invariant (op)) |
1880 | continue; |
1881 | tree arg = gimple_phi_arg_def (gs: phis_to_revisit[j], index: l); |
1882 | op = extract_component (NULL, t: arg, imagpart_p: k > 0, gimple_p: false, phiarg_p: false); |
1883 | SET_PHI_ARG_DEF (phi, l, op); |
1884 | } |
1885 | } |
1886 | phis_to_revisit.release (); |
1887 | } |
1888 | |
1889 | gsi_commit_edge_inserts (); |
1890 | |
1891 | unsigned todo |
1892 | = gimple_purge_all_dead_eh_edges (need_eh_cleanup) ? TODO_cleanup_cfg : 0; |
1893 | BITMAP_FREE (need_eh_cleanup); |
1894 | |
1895 | delete complex_variable_components; |
1896 | complex_variable_components = NULL; |
1897 | complex_ssa_name_components.release (); |
1898 | complex_lattice_values.release (); |
1899 | return todo; |
1900 | } |
1901 | |
1902 | namespace { |
1903 | |
1904 | const pass_data pass_data_lower_complex = |
1905 | { |
1906 | .type: GIMPLE_PASS, /* type */ |
1907 | .name: "cplxlower" , /* name */ |
1908 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
1909 | .tv_id: TV_NONE, /* tv_id */ |
1910 | PROP_ssa, /* properties_required */ |
1911 | PROP_gimple_lcx, /* properties_provided */ |
1912 | .properties_destroyed: 0, /* properties_destroyed */ |
1913 | .todo_flags_start: 0, /* todo_flags_start */ |
1914 | TODO_update_ssa, /* todo_flags_finish */ |
1915 | }; |
1916 | |
1917 | class pass_lower_complex : public gimple_opt_pass |
1918 | { |
1919 | public: |
1920 | pass_lower_complex (gcc::context *ctxt) |
1921 | : gimple_opt_pass (pass_data_lower_complex, ctxt) |
1922 | {} |
1923 | |
1924 | /* opt_pass methods: */ |
1925 | opt_pass * clone () final override { return new pass_lower_complex (m_ctxt); } |
1926 | unsigned int execute (function *) final override |
1927 | { |
1928 | return tree_lower_complex (); |
1929 | } |
1930 | |
1931 | }; // class pass_lower_complex |
1932 | |
1933 | } // anon namespace |
1934 | |
1935 | gimple_opt_pass * |
1936 | make_pass_lower_complex (gcc::context *ctxt) |
1937 | { |
1938 | return new pass_lower_complex (ctxt); |
1939 | } |
1940 | |
1941 | |
1942 | namespace { |
1943 | |
1944 | const pass_data pass_data_lower_complex_O0 = |
1945 | { |
1946 | .type: GIMPLE_PASS, /* type */ |
1947 | .name: "cplxlower0" , /* name */ |
1948 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
1949 | .tv_id: TV_NONE, /* tv_id */ |
1950 | PROP_cfg, /* properties_required */ |
1951 | PROP_gimple_lcx, /* properties_provided */ |
1952 | .properties_destroyed: 0, /* properties_destroyed */ |
1953 | .todo_flags_start: 0, /* todo_flags_start */ |
1954 | TODO_update_ssa, /* todo_flags_finish */ |
1955 | }; |
1956 | |
1957 | class pass_lower_complex_O0 : public gimple_opt_pass |
1958 | { |
1959 | public: |
1960 | pass_lower_complex_O0 (gcc::context *ctxt) |
1961 | : gimple_opt_pass (pass_data_lower_complex_O0, ctxt) |
1962 | {} |
1963 | |
1964 | /* opt_pass methods: */ |
1965 | bool gate (function *fun) final override |
1966 | { |
1967 | /* With errors, normal optimization passes are not run. If we don't |
1968 | lower complex operations at all, rtl expansion will abort. */ |
1969 | return !(fun->curr_properties & PROP_gimple_lcx); |
1970 | } |
1971 | |
1972 | unsigned int execute (function *) final override |
1973 | { |
1974 | return tree_lower_complex (); |
1975 | } |
1976 | |
1977 | }; // class pass_lower_complex_O0 |
1978 | |
1979 | } // anon namespace |
1980 | |
1981 | gimple_opt_pass * |
1982 | make_pass_lower_complex_O0 (gcc::context *ctxt) |
1983 | { |
1984 | return new pass_lower_complex_O0 (ctxt); |
1985 | } |
1986 | |