1/* Post-reload compare elimination.
2 Copyright (C) 2010-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* There is a set of targets whose general-purpose move or addition
21 instructions clobber the flags. These targets cannot split their
22 CBRANCH/CSTORE etc patterns before reload is complete, lest reload
23 itself insert these instructions in between the flags setter and user.
24 Because these targets cannot split the compare from the use, they
25 cannot make use of the comparison elimination offered by the combine pass.
26
27 This is a small pass intended to provide comparison elimination similar to
28 what was available via NOTICE_UPDATE_CC for cc0 targets.
29
30 This pass assumes:
31
32 (0) CBRANCH/CSTORE etc have been split in pass_split_after_reload.
33
34 (1) All comparison patterns are represented as
35
36 [(set (reg:CC) (compare:CC (reg) (reg_or_immediate)))]
37
38 (2) All insn patterns that modify the flags are represented as
39
40 [(set (reg) (operation)
41 (clobber (reg:CC))]
42
43 (3) If an insn of form (2) can usefully set the flags, there is
44 another pattern of the form
45
46 [(set (reg:CCM) (compare:CCM (operation) (immediate)))
47 (set (reg) (operation)]
48
49 The mode CCM will be chosen as if by SELECT_CC_MODE.
50
51 Note that unlike NOTICE_UPDATE_CC, we do not handle memory operands.
52 This could be handled as a future enhancement.
53*/
54
55#include "config.h"
56#include "system.h"
57#include "coretypes.h"
58#include "backend.h"
59#include "target.h"
60#include "rtl.h"
61#include "df.h"
62#include "memmodel.h"
63#include "tm_p.h"
64#include "insn-config.h"
65#include "recog.h"
66#include "emit-rtl.h"
67#include "cfgrtl.h"
68#include "tree-pass.h"
69#include "domwalk.h"
70
71
72/* These structures describe a comparison and how it is used. */
73
74/* The choice of maximum 3 uses comes from wanting to eliminate the two
75 duplicate compares from a three-way branch on the sign of a value.
76 This is also sufficient to eliminate the duplicate compare against the
77 high-part of a double-word comparison. */
78#define MAX_CMP_USE 3
79
80struct comparison_use
81{
82 /* The instruction in which the result of the compare is used. */
83 rtx_insn *insn;
84 /* The location of the flags register within the use. */
85 rtx *loc;
86 /* The comparison code applied against the flags register. */
87 enum rtx_code code;
88};
89
90struct comparison
91{
92 /* The comparison instruction. */
93 rtx_insn *insn;
94
95 /* The insn prior to the comparison insn that clobbers the flags. */
96 rtx_insn *prev_clobber;
97
98 /* The insn prior to the comparison insn that sets in_a REG. */
99 rtx_insn *in_a_setter;
100
101 /* The two values being compared. These will be either REGs or
102 constants. */
103 rtx in_a, in_b;
104
105 /* The REG_EH_REGION of the comparison. */
106 rtx eh_note;
107
108 /* Information about how this comparison is used. */
109 struct comparison_use uses[MAX_CMP_USE];
110
111 /* The original CC_MODE for this comparison. */
112 machine_mode orig_mode;
113
114 /* The number of uses identified for this comparison. */
115 unsigned short n_uses;
116
117 /* True if not all uses of this comparison have been identified.
118 This can happen either for overflowing the array above, or if
119 the flags register is used in some unusual context. */
120 bool missing_uses;
121
122 /* True if its inputs are still valid at the end of the block. */
123 bool inputs_valid;
124
125 /* Whether IN_A is wrapped in a NOT before being compared. */
126 bool not_in_a;
127};
128
129static vec<comparison *> all_compares;
130
131/* Return whether X is a NOT unary expression. */
132
133static bool
134is_not (rtx x)
135{
136 return GET_CODE (x) == NOT;
137}
138
139/* Strip a NOT unary expression around X, if any. */
140
141static rtx
142strip_not (rtx x)
143{
144 if (is_not (x))
145 return XEXP (x, 0);
146
147 return x;
148}
149
150/* Look for a "conforming" comparison, as defined above. If valid, return
151 the rtx for the COMPARE itself. */
152
153static rtx
154conforming_compare (rtx_insn *insn)
155{
156 rtx set, src, dest;
157
158 set = single_set (insn);
159 if (set == NULL)
160 return NULL;
161
162 src = SET_SRC (set);
163 if (GET_CODE (src) != COMPARE)
164 return NULL;
165
166 dest = SET_DEST (set);
167 if (!REG_P (dest) || REGNO (dest) != targetm.flags_regnum)
168 return NULL;
169
170 if (!REG_P (strip_not (XEXP (src, 0))))
171 return NULL;
172
173 if (CONSTANT_P (XEXP (src, 1)) || REG_P (XEXP (src, 1)))
174 return src;
175
176 if (GET_CODE (XEXP (src, 1)) == UNSPEC)
177 {
178 for (int i = 0; i < XVECLEN (XEXP (src, 1), 0); i++)
179 if (!REG_P (XVECEXP (XEXP (src, 1), 0, i)))
180 return NULL;
181 return src;
182 }
183
184 return NULL;
185}
186
187/* Look for a pattern of the "correct" form for an insn with a flags clobber
188 for which we may be able to eliminate a compare later. We're not looking
189 to validate any inputs at this time, merely see that the basic shape is
190 correct. The term "arithmetic" may be somewhat misleading... */
191
192static bool
193arithmetic_flags_clobber_p (rtx_insn *insn)
194{
195 rtx pat, x;
196
197 if (!NONJUMP_INSN_P (insn))
198 return false;
199 pat = PATTERN (insn);
200 if (asm_noperands (pat) >= 0)
201 return false;
202
203 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) == 2)
204 {
205 x = XVECEXP (pat, 0, 0);
206 if (GET_CODE (x) != SET)
207 return false;
208 x = SET_DEST (x);
209 if (!REG_P (x))
210 return false;
211
212 x = XVECEXP (pat, 0, 1);
213 if (GET_CODE (x) == CLOBBER)
214 {
215 x = XEXP (x, 0);
216 if (REG_P (x) && REGNO (x) == targetm.flags_regnum)
217 return true;
218 }
219 }
220
221 return false;
222}
223
224/* Look for uses of FLAGS in INSN. If we find one we can analyze, record
225 it in CMP; otherwise indicate that we've missed a use. */
226
227static void
228find_flags_uses_in_insn (struct comparison *cmp, rtx_insn *insn)
229{
230 df_ref use;
231
232 /* If we've already lost track of uses, don't bother collecting more. */
233 if (cmp->missing_uses)
234 return;
235
236 /* Find a USE of the flags register. */
237 FOR_EACH_INSN_USE (use, insn)
238 if (DF_REF_REGNO (use) == targetm.flags_regnum)
239 {
240 rtx x, *loc;
241
242 /* If this is an unusual use, quit. */
243 if (DF_REF_TYPE (use) != DF_REF_REG_USE)
244 goto fail;
245
246 /* If we've run out of slots to record uses, quit. */
247 if (cmp->n_uses == MAX_CMP_USE)
248 goto fail;
249
250 /* Unfortunately the location of the flags register, while present
251 in the reference structure, doesn't help. We need to find the
252 comparison code that is outer to the actual flags use. */
253 loc = DF_REF_LOC (use);
254 x = PATTERN (insn);
255 if (GET_CODE (x) == PARALLEL)
256 x = XVECEXP (x, 0, 0);
257 if (GET_CODE (x) == SET)
258 x = SET_SRC (x);
259 if (GET_CODE (x) == IF_THEN_ELSE)
260 x = XEXP (x, 0);
261 if (COMPARISON_P (x)
262 && loc == &XEXP (x, 0)
263 && XEXP (x, 1) == const0_rtx)
264 {
265 /* We've found a use of the flags that we understand. */
266 struct comparison_use *cuse = &cmp->uses[cmp->n_uses++];
267 cuse->insn = insn;
268 cuse->loc = loc;
269 cuse->code = GET_CODE (x);
270 }
271 else
272 goto fail;
273 }
274 return;
275
276 fail:
277 /* We failed to recognize this use of the flags register. */
278 cmp->missing_uses = true;
279}
280
281class find_comparison_dom_walker : public dom_walker
282{
283public:
284 find_comparison_dom_walker (cdi_direction direction)
285 : dom_walker (direction) {}
286
287 edge before_dom_children (basic_block) final override;
288};
289
290/* Return true if conforming COMPARE with EH_NOTE is redundant with comparison
291 CMP and can thus be eliminated. */
292
293static bool
294can_eliminate_compare (rtx compare, rtx eh_note, struct comparison *cmp)
295{
296 /* Take care that it's in the same EH region. */
297 if (cfun->can_throw_non_call_exceptions
298 && !rtx_equal_p (eh_note, cmp->eh_note))
299 return false;
300
301 /* Make sure the compare is redundant with the previous. */
302 if (!rtx_equal_p (strip_not (XEXP (compare, 0)), cmp->in_a)
303 || !rtx_equal_p (XEXP (compare, 1), cmp->in_b))
304 return false;
305
306 if (is_not (XEXP (compare, 0)) != cmp->not_in_a)
307 return false;
308
309 /* New mode must be compatible with the previous compare mode. */
310 machine_mode new_mode
311 = targetm.cc_modes_compatible (GET_MODE (compare), cmp->orig_mode);
312
313 if (new_mode == VOIDmode)
314 return false;
315
316 if (cmp->orig_mode != new_mode)
317 {
318 /* Generate new comparison for substitution. */
319 rtx flags = gen_rtx_REG (new_mode, targetm.flags_regnum);
320 rtx x = gen_rtx_COMPARE (new_mode, cmp->in_a, cmp->in_b);
321 x = gen_rtx_SET (flags, x);
322
323 if (!validate_change (cmp->insn, &PATTERN (insn: cmp->insn), x, false))
324 return false;
325
326 cmp->orig_mode = new_mode;
327 }
328
329 return true;
330}
331
332/* Identify comparison instructions within BB. If the flags from the last
333 compare in the BB is live at the end of the block, install the compare
334 in BB->AUX. Called via dom_walker.walk (). */
335
336edge
337find_comparison_dom_walker::before_dom_children (basic_block bb)
338{
339 rtx_insn *insn, *next;
340 bool need_purge = false;
341 rtx_insn *last_setter[FIRST_PSEUDO_REGISTER];
342
343 /* The last comparison that was made. Will be reset to NULL
344 once the flags are clobbered. */
345 struct comparison *last_cmp = NULL;
346
347 /* True iff the last comparison has not been clobbered, nor
348 have its inputs. Used to eliminate duplicate compares. */
349 bool last_cmp_valid = false;
350
351 /* The last insn that clobbered the flags, if that insn is of
352 a form that may be valid for eliminating a following compare.
353 To be reset to NULL once the flags are set otherwise. */
354 rtx_insn *last_clobber = NULL;
355
356 /* Propagate the last live comparison throughout the extended basic block. */
357 if (single_pred_p (bb))
358 {
359 last_cmp = (struct comparison *) single_pred (bb)->aux;
360 if (last_cmp)
361 last_cmp_valid = last_cmp->inputs_valid;
362 }
363
364 memset (s: last_setter, c: 0, n: sizeof (last_setter));
365 for (insn = BB_HEAD (bb); insn; insn = next)
366 {
367 rtx src;
368
369 next = (insn == BB_END (bb) ? NULL : NEXT_INSN (insn));
370 if (!NONDEBUG_INSN_P (insn))
371 continue;
372
373 src = conforming_compare (insn);
374 if (src)
375 {
376 rtx eh_note = NULL;
377
378 if (cfun->can_throw_non_call_exceptions)
379 eh_note = find_reg_note (insn, REG_EH_REGION, NULL);
380
381 if (last_cmp_valid && can_eliminate_compare (compare: src, eh_note, cmp: last_cmp))
382 {
383 if (eh_note)
384 need_purge = true;
385 delete_insn (insn);
386 continue;
387 }
388
389 last_cmp = XCNEW (struct comparison);
390 last_cmp->insn = insn;
391 last_cmp->prev_clobber = last_clobber;
392 last_cmp->in_a = strip_not (XEXP (src, 0));
393 last_cmp->in_b = XEXP (src, 1);
394 last_cmp->not_in_a = is_not (XEXP (src, 0));
395 last_cmp->eh_note = eh_note;
396 last_cmp->orig_mode = GET_MODE (src);
397 if (last_cmp->in_b == const0_rtx
398 && last_setter[REGNO (last_cmp->in_a)])
399 {
400 rtx set = single_set (insn: last_setter[REGNO (last_cmp->in_a)]);
401 if (set && rtx_equal_p (SET_DEST (set), last_cmp->in_a))
402 last_cmp->in_a_setter = last_setter[REGNO (last_cmp->in_a)];
403 }
404 all_compares.safe_push (obj: last_cmp);
405
406 /* It's unusual, but be prepared for comparison patterns that
407 also clobber an input, or perhaps a scratch. */
408 last_clobber = NULL;
409 last_cmp_valid = true;
410 }
411
412 else
413 {
414 /* Notice if this instruction uses the flags register. */
415 if (last_cmp)
416 find_flags_uses_in_insn (cmp: last_cmp, insn);
417
418 /* Notice if this instruction kills the flags register. */
419 df_ref def;
420 FOR_EACH_INSN_DEF (def, insn)
421 if (DF_REF_REGNO (def) == targetm.flags_regnum)
422 {
423 /* See if this insn could be the "clobber" that eliminates
424 a future comparison. */
425 last_clobber = (arithmetic_flags_clobber_p (insn)
426 ? insn : NULL);
427
428 /* In either case, the previous compare is no longer valid. */
429 last_cmp = NULL;
430 last_cmp_valid = false;
431 break;
432 }
433 }
434
435 /* Notice if any of the inputs to the comparison have changed
436 and remember last insn that sets each register. */
437 df_ref def;
438 FOR_EACH_INSN_DEF (def, insn)
439 {
440 if (last_cmp_valid
441 && (DF_REF_REGNO (def) == REGNO (last_cmp->in_a)
442 || (REG_P (last_cmp->in_b)
443 && DF_REF_REGNO (def) == REGNO (last_cmp->in_b))))
444 last_cmp_valid = false;
445 last_setter[DF_REF_REGNO (def)] = insn;
446 }
447 }
448
449 /* Remember the live comparison for subsequent members of
450 the extended basic block. */
451 if (last_cmp)
452 {
453 bb->aux = last_cmp;
454 last_cmp->inputs_valid = last_cmp_valid;
455
456 /* Look to see if the flags register is live outgoing here, and
457 incoming to any successor not part of the extended basic block. */
458 if (bitmap_bit_p (df_get_live_out (bb), targetm.flags_regnum))
459 {
460 edge e;
461 edge_iterator ei;
462
463 FOR_EACH_EDGE (e, ei, bb->succs)
464 {
465 basic_block dest = e->dest;
466 if (bitmap_bit_p (df_get_live_in (bb), targetm.flags_regnum)
467 && !single_pred_p (bb: dest))
468 {
469 last_cmp->missing_uses = true;
470 break;
471 }
472 }
473 }
474 }
475
476 /* If we deleted a compare with a REG_EH_REGION note, we may need to
477 remove EH edges. */
478 if (need_purge)
479 purge_dead_edges (bb);
480
481 return NULL;
482}
483
484/* Find all comparisons in the function. */
485
486static void
487find_comparisons (void)
488{
489 calculate_dominance_info (CDI_DOMINATORS);
490
491 find_comparison_dom_walker (CDI_DOMINATORS)
492 .walk (cfun->cfg->x_entry_block_ptr);
493
494 clear_aux_for_blocks ();
495 free_dominance_info (CDI_DOMINATORS);
496}
497
498/* Select an alternate CC_MODE for a comparison insn comparing A and B.
499 Note that inputs are almost certainly different than the IN_A and IN_B
500 stored in CMP -- we're called while attempting to eliminate the compare
501 after all. Return the new FLAGS rtx if successful, else return NULL.
502 Note that this function may start a change group. */
503
504static rtx
505maybe_select_cc_mode (struct comparison *cmp, rtx a ATTRIBUTE_UNUSED,
506 rtx b ATTRIBUTE_UNUSED)
507{
508 machine_mode sel_mode;
509 const int n = cmp->n_uses;
510 rtx flags = NULL;
511
512#ifndef SELECT_CC_MODE
513 /* Minimize code differences when this target macro is undefined. */
514 return NULL;
515#define SELECT_CC_MODE(A,B,C) (gcc_unreachable (), VOIDmode)
516#endif
517
518 /* If we don't have access to all of the uses, we can't validate. */
519 if (cmp->missing_uses || n == 0)
520 return NULL;
521
522 /* Find a new mode that works for all of the uses. Special case the
523 common case of exactly one use. */
524 if (n == 1)
525 {
526 sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
527 if (sel_mode != cmp->orig_mode)
528 {
529 flags = gen_rtx_REG (sel_mode, targetm.flags_regnum);
530 validate_change (cmp->uses[0].insn, cmp->uses[0].loc, flags, true);
531 }
532 }
533 else
534 {
535 int i;
536
537 sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
538 for (i = 1; i < n; ++i)
539 {
540 machine_mode new_mode = SELECT_CC_MODE (cmp->uses[i].code, a, b);
541 if (new_mode != sel_mode)
542 {
543 sel_mode = targetm.cc_modes_compatible (sel_mode, new_mode);
544 if (sel_mode == VOIDmode)
545 return NULL;
546 }
547 }
548
549 if (sel_mode != cmp->orig_mode)
550 {
551 flags = gen_rtx_REG (sel_mode, targetm.flags_regnum);
552 for (i = 0; i < n; ++i)
553 validate_change (cmp->uses[i].insn, cmp->uses[i].loc, flags, true);
554 }
555 }
556
557 return flags;
558}
559
560/* Return a register RTX holding the same value at START as REG at END, or
561 NULL_RTX if there is none. */
562
563static rtx
564equivalent_reg_at_start (rtx reg, rtx_insn *end, rtx_insn *start)
565{
566 machine_mode orig_mode = GET_MODE (reg);
567 rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (end));
568
569 for (rtx_insn *insn = PREV_INSN (insn: end);
570 insn != start;
571 insn = PREV_INSN (insn))
572 {
573 const int abnormal_flags
574 = (DF_REF_CONDITIONAL | DF_REF_PARTIAL | DF_REF_MAY_CLOBBER
575 | DF_REF_MUST_CLOBBER | DF_REF_SIGN_EXTRACT
576 | DF_REF_ZERO_EXTRACT | DF_REF_STRICT_LOW_PART
577 | DF_REF_PRE_POST_MODIFY);
578 df_ref def;
579
580 /* Note that the BB_HEAD is always either a note or a label, but in
581 any case it means that REG is defined outside the block. */
582 if (insn == bb_head)
583 return NULL_RTX;
584 if (NOTE_P (insn) || DEBUG_INSN_P (insn))
585 continue;
586
587 /* Find a possible def of REG in INSN. */
588 FOR_EACH_INSN_DEF (def, insn)
589 if (DF_REF_REGNO (def) == REGNO (reg))
590 break;
591
592 /* No definitions of REG; continue searching. */
593 if (def == NULL)
594 continue;
595
596 /* Bail if this is not a totally normal set of REG. */
597 if (DF_REF_IS_ARTIFICIAL (def))
598 return NULL_RTX;
599 if (DF_REF_FLAGS (def) & abnormal_flags)
600 return NULL_RTX;
601
602 /* We've found an insn between the compare and the clobber that sets
603 REG. Given that pass_cprop_hardreg has not yet run, we still find
604 situations in which we can usefully look through a copy insn. */
605 rtx x = single_set (insn);
606 if (x == NULL_RTX)
607 return NULL_RTX;
608 reg = SET_SRC (x);
609 if (!REG_P (reg))
610 return NULL_RTX;
611 }
612
613 if (GET_MODE (reg) != orig_mode)
614 return NULL_RTX;
615
616 return reg;
617}
618
619/* Return true if it is okay to merge the comparison CMP_INSN with
620 the instruction ARITH_INSN. Both instructions are assumed to be in the
621 same basic block with ARITH_INSN appearing before CMP_INSN. This checks
622 that there are no uses or defs of the condition flags or control flow
623 changes between the two instructions. */
624
625static bool
626can_merge_compare_into_arith (rtx_insn *cmp_insn, rtx_insn *arith_insn)
627{
628 for (rtx_insn *insn = PREV_INSN (insn: cmp_insn);
629 insn && insn != arith_insn;
630 insn = PREV_INSN (insn))
631 {
632 if (!NONDEBUG_INSN_P (insn))
633 continue;
634 /* Bail if there are jumps or calls in between. */
635 if (!NONJUMP_INSN_P (insn))
636 return false;
637
638 /* Bail on old-style asm statements because they lack
639 data flow information. */
640 if (GET_CODE (PATTERN (insn)) == ASM_INPUT)
641 return false;
642
643 df_ref ref;
644 /* Find a USE of the flags register. */
645 FOR_EACH_INSN_USE (ref, insn)
646 if (DF_REF_REGNO (ref) == targetm.flags_regnum)
647 return false;
648
649 /* Find a DEF of the flags register. */
650 FOR_EACH_INSN_DEF (ref, insn)
651 if (DF_REF_REGNO (ref) == targetm.flags_regnum)
652 return false;
653 }
654 return true;
655}
656
657/* Given two SET expressions, SET_A and SET_B determine whether they form
658 a recognizable pattern when emitted in parallel. Return that parallel
659 if so. Otherwise return NULL. */
660
661static rtx
662try_validate_parallel (rtx set_a, rtx set_b)
663{
664 rtx par = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set_a, set_b));
665 rtx_insn *insn = make_insn_raw (par);
666
667 if (insn_invalid_p (insn, false))
668 {
669 crtl->emit.x_cur_insn_uid--;
670 return NULL_RTX;
671 }
672
673 SET_PREV_INSN (insn) = NULL_RTX;
674 SET_NEXT_INSN (insn) = NULL_RTX;
675 INSN_LOCATION (insn) = 0;
676 return insn;
677}
678
679/* For a comparison instruction described by CMP check if it compares a
680 register with zero i.e. it is of the form CC := CMP R1, 0.
681 If it is, find the instruction defining R1 (say I1) and try to create a
682 PARALLEL consisting of I1 and the comparison, representing a flag-setting
683 arithmetic instruction. Example:
684 I1: R1 := R2 + R3
685 <instructions that don't read the condition register>
686 I2: CC := CMP R1 0
687 I2 can be merged with I1 into:
688 I1: { CC := CMP (R2 + R3) 0 ; R1 := R2 + R3 }
689 This catches cases where R1 is used between I1 and I2 and therefore
690 combine and other RTL optimisations will not try to propagate it into
691 I2. Return true if we succeeded in merging CMP. */
692
693static bool
694try_merge_compare (struct comparison *cmp)
695{
696 rtx_insn *cmp_insn = cmp->insn;
697
698 if (cmp->in_b != const0_rtx || cmp->in_a_setter == NULL)
699 return false;
700 rtx in_a = cmp->in_a;
701 df_ref use;
702
703 FOR_EACH_INSN_USE (use, cmp_insn)
704 if (DF_REF_REGNO (use) == REGNO (in_a))
705 break;
706 if (!use)
707 return false;
708
709 rtx_insn *def_insn = cmp->in_a_setter;
710 rtx set = single_set (insn: def_insn);
711 if (!set)
712 return false;
713
714 if (!can_merge_compare_into_arith (cmp_insn, arith_insn: def_insn))
715 return false;
716
717 rtx src = SET_SRC (set);
718
719 /* If the source uses addressing modes with side effects, we can't
720 do the merge because we'd end up with a PARALLEL that has two
721 instances of that side effect in it. */
722 if (side_effects_p (src))
723 return false;
724
725 rtx flags = maybe_select_cc_mode (cmp, a: src, CONST0_RTX (GET_MODE (src)));
726 if (!flags)
727 {
728 /* We may already have a change group going through maybe_select_cc_mode.
729 Discard it properly. */
730 cancel_changes (0);
731 return false;
732 }
733
734 rtx flag_set
735 = gen_rtx_SET (flags, gen_rtx_COMPARE (GET_MODE (flags),
736 copy_rtx (src),
737 CONST0_RTX (GET_MODE (src))));
738 rtx arith_set = copy_rtx (PATTERN (insn: def_insn));
739 rtx par = try_validate_parallel (set_a: flag_set, set_b: arith_set);
740 if (!par)
741 {
742 /* We may already have a change group going through maybe_select_cc_mode.
743 Discard it properly. */
744 cancel_changes (0);
745 return false;
746 }
747 if (!apply_change_group ())
748 return false;
749 emit_insn_after (par, def_insn);
750 delete_insn (def_insn);
751 delete_insn (cmp->insn);
752 return true;
753}
754
755/* Attempt to replace a comparison with a prior arithmetic insn that can
756 compute the same flags value as the comparison itself. Return true if
757 successful, having made all rtl modifications necessary. */
758
759static bool
760try_eliminate_compare (struct comparison *cmp)
761{
762 rtx flags, in_a, in_b, cmp_a, cmp_b;
763
764 if (try_merge_compare (cmp))
765 return true;
766
767 /* We must have found an interesting "clobber" preceding the compare. */
768 if (cmp->prev_clobber == NULL)
769 return false;
770
771 /* Verify that IN_A is not clobbered in between CMP and PREV_CLOBBER.
772 Given that this target requires this pass, we can assume that most
773 insns do clobber the flags, and so the distance between the compare
774 and the clobber is likely to be small. */
775 /* ??? This is one point at which one could argue that DF_REF_CHAIN would
776 be useful, but it is thought to be too heavy-weight a solution here. */
777 in_a = equivalent_reg_at_start (reg: cmp->in_a, end: cmp->insn, start: cmp->prev_clobber);
778 if (!in_a)
779 return false;
780
781 /* Likewise for IN_B if need be. */
782 if (CONSTANT_P (cmp->in_b))
783 in_b = cmp->in_b;
784 else if (REG_P (cmp->in_b))
785 {
786 in_b = equivalent_reg_at_start (reg: cmp->in_b, end: cmp->insn, start: cmp->prev_clobber);
787 if (!in_b)
788 return false;
789 }
790 else if (GET_CODE (cmp->in_b) == UNSPEC)
791 {
792 const int len = XVECLEN (cmp->in_b, 0);
793 rtvec v = rtvec_alloc (len);
794 for (int i = 0; i < len; i++)
795 {
796 rtx r = equivalent_reg_at_start (XVECEXP (cmp->in_b, 0, i),
797 end: cmp->insn, start: cmp->prev_clobber);
798 if (!r)
799 return false;
800 RTVEC_ELT (v, i) = r;
801 }
802 in_b = gen_rtx_UNSPEC (GET_MODE (cmp->in_b), v, XINT (cmp->in_b, 1));
803 }
804 else
805 gcc_unreachable ();
806
807 /* We've reached PREV_CLOBBER without finding a modification of IN_A.
808 Validate that PREV_CLOBBER itself does in fact refer to IN_A. Do
809 recall that we've already validated the shape of PREV_CLOBBER. */
810 rtx_insn *insn = cmp->prev_clobber;
811
812 rtx x = XVECEXP (PATTERN (insn), 0, 0);
813 if (rtx_equal_p (SET_DEST (x), in_a))
814 cmp_a = SET_SRC (x);
815
816 /* Also check operations with implicit extensions, e.g.:
817 [(set (reg:DI)
818 (zero_extend:DI (plus:SI (reg:SI) (reg:SI))))
819 (set (reg:CCZ flags)
820 (compare:CCZ (plus:SI (reg:SI) (reg:SI))
821 (const_int 0)))] */
822 else if (REG_P (SET_DEST (x))
823 && REG_P (in_a)
824 && REGNO (SET_DEST (x)) == REGNO (in_a)
825 && (GET_CODE (SET_SRC (x)) == ZERO_EXTEND
826 || GET_CODE (SET_SRC (x)) == SIGN_EXTEND)
827 && GET_MODE (XEXP (SET_SRC (x), 0)) == GET_MODE (in_a))
828 cmp_a = XEXP (SET_SRC (x), 0);
829
830 /* Also check fully redundant comparisons, e.g.:
831 [(set (reg:SI)
832 (minus:SI (reg:SI) (reg:SI))))
833 (set (reg:CC flags)
834 (compare:CC (reg:SI) (reg:SI)))] */
835 else if (REG_P (in_b)
836 && GET_CODE (SET_SRC (x)) == MINUS
837 && rtx_equal_p (XEXP (SET_SRC (x), 0), in_a)
838 && rtx_equal_p (XEXP (SET_SRC (x), 1), in_b))
839 cmp_a = in_a;
840
841 else
842 return false;
843
844 /* If the source uses addressing modes with side effects, we can't
845 do the merge because we'd end up with a PARALLEL that has two
846 instances of that side effect in it. */
847 if (side_effects_p (cmp_a))
848 return false;
849
850 if (in_a == in_b)
851 cmp_b = cmp_a;
852 else if (rtx_equal_p (SET_DEST (x), in_b))
853 cmp_b = SET_SRC (x);
854 else
855 cmp_b = in_b;
856 if (side_effects_p (cmp_b))
857 return false;
858
859 /* Determine if we ought to use a different CC_MODE here. */
860 flags = maybe_select_cc_mode (cmp, a: cmp_a, b: cmp_b);
861 if (flags == NULL)
862 flags = gen_rtx_REG (cmp->orig_mode, targetm.flags_regnum);
863
864 /* Generate a new comparison for installation in the setter. */
865 rtx y = cmp->not_in_a
866 ? gen_rtx_NOT (GET_MODE (cmp_a), copy_rtx (cmp_a))
867 : copy_rtx (cmp_a);
868 y = gen_rtx_COMPARE (GET_MODE (flags), y, copy_rtx (cmp_b));
869 y = gen_rtx_SET (flags, y);
870
871 /* Canonicalize instruction to:
872 [(set (reg:CCM) (compare:CCM (operation) (immediate)))
873 (set (reg) (operation)] */
874
875 rtvec v = rtvec_alloc (2);
876 RTVEC_ELT (v, 0) = y;
877 RTVEC_ELT (v, 1) = x;
878
879 rtx pat = gen_rtx_PARALLEL (VOIDmode, v);
880
881 /* Succeed if the new instruction is valid. Note that we may have started
882 a change group within maybe_select_cc_mode, therefore we must continue. */
883 validate_change (insn, &PATTERN (insn), pat, true);
884
885 if (!apply_change_group ())
886 return false;
887
888 /* Success. Delete the compare insn... */
889 delete_insn (cmp->insn);
890
891 /* ... and any notes that are now invalid due to multiple sets. */
892 x = find_regno_note (insn, REG_UNUSED, targetm.flags_regnum);
893 if (x)
894 remove_note (insn, x);
895 x = find_reg_note (insn, REG_EQUAL, NULL);
896 if (x)
897 remove_note (insn, x);
898 x = find_reg_note (insn, REG_EQUIV, NULL);
899 if (x)
900 remove_note (insn, x);
901
902 return true;
903}
904
905/* Main entry point to the pass. */
906
907static unsigned int
908execute_compare_elim_after_reload (void)
909{
910 df_set_flags (DF_LR_RUN_DCE);
911 df_analyze ();
912
913 gcc_checking_assert (!all_compares.exists ());
914
915 /* Locate all comparisons and their uses, and eliminate duplicates. */
916 find_comparisons ();
917 if (all_compares.exists ())
918 {
919 struct comparison *cmp;
920 size_t i;
921
922 /* Eliminate comparisons that are redundant with flags computation. */
923 FOR_EACH_VEC_ELT (all_compares, i, cmp)
924 {
925 try_eliminate_compare (cmp);
926 XDELETE (cmp);
927 }
928
929 all_compares.release ();
930 }
931
932 return 0;
933}
934
935namespace {
936
937const pass_data pass_data_compare_elim_after_reload =
938{
939 .type: RTL_PASS, /* type */
940 .name: "cmpelim", /* name */
941 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
942 .tv_id: TV_NONE, /* tv_id */
943 .properties_required: 0, /* properties_required */
944 .properties_provided: 0, /* properties_provided */
945 .properties_destroyed: 0, /* properties_destroyed */
946 .todo_flags_start: 0, /* todo_flags_start */
947 .todo_flags_finish: ( TODO_df_finish | TODO_df_verify ), /* todo_flags_finish */
948};
949
950class pass_compare_elim_after_reload : public rtl_opt_pass
951{
952public:
953 pass_compare_elim_after_reload (gcc::context *ctxt)
954 : rtl_opt_pass (pass_data_compare_elim_after_reload, ctxt)
955 {}
956
957 /* opt_pass methods: */
958 bool gate (function *) final override
959 {
960 /* Setting this target hook value is how a backend indicates the need. */
961 if (targetm.flags_regnum == INVALID_REGNUM)
962 return false;
963 return flag_compare_elim_after_reload;
964 }
965
966 unsigned int execute (function *) final override
967 {
968 return execute_compare_elim_after_reload ();
969 }
970
971}; // class pass_compare_elim_after_reload
972
973} // anon namespace
974
975rtl_opt_pass *
976make_pass_compare_elim_after_reload (gcc::context *ctxt)
977{
978 return new pass_compare_elim_after_reload (ctxt);
979}
980

source code of gcc/compare-elim.cc