1/* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "target.h"
26#include "rtl.h"
27#include "tree.h"
28#include "cfghooks.h"
29#include "df.h"
30#include "memmodel.h"
31#include "tm_p.h"
32#include "insn-config.h"
33#include "regs.h"
34#include "emit-rtl.h"
35#include "recog.h"
36#include "insn-attr.h"
37#include "addresses.h"
38#include "cfgrtl.h"
39#include "cfgbuild.h"
40#include "cfgcleanup.h"
41#include "reload.h"
42#include "tree-pass.h"
43#include "function-abi.h"
44
45#ifndef STACK_POP_CODE
46#if STACK_GROWS_DOWNWARD
47#define STACK_POP_CODE POST_INC
48#else
49#define STACK_POP_CODE POST_DEC
50#endif
51#endif
52
53static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
54static void validate_replace_src_1 (rtx *, void *);
55static rtx_insn *split_insn (rtx_insn *);
56
57struct target_recog default_target_recog;
58#if SWITCHABLE_TARGET
59struct target_recog *this_target_recog = &default_target_recog;
60#endif
61
62/* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.cc and expmed.cc (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in reginfo.cc and final.cc and reload.cc.
67
68 init_recog and init_recog_no_volatile are responsible for setting this. */
69
70int volatile_ok;
71
72struct recog_data_d recog_data;
73
74/* Contains a vector of operand_alternative structures, such that
75 operand OP of alternative A is at index A * n_operands + OP.
76 Set up by preprocess_constraints. */
77const operand_alternative *recog_op_alt;
78
79/* Used to provide recog_op_alt for asms. */
80static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
81 * MAX_RECOG_ALTERNATIVES];
82
83/* On return from `constrain_operands', indicate which alternative
84 was satisfied. */
85
86int which_alternative;
87
88/* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.cc.
90 Controls the significance of (SUBREG (MEM)). */
91
92int reload_completed;
93
94/* Nonzero after thread_prologue_and_epilogue_insns has run. */
95int epilogue_completed;
96
97/* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
100
101void
102init_recog_no_volatile (void)
103{
104 volatile_ok = 0;
105}
106
107void
108init_recog (void)
109{
110 volatile_ok = 1;
111}
112
113
114/* Return true if labels in asm operands BODY are LABEL_REFs. */
115
116static bool
117asm_labels_ok (rtx body)
118{
119 rtx asmop;
120 int i;
121
122 asmop = extract_asm_operands (body);
123 if (asmop == NULL_RTX)
124 return true;
125
126 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
127 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
128 return false;
129
130 return true;
131}
132
133/* Check that X is an insn-body for an `asm' with operands
134 and that the operands mentioned in it are legitimate. */
135
136bool
137check_asm_operands (rtx x)
138{
139 int noperands;
140 rtx *operands;
141 const char **constraints;
142 int i;
143
144 if (!asm_labels_ok (body: x))
145 return false;
146
147 /* Post-reload, be more strict with things. */
148 if (reload_completed)
149 {
150 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
151 rtx_insn *insn = make_insn_raw (x);
152 extract_insn (insn);
153 constrain_operands (1, get_enabled_alternatives (insn));
154 return which_alternative >= 0;
155 }
156
157 noperands = asm_noperands (x);
158 if (noperands < 0)
159 return false;
160 if (noperands == 0)
161 return true;
162
163 operands = XALLOCAVEC (rtx, noperands);
164 constraints = XALLOCAVEC (const char *, noperands);
165
166 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
167
168 for (i = 0; i < noperands; i++)
169 {
170 const char *c = constraints[i];
171 if (c[0] == '%')
172 c++;
173 if (! asm_operand_ok (operands[i], c, constraints))
174 return false;
175 }
176
177 return true;
178}
179
180/* Static data for the next two routines. */
181
182struct change_t
183{
184 rtx object;
185 int old_code;
186 int old_len;
187 bool unshare;
188 rtx *loc;
189 rtx old;
190};
191
192static change_t *changes;
193static int changes_allocated;
194
195static int num_changes = 0;
196static int temporarily_undone_changes = 0;
197
198/* Validate a proposed change to OBJECT. LOC is the location in the rtl
199 at which NEW_RTX will be placed. If NEW_LEN is >= 0, XVECLEN (NEW_RTX, 0)
200 will also be changed to NEW_LEN, which is no greater than the current
201 XVECLEN. If OBJECT is zero, no validation is done, the change is
202 simply made.
203
204 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
205 will be called with the address and mode as parameters. If OBJECT is
206 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
207 the change in place.
208
209 IN_GROUP is nonzero if this is part of a group of changes that must be
210 performed as a group. In that case, the changes will be stored. The
211 function `apply_change_group' will validate and apply the changes.
212
213 If IN_GROUP is zero, this is a single change. Try to recognize the insn
214 or validate the memory reference with the change applied. If the result
215 is not valid for the machine, suppress the change and return false.
216 Otherwise, perform the change and return true. */
217
218static bool
219validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group,
220 bool unshare, int new_len = -1)
221{
222 gcc_assert (temporarily_undone_changes == 0);
223 rtx old = *loc;
224
225 /* Single-element parallels aren't valid and won't match anything.
226 Replace them with the single element. */
227 if (new_len == 1 && GET_CODE (new_rtx) == PARALLEL)
228 {
229 new_rtx = XVECEXP (new_rtx, 0, 0);
230 new_len = -1;
231 }
232
233 if ((old == new_rtx || rtx_equal_p (old, new_rtx))
234 && (new_len < 0 || XVECLEN (new_rtx, 0) == new_len))
235 return true;
236
237 gcc_assert ((in_group != 0 || num_changes == 0)
238 && (new_len < 0 || new_rtx == *loc));
239
240 *loc = new_rtx;
241
242 /* Save the information describing this change. */
243 if (num_changes >= changes_allocated)
244 {
245 if (changes_allocated == 0)
246 /* This value allows for repeated substitutions inside complex
247 indexed addresses, or changes in up to 5 insns. */
248 changes_allocated = MAX_RECOG_OPERANDS * 5;
249 else
250 changes_allocated *= 2;
251
252 changes = XRESIZEVEC (change_t, changes, changes_allocated);
253 }
254
255 changes[num_changes].object = object;
256 changes[num_changes].loc = loc;
257 changes[num_changes].old = old;
258 changes[num_changes].old_len = (new_len >= 0 ? XVECLEN (new_rtx, 0) : -1);
259 changes[num_changes].unshare = unshare;
260
261 if (new_len >= 0)
262 XVECLEN (new_rtx, 0) = new_len;
263
264 if (object && !MEM_P (object))
265 {
266 /* Set INSN_CODE to force rerecognition of insn. Save old code in
267 case invalid. */
268 changes[num_changes].old_code = INSN_CODE (object);
269 INSN_CODE (object) = -1;
270 }
271
272 num_changes++;
273
274 /* If we are making a group of changes, return 1. Otherwise, validate the
275 change group we made. */
276
277 if (in_group)
278 return true;
279 else
280 return apply_change_group ();
281}
282
283/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
284 UNSHARE to false. */
285
286bool
287validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
288{
289 return validate_change_1 (object, loc, new_rtx, in_group, unshare: false);
290}
291
292/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
293 UNSHARE to true. */
294
295bool
296validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
297{
298 return validate_change_1 (object, loc, new_rtx, in_group, unshare: true);
299}
300
301/* Change XVECLEN (*LOC, 0) to NEW_LEN. OBJECT, IN_GROUP and the return
302 value are as for validate_change_1. */
303
304bool
305validate_change_xveclen (rtx object, rtx *loc, int new_len, bool in_group)
306{
307 return validate_change_1 (object, loc, new_rtx: *loc, in_group, unshare: false, new_len);
308}
309
310/* Keep X canonicalized if some changes have made it non-canonical; only
311 modifies the operands of X, not (for example) its code. Simplifications
312 are not the job of this routine.
313
314 Return true if anything was changed. */
315bool
316canonicalize_change_group (rtx_insn *insn, rtx x)
317{
318 if (COMMUTATIVE_P (x)
319 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
320 {
321 /* Oops, the caller has made X no longer canonical.
322 Let's redo the changes in the correct order. */
323 rtx tem = XEXP (x, 0);
324 validate_unshare_change (object: insn, loc: &XEXP (x, 0), XEXP (x, 1), in_group: 1);
325 validate_unshare_change (object: insn, loc: &XEXP (x, 1), new_rtx: tem, in_group: 1);
326 return true;
327 }
328 else
329 return false;
330}
331
332/* Check if REG_INC argument in *data overlaps a stored REG. */
333
334static void
335check_invalid_inc_dec (rtx reg, const_rtx, void *data)
336{
337 rtx *pinc = (rtx *) data;
338 if (*pinc == NULL_RTX || MEM_P (reg))
339 return;
340 if (reg_overlap_mentioned_p (reg, *pinc))
341 *pinc = NULL_RTX;
342}
343
344/* This subroutine of apply_change_group verifies whether the changes to INSN
345 were valid; i.e. whether INSN can still be recognized.
346
347 If IN_GROUP is true clobbers which have to be added in order to
348 match the instructions will be added to the current change group.
349 Otherwise the changes will take effect immediately. */
350
351bool
352insn_invalid_p (rtx_insn *insn, bool in_group)
353{
354 rtx pat = PATTERN (insn);
355 int num_clobbers = 0;
356 /* If we are before reload and the pattern is a SET, see if we can add
357 clobbers. */
358 int icode = recog (pat, insn,
359 (GET_CODE (pat) == SET
360 && ! reload_completed
361 && ! reload_in_progress)
362 ? &num_clobbers : 0);
363 bool is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
364
365
366 /* If this is an asm and the operand aren't legal, then fail. Likewise if
367 this is not an asm and the insn wasn't recognized. */
368 if ((is_asm && ! check_asm_operands (x: PATTERN (insn)))
369 || (!is_asm && icode < 0))
370 return true;
371
372 /* If we have to add CLOBBERs, fail if we have to add ones that reference
373 hard registers since our callers can't know if they are live or not.
374 Otherwise, add them. */
375 if (num_clobbers > 0)
376 {
377 rtx newpat;
378
379 if (added_clobbers_hard_reg_p (icode))
380 return true;
381
382 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
383 XVECEXP (newpat, 0, 0) = pat;
384 add_clobbers (newpat, icode);
385 if (in_group)
386 validate_change (object: insn, loc: &PATTERN (insn), new_rtx: newpat, in_group: 1);
387 else
388 PATTERN (insn) = pat = newpat;
389 }
390
391 /* After reload, verify that all constraints are satisfied. */
392 if (reload_completed)
393 {
394 extract_insn (insn);
395
396 if (! constrain_operands (1, get_preferred_alternatives (insn)))
397 return true;
398 }
399
400 /* Punt if REG_INC argument overlaps some stored REG. */
401 for (rtx link = FIND_REG_INC_NOTE (insn, NULL_RTX);
402 link; link = XEXP (link, 1))
403 if (REG_NOTE_KIND (link) == REG_INC)
404 {
405 rtx reg = XEXP (link, 0);
406 note_stores (insn, check_invalid_inc_dec, &reg);
407 if (reg == NULL_RTX)
408 return true;
409 }
410
411 INSN_CODE (insn) = icode;
412 return false;
413}
414
415/* Return number of changes made and not validated yet. */
416int
417num_changes_pending (void)
418{
419 return num_changes;
420}
421
422/* Tentatively apply the changes numbered NUM and up.
423 Return true if all changes are valid, false otherwise. */
424
425bool
426verify_changes (int num)
427{
428 int i;
429 rtx last_validated = NULL_RTX;
430
431 /* The changes have been applied and all INSN_CODEs have been reset to force
432 rerecognition.
433
434 The changes are valid if we aren't given an object, or if we are
435 given a MEM and it still is a valid address, or if this is in insn
436 and it is recognized. In the latter case, if reload has completed,
437 we also require that the operands meet the constraints for
438 the insn. */
439
440 for (i = num; i < num_changes; i++)
441 {
442 rtx object = changes[i].object;
443
444 /* If there is no object to test or if it is the same as the one we
445 already tested, ignore it. */
446 if (object == 0 || object == last_validated)
447 continue;
448
449 if (MEM_P (object))
450 {
451 if (! memory_address_addr_space_p (GET_MODE (object),
452 XEXP (object, 0),
453 MEM_ADDR_SPACE (object)))
454 break;
455 }
456 else if (/* changes[i].old might be zero, e.g. when putting a
457 REG_FRAME_RELATED_EXPR into a previously empty list. */
458 changes[i].old
459 && REG_P (changes[i].old)
460 && asm_noperands (PATTERN (insn: object)) > 0
461 && register_asm_p (changes[i].old))
462 {
463 /* Don't allow changes of hard register operands to inline
464 assemblies if they have been defined as register asm ("x"). */
465 break;
466 }
467 else if (DEBUG_INSN_P (object))
468 continue;
469 else if (insn_invalid_p (insn: as_a <rtx_insn *> (p: object), in_group: true))
470 {
471 rtx pat = PATTERN (insn: object);
472
473 /* Perhaps we couldn't recognize the insn because there were
474 extra CLOBBERs at the end. If so, try to re-recognize
475 without the last CLOBBER (later iterations will cause each of
476 them to be eliminated, in turn). But don't do this if we
477 have an ASM_OPERAND. */
478 if (GET_CODE (pat) == PARALLEL
479 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
480 && asm_noperands (PATTERN (insn: object)) < 0)
481 {
482 rtx newpat;
483
484 if (XVECLEN (pat, 0) == 2)
485 newpat = XVECEXP (pat, 0, 0);
486 else
487 {
488 int j;
489
490 newpat
491 = gen_rtx_PARALLEL (VOIDmode,
492 rtvec_alloc (XVECLEN (pat, 0) - 1));
493 for (j = 0; j < XVECLEN (newpat, 0); j++)
494 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
495 }
496
497 /* Add a new change to this group to replace the pattern
498 with this new pattern. Then consider this change
499 as having succeeded. The change we added will
500 cause the entire call to fail if things remain invalid.
501
502 Note that this can lose if a later change than the one
503 we are processing specified &XVECEXP (PATTERN (object), 0, X)
504 but this shouldn't occur. */
505
506 validate_change (object, loc: &PATTERN (insn: object), new_rtx: newpat, in_group: 1);
507 continue;
508 }
509 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
510 || GET_CODE (pat) == VAR_LOCATION)
511 /* If this insn is a CLOBBER or USE, it is always valid, but is
512 never recognized. */
513 continue;
514 else
515 break;
516 }
517 last_validated = object;
518 }
519
520 return (i == num_changes);
521}
522
523/* A group of changes has previously been issued with validate_change
524 and verified with verify_changes. Call df_insn_rescan for each of
525 the insn changed and clear num_changes. */
526
527void
528confirm_change_group (void)
529{
530 int i;
531 rtx last_object = NULL;
532
533 gcc_assert (temporarily_undone_changes == 0);
534 for (i = 0; i < num_changes; i++)
535 {
536 rtx object = changes[i].object;
537
538 if (changes[i].unshare)
539 *changes[i].loc = copy_rtx (*changes[i].loc);
540
541 /* Avoid unnecessary rescanning when multiple changes to same instruction
542 are made. */
543 if (object)
544 {
545 if (object != last_object && last_object && INSN_P (last_object))
546 df_insn_rescan (as_a <rtx_insn *> (p: last_object));
547 last_object = object;
548 }
549 }
550
551 if (last_object && INSN_P (last_object))
552 df_insn_rescan (as_a <rtx_insn *> (p: last_object));
553 num_changes = 0;
554}
555
556/* Apply a group of changes previously issued with `validate_change'.
557 If all changes are valid, call confirm_change_group and return true,
558 otherwise, call cancel_changes and return false. */
559
560bool
561apply_change_group (void)
562{
563 if (verify_changes (num: 0))
564 {
565 confirm_change_group ();
566 return true;
567 }
568 else
569 {
570 cancel_changes (0);
571 return false;
572 }
573}
574
575
576/* Return the number of changes so far in the current group. */
577
578int
579num_validated_changes (void)
580{
581 return num_changes;
582}
583
584/* Retract the changes numbered NUM and up. */
585
586void
587cancel_changes (int num)
588{
589 gcc_assert (temporarily_undone_changes == 0);
590 int i;
591
592 /* Back out all the changes. Do this in the opposite order in which
593 they were made. */
594 for (i = num_changes - 1; i >= num; i--)
595 {
596 if (changes[i].old_len >= 0)
597 XVECLEN (*changes[i].loc, 0) = changes[i].old_len;
598 else
599 *changes[i].loc = changes[i].old;
600 if (changes[i].object && !MEM_P (changes[i].object))
601 INSN_CODE (changes[i].object) = changes[i].old_code;
602 }
603 num_changes = num;
604}
605
606/* Swap the status of change NUM from being applied to not being applied,
607 or vice versa. */
608
609static void
610swap_change (int num)
611{
612 if (changes[num].old_len >= 0)
613 std::swap (XVECLEN (*changes[num].loc, 0), b&: changes[num].old_len);
614 else
615 std::swap (a&: *changes[num].loc, b&: changes[num].old);
616 if (changes[num].object && !MEM_P (changes[num].object))
617 std::swap (INSN_CODE (changes[num].object), b&: changes[num].old_code);
618}
619
620/* Temporarily undo all the changes numbered NUM and up, with a view
621 to reapplying them later. The next call to the changes machinery
622 must be:
623
624 redo_changes (NUM)
625
626 otherwise things will end up in an invalid state. */
627
628void
629temporarily_undo_changes (int num)
630{
631 gcc_assert (temporarily_undone_changes == 0 && num <= num_changes);
632 for (int i = num_changes - 1; i >= num; i--)
633 swap_change (num: i);
634 temporarily_undone_changes = num_changes - num;
635}
636
637/* Redo the changes that were temporarily undone by:
638
639 temporarily_undo_changes (NUM). */
640
641void
642redo_changes (int num)
643{
644 gcc_assert (temporarily_undone_changes == num_changes - num);
645 for (int i = num; i < num_changes; ++i)
646 swap_change (num: i);
647 temporarily_undone_changes = 0;
648}
649
650/* Reduce conditional compilation elsewhere. */
651/* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
652 rtx. */
653
654static void
655simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
656 machine_mode op0_mode)
657{
658 rtx x = *loc;
659 enum rtx_code code = GET_CODE (x);
660 rtx new_rtx = NULL_RTX;
661 scalar_int_mode is_mode;
662
663 if (SWAPPABLE_OPERANDS_P (x)
664 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
665 {
666 validate_unshare_change (object, loc,
667 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
668 : swap_condition (code),
669 GET_MODE (x), XEXP (x, 1),
670 XEXP (x, 0)), in_group: 1);
671 x = *loc;
672 code = GET_CODE (x);
673 }
674
675 /* Canonicalize arithmetics with all constant operands. */
676 switch (GET_RTX_CLASS (code))
677 {
678 case RTX_UNARY:
679 if (CONSTANT_P (XEXP (x, 0)))
680 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
681 op_mode: op0_mode);
682 break;
683 case RTX_COMM_ARITH:
684 case RTX_BIN_ARITH:
685 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
686 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
687 XEXP (x, 1));
688 break;
689 case RTX_COMPARE:
690 case RTX_COMM_COMPARE:
691 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
692 new_rtx = simplify_relational_operation (code, GET_MODE (x), op_mode: op0_mode,
693 XEXP (x, 0), XEXP (x, 1));
694 break;
695 default:
696 break;
697 }
698 if (new_rtx)
699 {
700 validate_change (object, loc, new_rtx, in_group: 1);
701 return;
702 }
703
704 switch (code)
705 {
706 case PLUS:
707 /* If we have a PLUS whose second operand is now a CONST_INT, use
708 simplify_gen_binary to try to simplify it.
709 ??? We may want later to remove this, once simplification is
710 separated from this function. */
711 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
712 validate_change (object, loc,
713 new_rtx: simplify_gen_binary
714 (code: PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), in_group: 1);
715 break;
716 case MINUS:
717 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
718 validate_change (object, loc,
719 new_rtx: simplify_gen_binary
720 (code: PLUS, GET_MODE (x), XEXP (x, 0),
721 op1: simplify_gen_unary (code: NEG,
722 GET_MODE (x), XEXP (x, 1),
723 GET_MODE (x))), in_group: 1);
724 break;
725 case ZERO_EXTEND:
726 case SIGN_EXTEND:
727 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
728 {
729 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
730 op_mode: op0_mode);
731 /* If any of the above failed, substitute in something that
732 we know won't be recognized. */
733 if (!new_rtx)
734 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
735 validate_change (object, loc, new_rtx, in_group: 1);
736 }
737 break;
738 case SUBREG:
739 /* All subregs possible to simplify should be simplified. */
740 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), innermode: op0_mode,
741 SUBREG_BYTE (x));
742
743 /* Subregs of VOIDmode operands are incorrect. */
744 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
745 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
746 if (new_rtx)
747 validate_change (object, loc, new_rtx, in_group: 1);
748 break;
749 case ZERO_EXTRACT:
750 case SIGN_EXTRACT:
751 /* If we are replacing a register with memory, try to change the memory
752 to be the mode required for memory in extract operations (this isn't
753 likely to be an insertion operation; if it was, nothing bad will
754 happen, we might just fail in some cases). */
755
756 if (MEM_P (XEXP (x, 0))
757 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), result: &is_mode)
758 && CONST_INT_P (XEXP (x, 1))
759 && CONST_INT_P (XEXP (x, 2))
760 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
761 MEM_ADDR_SPACE (XEXP (x, 0)))
762 && !MEM_VOLATILE_P (XEXP (x, 0)))
763 {
764 int pos = INTVAL (XEXP (x, 2));
765 machine_mode new_mode = is_mode;
766 if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
767 new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
768 else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
769 new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
770 scalar_int_mode wanted_mode = (new_mode == VOIDmode
771 ? word_mode
772 : as_a <scalar_int_mode> (m: new_mode));
773
774 /* If we have a narrower mode, we can do something. */
775 if (GET_MODE_SIZE (mode: wanted_mode) < GET_MODE_SIZE (mode: is_mode))
776 {
777 int offset = pos / BITS_PER_UNIT;
778 rtx newmem;
779
780 /* If the bytes and bits are counted differently, we
781 must adjust the offset. */
782 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
783 offset =
784 (GET_MODE_SIZE (mode: is_mode) - GET_MODE_SIZE (mode: wanted_mode) -
785 offset);
786
787 gcc_assert (GET_MODE_PRECISION (wanted_mode)
788 == GET_MODE_BITSIZE (wanted_mode));
789 pos %= GET_MODE_BITSIZE (mode: wanted_mode);
790
791 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
792
793 validate_change (object, loc: &XEXP (x, 2), GEN_INT (pos), in_group: 1);
794 validate_change (object, loc: &XEXP (x, 0), new_rtx: newmem, in_group: 1);
795 }
796 }
797
798 break;
799
800 default:
801 break;
802 }
803}
804
805/* Replace every occurrence of FROM in X with TO. Mark each change with
806 validate_change passing OBJECT. */
807
808static void
809validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
810 bool simplify)
811{
812 int i, j;
813 const char *fmt;
814 rtx x = *loc;
815 enum rtx_code code;
816 machine_mode op0_mode = VOIDmode;
817 int prev_changes = num_changes;
818
819 if (!x)
820 return;
821
822 code = GET_CODE (x);
823 fmt = GET_RTX_FORMAT (code);
824 if (fmt[0] == 'e')
825 op0_mode = GET_MODE (XEXP (x, 0));
826
827 /* X matches FROM if it is the same rtx or they are both referring to the
828 same register in the same mode. Avoid calling rtx_equal_p unless the
829 operands look similar. */
830
831 if (x == from
832 || (REG_P (x) && REG_P (from)
833 && GET_MODE (x) == GET_MODE (from)
834 && REGNO (x) == REGNO (from))
835 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
836 && rtx_equal_p (x, from)))
837 {
838 validate_unshare_change (object, loc, new_rtx: to, in_group: 1);
839 return;
840 }
841
842 /* Call ourself recursively to perform the replacements.
843 We must not replace inside already replaced expression, otherwise we
844 get infinite recursion for replacements like (reg X)->(subreg (reg X))
845 so we must special case shared ASM_OPERANDS. */
846
847 if (GET_CODE (x) == PARALLEL)
848 {
849 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
850 {
851 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
852 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
853 {
854 /* Verify that operands are really shared. */
855 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
856 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
857 (x, 0, j))));
858 validate_replace_rtx_1 (loc: &SET_DEST (XVECEXP (x, 0, j)),
859 from, to, object, simplify);
860 }
861 else
862 validate_replace_rtx_1 (loc: &XVECEXP (x, 0, j), from, to, object,
863 simplify);
864 }
865 }
866 else
867 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
868 {
869 if (fmt[i] == 'e')
870 validate_replace_rtx_1 (loc: &XEXP (x, i), from, to, object, simplify);
871 else if (fmt[i] == 'E')
872 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
873 validate_replace_rtx_1 (loc: &XVECEXP (x, i, j), from, to, object,
874 simplify);
875 }
876
877 /* If we didn't substitute, there is nothing more to do. */
878 if (num_changes == prev_changes)
879 return;
880
881 /* ??? The regmove is no more, so is this aberration still necessary? */
882 /* Allow substituted expression to have different mode. This is used by
883 regmove to change mode of pseudo register. */
884 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
885 op0_mode = GET_MODE (XEXP (x, 0));
886
887 /* Do changes needed to keep rtx consistent. Don't do any other
888 simplifications, as it is not our job. */
889 if (simplify)
890 simplify_while_replacing (loc, to, object, op0_mode);
891}
892
893/* Try replacing every occurrence of FROM in subexpression LOC of INSN
894 with TO. After all changes have been made, validate by seeing
895 if INSN is still valid. */
896
897bool
898validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
899{
900 validate_replace_rtx_1 (loc, from, to, object: insn, simplify: true);
901 return apply_change_group ();
902}
903
904/* Try replacing every occurrence of FROM in INSN with TO. After all
905 changes have been made, validate by seeing if INSN is still valid. */
906
907bool
908validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
909{
910 validate_replace_rtx_1 (loc: &PATTERN (insn), from, to, object: insn, simplify: true);
911 return apply_change_group ();
912}
913
914/* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
915 is a part of INSN. After all changes have been made, validate by seeing if
916 INSN is still valid.
917 validate_replace_rtx (from, to, insn) is equivalent to
918 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
919
920bool
921validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
922{
923 validate_replace_rtx_1 (loc: where, from, to, object: insn, simplify: true);
924 return apply_change_group ();
925}
926
927/* Same as above, but do not simplify rtx afterwards. */
928bool
929validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
930 rtx_insn *insn)
931{
932 validate_replace_rtx_1 (loc: where, from, to, object: insn, simplify: false);
933 return apply_change_group ();
934
935}
936
937/* Try replacing every occurrence of FROM in INSN with TO. This also
938 will replace in REG_EQUAL and REG_EQUIV notes. */
939
940void
941validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
942{
943 rtx note;
944 validate_replace_rtx_1 (loc: &PATTERN (insn), from, to, object: insn, simplify: true);
945 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
946 if (REG_NOTE_KIND (note) == REG_EQUAL
947 || REG_NOTE_KIND (note) == REG_EQUIV)
948 validate_replace_rtx_1 (loc: &XEXP (note, 0), from, to, object: insn, simplify: true);
949}
950
951/* Function called by note_uses to replace used subexpressions. */
952struct validate_replace_src_data
953{
954 rtx from; /* Old RTX */
955 rtx to; /* New RTX */
956 rtx_insn *insn; /* Insn in which substitution is occurring. */
957};
958
959static void
960validate_replace_src_1 (rtx *x, void *data)
961{
962 struct validate_replace_src_data *d
963 = (struct validate_replace_src_data *) data;
964
965 validate_replace_rtx_1 (loc: x, from: d->from, to: d->to, object: d->insn, simplify: true);
966}
967
968/* Try replacing every occurrence of FROM in INSN with TO, avoiding
969 SET_DESTs. */
970
971void
972validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
973{
974 struct validate_replace_src_data d;
975
976 d.from = from;
977 d.to = to;
978 d.insn = insn;
979 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
980}
981
982/* Try simplify INSN.
983 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
984 pattern and return true if something was simplified. */
985
986bool
987validate_simplify_insn (rtx_insn *insn)
988{
989 int i;
990 rtx pat = NULL;
991 rtx newpat = NULL;
992
993 pat = PATTERN (insn);
994
995 if (GET_CODE (pat) == SET)
996 {
997 newpat = simplify_rtx (SET_SRC (pat));
998 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
999 validate_change (object: insn, loc: &SET_SRC (pat), new_rtx: newpat, in_group: 1);
1000 newpat = simplify_rtx (SET_DEST (pat));
1001 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
1002 validate_change (object: insn, loc: &SET_DEST (pat), new_rtx: newpat, in_group: 1);
1003 }
1004 else if (GET_CODE (pat) == PARALLEL)
1005 for (i = 0; i < XVECLEN (pat, 0); i++)
1006 {
1007 rtx s = XVECEXP (pat, 0, i);
1008
1009 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
1010 {
1011 newpat = simplify_rtx (SET_SRC (s));
1012 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
1013 validate_change (object: insn, loc: &SET_SRC (s), new_rtx: newpat, in_group: 1);
1014 newpat = simplify_rtx (SET_DEST (s));
1015 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
1016 validate_change (object: insn, loc: &SET_DEST (s), new_rtx: newpat, in_group: 1);
1017 }
1018 }
1019 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
1020}
1021
1022/* Try to process the address of memory expression MEM. Return true on
1023 success; leave the caller to clean up on failure. */
1024
1025bool
1026insn_propagation::apply_to_mem_1 (rtx mem)
1027{
1028 auto old_num_changes = num_validated_changes ();
1029 mem_depth += 1;
1030 bool res = apply_to_rvalue_1 (&XEXP (mem, 0));
1031 mem_depth -= 1;
1032 if (!res)
1033 return false;
1034
1035 if (old_num_changes != num_validated_changes ()
1036 && should_check_mems
1037 && !check_mem (old_num_changes, mem))
1038 return false;
1039
1040 return true;
1041}
1042
1043/* Try to process the rvalue expression at *LOC. Return true on success;
1044 leave the caller to clean up on failure. */
1045
1046bool
1047insn_propagation::apply_to_rvalue_1 (rtx *loc)
1048{
1049 rtx x = *loc;
1050 enum rtx_code code = GET_CODE (x);
1051 machine_mode mode = GET_MODE (x);
1052
1053 auto old_num_changes = num_validated_changes ();
1054 if (from && GET_CODE (x) == GET_CODE (from) && rtx_equal_p (x, from))
1055 {
1056 /* Don't replace register asms in asm statements; we mustn't
1057 change the user's register allocation. */
1058 if (REG_P (x)
1059 && HARD_REGISTER_P (x)
1060 && register_asm_p (x)
1061 && asm_noperands (PATTERN (insn)) > 0)
1062 return false;
1063
1064 if (should_unshare)
1065 validate_unshare_change (object: insn, loc, new_rtx: to, in_group: 1);
1066 else
1067 validate_change (object: insn, loc, new_rtx: to, in_group: 1);
1068 if (mem_depth && !REG_P (to) && !CONSTANT_P (to))
1069 {
1070 /* We're substituting into an address, but TO will have the
1071 form expected outside an address. Canonicalize it if
1072 necessary. */
1073 insn_propagation subprop (insn);
1074 subprop.mem_depth += 1;
1075 if (!subprop.apply_to_rvalue (loc))
1076 gcc_unreachable ();
1077 if (should_unshare
1078 && num_validated_changes () != old_num_changes + 1)
1079 {
1080 /* TO is owned by someone else, so create a copy and
1081 return TO to its original form. */
1082 rtx to = copy_rtx (*loc);
1083 cancel_changes (num: old_num_changes);
1084 validate_change (object: insn, loc, new_rtx: to, in_group: 1);
1085 }
1086 }
1087 num_replacements += 1;
1088 should_unshare = true;
1089 result_flags |= UNSIMPLIFIED;
1090 return true;
1091 }
1092
1093 /* Recursively apply the substitution and see if we can simplify
1094 the result. This specifically shouldn't use simplify_gen_* for
1095 speculative simplifications, since we want to avoid generating new
1096 expressions where possible. */
1097 auto old_result_flags = result_flags;
1098 rtx newx = NULL_RTX;
1099 bool recurse_p = false;
1100 switch (GET_RTX_CLASS (code))
1101 {
1102 case RTX_UNARY:
1103 {
1104 machine_mode op0_mode = GET_MODE (XEXP (x, 0));
1105 if (!apply_to_rvalue_1 (loc: &XEXP (x, 0)))
1106 return false;
1107 if (from && old_num_changes == num_validated_changes ())
1108 return true;
1109
1110 newx = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
1111 break;
1112 }
1113
1114 case RTX_BIN_ARITH:
1115 case RTX_COMM_ARITH:
1116 {
1117 if (!apply_to_rvalue_1 (loc: &XEXP (x, 0))
1118 || !apply_to_rvalue_1 (loc: &XEXP (x, 1)))
1119 return false;
1120 if (from && old_num_changes == num_validated_changes ())
1121 return true;
1122
1123 if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
1124 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
1125 newx = simplify_gen_binary (code, mode, XEXP (x, 1), XEXP (x, 0));
1126 else
1127 newx = simplify_binary_operation (code, mode,
1128 XEXP (x, 0), XEXP (x, 1));
1129 break;
1130 }
1131
1132 case RTX_COMPARE:
1133 case RTX_COMM_COMPARE:
1134 {
1135 machine_mode op_mode = (GET_MODE (XEXP (x, 0)) != VOIDmode
1136 ? GET_MODE (XEXP (x, 0))
1137 : GET_MODE (XEXP (x, 1)));
1138 if (!apply_to_rvalue_1 (loc: &XEXP (x, 0))
1139 || !apply_to_rvalue_1 (loc: &XEXP (x, 1)))
1140 return false;
1141 if (from && old_num_changes == num_validated_changes ())
1142 return true;
1143
1144 newx = simplify_relational_operation (code, mode, op_mode,
1145 XEXP (x, 0), XEXP (x, 1));
1146 break;
1147 }
1148
1149 case RTX_TERNARY:
1150 case RTX_BITFIELD_OPS:
1151 {
1152 machine_mode op0_mode = GET_MODE (XEXP (x, 0));
1153 if (!apply_to_rvalue_1 (loc: &XEXP (x, 0))
1154 || !apply_to_rvalue_1 (loc: &XEXP (x, 1))
1155 || !apply_to_rvalue_1 (loc: &XEXP (x, 2)))
1156 return false;
1157 if (from && old_num_changes == num_validated_changes ())
1158 return true;
1159
1160 newx = simplify_ternary_operation (code, mode, op0_mode,
1161 XEXP (x, 0), XEXP (x, 1),
1162 XEXP (x, 2));
1163 break;
1164 }
1165
1166 case RTX_EXTRA:
1167 if (code == SUBREG)
1168 {
1169 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
1170 if (!apply_to_rvalue_1 (loc: &SUBREG_REG (x)))
1171 return false;
1172 if (from && old_num_changes == num_validated_changes ())
1173 return true;
1174
1175 rtx inner = SUBREG_REG (x);
1176 newx = simplify_subreg (mode, inner, inner_mode, SUBREG_BYTE (x));
1177 /* Reject the same cases that simplify_gen_subreg would. */
1178 if (!newx
1179 && (GET_CODE (inner) == SUBREG
1180 || GET_CODE (inner) == CONCAT
1181 || GET_MODE (inner) == VOIDmode
1182 || !validate_subreg (mode, inner_mode,
1183 inner, SUBREG_BYTE (x))))
1184 {
1185 failure_reason = "would create an invalid subreg";
1186 return false;
1187 }
1188 break;
1189 }
1190 else
1191 recurse_p = true;
1192 break;
1193
1194 case RTX_OBJ:
1195 if (code == LO_SUM)
1196 {
1197 if (!apply_to_rvalue_1 (loc: &XEXP (x, 0))
1198 || !apply_to_rvalue_1 (loc: &XEXP (x, 1)))
1199 return false;
1200 if (from && old_num_changes == num_validated_changes ())
1201 return true;
1202
1203 /* (lo_sum (high x) y) -> y where x and y have the same base. */
1204 rtx op0 = XEXP (x, 0);
1205 rtx op1 = XEXP (x, 1);
1206 if (GET_CODE (op0) == HIGH)
1207 {
1208 rtx base0, base1, offset0, offset1;
1209 split_const (XEXP (op0, 0), &base0, &offset0);
1210 split_const (op1, &base1, &offset1);
1211 if (rtx_equal_p (base0, base1))
1212 newx = op1;
1213 }
1214 }
1215 else if (code == REG)
1216 {
1217 if (from && REG_P (from) && reg_overlap_mentioned_p (x, from))
1218 {
1219 failure_reason = "inexact register overlap";
1220 return false;
1221 }
1222 }
1223 else if (code == MEM)
1224 return apply_to_mem_1 (mem: x);
1225 else
1226 recurse_p = true;
1227 break;
1228
1229 case RTX_CONST_OBJ:
1230 break;
1231
1232 case RTX_AUTOINC:
1233 if (from && reg_overlap_mentioned_p (XEXP (x, 0), from))
1234 {
1235 failure_reason = "is subject to autoinc";
1236 return false;
1237 }
1238 recurse_p = true;
1239 break;
1240
1241 case RTX_MATCH:
1242 case RTX_INSN:
1243 gcc_unreachable ();
1244 }
1245
1246 if (recurse_p)
1247 {
1248 const char *fmt = GET_RTX_FORMAT (code);
1249 for (int i = 0; fmt[i]; i++)
1250 switch (fmt[i])
1251 {
1252 case 'E':
1253 for (int j = 0; j < XVECLEN (x, i); j++)
1254 if (!apply_to_rvalue_1 (loc: &XVECEXP (x, i, j)))
1255 return false;
1256 break;
1257
1258 case 'e':
1259 if (XEXP (x, i) && !apply_to_rvalue_1 (loc: &XEXP (x, i)))
1260 return false;
1261 break;
1262 }
1263 }
1264 else if (newx && !rtx_equal_p (x, newx))
1265 {
1266 /* All substitutions made by OLD_NUM_CHANGES onwards have been
1267 simplified. */
1268 result_flags = ((result_flags & ~UNSIMPLIFIED)
1269 | (old_result_flags & UNSIMPLIFIED));
1270
1271 if (should_note_simplifications)
1272 note_simplification (old_num_changes, old_result_flags, x, newx);
1273
1274 /* There's no longer any point unsharing the substitutions made
1275 for subexpressions, since we'll just copy this one instead. */
1276 bool unshare = false;
1277 for (int i = old_num_changes; i < num_changes; ++i)
1278 {
1279 unshare |= changes[i].unshare;
1280 changes[i].unshare = false;
1281 }
1282 if (unshare)
1283 validate_unshare_change (object: insn, loc, new_rtx: newx, in_group: 1);
1284 else
1285 validate_change (object: insn, loc, new_rtx: newx, in_group: 1);
1286 }
1287
1288 return true;
1289}
1290
1291/* Try to process the lvalue expression at *LOC. Return true on success;
1292 leave the caller to clean up on failure. */
1293
1294bool
1295insn_propagation::apply_to_lvalue_1 (rtx dest)
1296{
1297 rtx old_dest = dest;
1298 while (GET_CODE (dest) == SUBREG
1299 || GET_CODE (dest) == ZERO_EXTRACT
1300 || GET_CODE (dest) == STRICT_LOW_PART)
1301 {
1302 if (GET_CODE (dest) == ZERO_EXTRACT
1303 && (!apply_to_rvalue_1 (loc: &XEXP (dest, 1))
1304 || !apply_to_rvalue_1 (loc: &XEXP (dest, 2))))
1305 return false;
1306 dest = XEXP (dest, 0);
1307 }
1308
1309 if (MEM_P (dest))
1310 return apply_to_mem_1 (mem: dest);
1311
1312 /* Check whether the substitution is safe in the presence of this lvalue. */
1313 if (!from
1314 || dest == old_dest
1315 || !REG_P (dest)
1316 || !reg_overlap_mentioned_p (dest, from))
1317 return true;
1318
1319 if (SUBREG_P (old_dest)
1320 && SUBREG_REG (old_dest) == dest
1321 && !read_modify_subreg_p (old_dest))
1322 return true;
1323
1324 failure_reason = "is part of a read-write destination";
1325 return false;
1326}
1327
1328/* Try to process the instruction pattern at *LOC. Return true on success;
1329 leave the caller to clean up on failure. */
1330
1331bool
1332insn_propagation::apply_to_pattern_1 (rtx *loc)
1333{
1334 rtx body = *loc;
1335 switch (GET_CODE (body))
1336 {
1337 case COND_EXEC:
1338 return (apply_to_rvalue_1 (loc: &COND_EXEC_TEST (body))
1339 && apply_to_pattern_1 (loc: &COND_EXEC_CODE (body)));
1340
1341 case PARALLEL:
1342 for (int i = 0; i < XVECLEN (body, 0); ++i)
1343 {
1344 rtx *subloc = &XVECEXP (body, 0, i);
1345 if (GET_CODE (*subloc) == SET)
1346 {
1347 if (!apply_to_lvalue_1 (SET_DEST (*subloc)))
1348 return false;
1349 /* ASM_OPERANDS are shared between SETs in the same PARALLEL.
1350 Only process them on the first iteration. */
1351 if ((i == 0 || GET_CODE (SET_SRC (*subloc)) != ASM_OPERANDS)
1352 && !apply_to_rvalue_1 (loc: &SET_SRC (*subloc)))
1353 return false;
1354 }
1355 else
1356 {
1357 if (!apply_to_pattern_1 (loc: subloc))
1358 return false;
1359 }
1360 }
1361 return true;
1362
1363 case ASM_OPERANDS:
1364 for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (body); i < len; ++i)
1365 if (!apply_to_rvalue_1 (loc: &ASM_OPERANDS_INPUT (body, i)))
1366 return false;
1367 return true;
1368
1369 case CLOBBER:
1370 return apply_to_lvalue_1 (XEXP (body, 0));
1371
1372 case SET:
1373 return (apply_to_lvalue_1 (SET_DEST (body))
1374 && apply_to_rvalue_1 (loc: &SET_SRC (body)));
1375
1376 default:
1377 /* All the other possibilities never store and can use a normal
1378 rtx walk. This includes:
1379
1380 - USE
1381 - TRAP_IF
1382 - PREFETCH
1383 - UNSPEC
1384 - UNSPEC_VOLATILE. */
1385 return apply_to_rvalue_1 (loc);
1386 }
1387}
1388
1389/* Apply this insn_propagation object's simplification or substitution
1390 to the instruction pattern at LOC. */
1391
1392bool
1393insn_propagation::apply_to_pattern (rtx *loc)
1394{
1395 unsigned int num_changes = num_validated_changes ();
1396 bool res = apply_to_pattern_1 (loc);
1397 if (!res)
1398 cancel_changes (num: num_changes);
1399 return res;
1400}
1401
1402/* Apply this insn_propagation object's simplification or substitution
1403 to the rvalue expression at LOC. */
1404
1405bool
1406insn_propagation::apply_to_rvalue (rtx *loc)
1407{
1408 unsigned int num_changes = num_validated_changes ();
1409 bool res = apply_to_rvalue_1 (loc);
1410 if (!res)
1411 cancel_changes (num: num_changes);
1412 return res;
1413}
1414
1415/* Check whether INSN matches a specific alternative of an .md pattern. */
1416
1417bool
1418valid_insn_p (rtx_insn *insn)
1419{
1420 recog_memoized (insn);
1421 if (INSN_CODE (insn) < 0)
1422 return false;
1423 extract_insn (insn);
1424 /* We don't know whether the insn will be in code that is optimized
1425 for size or speed, so consider all enabled alternatives. */
1426 if (!constrain_operands (1, get_enabled_alternatives (insn)))
1427 return false;
1428 return true;
1429}
1430
1431/* Return true if OP is a valid general operand for machine mode MODE.
1432 This is either a register reference, a memory reference,
1433 or a constant. In the case of a memory reference, the address
1434 is checked for general validity for the target machine.
1435
1436 Register and memory references must have mode MODE in order to be valid,
1437 but some constants have no machine mode and are valid for any mode.
1438
1439 If MODE is VOIDmode, OP is checked for validity for whatever mode
1440 it has.
1441
1442 The main use of this function is as a predicate in match_operand
1443 expressions in the machine description. */
1444
1445bool
1446general_operand (rtx op, machine_mode mode)
1447{
1448 enum rtx_code code = GET_CODE (op);
1449
1450 if (mode == VOIDmode)
1451 mode = GET_MODE (op);
1452
1453 /* Don't accept CONST_INT or anything similar
1454 if the caller wants something floating. */
1455 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1456 && GET_MODE_CLASS (mode) != MODE_INT
1457 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1458 return false;
1459
1460 if (CONST_INT_P (op)
1461 && mode != VOIDmode
1462 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1463 return false;
1464
1465 if (CONSTANT_P (op))
1466 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1467 || mode == VOIDmode)
1468 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1469 && targetm.legitimate_constant_p (mode == VOIDmode
1470 ? GET_MODE (op)
1471 : mode, op));
1472
1473 /* Except for certain constants with VOIDmode, already checked for,
1474 OP's mode must match MODE if MODE specifies a mode. */
1475
1476 if (GET_MODE (op) != mode)
1477 return false;
1478
1479 if (code == SUBREG)
1480 {
1481 rtx sub = SUBREG_REG (op);
1482
1483#ifdef INSN_SCHEDULING
1484 /* On machines that have insn scheduling, we want all memory
1485 reference to be explicit, so outlaw paradoxical SUBREGs.
1486 However, we must allow them after reload so that they can
1487 get cleaned up by cleanup_subreg_operands. */
1488 if (!reload_completed && MEM_P (sub)
1489 && paradoxical_subreg_p (x: op))
1490 return false;
1491#endif
1492 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1493 may result in incorrect reference. We should simplify all valid
1494 subregs of MEM anyway. But allow this after reload because we
1495 might be called from cleanup_subreg_operands.
1496
1497 ??? This is a kludge. */
1498 if (!reload_completed
1499 && maybe_ne (SUBREG_BYTE (op), b: 0)
1500 && MEM_P (sub))
1501 return false;
1502
1503 if (REG_P (sub)
1504 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1505 && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1506 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1507 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1508 /* LRA can generate some invalid SUBREGS just for matched
1509 operand reload presentation. LRA needs to treat them as
1510 valid. */
1511 && ! LRA_SUBREG_P (op))
1512 return false;
1513
1514 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1515 create such rtl, and we must reject it. */
1516 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1517 /* LRA can use subreg to store a floating point value in an
1518 integer mode. Although the floating point and the
1519 integer modes need the same number of hard registers, the
1520 size of floating point mode can be less than the integer
1521 mode. */
1522 && ! lra_in_progress
1523 && paradoxical_subreg_p (x: op))
1524 return false;
1525
1526 op = sub;
1527 code = GET_CODE (op);
1528 }
1529
1530 if (code == REG)
1531 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1532 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1533
1534 if (code == MEM)
1535 {
1536 rtx y = XEXP (op, 0);
1537
1538 if (! volatile_ok && MEM_VOLATILE_P (op))
1539 return false;
1540
1541 /* Use the mem's mode, since it will be reloaded thus. LRA can
1542 generate move insn with invalid addresses which is made valid
1543 and efficiently calculated by LRA through further numerous
1544 transformations. */
1545 if (lra_in_progress
1546 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1547 return true;
1548 }
1549
1550 return false;
1551}
1552
1553/* Return true if OP is a valid memory address for a memory reference
1554 of mode MODE.
1555
1556 The main use of this function is as a predicate in match_operand
1557 expressions in the machine description. */
1558
1559bool
1560address_operand (rtx op, machine_mode mode)
1561{
1562 /* Wrong mode for an address expr. */
1563 if (GET_MODE (op) != VOIDmode
1564 && ! SCALAR_INT_MODE_P (GET_MODE (op)))
1565 return false;
1566
1567 return memory_address_p (mode, op);
1568}
1569
1570/* Return true if OP is a register reference of mode MODE.
1571 If MODE is VOIDmode, accept a register in any mode.
1572
1573 The main use of this function is as a predicate in match_operand
1574 expressions in the machine description. */
1575
1576bool
1577register_operand (rtx op, machine_mode mode)
1578{
1579 if (GET_CODE (op) == SUBREG)
1580 {
1581 rtx sub = SUBREG_REG (op);
1582
1583 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1584 because it is guaranteed to be reloaded into one.
1585 Just make sure the MEM is valid in itself.
1586 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1587 but currently it does result from (SUBREG (REG)...) where the
1588 reg went on the stack.) */
1589 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1590 return false;
1591 }
1592 else if (!REG_P (op))
1593 return false;
1594 return general_operand (op, mode);
1595}
1596
1597/* Return true for a register in Pmode; ignore the tested mode. */
1598
1599bool
1600pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1601{
1602 return register_operand (op, Pmode);
1603}
1604
1605/* Return true if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1606 or a hard register. */
1607
1608bool
1609scratch_operand (rtx op, machine_mode mode)
1610{
1611 if (GET_MODE (op) != mode && mode != VOIDmode)
1612 return false;
1613
1614 return (GET_CODE (op) == SCRATCH
1615 || (REG_P (op)
1616 && (lra_in_progress
1617 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1618 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1619}
1620
1621/* Return true if OP is a valid immediate operand for mode MODE.
1622
1623 The main use of this function is as a predicate in match_operand
1624 expressions in the machine description. */
1625
1626bool
1627immediate_operand (rtx op, machine_mode mode)
1628{
1629 /* Don't accept CONST_INT or anything similar
1630 if the caller wants something floating. */
1631 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1632 && GET_MODE_CLASS (mode) != MODE_INT
1633 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1634 return false;
1635
1636 if (CONST_INT_P (op)
1637 && mode != VOIDmode
1638 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1639 return false;
1640
1641 return (CONSTANT_P (op)
1642 && (GET_MODE (op) == mode || mode == VOIDmode
1643 || GET_MODE (op) == VOIDmode)
1644 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1645 && targetm.legitimate_constant_p (mode == VOIDmode
1646 ? GET_MODE (op)
1647 : mode, op));
1648}
1649
1650/* Return true if OP is an operand that is a CONST_INT of mode MODE. */
1651
1652bool
1653const_int_operand (rtx op, machine_mode mode)
1654{
1655 if (!CONST_INT_P (op))
1656 return false;
1657
1658 if (mode != VOIDmode
1659 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1660 return false;
1661
1662 return true;
1663}
1664
1665#if TARGET_SUPPORTS_WIDE_INT
1666/* Return true if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1667 of mode MODE. */
1668bool
1669const_scalar_int_operand (rtx op, machine_mode mode)
1670{
1671 if (!CONST_SCALAR_INT_P (op))
1672 return false;
1673
1674 if (CONST_INT_P (op))
1675 return const_int_operand (op, mode);
1676
1677 if (mode != VOIDmode)
1678 {
1679 scalar_int_mode int_mode = as_a <scalar_int_mode> (m: mode);
1680 int prec = GET_MODE_PRECISION (mode: int_mode);
1681 int bitsize = GET_MODE_BITSIZE (mode: int_mode);
1682
1683 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1684 return false;
1685
1686 if (prec == bitsize)
1687 return true;
1688 else
1689 {
1690 /* Multiword partial int. */
1691 HOST_WIDE_INT x
1692 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1693 return (sext_hwi (src: x, prec: prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1694 }
1695 }
1696 return true;
1697}
1698
1699/* Return true if OP is an operand that is a constant integer or constant
1700 floating-point number of MODE. */
1701
1702bool
1703const_double_operand (rtx op, machine_mode mode)
1704{
1705 return (GET_CODE (op) == CONST_DOUBLE)
1706 && (GET_MODE (op) == mode || mode == VOIDmode);
1707}
1708#else
1709/* Return true if OP is an operand that is a constant integer or constant
1710 floating-point number of MODE. */
1711
1712bool
1713const_double_operand (rtx op, machine_mode mode)
1714{
1715 /* Don't accept CONST_INT or anything similar
1716 if the caller wants something floating. */
1717 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1718 && GET_MODE_CLASS (mode) != MODE_INT
1719 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1720 return false;
1721
1722 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1723 && (mode == VOIDmode || GET_MODE (op) == mode
1724 || GET_MODE (op) == VOIDmode));
1725}
1726#endif
1727/* Return true if OP is a general operand that is not an immediate
1728 operand of mode MODE. */
1729
1730bool
1731nonimmediate_operand (rtx op, machine_mode mode)
1732{
1733 return (general_operand (op, mode) && ! CONSTANT_P (op));
1734}
1735
1736/* Return true if OP is a register reference or
1737 immediate value of mode MODE. */
1738
1739bool
1740nonmemory_operand (rtx op, machine_mode mode)
1741{
1742 if (CONSTANT_P (op))
1743 return immediate_operand (op, mode);
1744 return register_operand (op, mode);
1745}
1746
1747/* Return true if OP is a valid operand that stands for pushing a
1748 value of mode MODE onto the stack.
1749
1750 The main use of this function is as a predicate in match_operand
1751 expressions in the machine description. */
1752
1753bool
1754push_operand (rtx op, machine_mode mode)
1755{
1756 if (!MEM_P (op))
1757 return false;
1758
1759 if (mode != VOIDmode && GET_MODE (op) != mode)
1760 return false;
1761
1762 poly_int64 rounded_size = GET_MODE_SIZE (mode);
1763
1764#ifdef PUSH_ROUNDING
1765 rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1766#endif
1767
1768 op = XEXP (op, 0);
1769
1770 if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1771 {
1772 if (GET_CODE (op) != STACK_PUSH_CODE)
1773 return false;
1774 }
1775 else
1776 {
1777 poly_int64 offset;
1778 if (GET_CODE (op) != PRE_MODIFY
1779 || GET_CODE (XEXP (op, 1)) != PLUS
1780 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1781 || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), res: &offset)
1782 || (STACK_GROWS_DOWNWARD
1783 ? maybe_ne (a: offset, b: -rounded_size)
1784 : maybe_ne (a: offset, b: rounded_size)))
1785 return false;
1786 }
1787
1788 return XEXP (op, 0) == stack_pointer_rtx;
1789}
1790
1791/* Return true if OP is a valid operand that stands for popping a
1792 value of mode MODE off the stack.
1793
1794 The main use of this function is as a predicate in match_operand
1795 expressions in the machine description. */
1796
1797bool
1798pop_operand (rtx op, machine_mode mode)
1799{
1800 if (!MEM_P (op))
1801 return false;
1802
1803 if (mode != VOIDmode && GET_MODE (op) != mode)
1804 return false;
1805
1806 op = XEXP (op, 0);
1807
1808 if (GET_CODE (op) != STACK_POP_CODE)
1809 return false;
1810
1811 return XEXP (op, 0) == stack_pointer_rtx;
1812}
1813
1814/* Return true if ADDR is a valid memory address
1815 for mode MODE in address space AS. */
1816
1817bool
1818memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED, rtx addr,
1819 addr_space_t as, code_helper ch ATTRIBUTE_UNUSED)
1820{
1821#ifdef GO_IF_LEGITIMATE_ADDRESS
1822 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1823 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1824 return false;
1825
1826 win:
1827 return true;
1828#else
1829 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as, ch);
1830#endif
1831}
1832
1833/* Return true if OP is a valid memory reference with mode MODE,
1834 including a valid address.
1835
1836 The main use of this function is as a predicate in match_operand
1837 expressions in the machine description. */
1838
1839bool
1840memory_operand (rtx op, machine_mode mode)
1841{
1842 rtx inner;
1843
1844 if (! reload_completed)
1845 /* Note that no SUBREG is a memory operand before end of reload pass,
1846 because (SUBREG (MEM...)) forces reloading into a register. */
1847 return MEM_P (op) && general_operand (op, mode);
1848
1849 if (mode != VOIDmode && GET_MODE (op) != mode)
1850 return false;
1851
1852 inner = op;
1853 if (GET_CODE (inner) == SUBREG)
1854 inner = SUBREG_REG (inner);
1855
1856 return (MEM_P (inner) && general_operand (op, mode));
1857}
1858
1859/* Return true if OP is a valid indirect memory reference with mode MODE;
1860 that is, a memory reference whose address is a general_operand. */
1861
1862bool
1863indirect_operand (rtx op, machine_mode mode)
1864{
1865 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1866 if (! reload_completed
1867 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1868 {
1869 if (mode != VOIDmode && GET_MODE (op) != mode)
1870 return false;
1871
1872 /* The only way that we can have a general_operand as the resulting
1873 address is if OFFSET is zero and the address already is an operand
1874 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1875 operand. */
1876 poly_int64 offset;
1877 rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1878 return (known_eq (offset + SUBREG_BYTE (op), 0)
1879 && general_operand (op: addr, Pmode));
1880 }
1881
1882 return (MEM_P (op)
1883 && memory_operand (op, mode)
1884 && general_operand (XEXP (op, 0), Pmode));
1885}
1886
1887/* Return true if this is an ordered comparison operator (not including
1888 ORDERED and UNORDERED). */
1889
1890bool
1891ordered_comparison_operator (rtx op, machine_mode mode)
1892{
1893 if (mode != VOIDmode && GET_MODE (op) != mode)
1894 return false;
1895 switch (GET_CODE (op))
1896 {
1897 case EQ:
1898 case NE:
1899 case LT:
1900 case LTU:
1901 case LE:
1902 case LEU:
1903 case GT:
1904 case GTU:
1905 case GE:
1906 case GEU:
1907 return true;
1908 default:
1909 return false;
1910 }
1911}
1912
1913/* Return true if this is a comparison operator. This allows the use of
1914 MATCH_OPERATOR to recognize all the branch insns. */
1915
1916bool
1917comparison_operator (rtx op, machine_mode mode)
1918{
1919 return ((mode == VOIDmode || GET_MODE (op) == mode)
1920 && COMPARISON_P (op));
1921}
1922
1923/* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1924
1925rtx
1926extract_asm_operands (rtx body)
1927{
1928 rtx tmp;
1929 switch (GET_CODE (body))
1930 {
1931 case ASM_OPERANDS:
1932 return body;
1933
1934 case SET:
1935 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1936 tmp = SET_SRC (body);
1937 if (GET_CODE (tmp) == ASM_OPERANDS)
1938 return tmp;
1939 break;
1940
1941 case PARALLEL:
1942 tmp = XVECEXP (body, 0, 0);
1943 if (GET_CODE (tmp) == ASM_OPERANDS)
1944 return tmp;
1945 if (GET_CODE (tmp) == SET)
1946 {
1947 tmp = SET_SRC (tmp);
1948 if (GET_CODE (tmp) == ASM_OPERANDS)
1949 return tmp;
1950 }
1951 break;
1952
1953 default:
1954 break;
1955 }
1956 return NULL;
1957}
1958
1959/* If BODY is an insn body that uses ASM_OPERANDS,
1960 return the number of operands (both input and output) in the insn.
1961 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1962 return 0.
1963 Otherwise return -1. */
1964
1965int
1966asm_noperands (const_rtx body)
1967{
1968 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1969 int i, n_sets = 0;
1970
1971 if (asm_op == NULL)
1972 {
1973 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1974 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1975 {
1976 /* body is [(asm_input ...) (clobber (reg ...))...]. */
1977 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1978 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1979 return -1;
1980 return 0;
1981 }
1982 return -1;
1983 }
1984
1985 if (GET_CODE (body) == SET)
1986 n_sets = 1;
1987 else if (GET_CODE (body) == PARALLEL)
1988 {
1989 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1990 {
1991 /* Multiple output operands, or 1 output plus some clobbers:
1992 body is
1993 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1994 /* Count backwards through CLOBBERs to determine number of SETs. */
1995 for (i = XVECLEN (body, 0); i > 0; i--)
1996 {
1997 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1998 break;
1999 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
2000 return -1;
2001 }
2002
2003 /* N_SETS is now number of output operands. */
2004 n_sets = i;
2005
2006 /* Verify that all the SETs we have
2007 came from a single original asm_operands insn
2008 (so that invalid combinations are blocked). */
2009 for (i = 0; i < n_sets; i++)
2010 {
2011 rtx elt = XVECEXP (body, 0, i);
2012 if (GET_CODE (elt) != SET)
2013 return -1;
2014 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
2015 return -1;
2016 /* If these ASM_OPERANDS rtx's came from different original insns
2017 then they aren't allowed together. */
2018 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
2019 != ASM_OPERANDS_INPUT_VEC (asm_op))
2020 return -1;
2021 }
2022 }
2023 else
2024 {
2025 /* 0 outputs, but some clobbers:
2026 body is [(asm_operands ...) (clobber (reg ...))...]. */
2027 /* Make sure all the other parallel things really are clobbers. */
2028 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
2029 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
2030 return -1;
2031 }
2032 }
2033
2034 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
2035 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
2036}
2037
2038/* Assuming BODY is an insn body that uses ASM_OPERANDS,
2039 copy its operands (both input and output) into the vector OPERANDS,
2040 the locations of the operands within the insn into the vector OPERAND_LOCS,
2041 and the constraints for the operands into CONSTRAINTS.
2042 Write the modes of the operands into MODES.
2043 Write the location info into LOC.
2044 Return the assembler-template.
2045 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
2046 return the basic assembly string.
2047
2048 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
2049 we don't store that info. */
2050
2051const char *
2052decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
2053 const char **constraints, machine_mode *modes,
2054 location_t *loc)
2055{
2056 int nbase = 0, n, i;
2057 rtx asmop;
2058
2059 switch (GET_CODE (body))
2060 {
2061 case ASM_OPERANDS:
2062 /* Zero output asm: BODY is (asm_operands ...). */
2063 asmop = body;
2064 break;
2065
2066 case SET:
2067 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
2068 asmop = SET_SRC (body);
2069
2070 /* The output is in the SET.
2071 Its constraint is in the ASM_OPERANDS itself. */
2072 if (operands)
2073 operands[0] = SET_DEST (body);
2074 if (operand_locs)
2075 operand_locs[0] = &SET_DEST (body);
2076 if (constraints)
2077 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
2078 if (modes)
2079 modes[0] = GET_MODE (SET_DEST (body));
2080 nbase = 1;
2081 break;
2082
2083 case PARALLEL:
2084 {
2085 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
2086
2087 asmop = XVECEXP (body, 0, 0);
2088 if (GET_CODE (asmop) == SET)
2089 {
2090 asmop = SET_SRC (asmop);
2091
2092 /* At least one output, plus some CLOBBERs. The outputs are in
2093 the SETs. Their constraints are in the ASM_OPERANDS itself. */
2094 for (i = 0; i < nparallel; i++)
2095 {
2096 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
2097 break; /* Past last SET */
2098 gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET);
2099 if (operands)
2100 operands[i] = SET_DEST (XVECEXP (body, 0, i));
2101 if (operand_locs)
2102 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
2103 if (constraints)
2104 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
2105 if (modes)
2106 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
2107 }
2108 nbase = i;
2109 }
2110 else if (GET_CODE (asmop) == ASM_INPUT)
2111 {
2112 if (loc)
2113 *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
2114 return XSTR (asmop, 0);
2115 }
2116 break;
2117 }
2118
2119 default:
2120 gcc_unreachable ();
2121 }
2122
2123 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
2124 for (i = 0; i < n; i++)
2125 {
2126 if (operand_locs)
2127 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
2128 if (operands)
2129 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
2130 if (constraints)
2131 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
2132 if (modes)
2133 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
2134 }
2135 nbase += n;
2136
2137 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
2138 for (i = 0; i < n; i++)
2139 {
2140 if (operand_locs)
2141 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
2142 if (operands)
2143 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
2144 if (constraints)
2145 constraints[nbase + i] = "";
2146 if (modes)
2147 modes[nbase + i] = Pmode;
2148 }
2149
2150 if (loc)
2151 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
2152
2153 return ASM_OPERANDS_TEMPLATE (asmop);
2154}
2155
2156/* Parse inline assembly string STRING and determine which operands are
2157 referenced by % markers. For the first NOPERANDS operands, set USED[I]
2158 to true if operand I is referenced.
2159
2160 This is intended to distinguish barrier-like asms such as:
2161
2162 asm ("" : "=m" (...));
2163
2164 from real references such as:
2165
2166 asm ("sw\t$0, %0" : "=m" (...)); */
2167
2168void
2169get_referenced_operands (const char *string, bool *used,
2170 unsigned int noperands)
2171{
2172 memset (s: used, c: 0, n: sizeof (bool) * noperands);
2173 const char *p = string;
2174 while (*p)
2175 switch (*p)
2176 {
2177 case '%':
2178 p += 1;
2179 /* A letter followed by a digit indicates an operand number. */
2180 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
2181 p += 1;
2182 if (ISDIGIT (*p))
2183 {
2184 char *endptr;
2185 unsigned long opnum = strtoul (nptr: p, endptr: &endptr, base: 10);
2186 if (endptr != p && opnum < noperands)
2187 used[opnum] = true;
2188 p = endptr;
2189 }
2190 else
2191 p += 1;
2192 break;
2193
2194 default:
2195 p++;
2196 break;
2197 }
2198}
2199
2200/* Check if an asm_operand matches its constraints.
2201 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
2202
2203int
2204asm_operand_ok (rtx op, const char *constraint, const char **constraints)
2205{
2206 int result = 0;
2207 bool incdec_ok = false;
2208
2209 /* Use constrain_operands after reload. */
2210 gcc_assert (!reload_completed);
2211
2212 /* Empty constraint string is the same as "X,...,X", i.e. X for as
2213 many alternatives as required to match the other operands. */
2214 if (*constraint == '\0')
2215 result = 1;
2216
2217 while (*constraint)
2218 {
2219 enum constraint_num cn;
2220 char c = *constraint;
2221 int len;
2222 switch (c)
2223 {
2224 case ',':
2225 constraint++;
2226 continue;
2227
2228 case '0': case '1': case '2': case '3': case '4':
2229 case '5': case '6': case '7': case '8': case '9':
2230 /* If caller provided constraints pointer, look up
2231 the matching constraint. Otherwise, our caller should have
2232 given us the proper matching constraint, but we can't
2233 actually fail the check if they didn't. Indicate that
2234 results are inconclusive. */
2235 if (constraints)
2236 {
2237 char *end;
2238 unsigned long match;
2239
2240 match = strtoul (nptr: constraint, endptr: &end, base: 10);
2241 if (!result)
2242 result = asm_operand_ok (op, constraint: constraints[match], NULL);
2243 constraint = (const char *) end;
2244 }
2245 else
2246 {
2247 do
2248 constraint++;
2249 while (ISDIGIT (*constraint));
2250 if (! result)
2251 result = -1;
2252 }
2253 continue;
2254
2255 /* The rest of the compiler assumes that reloading the address
2256 of a MEM into a register will make it fit an 'o' constraint.
2257 That is, if it sees a MEM operand for an 'o' constraint,
2258 it assumes that (mem (base-reg)) will fit.
2259
2260 That assumption fails on targets that don't have offsettable
2261 addresses at all. We therefore need to treat 'o' asm
2262 constraints as a special case and only accept operands that
2263 are already offsettable, thus proving that at least one
2264 offsettable address exists. */
2265 case 'o': /* offsettable */
2266 if (offsettable_nonstrict_memref_p (op))
2267 result = 1;
2268 break;
2269
2270 case 'g':
2271 if (general_operand (op, VOIDmode))
2272 result = 1;
2273 break;
2274
2275 case '<':
2276 case '>':
2277 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
2278 to exist, excepting those that expand_call created. Further,
2279 on some machines which do not have generalized auto inc/dec,
2280 an inc/dec is not a memory_operand.
2281
2282 Match any memory and hope things are resolved after reload. */
2283 incdec_ok = true;
2284 /* FALLTHRU */
2285 default:
2286 cn = lookup_constraint (p: constraint);
2287 rtx mem = NULL;
2288 switch (get_constraint_type (c: cn))
2289 {
2290 case CT_REGISTER:
2291 if (!result
2292 && reg_class_for_constraint (c: cn) != NO_REGS
2293 && GET_MODE (op) != BLKmode
2294 && register_operand (op, VOIDmode))
2295 result = 1;
2296 break;
2297
2298 case CT_CONST_INT:
2299 if (!result
2300 && CONST_INT_P (op)
2301 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2302 result = 1;
2303 break;
2304
2305 case CT_MEMORY:
2306 case CT_RELAXED_MEMORY:
2307 mem = op;
2308 /* Fall through. */
2309 case CT_SPECIAL_MEMORY:
2310 /* Every memory operand can be reloaded to fit. */
2311 if (!mem)
2312 mem = extract_mem_from_operand (op);
2313 result = result || memory_operand (op: mem, VOIDmode);
2314 break;
2315
2316 case CT_ADDRESS:
2317 /* Every address operand can be reloaded to fit. */
2318 result = result || address_operand (op, VOIDmode);
2319 break;
2320
2321 case CT_FIXED_FORM:
2322 result = result || constraint_satisfied_p (x: op, c: cn);
2323 break;
2324 }
2325 break;
2326 }
2327 len = CONSTRAINT_LEN (c, constraint);
2328 do
2329 constraint++;
2330 while (--len && *constraint && *constraint != ',');
2331 if (len)
2332 return 0;
2333 }
2334
2335 /* For operands without < or > constraints reject side-effects. */
2336 if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
2337 switch (GET_CODE (XEXP (op, 0)))
2338 {
2339 case PRE_INC:
2340 case POST_INC:
2341 case PRE_DEC:
2342 case POST_DEC:
2343 case PRE_MODIFY:
2344 case POST_MODIFY:
2345 return 0;
2346 default:
2347 break;
2348 }
2349
2350 return result;
2351}
2352
2353/* Given an rtx *P, if it is a sum containing an integer constant term,
2354 return the location (type rtx *) of the pointer to that constant term.
2355 Otherwise, return a null pointer. */
2356
2357rtx *
2358find_constant_term_loc (rtx *p)
2359{
2360 rtx *tem;
2361 enum rtx_code code = GET_CODE (*p);
2362
2363 /* If *P IS such a constant term, P is its location. */
2364
2365 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
2366 || code == CONST)
2367 return p;
2368
2369 /* Otherwise, if not a sum, it has no constant term. */
2370
2371 if (GET_CODE (*p) != PLUS)
2372 return 0;
2373
2374 /* If one of the summands is constant, return its location. */
2375
2376 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
2377 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
2378 return p;
2379
2380 /* Otherwise, check each summand for containing a constant term. */
2381
2382 if (XEXP (*p, 0) != 0)
2383 {
2384 tem = find_constant_term_loc (p: &XEXP (*p, 0));
2385 if (tem != 0)
2386 return tem;
2387 }
2388
2389 if (XEXP (*p, 1) != 0)
2390 {
2391 tem = find_constant_term_loc (p: &XEXP (*p, 1));
2392 if (tem != 0)
2393 return tem;
2394 }
2395
2396 return 0;
2397}
2398
2399/* Return true if OP is a memory reference whose address contains
2400 no side effects and remains valid after the addition of a positive
2401 integer less than the size of the object being referenced.
2402
2403 We assume that the original address is valid and do not check it.
2404
2405 This uses strict_memory_address_p as a subroutine, so
2406 don't use it before reload. */
2407
2408bool
2409offsettable_memref_p (rtx op)
2410{
2411 return ((MEM_P (op))
2412 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
2413 MEM_ADDR_SPACE (op)));
2414}
2415
2416/* Similar, but don't require a strictly valid mem ref:
2417 consider pseudo-regs valid as index or base regs. */
2418
2419bool
2420offsettable_nonstrict_memref_p (rtx op)
2421{
2422 return ((MEM_P (op))
2423 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
2424 MEM_ADDR_SPACE (op)));
2425}
2426
2427/* Return true if Y is a memory address which contains no side effects
2428 and would remain valid for address space AS after the addition of
2429 a positive integer less than the size of that mode.
2430
2431 We assume that the original address is valid and do not check it.
2432 We do check that it is valid for narrower modes.
2433
2434 If STRICTP is nonzero, we require a strictly valid address,
2435 for the sake of use in reload.cc. */
2436
2437bool
2438offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
2439 addr_space_t as)
2440{
2441 enum rtx_code ycode = GET_CODE (y);
2442 rtx z;
2443 rtx y1 = y;
2444 rtx *y2;
2445 bool (*addressp) (machine_mode, rtx, addr_space_t, code_helper) =
2446 (strictp ? strict_memory_address_addr_space_p
2447 : memory_address_addr_space_p);
2448 poly_int64 mode_sz = GET_MODE_SIZE (mode);
2449
2450 if (CONSTANT_ADDRESS_P (y))
2451 return true;
2452
2453 /* Adjusting an offsettable address involves changing to a narrower mode.
2454 Make sure that's OK. */
2455
2456 if (mode_dependent_address_p (y, as))
2457 return false;
2458
2459 machine_mode address_mode = GET_MODE (y);
2460 if (address_mode == VOIDmode)
2461 address_mode = targetm.addr_space.address_mode (as);
2462#ifdef POINTERS_EXTEND_UNSIGNED
2463 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2464#endif
2465
2466 /* ??? How much offset does an offsettable BLKmode reference need?
2467 Clearly that depends on the situation in which it's being used.
2468 However, the current situation in which we test 0xffffffff is
2469 less than ideal. Caveat user. */
2470 if (known_eq (mode_sz, 0))
2471 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2472
2473 /* If the expression contains a constant term,
2474 see if it remains valid when max possible offset is added. */
2475
2476 if ((ycode == PLUS) && (y2 = find_constant_term_loc (p: &y1)))
2477 {
2478 bool good;
2479
2480 y1 = *y2;
2481 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2482 /* Use QImode because an odd displacement may be automatically invalid
2483 for any wider mode. But it should be valid for a single byte. */
2484 good = (*addressp) (QImode, y, as, ERROR_MARK);
2485
2486 /* In any case, restore old contents of memory. */
2487 *y2 = y1;
2488 return good;
2489 }
2490
2491 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2492 return false;
2493
2494 /* The offset added here is chosen as the maximum offset that
2495 any instruction could need to add when operating on something
2496 of the specified mode. We assume that if Y and Y+c are
2497 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2498 go inside a LO_SUM here, so we do so as well. */
2499 if (GET_CODE (y) == LO_SUM
2500 && mode != BLKmode
2501 && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
2502 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2503 plus_constant (address_mode, XEXP (y, 1),
2504 mode_sz - 1));
2505#ifdef POINTERS_EXTEND_UNSIGNED
2506 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2507 else if (POINTERS_EXTEND_UNSIGNED > 0
2508 && GET_CODE (y) == ZERO_EXTEND
2509 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2510 z = gen_rtx_ZERO_EXTEND (address_mode,
2511 plus_constant (pointer_mode, XEXP (y, 0),
2512 mode_sz - 1));
2513#endif
2514 else
2515 z = plus_constant (address_mode, y, mode_sz - 1);
2516
2517 /* Use QImode because an odd displacement may be automatically invalid
2518 for any wider mode. But it should be valid for a single byte. */
2519 return (*addressp) (QImode, z, as, ERROR_MARK);
2520}
2521
2522/* Return true if ADDR is an address-expression whose effect depends
2523 on the mode of the memory reference it is used in.
2524
2525 ADDRSPACE is the address space associated with the address.
2526
2527 Autoincrement addressing is a typical example of mode-dependence
2528 because the amount of the increment depends on the mode. */
2529
2530bool
2531mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2532{
2533 /* Auto-increment addressing with anything other than post_modify
2534 or pre_modify always introduces a mode dependency. Catch such
2535 cases now instead of deferring to the target. */
2536 if (GET_CODE (addr) == PRE_INC
2537 || GET_CODE (addr) == POST_INC
2538 || GET_CODE (addr) == PRE_DEC
2539 || GET_CODE (addr) == POST_DEC)
2540 return true;
2541
2542 return targetm.mode_dependent_address_p (addr, addrspace);
2543}
2544
2545/* Return true if boolean attribute ATTR is supported. */
2546
2547static bool
2548have_bool_attr (bool_attr attr)
2549{
2550 switch (attr)
2551 {
2552 case BA_ENABLED:
2553 return HAVE_ATTR_enabled;
2554 case BA_PREFERRED_FOR_SIZE:
2555 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2556 case BA_PREFERRED_FOR_SPEED:
2557 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2558 }
2559 gcc_unreachable ();
2560}
2561
2562/* Return the value of ATTR for instruction INSN. */
2563
2564static bool
2565get_bool_attr (rtx_insn *insn, bool_attr attr)
2566{
2567 switch (attr)
2568 {
2569 case BA_ENABLED:
2570 return get_attr_enabled (insn);
2571 case BA_PREFERRED_FOR_SIZE:
2572 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2573 case BA_PREFERRED_FOR_SPEED:
2574 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2575 }
2576 gcc_unreachable ();
2577}
2578
2579/* Like get_bool_attr_mask, but don't use the cache. */
2580
2581static alternative_mask
2582get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2583{
2584 /* Temporarily install enough information for get_attr_<foo> to assume
2585 that the insn operands are already cached. As above, the attribute
2586 mustn't depend on the values of operands, so we don't provide their
2587 real values here. */
2588 rtx_insn *old_insn = recog_data.insn;
2589 int old_alternative = which_alternative;
2590
2591 recog_data.insn = insn;
2592 alternative_mask mask = ALL_ALTERNATIVES;
2593 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2594 for (int i = 0; i < n_alternatives; i++)
2595 {
2596 which_alternative = i;
2597 if (!get_bool_attr (insn, attr))
2598 mask &= ~ALTERNATIVE_BIT (i);
2599 }
2600
2601 recog_data.insn = old_insn;
2602 which_alternative = old_alternative;
2603 return mask;
2604}
2605
2606/* Return the mask of operand alternatives that are allowed for INSN
2607 by boolean attribute ATTR. This mask depends only on INSN and on
2608 the current target; it does not depend on things like the values of
2609 operands. */
2610
2611static alternative_mask
2612get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2613{
2614 /* Quick exit for asms and for targets that don't use these attributes. */
2615 int code = INSN_CODE (insn);
2616 if (code < 0 || !have_bool_attr (attr))
2617 return ALL_ALTERNATIVES;
2618
2619 /* Calling get_attr_<foo> can be expensive, so cache the mask
2620 for speed. */
2621 if (!this_target_recog->x_bool_attr_masks[code][attr])
2622 this_target_recog->x_bool_attr_masks[code][attr]
2623 = get_bool_attr_mask_uncached (insn, attr);
2624 return this_target_recog->x_bool_attr_masks[code][attr];
2625}
2626
2627/* Return the set of alternatives of INSN that are allowed by the current
2628 target. */
2629
2630alternative_mask
2631get_enabled_alternatives (rtx_insn *insn)
2632{
2633 return get_bool_attr_mask (insn, attr: BA_ENABLED);
2634}
2635
2636/* Return the set of alternatives of INSN that are allowed by the current
2637 target and are preferred for the current size/speed optimization
2638 choice. */
2639
2640alternative_mask
2641get_preferred_alternatives (rtx_insn *insn)
2642{
2643 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2644 return get_bool_attr_mask (insn, attr: BA_PREFERRED_FOR_SPEED);
2645 else
2646 return get_bool_attr_mask (insn, attr: BA_PREFERRED_FOR_SIZE);
2647}
2648
2649/* Return the set of alternatives of INSN that are allowed by the current
2650 target and are preferred for the size/speed optimization choice
2651 associated with BB. Passing a separate BB is useful if INSN has not
2652 been emitted yet or if we are considering moving it to a different
2653 block. */
2654
2655alternative_mask
2656get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2657{
2658 if (optimize_bb_for_speed_p (bb))
2659 return get_bool_attr_mask (insn, attr: BA_PREFERRED_FOR_SPEED);
2660 else
2661 return get_bool_attr_mask (insn, attr: BA_PREFERRED_FOR_SIZE);
2662}
2663
2664/* Assert that the cached boolean attributes for INSN are still accurate.
2665 The backend is required to define these attributes in a way that only
2666 depends on the current target (rather than operands, compiler phase,
2667 etc.). */
2668
2669bool
2670check_bool_attrs (rtx_insn *insn)
2671{
2672 int code = INSN_CODE (insn);
2673 if (code >= 0)
2674 for (int i = 0; i <= BA_LAST; ++i)
2675 {
2676 enum bool_attr attr = (enum bool_attr) i;
2677 if (this_target_recog->x_bool_attr_masks[code][attr])
2678 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2679 == get_bool_attr_mask_uncached (insn, attr));
2680 }
2681 return true;
2682}
2683
2684/* Like extract_insn, but save insn extracted and don't extract again, when
2685 called again for the same insn expecting that recog_data still contain the
2686 valid information. This is used primary by gen_attr infrastructure that
2687 often does extract insn again and again. */
2688void
2689extract_insn_cached (rtx_insn *insn)
2690{
2691 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2692 return;
2693 extract_insn (insn);
2694 recog_data.insn = insn;
2695}
2696
2697/* Do uncached extract_insn, constrain_operands and complain about failures.
2698 This should be used when extracting a pre-existing constrained instruction
2699 if the caller wants to know which alternative was chosen. */
2700void
2701extract_constrain_insn (rtx_insn *insn)
2702{
2703 extract_insn (insn);
2704 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2705 fatal_insn_not_found (insn);
2706}
2707
2708/* Do cached extract_insn, constrain_operands and complain about failures.
2709 Used by insn_attrtab. */
2710void
2711extract_constrain_insn_cached (rtx_insn *insn)
2712{
2713 extract_insn_cached (insn);
2714 if (which_alternative == -1
2715 && !constrain_operands (reload_completed,
2716 get_enabled_alternatives (insn)))
2717 fatal_insn_not_found (insn);
2718}
2719
2720/* Do cached constrain_operands on INSN and complain about failures. */
2721bool
2722constrain_operands_cached (rtx_insn *insn, int strict)
2723{
2724 if (which_alternative == -1)
2725 return constrain_operands (strict, get_enabled_alternatives (insn));
2726 else
2727 return true;
2728}
2729
2730/* Analyze INSN and fill in recog_data. */
2731
2732void
2733extract_insn (rtx_insn *insn)
2734{
2735 int i;
2736 int icode;
2737 int noperands;
2738 rtx body = PATTERN (insn);
2739
2740 recog_data.n_operands = 0;
2741 recog_data.n_alternatives = 0;
2742 recog_data.n_dups = 0;
2743 recog_data.is_asm = false;
2744
2745 switch (GET_CODE (body))
2746 {
2747 case USE:
2748 case CLOBBER:
2749 case ASM_INPUT:
2750 case ADDR_VEC:
2751 case ADDR_DIFF_VEC:
2752 case VAR_LOCATION:
2753 case DEBUG_MARKER:
2754 return;
2755
2756 case SET:
2757 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2758 goto asm_insn;
2759 else
2760 goto normal_insn;
2761 case PARALLEL:
2762 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2763 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2764 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2765 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2766 goto asm_insn;
2767 else
2768 goto normal_insn;
2769 case ASM_OPERANDS:
2770 asm_insn:
2771 recog_data.n_operands = noperands = asm_noperands (body);
2772 if (noperands >= 0)
2773 {
2774 /* This insn is an `asm' with operands. */
2775
2776 /* expand_asm_operands makes sure there aren't too many operands. */
2777 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2778
2779 /* Now get the operand values and constraints out of the insn. */
2780 decode_asm_operands (body, operands: recog_data.operand,
2781 operand_locs: recog_data.operand_loc,
2782 constraints: recog_data.constraints,
2783 modes: recog_data.operand_mode, NULL);
2784 memset (s: recog_data.is_operator, c: 0, n: sizeof recog_data.is_operator);
2785 if (noperands > 0)
2786 {
2787 const char *p = recog_data.constraints[0];
2788 recog_data.n_alternatives = 1;
2789 while (*p)
2790 recog_data.n_alternatives += (*p++ == ',');
2791 }
2792 recog_data.is_asm = true;
2793 break;
2794 }
2795 fatal_insn_not_found (insn);
2796
2797 default:
2798 normal_insn:
2799 /* Ordinary insn: recognize it, get the operands via insn_extract
2800 and get the constraints. */
2801
2802 icode = recog_memoized (insn);
2803 if (icode < 0)
2804 fatal_insn_not_found (insn);
2805
2806 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2807 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2808 recog_data.n_dups = insn_data[icode].n_dups;
2809
2810 insn_extract (insn);
2811
2812 for (i = 0; i < noperands; i++)
2813 {
2814 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2815 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2816 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2817 /* VOIDmode match_operands gets mode from their real operand. */
2818 if (recog_data.operand_mode[i] == VOIDmode)
2819 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2820 }
2821 }
2822 for (i = 0; i < noperands; i++)
2823 recog_data.operand_type[i]
2824 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2825 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2826 : OP_IN);
2827
2828 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2829
2830 recog_data.insn = NULL;
2831 which_alternative = -1;
2832}
2833
2834/* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2835 operands, N_ALTERNATIVES alternatives and constraint strings
2836 CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2837 and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in
2838 if the insn is an asm statement and preprocessing should take the
2839 asm operands into account, e.g. to determine whether they could be
2840 addresses in constraints that require addresses; it should then
2841 point to an array of pointers to each operand. */
2842
2843void
2844preprocess_constraints (int n_operands, int n_alternatives,
2845 const char **constraints,
2846 operand_alternative *op_alt_base,
2847 rtx **oploc)
2848{
2849 for (int i = 0; i < n_operands; i++)
2850 {
2851 int j;
2852 struct operand_alternative *op_alt;
2853 const char *p = constraints[i];
2854
2855 op_alt = op_alt_base;
2856
2857 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2858 {
2859 op_alt[i].cl = NO_REGS;
2860 op_alt[i].constraint = p;
2861 op_alt[i].matches = -1;
2862 op_alt[i].matched = -1;
2863
2864 if (*p == '\0' || *p == ',')
2865 {
2866 op_alt[i].anything_ok = 1;
2867 continue;
2868 }
2869
2870 for (;;)
2871 {
2872 char c = *p;
2873 if (c == '#')
2874 do
2875 c = *++p;
2876 while (c != ',' && c != '\0');
2877 if (c == ',' || c == '\0')
2878 {
2879 p++;
2880 break;
2881 }
2882
2883 switch (c)
2884 {
2885 case '?':
2886 op_alt[i].reject += 6;
2887 break;
2888 case '!':
2889 op_alt[i].reject += 600;
2890 break;
2891 case '&':
2892 op_alt[i].earlyclobber = 1;
2893 break;
2894
2895 case '0': case '1': case '2': case '3': case '4':
2896 case '5': case '6': case '7': case '8': case '9':
2897 {
2898 char *end;
2899 op_alt[i].matches = strtoul (nptr: p, endptr: &end, base: 10);
2900 op_alt[op_alt[i].matches].matched = i;
2901 p = end;
2902 }
2903 continue;
2904
2905 case 'X':
2906 op_alt[i].anything_ok = 1;
2907 break;
2908
2909 case 'g':
2910 op_alt[i].cl =
2911 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2912 break;
2913
2914 default:
2915 enum constraint_num cn = lookup_constraint (p);
2916 enum reg_class cl;
2917 switch (get_constraint_type (c: cn))
2918 {
2919 case CT_REGISTER:
2920 cl = reg_class_for_constraint (c: cn);
2921 if (cl != NO_REGS)
2922 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2923 break;
2924
2925 case CT_CONST_INT:
2926 break;
2927
2928 case CT_MEMORY:
2929 case CT_SPECIAL_MEMORY:
2930 case CT_RELAXED_MEMORY:
2931 op_alt[i].memory_ok = 1;
2932 break;
2933
2934 case CT_ADDRESS:
2935 if (oploc && !address_operand (op: *oploc[i], VOIDmode))
2936 break;
2937
2938 op_alt[i].is_address = 1;
2939 op_alt[i].cl
2940 = (reg_class_subunion
2941 [(int) op_alt[i].cl]
2942 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2943 outer_code: ADDRESS, index_code: SCRATCH)]);
2944 break;
2945
2946 case CT_FIXED_FORM:
2947 break;
2948 }
2949 break;
2950 }
2951 p += CONSTRAINT_LEN (c, p);
2952 }
2953 }
2954 }
2955}
2956
2957/* Return an array of operand_alternative instructions for
2958 instruction ICODE. */
2959
2960const operand_alternative *
2961preprocess_insn_constraints (unsigned int icode)
2962{
2963 gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2964 if (this_target_recog->x_op_alt[icode])
2965 return this_target_recog->x_op_alt[icode];
2966
2967 int n_operands = insn_data[icode].n_operands;
2968 if (n_operands == 0)
2969 return 0;
2970 /* Always provide at least one alternative so that which_op_alt ()
2971 works correctly. If the instruction has 0 alternatives (i.e. all
2972 constraint strings are empty) then each operand in this alternative
2973 will have anything_ok set. */
2974 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2975 int n_entries = n_operands * n_alternatives;
2976
2977 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2978 const char **constraints = XALLOCAVEC (const char *, n_operands);
2979
2980 for (int i = 0; i < n_operands; ++i)
2981 constraints[i] = insn_data[icode].operand[i].constraint;
2982 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt_base: op_alt,
2983 NULL);
2984
2985 this_target_recog->x_op_alt[icode] = op_alt;
2986 return op_alt;
2987}
2988
2989/* After calling extract_insn, you can use this function to extract some
2990 information from the constraint strings into a more usable form.
2991 The collected data is stored in recog_op_alt. */
2992
2993void
2994preprocess_constraints (rtx_insn *insn)
2995{
2996 int icode = INSN_CODE (insn);
2997 if (icode >= 0)
2998 recog_op_alt = preprocess_insn_constraints (icode);
2999 else
3000 {
3001 int n_operands = recog_data.n_operands;
3002 int n_alternatives = recog_data.n_alternatives;
3003 int n_entries = n_operands * n_alternatives;
3004 memset (s: asm_op_alt, c: 0, n: n_entries * sizeof (operand_alternative));
3005 preprocess_constraints (n_operands, n_alternatives,
3006 constraints: recog_data.constraints, op_alt_base: asm_op_alt,
3007 NULL);
3008 recog_op_alt = asm_op_alt;
3009 }
3010}
3011
3012/* Check the operands of an insn against the insn's operand constraints
3013 and return 1 if they match any of the alternatives in ALTERNATIVES.
3014
3015 The information about the insn's operands, constraints, operand modes
3016 etc. is obtained from the global variables set up by extract_insn.
3017
3018 WHICH_ALTERNATIVE is set to a number which indicates which
3019 alternative of constraints was matched: 0 for the first alternative,
3020 1 for the next, etc.
3021
3022 In addition, when two operands are required to match
3023 and it happens that the output operand is (reg) while the
3024 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
3025 make the output operand look like the input.
3026 This is because the output operand is the one the template will print.
3027
3028 This is used in final, just before printing the assembler code and by
3029 the routines that determine an insn's attribute.
3030
3031 If STRICT is a positive nonzero value, it means that we have been
3032 called after reload has been completed. In that case, we must
3033 do all checks strictly. If it is zero, it means that we have been called
3034 before reload has completed. In that case, we first try to see if we can
3035 find an alternative that matches strictly. If not, we try again, this
3036 time assuming that reload will fix up the insn. This provides a "best
3037 guess" for the alternative and is used to compute attributes of insns prior
3038 to reload. A negative value of STRICT is used for this internal call. */
3039
3040struct funny_match
3041{
3042 int this_op, other;
3043};
3044
3045bool
3046constrain_operands (int strict, alternative_mask alternatives)
3047{
3048 const char *constraints[MAX_RECOG_OPERANDS];
3049 int matching_operands[MAX_RECOG_OPERANDS];
3050 int earlyclobber[MAX_RECOG_OPERANDS];
3051 int c;
3052
3053 struct funny_match funny_match[MAX_RECOG_OPERANDS];
3054 int funny_match_index;
3055
3056 which_alternative = 0;
3057 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
3058 return true;
3059
3060 for (c = 0; c < recog_data.n_operands; c++)
3061 constraints[c] = recog_data.constraints[c];
3062
3063 do
3064 {
3065 int seen_earlyclobber_at = -1;
3066 int opno;
3067 bool lose = false;
3068 funny_match_index = 0;
3069
3070 if (!TEST_BIT (alternatives, which_alternative))
3071 {
3072 int i;
3073
3074 for (i = 0; i < recog_data.n_operands; i++)
3075 constraints[i] = skip_alternative (p: constraints[i]);
3076
3077 which_alternative++;
3078 continue;
3079 }
3080
3081 for (opno = 0; opno < recog_data.n_operands; opno++)
3082 matching_operands[opno] = -1;
3083
3084 for (opno = 0; opno < recog_data.n_operands; opno++)
3085 {
3086 rtx op = recog_data.operand[opno];
3087 machine_mode mode = GET_MODE (op);
3088 const char *p = constraints[opno];
3089 int offset = 0;
3090 bool win = false;
3091 int val;
3092 int len;
3093
3094 earlyclobber[opno] = 0;
3095
3096 if (GET_CODE (op) == SUBREG)
3097 {
3098 if (REG_P (SUBREG_REG (op))
3099 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
3100 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
3101 GET_MODE (SUBREG_REG (op)),
3102 SUBREG_BYTE (op),
3103 GET_MODE (op));
3104 op = SUBREG_REG (op);
3105 }
3106
3107 /* An empty constraint or empty alternative
3108 allows anything which matched the pattern. */
3109 if (*p == 0 || *p == ',')
3110 win = true;
3111
3112 do
3113 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
3114 {
3115 case '\0':
3116 len = 0;
3117 break;
3118 case ',':
3119 c = '\0';
3120 break;
3121
3122 case '#':
3123 /* Ignore rest of this alternative as far as
3124 constraint checking is concerned. */
3125 do
3126 p++;
3127 while (*p && *p != ',');
3128 len = 0;
3129 break;
3130
3131 case '&':
3132 earlyclobber[opno] = 1;
3133 if (seen_earlyclobber_at < 0)
3134 seen_earlyclobber_at = opno;
3135 break;
3136
3137 case '0': case '1': case '2': case '3': case '4':
3138 case '5': case '6': case '7': case '8': case '9':
3139 {
3140 /* This operand must be the same as a previous one.
3141 This kind of constraint is used for instructions such
3142 as add when they take only two operands.
3143
3144 Note that the lower-numbered operand is passed first.
3145
3146 If we are not testing strictly, assume that this
3147 constraint will be satisfied. */
3148
3149 char *end;
3150 int match;
3151
3152 match = strtoul (nptr: p, endptr: &end, base: 10);
3153 p = end;
3154
3155 if (strict < 0)
3156 val = 1;
3157 else
3158 {
3159 rtx op1 = recog_data.operand[match];
3160 rtx op2 = recog_data.operand[opno];
3161 val = operands_match_p (op1, op2);
3162 }
3163
3164 matching_operands[opno] = match;
3165 matching_operands[match] = opno;
3166
3167 if (val != 0)
3168 win = true;
3169
3170 /* If output is *x and input is *--x, arrange later
3171 to change the output to *--x as well, since the
3172 output op is the one that will be printed. */
3173 if (val == 2 && strict > 0)
3174 {
3175 funny_match[funny_match_index].this_op = opno;
3176 funny_match[funny_match_index++].other = match;
3177 }
3178 }
3179 len = 0;
3180 break;
3181
3182 case 'p':
3183 /* p is used for address_operands. When we are called by
3184 gen_reload, no one will have checked that the address is
3185 strictly valid, i.e., that all pseudos requiring hard regs
3186 have gotten them. We also want to make sure we have a
3187 valid mode. */
3188 if ((GET_MODE (op) == VOIDmode
3189 || SCALAR_INT_MODE_P (GET_MODE (op)))
3190 && (strict <= 0
3191 || (strict_memory_address_p
3192 (recog_data.operand_mode[opno], op))))
3193 win = true;
3194 break;
3195
3196 /* No need to check general_operand again;
3197 it was done in insn-recog.cc. Well, except that reload
3198 doesn't check the validity of its replacements, but
3199 that should only matter when there's a bug. */
3200 case 'g':
3201 /* Anything goes unless it is a REG and really has a hard reg
3202 but the hard reg is not in the class GENERAL_REGS. */
3203 if (REG_P (op))
3204 {
3205 if (strict < 0
3206 || GENERAL_REGS == ALL_REGS
3207 || (reload_in_progress
3208 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
3209 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
3210 win = true;
3211 }
3212 else if (strict < 0 || general_operand (op, mode))
3213 win = true;
3214 break;
3215
3216 default:
3217 {
3218 enum constraint_num cn = lookup_constraint (p);
3219 enum reg_class cl = reg_class_for_constraint (c: cn);
3220 if (cl != NO_REGS)
3221 {
3222 if (strict < 0
3223 || (strict == 0
3224 && REG_P (op)
3225 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
3226 || (strict == 0 && GET_CODE (op) == SCRATCH)
3227 || (REG_P (op)
3228 && reg_fits_class_p (op, cl, offset, mode)))
3229 win = true;
3230 }
3231
3232 else if (constraint_satisfied_p (x: op, c: cn))
3233 win = true;
3234
3235 else if ((insn_extra_memory_constraint (c: cn)
3236 || insn_extra_relaxed_memory_constraint (cn))
3237 /* Every memory operand can be reloaded to fit. */
3238 && ((strict < 0 && MEM_P (op))
3239 /* Before reload, accept what reload can turn
3240 into a mem. */
3241 || (strict < 0 && CONSTANT_P (op))
3242 /* Before reload, accept a pseudo or hard register,
3243 since LRA can turn it into a mem. */
3244 || (strict < 0 && targetm.lra_p () && REG_P (op))
3245 /* During reload, accept a pseudo */
3246 || (reload_in_progress && REG_P (op)
3247 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
3248 win = true;
3249 else if (insn_extra_address_constraint (c: cn)
3250 /* Every address operand can be reloaded to fit. */
3251 && strict < 0)
3252 win = true;
3253 /* Cater to architectures like IA-64 that define extra memory
3254 constraints without using define_memory_constraint. */
3255 else if (reload_in_progress
3256 && REG_P (op)
3257 && REGNO (op) >= FIRST_PSEUDO_REGISTER
3258 && reg_renumber[REGNO (op)] < 0
3259 && reg_equiv_mem (REGNO (op)) != 0
3260 && constraint_satisfied_p
3261 (reg_equiv_mem (REGNO (op)), c: cn))
3262 win = true;
3263 break;
3264 }
3265 }
3266 while (p += len, c);
3267
3268 constraints[opno] = p;
3269 /* If this operand did not win somehow,
3270 this alternative loses. */
3271 if (! win)
3272 lose = true;
3273 }
3274 /* This alternative won; the operands are ok.
3275 Change whichever operands this alternative says to change. */
3276 if (! lose)
3277 {
3278 int opno, eopno;
3279
3280 /* See if any earlyclobber operand conflicts with some other
3281 operand. */
3282
3283 if (strict > 0 && seen_earlyclobber_at >= 0)
3284 for (eopno = seen_earlyclobber_at;
3285 eopno < recog_data.n_operands;
3286 eopno++)
3287 /* Ignore earlyclobber operands now in memory,
3288 because we would often report failure when we have
3289 two memory operands, one of which was formerly a REG. */
3290 if (earlyclobber[eopno]
3291 && REG_P (recog_data.operand[eopno]))
3292 for (opno = 0; opno < recog_data.n_operands; opno++)
3293 if ((MEM_P (recog_data.operand[opno])
3294 || recog_data.operand_type[opno] != OP_OUT)
3295 && opno != eopno
3296 /* Ignore things like match_operator operands. */
3297 && *recog_data.constraints[opno] != 0
3298 && ! (matching_operands[opno] == eopno
3299 && operands_match_p (recog_data.operand[opno],
3300 recog_data.operand[eopno]))
3301 && ! safe_from_earlyclobber (recog_data.operand[opno],
3302 recog_data.operand[eopno]))
3303 lose = true;
3304
3305 if (! lose)
3306 {
3307 while (--funny_match_index >= 0)
3308 {
3309 recog_data.operand[funny_match[funny_match_index].other]
3310 = recog_data.operand[funny_match[funny_match_index].this_op];
3311 }
3312
3313 /* For operands without < or > constraints reject side-effects. */
3314 if (AUTO_INC_DEC && recog_data.is_asm)
3315 {
3316 for (opno = 0; opno < recog_data.n_operands; opno++)
3317 if (MEM_P (recog_data.operand[opno]))
3318 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
3319 {
3320 case PRE_INC:
3321 case POST_INC:
3322 case PRE_DEC:
3323 case POST_DEC:
3324 case PRE_MODIFY:
3325 case POST_MODIFY:
3326 if (strchr (s: recog_data.constraints[opno], c: '<') == NULL
3327 && strchr (s: recog_data.constraints[opno], c: '>')
3328 == NULL)
3329 return false;
3330 break;
3331 default:
3332 break;
3333 }
3334 }
3335
3336 return true;
3337 }
3338 }
3339
3340 which_alternative++;
3341 }
3342 while (which_alternative < recog_data.n_alternatives);
3343
3344 which_alternative = -1;
3345 /* If we are about to reject this, but we are not to test strictly,
3346 try a very loose test. Only return failure if it fails also. */
3347 if (strict == 0)
3348 return constrain_operands (strict: -1, alternatives);
3349 else
3350 return false;
3351}
3352
3353/* Return true iff OPERAND (assumed to be a REG rtx)
3354 is a hard reg in class CLASS when its regno is offset by OFFSET
3355 and changed to mode MODE.
3356 If REG occupies multiple hard regs, all of them must be in CLASS. */
3357
3358bool
3359reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
3360 machine_mode mode)
3361{
3362 unsigned int regno = REGNO (operand);
3363
3364 if (cl == NO_REGS)
3365 return false;
3366
3367 /* Regno must not be a pseudo register. Offset may be negative. */
3368 return (HARD_REGISTER_NUM_P (regno)
3369 && HARD_REGISTER_NUM_P (regno + offset)
3370 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
3371 regno: regno + offset));
3372}
3373
3374/* Split single instruction. Helper function for split_all_insns and
3375 split_all_insns_noflow. Return last insn in the sequence if successful,
3376 or NULL if unsuccessful. */
3377
3378static rtx_insn *
3379split_insn (rtx_insn *insn)
3380{
3381 /* Split insns here to get max fine-grain parallelism. */
3382 rtx_insn *first = PREV_INSN (insn);
3383 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
3384 rtx insn_set, last_set, note;
3385
3386 if (last == insn)
3387 return NULL;
3388
3389 /* If the original instruction was a single set that was known to be
3390 equivalent to a constant, see if we can say the same about the last
3391 instruction in the split sequence. The two instructions must set
3392 the same destination. */
3393 insn_set = single_set (insn);
3394 if (insn_set)
3395 {
3396 last_set = single_set (insn: last);
3397 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
3398 {
3399 note = find_reg_equal_equiv_note (insn);
3400 if (note && CONSTANT_P (XEXP (note, 0)))
3401 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
3402 else if (CONSTANT_P (SET_SRC (insn_set)))
3403 set_unique_reg_note (last, REG_EQUAL,
3404 copy_rtx (SET_SRC (insn_set)));
3405 }
3406 }
3407
3408 /* try_split returns the NOTE that INSN became. */
3409 SET_INSN_DELETED (insn);
3410
3411 /* ??? Coddle to md files that generate subregs in post-reload
3412 splitters instead of computing the proper hard register. */
3413 if (reload_completed && first != last)
3414 {
3415 first = NEXT_INSN (insn: first);
3416 for (;;)
3417 {
3418 if (INSN_P (first))
3419 cleanup_subreg_operands (first);
3420 if (first == last)
3421 break;
3422 first = NEXT_INSN (insn: first);
3423 }
3424 }
3425
3426 return last;
3427}
3428
3429/* Split all insns in the function. If UPD_LIFE, update life info after. */
3430
3431void
3432split_all_insns (void)
3433{
3434 bool changed;
3435 bool need_cfg_cleanup = false;
3436 basic_block bb;
3437
3438 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
3439 bitmap_clear (blocks);
3440 changed = false;
3441
3442 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3443 {
3444 rtx_insn *insn, *next;
3445 bool finish = false;
3446
3447 rtl_profile_for_bb (bb);
3448 for (insn = BB_HEAD (bb); !finish ; insn = next)
3449 {
3450 /* Can't use `next_real_insn' because that might go across
3451 CODE_LABELS and short-out basic blocks. */
3452 next = NEXT_INSN (insn);
3453 finish = (insn == BB_END (bb));
3454
3455 /* If INSN has a REG_EH_REGION note and we split INSN, the
3456 resulting split may not have/need REG_EH_REGION notes.
3457
3458 If that happens and INSN was the last reference to the
3459 given EH region, then the EH region will become unreachable.
3460 We cannot leave the unreachable blocks in the CFG as that
3461 will trigger a checking failure.
3462
3463 So track if INSN has a REG_EH_REGION note. If so and we
3464 split INSN, then trigger a CFG cleanup. */
3465 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3466 if (INSN_P (insn))
3467 {
3468 rtx set = single_set (insn);
3469
3470 /* Don't split no-op move insns. These should silently
3471 disappear later in final. Splitting such insns would
3472 break the code that handles LIBCALL blocks. */
3473 if (set && set_noop_p (set))
3474 {
3475 /* Nops get in the way while scheduling, so delete them
3476 now if register allocation has already been done. It
3477 is too risky to try to do this before register
3478 allocation, and there are unlikely to be very many
3479 nops then anyways. */
3480 if (reload_completed)
3481 delete_insn_and_edges (insn);
3482 if (note)
3483 need_cfg_cleanup = true;
3484 }
3485 else
3486 {
3487 if (split_insn (insn))
3488 {
3489 bitmap_set_bit (map: blocks, bitno: bb->index);
3490 changed = true;
3491 if (note)
3492 need_cfg_cleanup = true;
3493 }
3494 }
3495 }
3496 }
3497 }
3498
3499 default_rtl_profile ();
3500 if (changed)
3501 {
3502 find_many_sub_basic_blocks (blocks);
3503
3504 /* Splitting could drop an REG_EH_REGION if it potentially
3505 trapped in its original form, but does not in its split
3506 form. Consider a FLOAT_TRUNCATE which splits into a memory
3507 store/load pair and -fnon-call-exceptions. */
3508 if (need_cfg_cleanup)
3509 cleanup_cfg (0);
3510 }
3511
3512 checking_verify_flow_info ();
3513}
3514
3515/* Same as split_all_insns, but do not expect CFG to be available.
3516 Used by machine dependent reorg passes. */
3517
3518void
3519split_all_insns_noflow (void)
3520{
3521 rtx_insn *next, *insn;
3522
3523 for (insn = get_insns (); insn; insn = next)
3524 {
3525 next = NEXT_INSN (insn);
3526 if (INSN_P (insn))
3527 {
3528 /* Don't split no-op move insns. These should silently
3529 disappear later in final. Splitting such insns would
3530 break the code that handles LIBCALL blocks. */
3531 rtx set = single_set (insn);
3532 if (set && set_noop_p (set))
3533 {
3534 /* Nops get in the way while scheduling, so delete them
3535 now if register allocation has already been done. It
3536 is too risky to try to do this before register
3537 allocation, and there are unlikely to be very many
3538 nops then anyways.
3539
3540 ??? Should we use delete_insn when the CFG isn't valid? */
3541 if (reload_completed)
3542 delete_insn_and_edges (insn);
3543 }
3544 else
3545 split_insn (insn);
3546 }
3547 }
3548}
3549
3550struct peep2_insn_data
3551{
3552 rtx_insn *insn;
3553 regset live_before;
3554};
3555
3556static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3557static int peep2_current;
3558
3559static bool peep2_do_rebuild_jump_labels;
3560static bool peep2_do_cleanup_cfg;
3561
3562/* The number of instructions available to match a peep2. */
3563int peep2_current_count;
3564
3565/* A marker indicating the last insn of the block. The live_before regset
3566 for this element is correct, indicating DF_LIVE_OUT for the block. */
3567#define PEEP2_EOB invalid_insn_rtx
3568
3569/* Wrap N to fit into the peep2_insn_data buffer. */
3570
3571static int
3572peep2_buf_position (int n)
3573{
3574 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3575 n -= MAX_INSNS_PER_PEEP2 + 1;
3576 return n;
3577}
3578
3579/* Return the Nth non-note insn after `current', or return NULL_RTX if it
3580 does not exist. Used by the recognizer to find the next insn to match
3581 in a multi-insn pattern. */
3582
3583rtx_insn *
3584peep2_next_insn (int n)
3585{
3586 gcc_assert (n <= peep2_current_count);
3587
3588 n = peep2_buf_position (n: peep2_current + n);
3589
3590 return peep2_insn_data[n].insn;
3591}
3592
3593/* Return true if REGNO is dead before the Nth non-note insn
3594 after `current'. */
3595
3596bool
3597peep2_regno_dead_p (int ofs, int regno)
3598{
3599 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3600
3601 ofs = peep2_buf_position (n: peep2_current + ofs);
3602
3603 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3604
3605 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3606}
3607
3608/* Similarly for a REG. */
3609
3610bool
3611peep2_reg_dead_p (int ofs, rtx reg)
3612{
3613 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3614
3615 ofs = peep2_buf_position (n: peep2_current + ofs);
3616
3617 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3618
3619 unsigned int end_regno = END_REGNO (x: reg);
3620 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3621 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3622 return false;
3623 return true;
3624}
3625
3626/* Regno offset to be used in the register search. */
3627static int search_ofs;
3628
3629/* Try to find a hard register of mode MODE, matching the register class in
3630 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3631 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3632 in which case the only condition is that the register must be available
3633 before CURRENT_INSN.
3634 Registers that already have bits set in REG_SET will not be considered.
3635
3636 If an appropriate register is available, it will be returned and the
3637 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3638 returned. */
3639
3640rtx
3641peep2_find_free_register (int from, int to, const char *class_str,
3642 machine_mode mode, HARD_REG_SET *reg_set)
3643{
3644 enum reg_class cl;
3645 HARD_REG_SET live;
3646 df_ref def;
3647 int i;
3648
3649 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3650 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3651
3652 from = peep2_buf_position (n: peep2_current + from);
3653 to = peep2_buf_position (n: peep2_current + to);
3654
3655 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3656 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3657
3658 while (from != to)
3659 {
3660 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3661
3662 /* Don't use registers set or clobbered by the insn. */
3663 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3664 SET_HARD_REG_BIT (set&: live, DF_REF_REGNO (def));
3665
3666 from = peep2_buf_position (n: from + 1);
3667 }
3668
3669 cl = reg_class_for_constraint (c: lookup_constraint (p: class_str));
3670
3671 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3672 {
3673 int raw_regno, regno, j;
3674 bool success;
3675
3676 /* Distribute the free registers as much as possible. */
3677 raw_regno = search_ofs + i;
3678 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3679 raw_regno -= FIRST_PSEUDO_REGISTER;
3680#ifdef REG_ALLOC_ORDER
3681 regno = reg_alloc_order[raw_regno];
3682#else
3683 regno = raw_regno;
3684#endif
3685
3686 /* Can it support the mode we need? */
3687 if (!targetm.hard_regno_mode_ok (regno, mode))
3688 continue;
3689
3690 success = true;
3691 for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3692 {
3693 /* Don't allocate fixed registers. */
3694 if (fixed_regs[regno + j])
3695 {
3696 success = false;
3697 break;
3698 }
3699 /* Don't allocate global registers. */
3700 if (global_regs[regno + j])
3701 {
3702 success = false;
3703 break;
3704 }
3705 /* Make sure the register is of the right class. */
3706 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], bit: regno + j))
3707 {
3708 success = false;
3709 break;
3710 }
3711 /* And that we don't create an extra save/restore. */
3712 if (! crtl->abi->clobbers_full_reg_p (regno: regno + j)
3713 && ! df_regs_ever_live_p (regno + j))
3714 {
3715 success = false;
3716 break;
3717 }
3718
3719 if (! targetm.hard_regno_scratch_ok (regno + j))
3720 {
3721 success = false;
3722 break;
3723 }
3724
3725 /* And we don't clobber traceback for noreturn functions. */
3726 if ((regno + j == FRAME_POINTER_REGNUM
3727 || regno + j == HARD_FRAME_POINTER_REGNUM)
3728 && (! reload_completed || frame_pointer_needed))
3729 {
3730 success = false;
3731 break;
3732 }
3733
3734 if (TEST_HARD_REG_BIT (set: *reg_set, bit: regno + j)
3735 || TEST_HARD_REG_BIT (set: live, bit: regno + j))
3736 {
3737 success = false;
3738 break;
3739 }
3740 }
3741
3742 if (success)
3743 {
3744 add_to_hard_reg_set (regs: reg_set, mode, regno);
3745
3746 /* Start the next search with the next register. */
3747 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3748 raw_regno = 0;
3749 search_ofs = raw_regno;
3750
3751 return gen_rtx_REG (mode, regno);
3752 }
3753 }
3754
3755 search_ofs = 0;
3756 return NULL_RTX;
3757}
3758
3759/* Forget all currently tracked instructions, only remember current
3760 LIVE regset. */
3761
3762static void
3763peep2_reinit_state (regset live)
3764{
3765 int i;
3766
3767 /* Indicate that all slots except the last holds invalid data. */
3768 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3769 peep2_insn_data[i].insn = NULL;
3770 peep2_current_count = 0;
3771
3772 /* Indicate that the last slot contains live_after data. */
3773 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3774 peep2_current = MAX_INSNS_PER_PEEP2;
3775
3776 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3777}
3778
3779/* Copies frame related info of an insn (OLD_INSN) to the single
3780 insn (NEW_INSN) that was obtained by splitting OLD_INSN. */
3781
3782void
3783copy_frame_info_to_split_insn (rtx_insn *old_insn, rtx_insn *new_insn)
3784{
3785 bool any_note = false;
3786 rtx note;
3787
3788 if (!RTX_FRAME_RELATED_P (old_insn))
3789 return;
3790
3791 RTX_FRAME_RELATED_P (new_insn) = 1;
3792
3793 /* Allow the backend to fill in a note during the split. */
3794 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3795 switch (REG_NOTE_KIND (note))
3796 {
3797 case REG_FRAME_RELATED_EXPR:
3798 case REG_CFA_DEF_CFA:
3799 case REG_CFA_ADJUST_CFA:
3800 case REG_CFA_OFFSET:
3801 case REG_CFA_REGISTER:
3802 case REG_CFA_EXPRESSION:
3803 case REG_CFA_RESTORE:
3804 case REG_CFA_SET_VDRAP:
3805 any_note = true;
3806 break;
3807 default:
3808 break;
3809 }
3810
3811 /* If the backend didn't supply a note, copy one over. */
3812 if (!any_note)
3813 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3814 switch (REG_NOTE_KIND (note))
3815 {
3816 case REG_FRAME_RELATED_EXPR:
3817 case REG_CFA_DEF_CFA:
3818 case REG_CFA_ADJUST_CFA:
3819 case REG_CFA_OFFSET:
3820 case REG_CFA_REGISTER:
3821 case REG_CFA_EXPRESSION:
3822 case REG_CFA_RESTORE:
3823 case REG_CFA_SET_VDRAP:
3824 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3825 any_note = true;
3826 break;
3827 default:
3828 break;
3829 }
3830
3831 /* If there still isn't a note, make sure the unwind info sees the
3832 same expression as before the split. */
3833 if (!any_note)
3834 {
3835 rtx old_set, new_set;
3836
3837 /* The old insn had better have been simple, or annotated. */
3838 old_set = single_set (insn: old_insn);
3839 gcc_assert (old_set != NULL);
3840
3841 new_set = single_set (insn: new_insn);
3842 if (!new_set || !rtx_equal_p (new_set, old_set))
3843 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3844 }
3845
3846 /* Copy prologue/epilogue status. This is required in order to keep
3847 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3848 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3849}
3850
3851/* While scanning basic block BB, we found a match of length MATCH_LEN + 1,
3852 starting at INSN. Perform the replacement, removing the old insns and
3853 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3854 if the replacement is rejected. */
3855
3856static rtx_insn *
3857peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3858{
3859 int i;
3860 rtx_insn *last, *before_try, *x;
3861 rtx eh_note, as_note;
3862 rtx_insn *old_insn;
3863 rtx_insn *new_insn;
3864 bool was_call = false;
3865
3866 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3867 match more than one insn, or to be split into more than one insn. */
3868 old_insn = peep2_insn_data[peep2_current].insn;
3869 if (RTX_FRAME_RELATED_P (old_insn))
3870 {
3871 if (match_len != 0)
3872 return NULL;
3873
3874 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3875 may be in the stream for the purpose of register allocation. */
3876 if (active_insn_p (attempt))
3877 new_insn = attempt;
3878 else
3879 new_insn = next_active_insn (attempt);
3880 if (next_active_insn (new_insn))
3881 return NULL;
3882
3883 /* We have a 1-1 replacement. Copy over any frame-related info. */
3884 copy_frame_info_to_split_insn (old_insn, new_insn);
3885 }
3886
3887 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3888 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3889 cfg-related call notes. */
3890 for (i = 0; i <= match_len; ++i)
3891 {
3892 int j;
3893 rtx note;
3894
3895 j = peep2_buf_position (n: peep2_current + i);
3896 old_insn = peep2_insn_data[j].insn;
3897 if (!CALL_P (old_insn))
3898 continue;
3899 was_call = true;
3900
3901 new_insn = attempt;
3902 while (new_insn != NULL_RTX)
3903 {
3904 if (CALL_P (new_insn))
3905 break;
3906 new_insn = NEXT_INSN (insn: new_insn);
3907 }
3908
3909 gcc_assert (new_insn != NULL_RTX);
3910
3911 CALL_INSN_FUNCTION_USAGE (new_insn)
3912 = CALL_INSN_FUNCTION_USAGE (old_insn);
3913 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3914
3915 for (note = REG_NOTES (old_insn);
3916 note;
3917 note = XEXP (note, 1))
3918 switch (REG_NOTE_KIND (note))
3919 {
3920 case REG_NORETURN:
3921 case REG_SETJMP:
3922 case REG_TM:
3923 case REG_CALL_NOCF_CHECK:
3924 add_reg_note (new_insn, REG_NOTE_KIND (note),
3925 XEXP (note, 0));
3926 break;
3927 default:
3928 /* Discard all other reg notes. */
3929 break;
3930 }
3931
3932 /* Croak if there is another call in the sequence. */
3933 while (++i <= match_len)
3934 {
3935 j = peep2_buf_position (n: peep2_current + i);
3936 old_insn = peep2_insn_data[j].insn;
3937 gcc_assert (!CALL_P (old_insn));
3938 }
3939 break;
3940 }
3941
3942 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3943 move those notes over to the new sequence. */
3944 as_note = NULL;
3945 for (i = match_len; i >= 0; --i)
3946 {
3947 int j = peep2_buf_position (n: peep2_current + i);
3948 old_insn = peep2_insn_data[j].insn;
3949
3950 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3951 if (as_note)
3952 break;
3953 }
3954
3955 i = peep2_buf_position (n: peep2_current + match_len);
3956 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3957
3958 /* Replace the old sequence with the new. */
3959 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3960 last = emit_insn_after_setloc (attempt,
3961 peep2_insn_data[i].insn,
3962 INSN_LOCATION (insn: peepinsn));
3963 if (JUMP_P (peepinsn) && JUMP_P (last))
3964 CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
3965 before_try = PREV_INSN (insn);
3966 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3967
3968 /* Re-insert the EH_REGION notes. */
3969 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3970 {
3971 edge eh_edge;
3972 edge_iterator ei;
3973
3974 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3975 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3976 break;
3977
3978 if (eh_note)
3979 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3980
3981 if (eh_edge)
3982 for (x = last; x != before_try; x = PREV_INSN (insn: x))
3983 if (x != BB_END (bb)
3984 && (can_throw_internal (x)
3985 || can_nonlocal_goto (x)))
3986 {
3987 edge nfte, nehe;
3988 int flags;
3989
3990 nfte = split_block (bb, x);
3991 flags = (eh_edge->flags
3992 & (EDGE_EH | EDGE_ABNORMAL));
3993 if (CALL_P (x))
3994 flags |= EDGE_ABNORMAL_CALL;
3995 nehe = make_edge (nfte->src, eh_edge->dest,
3996 flags);
3997
3998 nehe->probability = eh_edge->probability;
3999 nfte->probability = nehe->probability.invert ();
4000
4001 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
4002 bb = nfte->src;
4003 eh_edge = nehe;
4004 }
4005
4006 /* Converting possibly trapping insn to non-trapping is
4007 possible. Zap dummy outgoing edges. */
4008 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
4009 }
4010
4011 /* Re-insert the ARGS_SIZE notes. */
4012 if (as_note)
4013 fixup_args_size_notes (before_try, last, get_args_size (as_note));
4014
4015 /* Scan the new insns for embedded side effects and add appropriate
4016 REG_INC notes. */
4017 if (AUTO_INC_DEC)
4018 for (x = last; x != before_try; x = PREV_INSN (insn: x))
4019 if (NONDEBUG_INSN_P (x))
4020 add_auto_inc_notes (x, PATTERN (insn: x));
4021
4022 /* If we generated a jump instruction, it won't have
4023 JUMP_LABEL set. Recompute after we're done. */
4024 for (x = last; x != before_try; x = PREV_INSN (insn: x))
4025 if (JUMP_P (x))
4026 {
4027 peep2_do_rebuild_jump_labels = true;
4028 break;
4029 }
4030
4031 return last;
4032}
4033
4034/* After performing a replacement in basic block BB, fix up the life
4035 information in our buffer. LAST is the last of the insns that we
4036 emitted as a replacement. PREV is the insn before the start of
4037 the replacement. MATCH_LEN + 1 is the number of instructions that were
4038 matched, and which now need to be replaced in the buffer. */
4039
4040static void
4041peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
4042 rtx_insn *prev)
4043{
4044 int i = peep2_buf_position (n: peep2_current + match_len + 1);
4045 rtx_insn *x;
4046 regset_head live;
4047
4048 INIT_REG_SET (&live);
4049 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
4050
4051 gcc_assert (peep2_current_count >= match_len + 1);
4052 peep2_current_count -= match_len + 1;
4053
4054 x = last;
4055 do
4056 {
4057 if (INSN_P (x))
4058 {
4059 df_insn_rescan (x);
4060 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
4061 {
4062 peep2_current_count++;
4063 if (--i < 0)
4064 i = MAX_INSNS_PER_PEEP2;
4065 peep2_insn_data[i].insn = x;
4066 df_simulate_one_insn_backwards (bb, x, &live);
4067 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
4068 }
4069 }
4070 x = PREV_INSN (insn: x);
4071 }
4072 while (x != prev);
4073 CLEAR_REG_SET (&live);
4074
4075 peep2_current = i;
4076}
4077
4078/* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
4079 Return true if we added it, false otherwise. The caller will try to match
4080 peepholes against the buffer if we return false; otherwise it will try to
4081 add more instructions to the buffer. */
4082
4083static bool
4084peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
4085{
4086 int pos;
4087
4088 /* Once we have filled the maximum number of insns the buffer can hold,
4089 allow the caller to match the insns against peepholes. We wait until
4090 the buffer is full in case the target has similar peepholes of different
4091 length; we always want to match the longest if possible. */
4092 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
4093 return false;
4094
4095 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
4096 any other pattern, lest it change the semantics of the frame info. */
4097 if (RTX_FRAME_RELATED_P (insn))
4098 {
4099 /* Let the buffer drain first. */
4100 if (peep2_current_count > 0)
4101 return false;
4102 /* Now the insn will be the only thing in the buffer. */
4103 }
4104
4105 pos = peep2_buf_position (n: peep2_current + peep2_current_count);
4106 peep2_insn_data[pos].insn = insn;
4107 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
4108 peep2_current_count++;
4109
4110 df_simulate_one_insn_forwards (bb, insn, live);
4111 return true;
4112}
4113
4114/* Perform the peephole2 optimization pass. */
4115
4116static void
4117peephole2_optimize (void)
4118{
4119 rtx_insn *insn;
4120 bitmap live;
4121 int i;
4122 basic_block bb;
4123
4124 peep2_do_cleanup_cfg = false;
4125 peep2_do_rebuild_jump_labels = false;
4126
4127 df_set_flags (DF_LR_RUN_DCE);
4128 df_note_add_problem ();
4129 df_analyze ();
4130
4131 /* Initialize the regsets we're going to use. */
4132 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
4133 peep2_insn_data[i].live_before = BITMAP_ALLOC (obstack: &reg_obstack);
4134 search_ofs = 0;
4135 live = BITMAP_ALLOC (obstack: &reg_obstack);
4136
4137 FOR_EACH_BB_REVERSE_FN (bb, cfun)
4138 {
4139 bool past_end = false;
4140 int pos;
4141
4142 rtl_profile_for_bb (bb);
4143
4144 /* Start up propagation. */
4145 bitmap_copy (live, DF_LR_IN (bb));
4146 df_simulate_initialize_forwards (bb, live);
4147 peep2_reinit_state (live);
4148
4149 insn = BB_HEAD (bb);
4150 for (;;)
4151 {
4152 rtx_insn *attempt, *head;
4153 int match_len;
4154
4155 if (!past_end && !NONDEBUG_INSN_P (insn))
4156 {
4157 next_insn:
4158 insn = NEXT_INSN (insn);
4159 if (insn == NEXT_INSN (BB_END (bb)))
4160 past_end = true;
4161 continue;
4162 }
4163 if (!past_end && peep2_fill_buffer (bb, insn, live))
4164 goto next_insn;
4165
4166 /* If we did not fill an empty buffer, it signals the end of the
4167 block. */
4168 if (peep2_current_count == 0)
4169 break;
4170
4171 /* The buffer filled to the current maximum, so try to match. */
4172
4173 pos = peep2_buf_position (n: peep2_current + peep2_current_count);
4174 peep2_insn_data[pos].insn = PEEP2_EOB;
4175 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
4176
4177 /* Match the peephole. */
4178 head = peep2_insn_data[peep2_current].insn;
4179 attempt = peephole2_insns (PATTERN (insn: head), head, &match_len);
4180 if (attempt != NULL)
4181 {
4182 rtx_insn *last = peep2_attempt (bb, insn: head, match_len, attempt);
4183 if (last)
4184 {
4185 peep2_update_life (bb, match_len, last, prev: PREV_INSN (insn: attempt));
4186 continue;
4187 }
4188 }
4189
4190 /* No match: advance the buffer by one insn. */
4191 peep2_current = peep2_buf_position (n: peep2_current + 1);
4192 peep2_current_count--;
4193 }
4194 }
4195
4196 default_rtl_profile ();
4197 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
4198 BITMAP_FREE (peep2_insn_data[i].live_before);
4199 BITMAP_FREE (live);
4200 if (peep2_do_rebuild_jump_labels)
4201 rebuild_jump_labels (get_insns ());
4202 if (peep2_do_cleanup_cfg)
4203 cleanup_cfg (CLEANUP_CFG_CHANGED);
4204}
4205
4206/* Common predicates for use with define_bypass. */
4207
4208/* Helper function for store_data_bypass_p, handle just a single SET
4209 IN_SET. */
4210
4211static bool
4212store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
4213{
4214 if (!MEM_P (SET_DEST (in_set)))
4215 return false;
4216
4217 rtx out_set = single_set (insn: out_insn);
4218 if (out_set)
4219 return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
4220
4221 rtx out_pat = PATTERN (insn: out_insn);
4222 if (GET_CODE (out_pat) != PARALLEL)
4223 return false;
4224
4225 for (int i = 0; i < XVECLEN (out_pat, 0); i++)
4226 {
4227 rtx out_exp = XVECEXP (out_pat, 0, i);
4228
4229 if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
4230 continue;
4231
4232 gcc_assert (GET_CODE (out_exp) == SET);
4233
4234 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
4235 return false;
4236 }
4237
4238 return true;
4239}
4240
4241/* True if the dependency between OUT_INSN and IN_INSN is on the store
4242 data not the address operand(s) of the store. IN_INSN and OUT_INSN
4243 must be either a single_set or a PARALLEL with SETs inside. */
4244
4245bool
4246store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4247{
4248 rtx in_set = single_set (insn: in_insn);
4249 if (in_set)
4250 return store_data_bypass_p_1 (out_insn, in_set);
4251
4252 rtx in_pat = PATTERN (insn: in_insn);
4253 if (GET_CODE (in_pat) != PARALLEL)
4254 return false;
4255
4256 for (int i = 0; i < XVECLEN (in_pat, 0); i++)
4257 {
4258 rtx in_exp = XVECEXP (in_pat, 0, i);
4259
4260 if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
4261 continue;
4262
4263 gcc_assert (GET_CODE (in_exp) == SET);
4264
4265 if (!store_data_bypass_p_1 (out_insn, in_set: in_exp))
4266 return false;
4267 }
4268
4269 return true;
4270}
4271
4272/* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
4273 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
4274 or multiple set; IN_INSN should be single_set for truth, but for convenience
4275 of insn categorization may be any JUMP or CALL insn. */
4276
4277bool
4278if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4279{
4280 rtx out_set, in_set;
4281
4282 in_set = single_set (insn: in_insn);
4283 if (! in_set)
4284 {
4285 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
4286 return false;
4287 }
4288
4289 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
4290 return false;
4291 in_set = SET_SRC (in_set);
4292
4293 out_set = single_set (insn: out_insn);
4294 if (out_set)
4295 {
4296 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
4297 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
4298 return false;
4299 }
4300 else
4301 {
4302 rtx out_pat;
4303 int i;
4304
4305 out_pat = PATTERN (insn: out_insn);
4306 gcc_assert (GET_CODE (out_pat) == PARALLEL);
4307
4308 for (i = 0; i < XVECLEN (out_pat, 0); i++)
4309 {
4310 rtx exp = XVECEXP (out_pat, 0, i);
4311
4312 if (GET_CODE (exp) == CLOBBER)
4313 continue;
4314
4315 gcc_assert (GET_CODE (exp) == SET);
4316
4317 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
4318 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
4319 return false;
4320 }
4321 }
4322
4323 return true;
4324}
4325
4326static unsigned int
4327rest_of_handle_peephole2 (void)
4328{
4329 if (HAVE_peephole2)
4330 peephole2_optimize ();
4331
4332 return 0;
4333}
4334
4335namespace {
4336
4337const pass_data pass_data_peephole2 =
4338{
4339 .type: RTL_PASS, /* type */
4340 .name: "peephole2", /* name */
4341 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
4342 .tv_id: TV_PEEPHOLE2, /* tv_id */
4343 .properties_required: 0, /* properties_required */
4344 .properties_provided: 0, /* properties_provided */
4345 .properties_destroyed: 0, /* properties_destroyed */
4346 .todo_flags_start: 0, /* todo_flags_start */
4347 TODO_df_finish, /* todo_flags_finish */
4348};
4349
4350class pass_peephole2 : public rtl_opt_pass
4351{
4352public:
4353 pass_peephole2 (gcc::context *ctxt)
4354 : rtl_opt_pass (pass_data_peephole2, ctxt)
4355 {}
4356
4357 /* opt_pass methods: */
4358 /* The epiphany backend creates a second instance of this pass, so we need
4359 a clone method. */
4360 opt_pass * clone () final override { return new pass_peephole2 (m_ctxt); }
4361 bool gate (function *) final override
4362 {
4363 return (optimize > 0 && flag_peephole2);
4364 }
4365 unsigned int execute (function *) final override
4366 {
4367 return rest_of_handle_peephole2 ();
4368 }
4369
4370}; // class pass_peephole2
4371
4372} // anon namespace
4373
4374rtl_opt_pass *
4375make_pass_peephole2 (gcc::context *ctxt)
4376{
4377 return new pass_peephole2 (ctxt);
4378}
4379
4380namespace {
4381
4382const pass_data pass_data_split_all_insns =
4383{
4384 .type: RTL_PASS, /* type */
4385 .name: "split1", /* name */
4386 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
4387 .tv_id: TV_NONE, /* tv_id */
4388 .properties_required: 0, /* properties_required */
4389 PROP_rtl_split_insns, /* properties_provided */
4390 .properties_destroyed: 0, /* properties_destroyed */
4391 .todo_flags_start: 0, /* todo_flags_start */
4392 .todo_flags_finish: 0, /* todo_flags_finish */
4393};
4394
4395class pass_split_all_insns : public rtl_opt_pass
4396{
4397public:
4398 pass_split_all_insns (gcc::context *ctxt)
4399 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
4400 {}
4401
4402 /* opt_pass methods: */
4403 /* The epiphany backend creates a second instance of this pass, so
4404 we need a clone method. */
4405 opt_pass * clone () final override
4406 {
4407 return new pass_split_all_insns (m_ctxt);
4408 }
4409 unsigned int execute (function *) final override
4410 {
4411 split_all_insns ();
4412 return 0;
4413 }
4414
4415}; // class pass_split_all_insns
4416
4417} // anon namespace
4418
4419rtl_opt_pass *
4420make_pass_split_all_insns (gcc::context *ctxt)
4421{
4422 return new pass_split_all_insns (ctxt);
4423}
4424
4425namespace {
4426
4427const pass_data pass_data_split_after_reload =
4428{
4429 .type: RTL_PASS, /* type */
4430 .name: "split2", /* name */
4431 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
4432 .tv_id: TV_NONE, /* tv_id */
4433 .properties_required: 0, /* properties_required */
4434 .properties_provided: 0, /* properties_provided */
4435 .properties_destroyed: 0, /* properties_destroyed */
4436 .todo_flags_start: 0, /* todo_flags_start */
4437 .todo_flags_finish: 0, /* todo_flags_finish */
4438};
4439
4440class pass_split_after_reload : public rtl_opt_pass
4441{
4442public:
4443 pass_split_after_reload (gcc::context *ctxt)
4444 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
4445 {}
4446
4447 /* opt_pass methods: */
4448 bool gate (function *) final override
4449 {
4450 /* If optimizing, then go ahead and split insns now. */
4451 return optimize > 0;
4452 }
4453
4454 unsigned int execute (function *) final override
4455 {
4456 split_all_insns ();
4457 return 0;
4458 }
4459
4460}; // class pass_split_after_reload
4461
4462} // anon namespace
4463
4464rtl_opt_pass *
4465make_pass_split_after_reload (gcc::context *ctxt)
4466{
4467 return new pass_split_after_reload (ctxt);
4468}
4469
4470static bool
4471enable_split_before_sched2 (void)
4472{
4473#ifdef INSN_SCHEDULING
4474 return optimize > 0 && flag_schedule_insns_after_reload;
4475#else
4476 return false;
4477#endif
4478}
4479
4480namespace {
4481
4482const pass_data pass_data_split_before_sched2 =
4483{
4484 .type: RTL_PASS, /* type */
4485 .name: "split3", /* name */
4486 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
4487 .tv_id: TV_NONE, /* tv_id */
4488 .properties_required: 0, /* properties_required */
4489 .properties_provided: 0, /* properties_provided */
4490 .properties_destroyed: 0, /* properties_destroyed */
4491 .todo_flags_start: 0, /* todo_flags_start */
4492 .todo_flags_finish: 0, /* todo_flags_finish */
4493};
4494
4495class pass_split_before_sched2 : public rtl_opt_pass
4496{
4497public:
4498 pass_split_before_sched2 (gcc::context *ctxt)
4499 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4500 {}
4501
4502 /* opt_pass methods: */
4503 bool gate (function *) final override
4504 {
4505 return enable_split_before_sched2 ();
4506 }
4507
4508 unsigned int execute (function *) final override
4509 {
4510 split_all_insns ();
4511 return 0;
4512 }
4513
4514}; // class pass_split_before_sched2
4515
4516} // anon namespace
4517
4518rtl_opt_pass *
4519make_pass_split_before_sched2 (gcc::context *ctxt)
4520{
4521 return new pass_split_before_sched2 (ctxt);
4522}
4523
4524namespace {
4525
4526const pass_data pass_data_split_before_regstack =
4527{
4528 .type: RTL_PASS, /* type */
4529 .name: "split4", /* name */
4530 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
4531 .tv_id: TV_NONE, /* tv_id */
4532 .properties_required: 0, /* properties_required */
4533 .properties_provided: 0, /* properties_provided */
4534 .properties_destroyed: 0, /* properties_destroyed */
4535 .todo_flags_start: 0, /* todo_flags_start */
4536 .todo_flags_finish: 0, /* todo_flags_finish */
4537};
4538
4539class pass_split_before_regstack : public rtl_opt_pass
4540{
4541public:
4542 pass_split_before_regstack (gcc::context *ctxt)
4543 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4544 {}
4545
4546 /* opt_pass methods: */
4547 bool gate (function *) final override;
4548 unsigned int execute (function *) final override
4549 {
4550 split_all_insns ();
4551 return 0;
4552 }
4553
4554}; // class pass_split_before_regstack
4555
4556bool
4557pass_split_before_regstack::gate (function *)
4558{
4559#if HAVE_ATTR_length && defined (STACK_REGS)
4560 /* If flow2 creates new instructions which need splitting
4561 and scheduling after reload is not done, they might not be
4562 split until final which doesn't allow splitting
4563 if HAVE_ATTR_length. Selective scheduling can result in
4564 further instructions that need splitting. */
4565#ifdef INSN_SCHEDULING
4566 return !enable_split_before_sched2 () || flag_selective_scheduling2;
4567#else
4568 return !enable_split_before_sched2 ();
4569#endif
4570#else
4571 return false;
4572#endif
4573}
4574
4575} // anon namespace
4576
4577rtl_opt_pass *
4578make_pass_split_before_regstack (gcc::context *ctxt)
4579{
4580 return new pass_split_before_regstack (ctxt);
4581}
4582
4583namespace {
4584
4585const pass_data pass_data_split_for_shorten_branches =
4586{
4587 .type: RTL_PASS, /* type */
4588 .name: "split5", /* name */
4589 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
4590 .tv_id: TV_NONE, /* tv_id */
4591 .properties_required: 0, /* properties_required */
4592 .properties_provided: 0, /* properties_provided */
4593 .properties_destroyed: 0, /* properties_destroyed */
4594 .todo_flags_start: 0, /* todo_flags_start */
4595 .todo_flags_finish: 0, /* todo_flags_finish */
4596};
4597
4598class pass_split_for_shorten_branches : public rtl_opt_pass
4599{
4600public:
4601 pass_split_for_shorten_branches (gcc::context *ctxt)
4602 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4603 {}
4604
4605 /* opt_pass methods: */
4606 bool gate (function *) final override
4607 {
4608 /* The placement of the splitting that we do for shorten_branches
4609 depends on whether regstack is used by the target or not. */
4610#if HAVE_ATTR_length && !defined (STACK_REGS)
4611 return true;
4612#else
4613 return false;
4614#endif
4615 }
4616
4617 unsigned int execute (function *) final override
4618 {
4619 split_all_insns_noflow ();
4620 return 0;
4621 }
4622
4623}; // class pass_split_for_shorten_branches
4624
4625} // anon namespace
4626
4627rtl_opt_pass *
4628make_pass_split_for_shorten_branches (gcc::context *ctxt)
4629{
4630 return new pass_split_for_shorten_branches (ctxt);
4631}
4632
4633/* (Re)initialize the target information after a change in target. */
4634
4635void
4636recog_init ()
4637{
4638 /* The information is zero-initialized, so we don't need to do anything
4639 first time round. */
4640 if (!this_target_recog->x_initialized)
4641 {
4642 this_target_recog->x_initialized = true;
4643 return;
4644 }
4645 memset (s: this_target_recog->x_bool_attr_masks, c: 0,
4646 n: sizeof (this_target_recog->x_bool_attr_masks));
4647 for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4648 if (this_target_recog->x_op_alt[i])
4649 {
4650 free (ptr: this_target_recog->x_op_alt[i]);
4651 this_target_recog->x_op_alt[i] = 0;
4652 }
4653}
4654

source code of gcc/recog.cc