1/* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* This file contains subroutines used only from the file reload1.cc.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
681 happens every time find_reloads is called.
692 happens only when REPLACE is 1, which is only when
70actually doing the reloads, not when just counting them.
71
72Using a reload register for several reloads in one insn:
73
74When an insn has reloads, it is considered as having three parts:
75the input reloads, the insn itself after reloading, and the output reloads.
76Reloads of values used in memory addresses are often needed for only one part.
77
78When this is so, reload_when_needed records which part needs the reload.
79Two reloads for different parts of the insn can share the same reload
80register.
81
82When a reload is used for addresses in multiple parts, or when it is
83an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84a register with any other reload. */
85
86#define REG_OK_STRICT
87
88/* We do not enable this with CHECKING_P, since it is awfully slow. */
89#undef DEBUG_RELOAD
90
91#include "config.h"
92#include "system.h"
93#include "coretypes.h"
94#include "backend.h"
95#include "target.h"
96#include "rtl.h"
97#include "tree.h"
98#include "df.h"
99#include "memmodel.h"
100#include "tm_p.h"
101#include "optabs.h"
102#include "regs.h"
103#include "ira.h"
104#include "recog.h"
105#include "rtl-error.h"
106#include "reload.h"
107#include "addresses.h"
108#include "function-abi.h"
109
110/* True if X is a constant that can be forced into the constant pool.
111 MODE is the mode of the operand, or VOIDmode if not known. */
112#define CONST_POOL_OK_P(MODE, X) \
113 ((MODE) != VOIDmode \
114 && CONSTANT_P (X) \
115 && GET_CODE (X) != HIGH \
116 && !targetm.cannot_force_const_mem (MODE, X))
117
118/* True if C is a non-empty register class that has too few registers
119 to be safely used as a reload target class. */
120
121static inline bool
122small_register_class_p (reg_class_t rclass)
123{
124 return (reg_class_size [(int) rclass] == 1
125 || (reg_class_size [(int) rclass] >= 1
126 && targetm.class_likely_spilled_p (rclass)));
127}
128
129
130/* All reloads of the current insn are recorded here. See reload.h for
131 comments. */
132int n_reloads;
133struct reload rld[MAX_RELOADS];
134
135/* All the "earlyclobber" operands of the current insn
136 are recorded here. */
137int n_earlyclobbers;
138rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139
140int reload_n_operands;
141
142/* Replacing reloads.
143
144 If `replace_reloads' is nonzero, then as each reload is recorded
145 an entry is made for it in the table `replacements'.
146 Then later `subst_reloads' can look through that table and
147 perform all the replacements needed. */
148
149/* Nonzero means record the places to replace. */
150static int replace_reloads;
151
152/* Each replacement is recorded with a structure like this. */
153struct replacement
154{
155 rtx *where; /* Location to store in */
156 int what; /* which reload this is for */
157 machine_mode mode; /* mode it must have */
158};
159
160static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
161
162/* Number of replacements currently recorded. */
163static int n_replacements;
164
165/* Used to track what is modified by an operand. */
166struct decomposition
167{
168 int reg_flag; /* Nonzero if referencing a register. */
169 int safe; /* Nonzero if this can't conflict with anything. */
170 rtx base; /* Base address for MEM. */
171 poly_int64 start; /* Starting offset or register number. */
172 poly_int64 end; /* Ending offset or register number. */
173};
174
175/* Save MEMs needed to copy from one class of registers to another. One MEM
176 is used per mode, but normally only one or two modes are ever used.
177
178 We keep two versions, before and after register elimination. The one
179 after register elimination is record separately for each operand. This
180 is done in case the address is not valid to be sure that we separately
181 reload each. */
182
183static rtx secondary_memlocs[NUM_MACHINE_MODES];
184static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
185static int secondary_memlocs_elim_used = 0;
186
187/* The instruction we are doing reloads for;
188 so we can test whether a register dies in it. */
189static rtx_insn *this_insn;
190
191/* Nonzero if this instruction is a user-specified asm with operands. */
192static int this_insn_is_asm;
193
194/* If hard_regs_live_known is nonzero,
195 we can tell which hard regs are currently live,
196 at least enough to succeed in choosing dummy reloads. */
197static int hard_regs_live_known;
198
199/* Indexed by hard reg number,
200 element is nonnegative if hard reg has been spilled.
201 This vector is passed to `find_reloads' as an argument
202 and is not changed here. */
203static short *static_reload_reg_p;
204
205/* Set to 1 in subst_reg_equivs if it changes anything. */
206static int subst_reg_equivs_changed;
207
208/* On return from push_reload, holds the reload-number for the OUT
209 operand, which can be different for that from the input operand. */
210static int output_reloadnum;
211
212 /* Compare two RTX's. */
213#define MATCHES(x, y) \
214 (x == y || (x != 0 && (REG_P (x) \
215 ? REG_P (y) && REGNO (x) == REGNO (y) \
216 : rtx_equal_p (x, y) && ! side_effects_p (x))))
217
218 /* Indicates if two reloads purposes are for similar enough things that we
219 can merge their reloads. */
220#define MERGABLE_RELOADS(when1, when2, op1, op2) \
221 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
222 || ((when1) == (when2) && (op1) == (op2)) \
223 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
224 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
225 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
226 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
227 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
228
229 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
230#define MERGE_TO_OTHER(when1, when2, op1, op2) \
231 ((when1) != (when2) \
232 || ! ((op1) == (op2) \
233 || (when1) == RELOAD_FOR_INPUT \
234 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
235 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
236
237 /* If we are going to reload an address, compute the reload type to
238 use. */
239#define ADDR_TYPE(type) \
240 ((type) == RELOAD_FOR_INPUT_ADDRESS \
241 ? RELOAD_FOR_INPADDR_ADDRESS \
242 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
243 ? RELOAD_FOR_OUTADDR_ADDRESS \
244 : (type)))
245
246static int push_secondary_reload (int, rtx, int, int, enum reg_class,
247 machine_mode, enum reload_type,
248 enum insn_code *, secondary_reload_info *);
249static enum reg_class find_valid_class (machine_mode, machine_mode,
250 int, unsigned int);
251static void push_replacement (rtx *, int, machine_mode);
252static void dup_replacements (rtx *, rtx *);
253static void combine_reloads (void);
254static int find_reusable_reload (rtx *, rtx, enum reg_class,
255 enum reload_type, int, int);
256static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
257 machine_mode, reg_class_t, int, int);
258static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
259static struct decomposition decompose (rtx);
260static int immune_p (rtx, rtx, struct decomposition);
261static bool alternative_allows_const_pool_ref (rtx, const char *, int);
262static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
263 rtx_insn *, int *);
264static rtx make_memloc (rtx, int);
265static bool maybe_memory_address_addr_space_p (machine_mode, rtx,
266 addr_space_t, rtx *);
267static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
268 int, enum reload_type, int, rtx_insn *);
269static rtx subst_reg_equivs (rtx, rtx_insn *);
270static rtx subst_indexed_address (rtx);
271static void update_auto_inc_notes (rtx_insn *, int, int);
272static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
273 enum rtx_code, enum rtx_code, rtx *,
274 int, enum reload_type,int, rtx_insn *);
275static void find_reloads_address_part (rtx, rtx *, enum reg_class,
276 machine_mode, int,
277 enum reload_type, int);
278static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
279 int, rtx_insn *, int *);
280static void copy_replacements_1 (rtx *, rtx *, int);
281static poly_int64 find_inc_amount (rtx, rtx);
282static int refers_to_mem_for_reload_p (rtx);
283static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
284 rtx, rtx *);
285
286/* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
287 list yet. */
288
289static void
290push_reg_equiv_alt_mem (int regno, rtx mem)
291{
292 rtx it;
293
294 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
295 if (rtx_equal_p (XEXP (it, 0), mem))
296 return;
297
298 reg_equiv_alt_mem_list (regno)
299 = alloc_EXPR_LIST (REG_EQUIV, mem,
300 reg_equiv_alt_mem_list (regno));
301}
302
303/* Determine if any secondary reloads are needed for loading (if IN_P is
304 nonzero) or storing (if IN_P is zero) X to or from a reload register of
305 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
306 are needed, push them.
307
308 Return the reload number of the secondary reload we made, or -1 if
309 we didn't need one. *PICODE is set to the insn_code to use if we do
310 need a secondary reload. */
311
312static int
313push_secondary_reload (int in_p, rtx x, int opnum, int optional,
314 enum reg_class reload_class,
315 machine_mode reload_mode, enum reload_type type,
316 enum insn_code *picode, secondary_reload_info *prev_sri)
317{
318 enum reg_class rclass = NO_REGS;
319 enum reg_class scratch_class;
320 machine_mode mode = reload_mode;
321 enum insn_code icode = CODE_FOR_nothing;
322 enum insn_code t_icode = CODE_FOR_nothing;
323 enum reload_type secondary_type;
324 int s_reload, t_reload = -1;
325 const char *scratch_constraint;
326 secondary_reload_info sri;
327
328 if (type == RELOAD_FOR_INPUT_ADDRESS
329 || type == RELOAD_FOR_OUTPUT_ADDRESS
330 || type == RELOAD_FOR_INPADDR_ADDRESS
331 || type == RELOAD_FOR_OUTADDR_ADDRESS)
332 secondary_type = type;
333 else
334 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
335
336 *picode = CODE_FOR_nothing;
337
338 /* If X is a paradoxical SUBREG, use the inner value to determine both the
339 mode and object being reloaded. */
340 if (paradoxical_subreg_p (x))
341 {
342 x = SUBREG_REG (x);
343 reload_mode = GET_MODE (x);
344 }
345
346 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
347 is still a pseudo-register by now, it *must* have an equivalent MEM
348 but we don't want to assume that), use that equivalent when seeing if
349 a secondary reload is needed since whether or not a reload is needed
350 might be sensitive to the form of the MEM. */
351
352 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
353 && reg_equiv_mem (REGNO (x)))
354 x = reg_equiv_mem (REGNO (x));
355
356 sri.icode = CODE_FOR_nothing;
357 sri.prev_sri = prev_sri;
358 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
359 reload_mode, &sri);
360 icode = (enum insn_code) sri.icode;
361
362 /* If we don't need any secondary registers, done. */
363 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
364 return -1;
365
366 if (rclass != NO_REGS)
367 t_reload = push_secondary_reload (in_p, x, opnum, optional, reload_class: rclass,
368 reload_mode, type, picode: &t_icode, prev_sri: &sri);
369
370 /* If we will be using an insn, the secondary reload is for a
371 scratch register. */
372
373 if (icode != CODE_FOR_nothing)
374 {
375 /* If IN_P is nonzero, the reload register will be the output in
376 operand 0. If IN_P is zero, the reload register will be the input
377 in operand 1. Outputs should have an initial "=", which we must
378 skip. */
379
380 /* ??? It would be useful to be able to handle only two, or more than
381 three, operands, but for now we can only handle the case of having
382 exactly three: output, input and one temp/scratch. */
383 gcc_assert (insn_data[(int) icode].n_operands == 3);
384
385 /* ??? We currently have no way to represent a reload that needs
386 an icode to reload from an intermediate tertiary reload register.
387 We should probably have a new field in struct reload to tag a
388 chain of scratch operand reloads onto. */
389 gcc_assert (rclass == NO_REGS);
390
391 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
392 gcc_assert (*scratch_constraint == '=');
393 scratch_constraint++;
394 if (*scratch_constraint == '&')
395 scratch_constraint++;
396 scratch_class = (reg_class_for_constraint
397 (c: lookup_constraint (p: scratch_constraint)));
398
399 rclass = scratch_class;
400 mode = insn_data[(int) icode].operand[2].mode;
401 }
402
403 /* This case isn't valid, so fail. Reload is allowed to use the same
404 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
405 in the case of a secondary register, we actually need two different
406 registers for correct code. We fail here to prevent the possibility of
407 silently generating incorrect code later.
408
409 The convention is that secondary input reloads are valid only if the
410 secondary_class is different from class. If you have such a case, you
411 cannot use secondary reloads, you must work around the problem some
412 other way.
413
414 Allow this when a reload_in/out pattern is being used. I.e. assume
415 that the generated code handles this case. */
416
417 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
418 || t_icode != CODE_FOR_nothing);
419
420 /* See if we can reuse an existing secondary reload. */
421 for (s_reload = 0; s_reload < n_reloads; s_reload++)
422 if (rld[s_reload].secondary_p
423 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
424 || reg_class_subset_p (rld[s_reload].rclass, rclass))
425 && ((in_p && rld[s_reload].inmode == mode)
426 || (! in_p && rld[s_reload].outmode == mode))
427 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
428 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
429 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
430 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
431 && (small_register_class_p (rclass)
432 || targetm.small_register_classes_for_mode_p (VOIDmode))
433 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
434 opnum, rld[s_reload].opnum))
435 {
436 if (in_p)
437 rld[s_reload].inmode = mode;
438 if (! in_p)
439 rld[s_reload].outmode = mode;
440
441 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
442 rld[s_reload].rclass = rclass;
443
444 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
445 rld[s_reload].optional &= optional;
446 rld[s_reload].secondary_p = 1;
447 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
448 opnum, rld[s_reload].opnum))
449 rld[s_reload].when_needed = RELOAD_OTHER;
450
451 break;
452 }
453
454 if (s_reload == n_reloads)
455 {
456 /* If we need a memory location to copy between the two reload regs,
457 set it up now. Note that we do the input case before making
458 the reload and the output case after. This is due to the
459 way reloads are output. */
460
461 if (in_p && icode == CODE_FOR_nothing
462 && targetm.secondary_memory_needed (mode, rclass, reload_class))
463 {
464 get_secondary_mem (x, reload_mode, opnum, type);
465
466 /* We may have just added new reloads. Make sure we add
467 the new reload at the end. */
468 s_reload = n_reloads;
469 }
470
471 /* We need to make a new secondary reload for this register class. */
472 rld[s_reload].in = rld[s_reload].out = 0;
473 rld[s_reload].rclass = rclass;
474
475 rld[s_reload].inmode = in_p ? mode : VOIDmode;
476 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
477 rld[s_reload].reg_rtx = 0;
478 rld[s_reload].optional = optional;
479 rld[s_reload].inc = 0;
480 /* Maybe we could combine these, but it seems too tricky. */
481 rld[s_reload].nocombine = 1;
482 rld[s_reload].in_reg = 0;
483 rld[s_reload].out_reg = 0;
484 rld[s_reload].opnum = opnum;
485 rld[s_reload].when_needed = secondary_type;
486 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
487 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
488 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
489 rld[s_reload].secondary_out_icode
490 = ! in_p ? t_icode : CODE_FOR_nothing;
491 rld[s_reload].secondary_p = 1;
492
493 n_reloads++;
494
495 if (! in_p && icode == CODE_FOR_nothing
496 && targetm.secondary_memory_needed (mode, reload_class, rclass))
497 get_secondary_mem (x, mode, opnum, type);
498 }
499
500 *picode = icode;
501 return s_reload;
502}
503
504/* If a secondary reload is needed, return its class. If both an intermediate
505 register and a scratch register is needed, we return the class of the
506 intermediate register. */
507reg_class_t
508secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
509 rtx x)
510{
511 enum insn_code icode;
512 secondary_reload_info sri;
513
514 sri.icode = CODE_FOR_nothing;
515 sri.prev_sri = NULL;
516 rclass
517 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
518 icode = (enum insn_code) sri.icode;
519
520 /* If there are no secondary reloads at all, we return NO_REGS.
521 If an intermediate register is needed, we return its class. */
522 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
523 return rclass;
524
525 /* No intermediate register is needed, but we have a special reload
526 pattern, which we assume for now needs a scratch register. */
527 return scratch_reload_class (icode);
528}
529
530/* ICODE is the insn_code of a reload pattern. Check that it has exactly
531 three operands, verify that operand 2 is an output operand, and return
532 its register class.
533 ??? We'd like to be able to handle any pattern with at least 2 operands,
534 for zero or more scratch registers, but that needs more infrastructure. */
535enum reg_class
536scratch_reload_class (enum insn_code icode)
537{
538 const char *scratch_constraint;
539 enum reg_class rclass;
540
541 gcc_assert (insn_data[(int) icode].n_operands == 3);
542 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
543 gcc_assert (*scratch_constraint == '=');
544 scratch_constraint++;
545 if (*scratch_constraint == '&')
546 scratch_constraint++;
547 rclass = reg_class_for_constraint (c: lookup_constraint (p: scratch_constraint));
548 gcc_assert (rclass != NO_REGS);
549 return rclass;
550}
551
552/* Return a memory location that will be used to copy X in mode MODE.
553 If we haven't already made a location for this mode in this insn,
554 call find_reloads_address on the location being returned. */
555
556rtx
557get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
558 int opnum, enum reload_type type)
559{
560 rtx loc;
561 int mem_valid;
562
563 /* By default, if MODE is narrower than a word, widen it to a word.
564 This is required because most machines that require these memory
565 locations do not support short load and stores from all registers
566 (e.g., FP registers). */
567
568 mode = targetm.secondary_memory_needed_mode (mode);
569
570 /* If we already have made a MEM for this operand in MODE, return it. */
571 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
572 return secondary_memlocs_elim[(int) mode][opnum];
573
574 /* If this is the first time we've tried to get a MEM for this mode,
575 allocate a new one. `something_changed' in reload will get set
576 by noticing that the frame size has changed. */
577
578 if (secondary_memlocs[(int) mode] == 0)
579 {
580#ifdef SECONDARY_MEMORY_NEEDED_RTX
581 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
582#else
583 secondary_memlocs[(int) mode]
584 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
585#endif
586 }
587
588 /* Get a version of the address doing any eliminations needed. If that
589 didn't give us a new MEM, make a new one if it isn't valid. */
590
591 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
592 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
593 MEM_ADDR_SPACE (loc));
594
595 if (! mem_valid && loc == secondary_memlocs[(int) mode])
596 loc = copy_rtx (loc);
597
598 /* The only time the call below will do anything is if the stack
599 offset is too large. In that case IND_LEVELS doesn't matter, so we
600 can just pass a zero. Adjust the type to be the address of the
601 corresponding object. If the address was valid, save the eliminated
602 address. If it wasn't valid, we need to make a reload each time, so
603 don't save it. */
604
605 if (! mem_valid)
606 {
607 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
608 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
609 : RELOAD_OTHER);
610
611 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
612 opnum, type, 0, 0);
613 }
614
615 secondary_memlocs_elim[(int) mode][opnum] = loc;
616 if (secondary_memlocs_elim_used <= (int)mode)
617 secondary_memlocs_elim_used = (int)mode + 1;
618 return loc;
619}
620
621/* Clear any secondary memory locations we've made. */
622
623void
624clear_secondary_mem (void)
625{
626 memset (s: secondary_memlocs, c: 0, n: sizeof secondary_memlocs);
627}
628
629
630/* Find the largest class which has at least one register valid in
631 mode INNER, and which for every such register, that register number
632 plus N is also valid in OUTER (if in range) and is cheap to move
633 into REGNO. Such a class must exist. */
634
635static enum reg_class
636find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
637 machine_mode inner ATTRIBUTE_UNUSED, int n,
638 unsigned int dest_regno ATTRIBUTE_UNUSED)
639{
640 int best_cost = -1;
641 int rclass;
642 int regno;
643 enum reg_class best_class = NO_REGS;
644 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
645 unsigned int best_size = 0;
646 int cost;
647
648 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
649 {
650 int bad = 0;
651 int good = 0;
652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
653 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], bit: regno))
654 {
655 if (targetm.hard_regno_mode_ok (regno, inner))
656 {
657 good = 1;
658 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], bit: regno + n)
659 && !targetm.hard_regno_mode_ok (regno + n, outer))
660 bad = 1;
661 }
662 }
663
664 if (bad || !good)
665 continue;
666 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
667
668 if ((reg_class_size[rclass] > best_size
669 && (best_cost < 0 || best_cost >= cost))
670 || best_cost > cost)
671 {
672 best_class = (enum reg_class) rclass;
673 best_size = reg_class_size[rclass];
674 best_cost = register_move_cost (outer, (enum reg_class) rclass,
675 dest_class);
676 }
677 }
678
679 gcc_assert (best_size != 0);
680
681 return best_class;
682}
683
684/* We are trying to reload a subreg of something that is not a register.
685 Find the largest class which contains only registers valid in
686 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
687 which we would eventually like to obtain the object. */
688
689static enum reg_class
690find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
691 machine_mode mode ATTRIBUTE_UNUSED,
692 enum reg_class dest_class ATTRIBUTE_UNUSED)
693{
694 int best_cost = -1;
695 int rclass;
696 int regno;
697 enum reg_class best_class = NO_REGS;
698 unsigned int best_size = 0;
699 int cost;
700
701 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
702 {
703 unsigned int computed_rclass_size = 0;
704
705 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
706 {
707 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
708 && targetm.hard_regno_mode_ok (regno, mode))
709 computed_rclass_size++;
710 }
711
712 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
713
714 if ((computed_rclass_size > best_size
715 && (best_cost < 0 || best_cost >= cost))
716 || best_cost > cost)
717 {
718 best_class = (enum reg_class) rclass;
719 best_size = computed_rclass_size;
720 best_cost = register_move_cost (outer, (enum reg_class) rclass,
721 dest_class);
722 }
723 }
724
725 gcc_assert (best_size != 0);
726
727#ifdef LIMIT_RELOAD_CLASS
728 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
729#endif
730 return best_class;
731}
732
733/* Return the number of a previously made reload that can be combined with
734 a new one, or n_reloads if none of the existing reloads can be used.
735 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
736 push_reload, they determine the kind of the new reload that we try to
737 combine. P_IN points to the corresponding value of IN, which can be
738 modified by this function.
739 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
740
741static int
742find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
743 enum reload_type type, int opnum, int dont_share)
744{
745 rtx in = *p_in;
746 int i;
747 /* We can't merge two reloads if the output of either one is
748 earlyclobbered. */
749
750 if (earlyclobber_operand_p (out))
751 return n_reloads;
752
753 /* We can use an existing reload if the class is right
754 and at least one of IN and OUT is a match
755 and the other is at worst neutral.
756 (A zero compared against anything is neutral.)
757
758 For targets with small register classes, don't use existing reloads
759 unless they are for the same thing since that can cause us to need
760 more reload registers than we otherwise would. */
761
762 for (i = 0; i < n_reloads; i++)
763 if ((reg_class_subset_p (rclass, rld[i].rclass)
764 || reg_class_subset_p (rld[i].rclass, rclass))
765 /* If the existing reload has a register, it must fit our class. */
766 && (rld[i].reg_rtx == 0
767 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
768 bit: true_regnum (rld[i].reg_rtx)))
769 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
770 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
771 || (out != 0 && MATCHES (rld[i].out, out)
772 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
773 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
774 && (small_register_class_p (rclass)
775 || targetm.small_register_classes_for_mode_p (VOIDmode))
776 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
777 return i;
778
779 /* Reloading a plain reg for input can match a reload to postincrement
780 that reg, since the postincrement's value is the right value.
781 Likewise, it can match a preincrement reload, since we regard
782 the preincrementation as happening before any ref in this insn
783 to that register. */
784 for (i = 0; i < n_reloads; i++)
785 if ((reg_class_subset_p (rclass, rld[i].rclass)
786 || reg_class_subset_p (rld[i].rclass, rclass))
787 /* If the existing reload has a register, it must fit our
788 class. */
789 && (rld[i].reg_rtx == 0
790 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
791 bit: true_regnum (rld[i].reg_rtx)))
792 && out == 0 && rld[i].out == 0 && rld[i].in != 0
793 && ((REG_P (in)
794 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
795 && MATCHES (XEXP (rld[i].in, 0), in))
796 || (REG_P (rld[i].in)
797 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
798 && MATCHES (XEXP (in, 0), rld[i].in)))
799 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
800 && (small_register_class_p (rclass)
801 || targetm.small_register_classes_for_mode_p (VOIDmode))
802 && MERGABLE_RELOADS (type, rld[i].when_needed,
803 opnum, rld[i].opnum))
804 {
805 /* Make sure reload_in ultimately has the increment,
806 not the plain register. */
807 if (REG_P (in))
808 *p_in = rld[i].in;
809 return i;
810 }
811 return n_reloads;
812}
813
814/* Return true if:
815
816 (a) (subreg:OUTER_MODE REG ...) represents a word or subword subreg
817 of a multiword value; and
818
819 (b) the number of *words* in REG does not match the number of *registers*
820 in REG. */
821
822static bool
823complex_word_subreg_p (machine_mode outer_mode, rtx reg)
824{
825 machine_mode inner_mode = GET_MODE (reg);
826 poly_uint64 reg_words = REG_NREGS (reg) * UNITS_PER_WORD;
827 return (known_le (GET_MODE_SIZE (outer_mode), UNITS_PER_WORD)
828 && maybe_gt (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
829 && !known_equal_after_align_up (a: GET_MODE_SIZE (mode: inner_mode),
830 b: reg_words, UNITS_PER_WORD));
831}
832
833/* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
834 expression. MODE is the mode that X will be used in. OUTPUT is true if
835 the function is invoked for the output part of an enclosing reload. */
836
837static bool
838reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
839{
840 rtx inner;
841
842 /* Only SUBREGs are problematical. */
843 if (GET_CODE (x) != SUBREG)
844 return false;
845
846 inner = SUBREG_REG (x);
847
848 /* If INNER is a constant or PLUS, then INNER will need reloading. */
849 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
850 return true;
851
852 /* If INNER is not a hard register, then INNER will not need reloading. */
853 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
854 return false;
855
856 /* If INNER is not ok for MODE, then INNER will need reloading. */
857 if (!targetm.hard_regno_mode_ok (subreg_regno (x), mode))
858 return true;
859
860 /* If this is for an output, and the outer part is a word or smaller,
861 INNER is larger than a word and the number of registers in INNER is
862 not the same as the number of words in INNER, then INNER will need
863 reloading (with an in-out reload). */
864 return output && complex_word_subreg_p (outer_mode: mode, reg: inner);
865}
866
867/* Return nonzero if IN can be reloaded into REGNO with mode MODE without
868 requiring an extra reload register. The caller has already found that
869 IN contains some reference to REGNO, so check that we can produce the
870 new value in a single step. E.g. if we have
871 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
872 instruction that adds one to a register, this should succeed.
873 However, if we have something like
874 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
875 needs to be loaded into a register first, we need a separate reload
876 register.
877 Such PLUS reloads are generated by find_reload_address_part.
878 The out-of-range PLUS expressions are usually introduced in the instruction
879 patterns by register elimination and substituting pseudos without a home
880 by their function-invariant equivalences. */
881static int
882can_reload_into (rtx in, int regno, machine_mode mode)
883{
884 rtx dst;
885 rtx_insn *test_insn;
886 int r = 0;
887 struct recog_data_d save_recog_data;
888
889 /* For matching constraints, we often get notional input reloads where
890 we want to use the original register as the reload register. I.e.
891 technically this is a non-optional input-output reload, but IN is
892 already a valid register, and has been chosen as the reload register.
893 Speed this up, since it trivially works. */
894 if (REG_P (in))
895 return 1;
896
897 /* To test MEMs properly, we'd have to take into account all the reloads
898 that are already scheduled, which can become quite complicated.
899 And since we've already handled address reloads for this MEM, it
900 should always succeed anyway. */
901 if (MEM_P (in))
902 return 1;
903
904 /* If we can make a simple SET insn that does the job, everything should
905 be fine. */
906 dst = gen_rtx_REG (mode, regno);
907 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
908 save_recog_data = recog_data;
909 if (recog_memoized (insn: test_insn) >= 0)
910 {
911 extract_insn (test_insn);
912 r = constrain_operands (1, get_enabled_alternatives (test_insn));
913 }
914 recog_data = save_recog_data;
915 return r;
916}
917
918/* Record one reload that needs to be performed.
919 IN is an rtx saying where the data are to be found before this instruction.
920 OUT says where they must be stored after the instruction.
921 (IN is zero for data not read, and OUT is zero for data not written.)
922 INLOC and OUTLOC point to the places in the instructions where
923 IN and OUT were found.
924 If IN and OUT are both nonzero, it means the same register must be used
925 to reload both IN and OUT.
926
927 RCLASS is a register class required for the reloaded data.
928 INMODE is the machine mode that the instruction requires
929 for the reg that replaces IN and OUTMODE is likewise for OUT.
930
931 If IN is zero, then OUT's location and mode should be passed as
932 INLOC and INMODE.
933
934 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
935
936 OPTIONAL nonzero means this reload does not need to be performed:
937 it can be discarded if that is more convenient.
938
939 OPNUM and TYPE say what the purpose of this reload is.
940
941 The return value is the reload-number for this reload.
942
943 If both IN and OUT are nonzero, in some rare cases we might
944 want to make two separate reloads. (Actually we never do this now.)
945 Therefore, the reload-number for OUT is stored in
946 output_reloadnum when we return; the return value applies to IN.
947 Usually (presently always), when IN and OUT are nonzero,
948 the two reload-numbers are equal, but the caller should be careful to
949 distinguish them. */
950
951int
952push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
953 enum reg_class rclass, machine_mode inmode,
954 machine_mode outmode, int strict_low, int optional,
955 int opnum, enum reload_type type)
956{
957 int i;
958 int dont_share = 0;
959 int dont_remove_subreg = 0;
960#ifdef LIMIT_RELOAD_CLASS
961 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
962#endif
963 int secondary_in_reload = -1, secondary_out_reload = -1;
964 enum insn_code secondary_in_icode = CODE_FOR_nothing;
965 enum insn_code secondary_out_icode = CODE_FOR_nothing;
966 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
967 subreg_in_class = NO_REGS;
968
969 /* INMODE and/or OUTMODE could be VOIDmode if no mode
970 has been specified for the operand. In that case,
971 use the operand's mode as the mode to reload. */
972 if (inmode == VOIDmode && in != 0)
973 inmode = GET_MODE (in);
974 if (outmode == VOIDmode && out != 0)
975 outmode = GET_MODE (out);
976
977 /* If find_reloads and friends until now missed to replace a pseudo
978 with a constant of reg_equiv_constant something went wrong
979 beforehand.
980 Note that it can't simply be done here if we missed it earlier
981 since the constant might need to be pushed into the literal pool
982 and the resulting memref would probably need further
983 reloading. */
984 if (in != 0 && REG_P (in))
985 {
986 int regno = REGNO (in);
987
988 gcc_assert (regno < FIRST_PSEUDO_REGISTER
989 || reg_renumber[regno] >= 0
990 || reg_equiv_constant (regno) == NULL_RTX);
991 }
992
993 /* reg_equiv_constant only contains constants which are obviously
994 not appropriate as destination. So if we would need to replace
995 the destination pseudo with a constant we are in real
996 trouble. */
997 if (out != 0 && REG_P (out))
998 {
999 int regno = REGNO (out);
1000
1001 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1002 || reg_renumber[regno] >= 0
1003 || reg_equiv_constant (regno) == NULL_RTX);
1004 }
1005
1006 /* If we have a read-write operand with an address side-effect,
1007 change either IN or OUT so the side-effect happens only once. */
1008 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1009 switch (GET_CODE (XEXP (in, 0)))
1010 {
1011 case POST_INC: case POST_DEC: case POST_MODIFY:
1012 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1013 break;
1014
1015 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1016 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1017 break;
1018
1019 default:
1020 break;
1021 }
1022
1023 /* If we are reloading a (SUBREG constant ...), really reload just the
1024 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1025 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1026 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1027 register is a pseudo, also reload the inside expression.
1028 For machines that extend byte loads, do this for any SUBREG of a pseudo
1029 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1030 M2 is an integral mode that gets extended when loaded.
1031 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1032 where either M1 is not valid for R or M2 is wider than a word but we
1033 only need one register to store an M2-sized quantity in R.
1034 (However, if OUT is nonzero, we need to reload the reg *and*
1035 the subreg, so do nothing here, and let following statement handle it.)
1036
1037 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1038 we can't handle it here because CONST_INT does not indicate a mode.
1039
1040 Similarly, we must reload the inside expression if we have a
1041 STRICT_LOW_PART (presumably, in == out in this case).
1042
1043 Also reload the inner expression if it does not require a secondary
1044 reload but the SUBREG does.
1045
1046 Also reload the inner expression if it is a register that is in
1047 the class whose registers cannot be referenced in a different size
1048 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1049 cannot reload just the inside since we might end up with the wrong
1050 register class. But if it is inside a STRICT_LOW_PART, we have
1051 no choice, so we hope we do get the right register class there.
1052
1053 Finally, reload the inner expression if it is a pseudo that will
1054 become a MEM and the MEM has a mode-dependent address, as in that
1055 case we obviously cannot change the mode of the MEM to that of the
1056 containing SUBREG as that would change the interpretation of the
1057 address. */
1058
1059 scalar_int_mode inner_mode;
1060 if (in != 0 && GET_CODE (in) == SUBREG
1061 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
1062 inmode, rclass)
1063 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1064 && (strict_low
1065 || (subreg_lowpart_p (in)
1066 && (CONSTANT_P (SUBREG_REG (in))
1067 || GET_CODE (SUBREG_REG (in)) == PLUS
1068 || (((REG_P (SUBREG_REG (in))
1069 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1070 || MEM_P (SUBREG_REG (in)))
1071 && (paradoxical_subreg_p (outermode: inmode,
1072 GET_MODE (SUBREG_REG (in)))
1073 || (known_le (GET_MODE_SIZE (inmode), UNITS_PER_WORD)
1074 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG
1075 (in)),
1076 result: &inner_mode)
1077 && GET_MODE_SIZE (mode: inner_mode) <= UNITS_PER_WORD
1078 && paradoxical_subreg_p (outermode: inmode, innermode: inner_mode)
1079 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1080 || (WORD_REGISTER_OPERATIONS
1081 && partial_subreg_p (outermode: inmode,
1082 GET_MODE (SUBREG_REG (in)))
1083 && (known_equal_after_align_down
1084 (a: GET_MODE_SIZE (mode: inmode) - 1,
1085 b: GET_MODE_SIZE (GET_MODE (SUBREG_REG
1086 (in))) - 1,
1087 UNITS_PER_WORD)))))
1088 || (REG_P (SUBREG_REG (in))
1089 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1090 /* The case where out is nonzero
1091 is handled differently in the following statement. */
1092 && (out == 0 || subreg_lowpart_p (in))
1093 && (complex_word_subreg_p (outer_mode: inmode, SUBREG_REG (in))
1094 || !targetm.hard_regno_mode_ok (subreg_regno (in),
1095 inmode)))
1096 || (secondary_reload_class (in_p: 1, rclass, mode: inmode, x: in) != NO_REGS
1097 && (secondary_reload_class (in_p: 1, rclass,
1098 GET_MODE (SUBREG_REG (in)),
1099 SUBREG_REG (in))
1100 == NO_REGS))
1101 || (REG_P (SUBREG_REG (in))
1102 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1103 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (in)),
1104 GET_MODE (SUBREG_REG (in)),
1105 inmode))))
1106 || (REG_P (SUBREG_REG (in))
1107 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER
1108 && reg_equiv_mem (REGNO (SUBREG_REG (in)))
1109 && (mode_dependent_address_p
1110 (XEXP (reg_equiv_mem (REGNO (SUBREG_REG (in))), 0),
1111 MEM_ADDR_SPACE (reg_equiv_mem (REGNO (SUBREG_REG (in)))))))))
1112 {
1113#ifdef LIMIT_RELOAD_CLASS
1114 in_subreg_loc = inloc;
1115#endif
1116 inloc = &SUBREG_REG (in);
1117 in = *inloc;
1118
1119 if (!WORD_REGISTER_OPERATIONS
1120 && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1121 && MEM_P (in))
1122 /* This is supposed to happen only for paradoxical subregs made by
1123 combine.cc. (SUBREG (MEM)) isn't supposed to occur other ways. */
1124 gcc_assert (known_le (GET_MODE_SIZE (GET_MODE (in)),
1125 GET_MODE_SIZE (inmode)));
1126
1127 inmode = GET_MODE (in);
1128 }
1129
1130 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1131 where M1 is not valid for R if it was not handled by the code above.
1132
1133 Similar issue for (SUBREG constant ...) if it was not handled by the
1134 code above. This can happen if SUBREG_BYTE != 0.
1135
1136 However, we must reload the inner reg *as well as* the subreg in
1137 that case. */
1138
1139 if (in != 0 && reload_inner_reg_of_subreg (x: in, mode: inmode, output: false))
1140 {
1141 if (REG_P (SUBREG_REG (in)))
1142 subreg_in_class
1143 = find_valid_class (outer: inmode, GET_MODE (SUBREG_REG (in)),
1144 n: subreg_regno_offset (REGNO (SUBREG_REG (in)),
1145 GET_MODE (SUBREG_REG (in)),
1146 SUBREG_BYTE (in),
1147 GET_MODE (in)),
1148 REGNO (SUBREG_REG (in)));
1149 else if (CONSTANT_P (SUBREG_REG (in))
1150 || GET_CODE (SUBREG_REG (in)) == PLUS)
1151 subreg_in_class = find_valid_class_1 (outer: inmode,
1152 GET_MODE (SUBREG_REG (in)),
1153 dest_class: rclass);
1154
1155 /* This relies on the fact that emit_reload_insns outputs the
1156 instructions for input reloads of type RELOAD_OTHER in the same
1157 order as the reloads. Thus if the outer reload is also of type
1158 RELOAD_OTHER, we are guaranteed that this inner reload will be
1159 output before the outer reload. */
1160 push_reload (SUBREG_REG (in), NULL_RTX, inloc: &SUBREG_REG (in), outloc: (rtx *) 0,
1161 rclass: subreg_in_class, VOIDmode, VOIDmode, strict_low: 0, optional: 0, opnum, type);
1162 dont_remove_subreg = 1;
1163 }
1164
1165 /* Similarly for paradoxical and problematical SUBREGs on the output.
1166 Note that there is no reason we need worry about the previous value
1167 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1168 entitled to clobber it all (except in the case of a word mode subreg
1169 or of a STRICT_LOW_PART, in that latter case the constraint should
1170 label it input-output.) */
1171 if (out != 0 && GET_CODE (out) == SUBREG
1172 && (subreg_lowpart_p (out) || strict_low)
1173 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (out)),
1174 outmode, rclass)
1175 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1176 && (CONSTANT_P (SUBREG_REG (out))
1177 || strict_low
1178 || (((REG_P (SUBREG_REG (out))
1179 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1180 || MEM_P (SUBREG_REG (out)))
1181 && (paradoxical_subreg_p (outermode: outmode, GET_MODE (SUBREG_REG (out)))
1182 || (WORD_REGISTER_OPERATIONS
1183 && partial_subreg_p (outermode: outmode, GET_MODE (SUBREG_REG (out)))
1184 && (known_equal_after_align_down
1185 (a: GET_MODE_SIZE (mode: outmode) - 1,
1186 b: GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1,
1187 UNITS_PER_WORD)))))
1188 || (REG_P (SUBREG_REG (out))
1189 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1190 /* The case of a word mode subreg
1191 is handled differently in the following statement. */
1192 && ! (known_le (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1193 && maybe_gt (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))),
1194 UNITS_PER_WORD))
1195 && !targetm.hard_regno_mode_ok (subreg_regno (out), outmode))
1196 || (secondary_reload_class (in_p: 0, rclass, mode: outmode, x: out) != NO_REGS
1197 && (secondary_reload_class (in_p: 0, rclass, GET_MODE (SUBREG_REG (out)),
1198 SUBREG_REG (out))
1199 == NO_REGS))
1200 || (REG_P (SUBREG_REG (out))
1201 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1202 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1203 GET_MODE (SUBREG_REG (out)),
1204 outmode))))
1205 {
1206#ifdef LIMIT_RELOAD_CLASS
1207 out_subreg_loc = outloc;
1208#endif
1209 outloc = &SUBREG_REG (out);
1210 out = *outloc;
1211 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1212 || known_le (GET_MODE_SIZE (GET_MODE (out)),
1213 GET_MODE_SIZE (outmode)));
1214 outmode = GET_MODE (out);
1215 }
1216
1217 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1218 where either M1 is not valid for R or M2 is wider than a word but we
1219 only need one register to store an M2-sized quantity in R.
1220
1221 However, we must reload the inner reg *as well as* the subreg in
1222 that case and the inner reg is an in-out reload. */
1223
1224 if (out != 0 && reload_inner_reg_of_subreg (x: out, mode: outmode, output: true))
1225 {
1226 enum reg_class in_out_class
1227 = find_valid_class (outer: outmode, GET_MODE (SUBREG_REG (out)),
1228 n: subreg_regno_offset (REGNO (SUBREG_REG (out)),
1229 GET_MODE (SUBREG_REG (out)),
1230 SUBREG_BYTE (out),
1231 GET_MODE (out)),
1232 REGNO (SUBREG_REG (out)));
1233
1234 /* This relies on the fact that emit_reload_insns outputs the
1235 instructions for output reloads of type RELOAD_OTHER in reverse
1236 order of the reloads. Thus if the outer reload is also of type
1237 RELOAD_OTHER, we are guaranteed that this inner reload will be
1238 output after the outer reload. */
1239 push_reload (SUBREG_REG (out), SUBREG_REG (out), inloc: &SUBREG_REG (out),
1240 outloc: &SUBREG_REG (out), rclass: in_out_class, VOIDmode, VOIDmode,
1241 strict_low: 0, optional: 0, opnum, type: RELOAD_OTHER);
1242 dont_remove_subreg = 1;
1243 }
1244
1245 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1246 if (in != 0 && out != 0 && MEM_P (out)
1247 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1248 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1249 dont_share = 1;
1250
1251 /* If IN is a SUBREG of a hard register, make a new REG. This
1252 simplifies some of the cases below. */
1253
1254 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1255 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1256 && ! dont_remove_subreg)
1257 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1258
1259 /* Similarly for OUT. */
1260 if (out != 0 && GET_CODE (out) == SUBREG
1261 && REG_P (SUBREG_REG (out))
1262 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1263 && ! dont_remove_subreg)
1264 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1265
1266 /* Narrow down the class of register wanted if that is
1267 desirable on this machine for efficiency. */
1268 {
1269 reg_class_t preferred_class = rclass;
1270
1271 if (in != 0)
1272 preferred_class = targetm.preferred_reload_class (in, rclass);
1273
1274 /* Output reloads may need analogous treatment, different in detail. */
1275 if (out != 0)
1276 preferred_class
1277 = targetm.preferred_output_reload_class (out, preferred_class);
1278
1279 /* Discard what the target said if we cannot do it. */
1280 if (preferred_class != NO_REGS
1281 || (optional && type == RELOAD_FOR_OUTPUT))
1282 rclass = (enum reg_class) preferred_class;
1283 }
1284
1285 /* Make sure we use a class that can handle the actual pseudo
1286 inside any subreg. For example, on the 386, QImode regs
1287 can appear within SImode subregs. Although GENERAL_REGS
1288 can handle SImode, QImode needs a smaller class. */
1289#ifdef LIMIT_RELOAD_CLASS
1290 if (in_subreg_loc)
1291 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1292 else if (in != 0 && GET_CODE (in) == SUBREG)
1293 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1294
1295 if (out_subreg_loc)
1296 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1297 if (out != 0 && GET_CODE (out) == SUBREG)
1298 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1299#endif
1300
1301 /* Verify that this class is at least possible for the mode that
1302 is specified. */
1303 if (this_insn_is_asm)
1304 {
1305 machine_mode mode;
1306 if (paradoxical_subreg_p (outermode: inmode, innermode: outmode))
1307 mode = inmode;
1308 else
1309 mode = outmode;
1310 if (mode == VOIDmode)
1311 {
1312 error_for_asm (this_insn, "cannot reload integer constant "
1313 "operand in %<asm%>");
1314 mode = word_mode;
1315 if (in != 0)
1316 inmode = word_mode;
1317 if (out != 0)
1318 outmode = word_mode;
1319 }
1320 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1321 if (targetm.hard_regno_mode_ok (i, mode)
1322 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, regno: i))
1323 break;
1324 if (i == FIRST_PSEUDO_REGISTER)
1325 {
1326 error_for_asm (this_insn, "impossible register constraint "
1327 "in %<asm%>");
1328 /* Avoid further trouble with this insn. */
1329 PATTERN (insn: this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1330 /* We used to continue here setting class to ALL_REGS, but it triggers
1331 sanity check on i386 for:
1332 void foo(long double d)
1333 {
1334 asm("" :: "a" (d));
1335 }
1336 Returning zero here ought to be safe as we take care in
1337 find_reloads to not process the reloads when instruction was
1338 replaced by USE. */
1339
1340 return 0;
1341 }
1342 }
1343
1344 /* Optional output reloads are always OK even if we have no register class,
1345 since the function of these reloads is only to have spill_reg_store etc.
1346 set, so that the storing insn can be deleted later. */
1347 gcc_assert (rclass != NO_REGS
1348 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1349
1350 i = find_reusable_reload (p_in: &in, out, rclass, type, opnum, dont_share);
1351
1352 if (i == n_reloads)
1353 {
1354 /* See if we need a secondary reload register to move between CLASS
1355 and IN or CLASS and OUT. Get the icode and push any required reloads
1356 needed for each of them if so. */
1357
1358 if (in != 0)
1359 secondary_in_reload
1360 = push_secondary_reload (in_p: 1, x: in, opnum, optional, reload_class: rclass, reload_mode: inmode, type,
1361 picode: &secondary_in_icode, NULL);
1362 if (out != 0 && GET_CODE (out) != SCRATCH)
1363 secondary_out_reload
1364 = push_secondary_reload (in_p: 0, x: out, opnum, optional, reload_class: rclass, reload_mode: outmode,
1365 type, picode: &secondary_out_icode, NULL);
1366
1367 /* We found no existing reload suitable for re-use.
1368 So add an additional reload. */
1369
1370 if (subreg_in_class == NO_REGS
1371 && in != 0
1372 && (REG_P (in)
1373 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1374 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1375 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1376 /* If a memory location is needed for the copy, make one. */
1377 if (subreg_in_class != NO_REGS
1378 && targetm.secondary_memory_needed (inmode, subreg_in_class, rclass))
1379 get_secondary_mem (x: in, mode: inmode, opnum, type);
1380
1381 i = n_reloads;
1382 rld[i].in = in;
1383 rld[i].out = out;
1384 rld[i].rclass = rclass;
1385 rld[i].inmode = inmode;
1386 rld[i].outmode = outmode;
1387 rld[i].reg_rtx = 0;
1388 rld[i].optional = optional;
1389 rld[i].inc = 0;
1390 rld[i].nocombine = 0;
1391 rld[i].in_reg = inloc ? *inloc : 0;
1392 rld[i].out_reg = outloc ? *outloc : 0;
1393 rld[i].opnum = opnum;
1394 rld[i].when_needed = type;
1395 rld[i].secondary_in_reload = secondary_in_reload;
1396 rld[i].secondary_out_reload = secondary_out_reload;
1397 rld[i].secondary_in_icode = secondary_in_icode;
1398 rld[i].secondary_out_icode = secondary_out_icode;
1399 rld[i].secondary_p = 0;
1400
1401 n_reloads++;
1402
1403 if (out != 0
1404 && (REG_P (out)
1405 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1406 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1407 && (targetm.secondary_memory_needed
1408 (outmode, rclass, REGNO_REG_CLASS (reg_or_subregno (out)))))
1409 get_secondary_mem (x: out, mode: outmode, opnum, type);
1410 }
1411 else
1412 {
1413 /* We are reusing an existing reload,
1414 but we may have additional information for it.
1415 For example, we may now have both IN and OUT
1416 while the old one may have just one of them. */
1417
1418 /* The modes can be different. If they are, we want to reload in
1419 the larger mode, so that the value is valid for both modes. */
1420 if (inmode != VOIDmode
1421 && partial_subreg_p (outermode: rld[i].inmode, innermode: inmode))
1422 rld[i].inmode = inmode;
1423 if (outmode != VOIDmode
1424 && partial_subreg_p (outermode: rld[i].outmode, innermode: outmode))
1425 rld[i].outmode = outmode;
1426 if (in != 0)
1427 {
1428 rtx in_reg = inloc ? *inloc : 0;
1429 /* If we merge reloads for two distinct rtl expressions that
1430 are identical in content, there might be duplicate address
1431 reloads. Remove the extra set now, so that if we later find
1432 that we can inherit this reload, we can get rid of the
1433 address reloads altogether.
1434
1435 Do not do this if both reloads are optional since the result
1436 would be an optional reload which could potentially leave
1437 unresolved address replacements.
1438
1439 It is not sufficient to call transfer_replacements since
1440 choose_reload_regs will remove the replacements for address
1441 reloads of inherited reloads which results in the same
1442 problem. */
1443 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1444 && ! (rld[i].optional && optional))
1445 {
1446 /* We must keep the address reload with the lower operand
1447 number alive. */
1448 if (opnum > rld[i].opnum)
1449 {
1450 remove_address_replacements (in_rtx: in);
1451 in = rld[i].in;
1452 in_reg = rld[i].in_reg;
1453 }
1454 else
1455 remove_address_replacements (in_rtx: rld[i].in);
1456 }
1457 /* When emitting reloads we don't necessarily look at the in-
1458 and outmode, but also directly at the operands (in and out).
1459 So we can't simply overwrite them with whatever we have found
1460 for this (to-be-merged) reload, we have to "merge" that too.
1461 Reusing another reload already verified that we deal with the
1462 same operands, just possibly in different modes. So we
1463 overwrite the operands only when the new mode is larger.
1464 See also PR33613. */
1465 if (!rld[i].in
1466 || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
1467 rld[i].in = in;
1468 if (!rld[i].in_reg
1469 || (in_reg
1470 && partial_subreg_p (GET_MODE (rld[i].in_reg),
1471 GET_MODE (in_reg))))
1472 rld[i].in_reg = in_reg;
1473 }
1474 if (out != 0)
1475 {
1476 if (!rld[i].out
1477 || (out
1478 && partial_subreg_p (GET_MODE (rld[i].out),
1479 GET_MODE (out))))
1480 rld[i].out = out;
1481 if (outloc
1482 && (!rld[i].out_reg
1483 || partial_subreg_p (GET_MODE (rld[i].out_reg),
1484 GET_MODE (*outloc))))
1485 rld[i].out_reg = *outloc;
1486 }
1487 if (reg_class_subset_p (rclass, rld[i].rclass))
1488 rld[i].rclass = rclass;
1489 rld[i].optional &= optional;
1490 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1491 opnum, rld[i].opnum))
1492 rld[i].when_needed = RELOAD_OTHER;
1493 rld[i].opnum = MIN (rld[i].opnum, opnum);
1494 }
1495
1496 /* If the ostensible rtx being reloaded differs from the rtx found
1497 in the location to substitute, this reload is not safe to combine
1498 because we cannot reliably tell whether it appears in the insn. */
1499
1500 if (in != 0 && in != *inloc)
1501 rld[i].nocombine = 1;
1502
1503 /* If we will replace IN and OUT with the reload-reg,
1504 record where they are located so that substitution need
1505 not do a tree walk. */
1506
1507 if (replace_reloads)
1508 {
1509 if (inloc != 0)
1510 {
1511 struct replacement *r = &replacements[n_replacements++];
1512 r->what = i;
1513 r->where = inloc;
1514 r->mode = inmode;
1515 }
1516 if (outloc != 0 && outloc != inloc)
1517 {
1518 struct replacement *r = &replacements[n_replacements++];
1519 r->what = i;
1520 r->where = outloc;
1521 r->mode = outmode;
1522 }
1523 }
1524
1525 /* If this reload is just being introduced and it has both
1526 an incoming quantity and an outgoing quantity that are
1527 supposed to be made to match, see if either one of the two
1528 can serve as the place to reload into.
1529
1530 If one of them is acceptable, set rld[i].reg_rtx
1531 to that one. */
1532
1533 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1534 {
1535 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1536 inmode, outmode,
1537 rld[i].rclass, i,
1538 earlyclobber_operand_p (out));
1539
1540 /* If the outgoing register already contains the same value
1541 as the incoming one, we can dispense with loading it.
1542 The easiest way to tell the caller that is to give a phony
1543 value for the incoming operand (same as outgoing one). */
1544 if (rld[i].reg_rtx == out
1545 && (REG_P (in) || CONSTANT_P (in))
1546 && find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1547 static_reload_reg_p, i, inmode) != 0)
1548 rld[i].in = out;
1549 }
1550
1551 /* If this is an input reload and the operand contains a register that
1552 dies in this insn and is used nowhere else, see if it is the right class
1553 to be used for this reload. Use it if so. (This occurs most commonly
1554 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1555 this if it is also an output reload that mentions the register unless
1556 the output is a SUBREG that clobbers an entire register.
1557
1558 Note that the operand might be one of the spill regs, if it is a
1559 pseudo reg and we are in a block where spilling has not taken place.
1560 But if there is no spilling in this block, that is OK.
1561 An explicitly used hard reg cannot be a spill reg. */
1562
1563 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1564 {
1565 rtx note;
1566 int regno;
1567 machine_mode rel_mode = inmode;
1568
1569 if (out && partial_subreg_p (outermode: rel_mode, innermode: outmode))
1570 rel_mode = outmode;
1571
1572 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1573 if (REG_NOTE_KIND (note) == REG_DEAD
1574 && REG_P (XEXP (note, 0))
1575 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1576 && reg_mentioned_p (XEXP (note, 0), in)
1577 /* Check that a former pseudo is valid; see find_dummy_reload. */
1578 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1579 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1580 ORIGINAL_REGNO (XEXP (note, 0)))
1581 && REG_NREGS (XEXP (note, 0)) == 1))
1582 && ! refers_to_regno_for_reload_p (regno,
1583 end_hard_regno (mode: rel_mode,
1584 regno),
1585 PATTERN (insn: this_insn), inloc)
1586 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1587 /* If this is also an output reload, IN cannot be used as
1588 the reload register if it is set in this insn unless IN
1589 is also OUT. */
1590 && (out == 0 || in == out
1591 || ! hard_reg_set_here_p (regno,
1592 end_hard_regno (mode: rel_mode, regno),
1593 PATTERN (insn: this_insn)))
1594 /* ??? Why is this code so different from the previous?
1595 Is there any simple coherent way to describe the two together?
1596 What's going on here. */
1597 && (in != out
1598 || (GET_CODE (in) == SUBREG
1599 && (known_equal_after_align_up
1600 (a: GET_MODE_SIZE (GET_MODE (in)),
1601 b: GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))),
1602 UNITS_PER_WORD))))
1603 /* Make sure the operand fits in the reg that dies. */
1604 && known_le (GET_MODE_SIZE (rel_mode),
1605 GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1606 && targetm.hard_regno_mode_ok (regno, inmode)
1607 && targetm.hard_regno_mode_ok (regno, outmode))
1608 {
1609 unsigned int offs;
1610 unsigned int nregs = MAX (hard_regno_nregs (regno, inmode),
1611 hard_regno_nregs (regno, outmode));
1612
1613 for (offs = 0; offs < nregs; offs++)
1614 if (fixed_regs[regno + offs]
1615 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1616 bit: regno + offs))
1617 break;
1618
1619 if (offs == nregs
1620 && (! (refers_to_regno_for_reload_p
1621 (regno, end_hard_regno (mode: inmode, regno), in, (rtx *) 0))
1622 || can_reload_into (in, regno, mode: inmode)))
1623 {
1624 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1625 break;
1626 }
1627 }
1628 }
1629
1630 if (out)
1631 output_reloadnum = i;
1632
1633 return i;
1634}
1635
1636/* Record an additional place we must replace a value
1637 for which we have already recorded a reload.
1638 RELOADNUM is the value returned by push_reload
1639 when the reload was recorded.
1640 This is used in insn patterns that use match_dup. */
1641
1642static void
1643push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1644{
1645 if (replace_reloads)
1646 {
1647 struct replacement *r = &replacements[n_replacements++];
1648 r->what = reloadnum;
1649 r->where = loc;
1650 r->mode = mode;
1651 }
1652}
1653
1654/* Duplicate any replacement we have recorded to apply at
1655 location ORIG_LOC to also be performed at DUP_LOC.
1656 This is used in insn patterns that use match_dup. */
1657
1658static void
1659dup_replacements (rtx *dup_loc, rtx *orig_loc)
1660{
1661 int i, n = n_replacements;
1662
1663 for (i = 0; i < n; i++)
1664 {
1665 struct replacement *r = &replacements[i];
1666 if (r->where == orig_loc)
1667 push_replacement (loc: dup_loc, reloadnum: r->what, mode: r->mode);
1668 }
1669}
1670
1671/* Transfer all replacements that used to be in reload FROM to be in
1672 reload TO. */
1673
1674void
1675transfer_replacements (int to, int from)
1676{
1677 int i;
1678
1679 for (i = 0; i < n_replacements; i++)
1680 if (replacements[i].what == from)
1681 replacements[i].what = to;
1682}
1683
1684/* IN_RTX is the value loaded by a reload that we now decided to inherit,
1685 or a subpart of it. If we have any replacements registered for IN_RTX,
1686 cancel the reloads that were supposed to load them.
1687 Return nonzero if we canceled any reloads. */
1688int
1689remove_address_replacements (rtx in_rtx)
1690{
1691 int i, j;
1692 char reload_flags[MAX_RELOADS];
1693 int something_changed = 0;
1694
1695 memset (s: reload_flags, c: 0, n: sizeof reload_flags);
1696 for (i = 0, j = 0; i < n_replacements; i++)
1697 {
1698 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1699 reload_flags[replacements[i].what] |= 1;
1700 else
1701 {
1702 replacements[j++] = replacements[i];
1703 reload_flags[replacements[i].what] |= 2;
1704 }
1705 }
1706 /* Note that the following store must be done before the recursive calls. */
1707 n_replacements = j;
1708
1709 for (i = n_reloads - 1; i >= 0; i--)
1710 {
1711 if (reload_flags[i] == 1)
1712 {
1713 deallocate_reload_reg (r: i);
1714 remove_address_replacements (in_rtx: rld[i].in);
1715 rld[i].in = 0;
1716 something_changed = 1;
1717 }
1718 }
1719 return something_changed;
1720}
1721
1722/* If there is only one output reload, and it is not for an earlyclobber
1723 operand, try to combine it with a (logically unrelated) input reload
1724 to reduce the number of reload registers needed.
1725
1726 This is safe if the input reload does not appear in
1727 the value being output-reloaded, because this implies
1728 it is not needed any more once the original insn completes.
1729
1730 If that doesn't work, see we can use any of the registers that
1731 die in this insn as a reload register. We can if it is of the right
1732 class and does not appear in the value being output-reloaded. */
1733
1734static void
1735combine_reloads (void)
1736{
1737 int i, regno;
1738 int output_reload = -1;
1739 int secondary_out = -1;
1740 rtx note;
1741
1742 /* Find the output reload; return unless there is exactly one
1743 and that one is mandatory. */
1744
1745 for (i = 0; i < n_reloads; i++)
1746 if (rld[i].out != 0)
1747 {
1748 if (output_reload >= 0)
1749 return;
1750 output_reload = i;
1751 }
1752
1753 if (output_reload < 0 || rld[output_reload].optional)
1754 return;
1755
1756 /* An input-output reload isn't combinable. */
1757
1758 if (rld[output_reload].in != 0)
1759 return;
1760
1761 /* If this reload is for an earlyclobber operand, we can't do anything. */
1762 if (earlyclobber_operand_p (rld[output_reload].out))
1763 return;
1764
1765 /* If there is a reload for part of the address of this operand, we would
1766 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1767 its life to the point where doing this combine would not lower the
1768 number of spill registers needed. */
1769 for (i = 0; i < n_reloads; i++)
1770 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1771 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1772 && rld[i].opnum == rld[output_reload].opnum)
1773 return;
1774
1775 /* Check each input reload; can we combine it? */
1776
1777 for (i = 0; i < n_reloads; i++)
1778 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1779 /* Life span of this reload must not extend past main insn. */
1780 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1781 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1782 && rld[i].when_needed != RELOAD_OTHER
1783 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1784 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1785 [(int) rld[output_reload].outmode])
1786 && known_eq (rld[i].inc, 0)
1787 && rld[i].reg_rtx == 0
1788 /* Don't combine two reloads with different secondary
1789 memory locations. */
1790 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1791 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1792 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1793 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1794 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1795 ? (rld[i].rclass == rld[output_reload].rclass)
1796 : (reg_class_subset_p (rld[i].rclass,
1797 rld[output_reload].rclass)
1798 || reg_class_subset_p (rld[output_reload].rclass,
1799 rld[i].rclass)))
1800 && (MATCHES (rld[i].in, rld[output_reload].out)
1801 /* Args reversed because the first arg seems to be
1802 the one that we imagine being modified
1803 while the second is the one that might be affected. */
1804 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1805 rld[i].in)
1806 /* However, if the input is a register that appears inside
1807 the output, then we also can't share.
1808 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1809 If the same reload reg is used for both reg 69 and the
1810 result to be stored in memory, then that result
1811 will clobber the address of the memory ref. */
1812 && ! (REG_P (rld[i].in)
1813 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1814 rld[output_reload].out))))
1815 && ! reload_inner_reg_of_subreg (x: rld[i].in, mode: rld[i].inmode,
1816 output: rld[i].when_needed != RELOAD_FOR_INPUT)
1817 && (reg_class_size[(int) rld[i].rclass]
1818 || targetm.small_register_classes_for_mode_p (VOIDmode))
1819 /* We will allow making things slightly worse by combining an
1820 input and an output, but no worse than that. */
1821 && (rld[i].when_needed == RELOAD_FOR_INPUT
1822 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1823 {
1824 int j;
1825
1826 /* We have found a reload to combine with! */
1827 rld[i].out = rld[output_reload].out;
1828 rld[i].out_reg = rld[output_reload].out_reg;
1829 rld[i].outmode = rld[output_reload].outmode;
1830 /* Mark the old output reload as inoperative. */
1831 rld[output_reload].out = 0;
1832 /* The combined reload is needed for the entire insn. */
1833 rld[i].when_needed = RELOAD_OTHER;
1834 /* If the output reload had a secondary reload, copy it. */
1835 if (rld[output_reload].secondary_out_reload != -1)
1836 {
1837 rld[i].secondary_out_reload
1838 = rld[output_reload].secondary_out_reload;
1839 rld[i].secondary_out_icode
1840 = rld[output_reload].secondary_out_icode;
1841 }
1842
1843 /* Copy any secondary MEM. */
1844 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1845 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1846 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1847 /* If required, minimize the register class. */
1848 if (reg_class_subset_p (rld[output_reload].rclass,
1849 rld[i].rclass))
1850 rld[i].rclass = rld[output_reload].rclass;
1851
1852 /* Transfer all replacements from the old reload to the combined. */
1853 for (j = 0; j < n_replacements; j++)
1854 if (replacements[j].what == output_reload)
1855 replacements[j].what = i;
1856
1857 return;
1858 }
1859
1860 /* If this insn has only one operand that is modified or written (assumed
1861 to be the first), it must be the one corresponding to this reload. It
1862 is safe to use anything that dies in this insn for that output provided
1863 that it does not occur in the output (we already know it isn't an
1864 earlyclobber. If this is an asm insn, give up. */
1865
1866 if (INSN_CODE (this_insn) == -1)
1867 return;
1868
1869 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1870 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1871 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1872 return;
1873
1874 /* See if some hard register that dies in this insn and is not used in
1875 the output is the right class. Only works if the register we pick
1876 up can fully hold our output reload. */
1877 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1878 if (REG_NOTE_KIND (note) == REG_DEAD
1879 && REG_P (XEXP (note, 0))
1880 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1881 rld[output_reload].out)
1882 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1883 && targetm.hard_regno_mode_ok (regno, rld[output_reload].outmode)
1884 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1885 bit: regno)
1886 && (hard_regno_nregs (regno, mode: rld[output_reload].outmode)
1887 <= REG_NREGS (XEXP (note, 0)))
1888 /* Ensure that a secondary or tertiary reload for this output
1889 won't want this register. */
1890 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1891 || (!(TEST_HARD_REG_BIT
1892 (reg_class_contents[(int) rld[secondary_out].rclass], bit: regno))
1893 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1894 || !(TEST_HARD_REG_BIT
1895 (reg_class_contents[(int) rld[secondary_out].rclass],
1896 bit: regno)))))
1897 && !fixed_regs[regno]
1898 /* Check that a former pseudo is valid; see find_dummy_reload. */
1899 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1900 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1901 ORIGINAL_REGNO (XEXP (note, 0)))
1902 && REG_NREGS (XEXP (note, 0)) == 1)))
1903 {
1904 rld[output_reload].reg_rtx
1905 = gen_rtx_REG (rld[output_reload].outmode, regno);
1906 return;
1907 }
1908}
1909
1910/* Try to find a reload register for an in-out reload (expressions IN and OUT).
1911 See if one of IN and OUT is a register that may be used;
1912 this is desirable since a spill-register won't be needed.
1913 If so, return the register rtx that proves acceptable.
1914
1915 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1916 RCLASS is the register class required for the reload.
1917
1918 If FOR_REAL is >= 0, it is the number of the reload,
1919 and in some cases when it can be discovered that OUT doesn't need
1920 to be computed, clear out rld[FOR_REAL].out.
1921
1922 If FOR_REAL is -1, this should not be done, because this call
1923 is just to see if a register can be found, not to find and install it.
1924
1925 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1926 puts an additional constraint on being able to use IN for OUT since
1927 IN must not appear elsewhere in the insn (it is assumed that IN itself
1928 is safe from the earlyclobber). */
1929
1930static rtx
1931find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1932 machine_mode inmode, machine_mode outmode,
1933 reg_class_t rclass, int for_real, int earlyclobber)
1934{
1935 rtx in = real_in;
1936 rtx out = real_out;
1937 int in_offset = 0;
1938 int out_offset = 0;
1939 rtx value = 0;
1940
1941 /* If operands exceed a word, we can't use either of them
1942 unless they have the same size. */
1943 if (maybe_ne (a: GET_MODE_SIZE (mode: outmode), b: GET_MODE_SIZE (mode: inmode))
1944 && (maybe_gt (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1945 || maybe_gt (GET_MODE_SIZE (inmode), UNITS_PER_WORD)))
1946 return 0;
1947
1948 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1949 respectively refers to a hard register. */
1950
1951 /* Find the inside of any subregs. */
1952 while (GET_CODE (out) == SUBREG)
1953 {
1954 if (REG_P (SUBREG_REG (out))
1955 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1956 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1957 GET_MODE (SUBREG_REG (out)),
1958 SUBREG_BYTE (out),
1959 GET_MODE (out));
1960 out = SUBREG_REG (out);
1961 }
1962 while (GET_CODE (in) == SUBREG)
1963 {
1964 if (REG_P (SUBREG_REG (in))
1965 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1966 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1967 GET_MODE (SUBREG_REG (in)),
1968 SUBREG_BYTE (in),
1969 GET_MODE (in));
1970 in = SUBREG_REG (in);
1971 }
1972
1973 /* Narrow down the reg class, the same way push_reload will;
1974 otherwise we might find a dummy now, but push_reload won't. */
1975 {
1976 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1977 if (preferred_class != NO_REGS)
1978 rclass = (enum reg_class) preferred_class;
1979 }
1980
1981 /* See if OUT will do. */
1982 if (REG_P (out)
1983 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1984 {
1985 unsigned int regno = REGNO (out) + out_offset;
1986 unsigned int nwords = hard_regno_nregs (regno, mode: outmode);
1987 rtx saved_rtx;
1988
1989 /* When we consider whether the insn uses OUT,
1990 ignore references within IN. They don't prevent us
1991 from copying IN into OUT, because those refs would
1992 move into the insn that reloads IN.
1993
1994 However, we only ignore IN in its role as this reload.
1995 If the insn uses IN elsewhere and it contains OUT,
1996 that counts. We can't be sure it's the "same" operand
1997 so it might not go through this reload.
1998
1999 We also need to avoid using OUT if it, or part of it, is a
2000 fixed register. Modifying such registers, even transiently,
2001 may have undefined effects on the machine, such as modifying
2002 the stack pointer. */
2003 saved_rtx = *inloc;
2004 *inloc = const0_rtx;
2005
2006 if (regno < FIRST_PSEUDO_REGISTER
2007 && targetm.hard_regno_mode_ok (regno, outmode)
2008 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2009 PATTERN (insn: this_insn), outloc))
2010 {
2011 unsigned int i;
2012
2013 for (i = 0; i < nwords; i++)
2014 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2015 bit: regno + i)
2016 || fixed_regs[regno + i])
2017 break;
2018
2019 if (i == nwords)
2020 {
2021 if (REG_P (real_out))
2022 value = real_out;
2023 else
2024 value = gen_rtx_REG (outmode, regno);
2025 }
2026 }
2027
2028 *inloc = saved_rtx;
2029 }
2030
2031 /* Consider using IN if OUT was not acceptable
2032 or if OUT dies in this insn (like the quotient in a divmod insn).
2033 We can't use IN unless it is dies in this insn,
2034 which means we must know accurately which hard regs are live.
2035 Also, the result can't go in IN if IN is used within OUT,
2036 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2037 if (hard_regs_live_known
2038 && REG_P (in)
2039 && REGNO (in) < FIRST_PSEUDO_REGISTER
2040 && (value == 0
2041 || find_reg_note (this_insn, REG_UNUSED, real_out))
2042 && find_reg_note (this_insn, REG_DEAD, real_in)
2043 && !fixed_regs[REGNO (in)]
2044 && targetm.hard_regno_mode_ok (REGNO (in),
2045 /* The only case where out and real_out
2046 might have different modes is where
2047 real_out is a subreg, and in that
2048 case, out has a real mode. */
2049 (GET_MODE (out) != VOIDmode
2050 ? GET_MODE (out) : outmode))
2051 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2052 /* However only do this if we can be sure that this input
2053 operand doesn't correspond with an uninitialized pseudo.
2054 global can assign some hardreg to it that is the same as
2055 the one assigned to a different, also live pseudo (as it
2056 can ignore the conflict). We must never introduce writes
2057 to such hardregs, as they would clobber the other live
2058 pseudo. See PR 20973. */
2059 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2060 ORIGINAL_REGNO (in))
2061 /* Similarly, only do this if we can be sure that the death
2062 note is still valid. global can assign some hardreg to
2063 the pseudo referenced in the note and simultaneously a
2064 subword of this hardreg to a different, also live pseudo,
2065 because only another subword of the hardreg is actually
2066 used in the insn. This cannot happen if the pseudo has
2067 been assigned exactly one hardreg. See PR 33732. */
2068 && REG_NREGS (in) == 1)))
2069 {
2070 unsigned int regno = REGNO (in) + in_offset;
2071 unsigned int nwords = hard_regno_nregs (regno, mode: inmode);
2072
2073 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2074 && ! hard_reg_set_here_p (regno, regno + nwords,
2075 PATTERN (insn: this_insn))
2076 && (! earlyclobber
2077 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2078 PATTERN (insn: this_insn), inloc)))
2079 {
2080 unsigned int i;
2081
2082 for (i = 0; i < nwords; i++)
2083 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2084 bit: regno + i))
2085 break;
2086
2087 if (i == nwords)
2088 {
2089 /* If we were going to use OUT as the reload reg
2090 and changed our mind, it means OUT is a dummy that
2091 dies here. So don't bother copying value to it. */
2092 if (for_real >= 0 && value == real_out)
2093 rld[for_real].out = 0;
2094 if (REG_P (real_in))
2095 value = real_in;
2096 else
2097 value = gen_rtx_REG (inmode, regno);
2098 }
2099 }
2100 }
2101
2102 return value;
2103}
2104
2105/* This page contains subroutines used mainly for determining
2106 whether the IN or an OUT of a reload can serve as the
2107 reload register. */
2108
2109/* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2110
2111int
2112earlyclobber_operand_p (rtx x)
2113{
2114 int i;
2115
2116 for (i = 0; i < n_earlyclobbers; i++)
2117 if (reload_earlyclobbers[i] == x)
2118 return 1;
2119
2120 return 0;
2121}
2122
2123/* Return 1 if expression X alters a hard reg in the range
2124 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2125 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2126 X should be the body of an instruction. */
2127
2128static int
2129hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2130{
2131 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2132 {
2133 rtx op0 = SET_DEST (x);
2134
2135 while (GET_CODE (op0) == SUBREG)
2136 op0 = SUBREG_REG (op0);
2137 if (REG_P (op0))
2138 {
2139 unsigned int r = REGNO (op0);
2140
2141 /* See if this reg overlaps range under consideration. */
2142 if (r < end_regno
2143 && end_hard_regno (GET_MODE (op0), regno: r) > beg_regno)
2144 return 1;
2145 }
2146 }
2147 else if (GET_CODE (x) == PARALLEL)
2148 {
2149 int i = XVECLEN (x, 0) - 1;
2150
2151 for (; i >= 0; i--)
2152 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2153 return 1;
2154 }
2155
2156 return 0;
2157}
2158
2159/* Return true if ADDR is a valid memory address for mode MODE
2160 in address space AS, and check that each pseudo reg has the
2161 proper kind of hard reg. */
2162
2163bool
2164strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2165 rtx addr, addr_space_t as, code_helper)
2166{
2167#ifdef GO_IF_LEGITIMATE_ADDRESS
2168 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2169 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2170 return false;
2171
2172 win:
2173 return true;
2174#else
2175 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as,
2176 ERROR_MARK);
2177#endif
2178}
2179
2180/* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2181 if they are the same hard reg, and has special hacks for
2182 autoincrement and autodecrement.
2183 This is specifically intended for find_reloads to use
2184 in determining whether two operands match.
2185 X is the operand whose number is the lower of the two.
2186
2187 The value is 2 if Y contains a pre-increment that matches
2188 a non-incrementing address in X. */
2189
2190/* ??? To be completely correct, we should arrange to pass
2191 for X the output operand and for Y the input operand.
2192 For now, we assume that the output operand has the lower number
2193 because that is natural in (SET output (... input ...)). */
2194
2195int
2196operands_match_p (rtx x, rtx y)
2197{
2198 int i;
2199 RTX_CODE code = GET_CODE (x);
2200 const char *fmt;
2201 int success_2;
2202
2203 if (x == y)
2204 return 1;
2205 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2206 && (REG_P (y) || (GET_CODE (y) == SUBREG
2207 && REG_P (SUBREG_REG (y)))))
2208 {
2209 int j;
2210
2211 if (code == SUBREG)
2212 {
2213 i = REGNO (SUBREG_REG (x));
2214 if (i >= FIRST_PSEUDO_REGISTER)
2215 goto slow;
2216 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2217 GET_MODE (SUBREG_REG (x)),
2218 SUBREG_BYTE (x),
2219 GET_MODE (x));
2220 }
2221 else
2222 i = REGNO (x);
2223
2224 if (GET_CODE (y) == SUBREG)
2225 {
2226 j = REGNO (SUBREG_REG (y));
2227 if (j >= FIRST_PSEUDO_REGISTER)
2228 goto slow;
2229 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2230 GET_MODE (SUBREG_REG (y)),
2231 SUBREG_BYTE (y),
2232 GET_MODE (y));
2233 }
2234 else
2235 j = REGNO (y);
2236
2237 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2238 multiple hard register group of scalar integer registers, so that
2239 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2240 register. */
2241 scalar_int_mode xmode;
2242 if (REG_WORDS_BIG_ENDIAN
2243 && is_a <scalar_int_mode> (GET_MODE (x), result: &xmode)
2244 && GET_MODE_SIZE (mode: xmode) > UNITS_PER_WORD
2245 && i < FIRST_PSEUDO_REGISTER)
2246 i += hard_regno_nregs (regno: i, mode: xmode) - 1;
2247 scalar_int_mode ymode;
2248 if (REG_WORDS_BIG_ENDIAN
2249 && is_a <scalar_int_mode> (GET_MODE (y), result: &ymode)
2250 && GET_MODE_SIZE (mode: ymode) > UNITS_PER_WORD
2251 && j < FIRST_PSEUDO_REGISTER)
2252 j += hard_regno_nregs (regno: j, mode: ymode) - 1;
2253
2254 return i == j;
2255 }
2256 /* If two operands must match, because they are really a single
2257 operand of an assembler insn, then two postincrements are invalid
2258 because the assembler insn would increment only once.
2259 On the other hand, a postincrement matches ordinary indexing
2260 if the postincrement is the output operand. */
2261 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2262 return operands_match_p (XEXP (x, 0), y);
2263 /* Two preincrements are invalid
2264 because the assembler insn would increment only once.
2265 On the other hand, a preincrement matches ordinary indexing
2266 if the preincrement is the input operand.
2267 In this case, return 2, since some callers need to do special
2268 things when this happens. */
2269 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2270 || GET_CODE (y) == PRE_MODIFY)
2271 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2272
2273 slow:
2274
2275 /* Now we have disposed of all the cases in which different rtx codes
2276 can match. */
2277 if (code != GET_CODE (y))
2278 return 0;
2279
2280 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2281 if (GET_MODE (x) != GET_MODE (y))
2282 return 0;
2283
2284 /* MEMs referring to different address space are not equivalent. */
2285 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2286 return 0;
2287
2288 switch (code)
2289 {
2290 CASE_CONST_UNIQUE:
2291 return 0;
2292
2293 case CONST_VECTOR:
2294 if (!same_vector_encodings_p (x, y))
2295 return false;
2296 break;
2297
2298 case LABEL_REF:
2299 return label_ref_label (ref: x) == label_ref_label (ref: y);
2300 case SYMBOL_REF:
2301 return XSTR (x, 0) == XSTR (y, 0);
2302
2303 default:
2304 break;
2305 }
2306
2307 /* Compare the elements. If any pair of corresponding elements
2308 fail to match, return 0 for the whole things. */
2309
2310 success_2 = 0;
2311 fmt = GET_RTX_FORMAT (code);
2312 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2313 {
2314 int val, j;
2315 switch (fmt[i])
2316 {
2317 case 'w':
2318 if (XWINT (x, i) != XWINT (y, i))
2319 return 0;
2320 break;
2321
2322 case 'i':
2323 if (XINT (x, i) != XINT (y, i))
2324 return 0;
2325 break;
2326
2327 case 'p':
2328 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2329 return 0;
2330 break;
2331
2332 case 'e':
2333 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2334 if (val == 0)
2335 return 0;
2336 /* If any subexpression returns 2,
2337 we should return 2 if we are successful. */
2338 if (val == 2)
2339 success_2 = 1;
2340 break;
2341
2342 case '0':
2343 break;
2344
2345 case 'E':
2346 if (XVECLEN (x, i) != XVECLEN (y, i))
2347 return 0;
2348 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2349 {
2350 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2351 if (val == 0)
2352 return 0;
2353 if (val == 2)
2354 success_2 = 1;
2355 }
2356 break;
2357
2358 /* It is believed that rtx's at this level will never
2359 contain anything but integers and other rtx's,
2360 except for within LABEL_REFs and SYMBOL_REFs. */
2361 default:
2362 gcc_unreachable ();
2363 }
2364 }
2365 return 1 + success_2;
2366}
2367
2368/* Describe the range of registers or memory referenced by X.
2369 If X is a register, set REG_FLAG and put the first register
2370 number into START and the last plus one into END.
2371 If X is a memory reference, put a base address into BASE
2372 and a range of integer offsets into START and END.
2373 If X is pushing on the stack, we can assume it causes no trouble,
2374 so we set the SAFE field. */
2375
2376static struct decomposition
2377decompose (rtx x)
2378{
2379 struct decomposition val;
2380 int all_const = 0, regno;
2381
2382 memset (s: &val, c: 0, n: sizeof (val));
2383
2384 switch (GET_CODE (x))
2385 {
2386 case MEM:
2387 {
2388 rtx base = NULL_RTX, offset = 0;
2389 rtx addr = XEXP (x, 0);
2390
2391 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2392 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2393 {
2394 val.base = XEXP (addr, 0);
2395 val.start = -GET_MODE_SIZE (GET_MODE (x));
2396 val.end = GET_MODE_SIZE (GET_MODE (x));
2397 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2398 return val;
2399 }
2400
2401 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2402 {
2403 if (GET_CODE (XEXP (addr, 1)) == PLUS
2404 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2405 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2406 {
2407 val.base = XEXP (addr, 0);
2408 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2409 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2410 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2411 return val;
2412 }
2413 }
2414
2415 if (GET_CODE (addr) == CONST)
2416 {
2417 addr = XEXP (addr, 0);
2418 all_const = 1;
2419 }
2420 if (GET_CODE (addr) == PLUS)
2421 {
2422 if (CONSTANT_P (XEXP (addr, 0)))
2423 {
2424 base = XEXP (addr, 1);
2425 offset = XEXP (addr, 0);
2426 }
2427 else if (CONSTANT_P (XEXP (addr, 1)))
2428 {
2429 base = XEXP (addr, 0);
2430 offset = XEXP (addr, 1);
2431 }
2432 }
2433
2434 if (offset == 0)
2435 {
2436 base = addr;
2437 offset = const0_rtx;
2438 }
2439 if (GET_CODE (offset) == CONST)
2440 offset = XEXP (offset, 0);
2441 if (GET_CODE (offset) == PLUS)
2442 {
2443 if (CONST_INT_P (XEXP (offset, 0)))
2444 {
2445 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2446 offset = XEXP (offset, 0);
2447 }
2448 else if (CONST_INT_P (XEXP (offset, 1)))
2449 {
2450 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2451 offset = XEXP (offset, 1);
2452 }
2453 else
2454 {
2455 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2456 offset = const0_rtx;
2457 }
2458 }
2459 else if (!CONST_INT_P (offset))
2460 {
2461 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2462 offset = const0_rtx;
2463 }
2464
2465 if (all_const && GET_CODE (base) == PLUS)
2466 base = gen_rtx_CONST (GET_MODE (base), base);
2467
2468 gcc_assert (CONST_INT_P (offset));
2469
2470 val.start = INTVAL (offset);
2471 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2472 val.base = base;
2473 }
2474 break;
2475
2476 case REG:
2477 val.reg_flag = 1;
2478 regno = true_regnum (x);
2479 if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2480 {
2481 /* A pseudo with no hard reg. */
2482 val.start = REGNO (x);
2483 val.end = val.start + 1;
2484 }
2485 else
2486 {
2487 /* A hard reg. */
2488 val.start = regno;
2489 val.end = end_hard_regno (GET_MODE (x), regno);
2490 }
2491 break;
2492
2493 case SUBREG:
2494 if (!REG_P (SUBREG_REG (x)))
2495 /* This could be more precise, but it's good enough. */
2496 return decompose (SUBREG_REG (x));
2497 regno = true_regnum (x);
2498 if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2499 return decompose (SUBREG_REG (x));
2500
2501 /* A hard reg. */
2502 val.reg_flag = 1;
2503 val.start = regno;
2504 val.end = regno + subreg_nregs (x);
2505 break;
2506
2507 case SCRATCH:
2508 /* This hasn't been assigned yet, so it can't conflict yet. */
2509 val.safe = 1;
2510 break;
2511
2512 default:
2513 gcc_assert (CONSTANT_P (x));
2514 val.safe = 1;
2515 break;
2516 }
2517 return val;
2518}
2519
2520/* Return 1 if altering Y will not modify the value of X.
2521 Y is also described by YDATA, which should be decompose (Y). */
2522
2523static int
2524immune_p (rtx x, rtx y, struct decomposition ydata)
2525{
2526 struct decomposition xdata;
2527
2528 if (ydata.reg_flag)
2529 /* In this case the decomposition structure contains register
2530 numbers rather than byte offsets. */
2531 return !refers_to_regno_for_reload_p (ydata.start.to_constant (),
2532 ydata.end.to_constant (),
2533 x, (rtx *) 0);
2534 if (ydata.safe)
2535 return 1;
2536
2537 gcc_assert (MEM_P (y));
2538 /* If Y is memory and X is not, Y can't affect X. */
2539 if (!MEM_P (x))
2540 return 1;
2541
2542 xdata = decompose (x);
2543
2544 if (! rtx_equal_p (xdata.base, ydata.base))
2545 {
2546 /* If bases are distinct symbolic constants, there is no overlap. */
2547 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2548 return 1;
2549 /* Constants and stack slots never overlap. */
2550 if (CONSTANT_P (xdata.base)
2551 && (ydata.base == frame_pointer_rtx
2552 || ydata.base == hard_frame_pointer_rtx
2553 || ydata.base == stack_pointer_rtx))
2554 return 1;
2555 if (CONSTANT_P (ydata.base)
2556 && (xdata.base == frame_pointer_rtx
2557 || xdata.base == hard_frame_pointer_rtx
2558 || xdata.base == stack_pointer_rtx))
2559 return 1;
2560 /* If either base is variable, we don't know anything. */
2561 return 0;
2562 }
2563
2564 return known_ge (xdata.start, ydata.end) || known_ge (ydata.start, xdata.end);
2565}
2566
2567/* Similar, but calls decompose. */
2568
2569int
2570safe_from_earlyclobber (rtx op, rtx clobber)
2571{
2572 struct decomposition early_data;
2573
2574 early_data = decompose (x: clobber);
2575 return immune_p (x: op, y: clobber, ydata: early_data);
2576}
2577
2578/* Main entry point of this file: search the body of INSN
2579 for values that need reloading and record them with push_reload.
2580 REPLACE nonzero means record also where the values occur
2581 so that subst_reloads can be used.
2582
2583 IND_LEVELS says how many levels of indirection are supported by this
2584 machine; a value of zero means that a memory reference is not a valid
2585 memory address.
2586
2587 LIVE_KNOWN says we have valid information about which hard
2588 regs are live at each point in the program; this is true when
2589 we are called from global_alloc but false when stupid register
2590 allocation has been done.
2591
2592 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2593 which is nonnegative if the reg has been commandeered for reloading into.
2594 It is copied into STATIC_RELOAD_REG_P and referenced from there
2595 by various subroutines.
2596
2597 Return TRUE if some operands need to be changed, because of swapping
2598 commutative operands, reg_equiv_address substitution, or whatever. */
2599
2600int
2601find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2602 short *reload_reg_p)
2603{
2604 int insn_code_number;
2605 int i, j;
2606 int noperands;
2607 /* These start out as the constraints for the insn
2608 and they are chewed up as we consider alternatives. */
2609 const char *constraints[MAX_RECOG_OPERANDS];
2610 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2611 a register. */
2612 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2613 char pref_or_nothing[MAX_RECOG_OPERANDS];
2614 /* Nonzero for a MEM operand whose entire address needs a reload.
2615 May be -1 to indicate the entire address may or may not need a reload. */
2616 int address_reloaded[MAX_RECOG_OPERANDS];
2617 /* Nonzero for an address operand that needs to be completely reloaded.
2618 May be -1 to indicate the entire operand may or may not need a reload. */
2619 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2620 /* Value of enum reload_type to use for operand. */
2621 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2622 /* Value of enum reload_type to use within address of operand. */
2623 enum reload_type address_type[MAX_RECOG_OPERANDS];
2624 /* Save the usage of each operand. */
2625 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2626 int no_input_reloads = 0, no_output_reloads = 0;
2627 int n_alternatives;
2628 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2629 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2630 char this_alternative_win[MAX_RECOG_OPERANDS];
2631 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2632 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2633 int this_alternative_matches[MAX_RECOG_OPERANDS];
2634 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2635 int this_alternative_number;
2636 int goal_alternative_number = 0;
2637 int operand_reloadnum[MAX_RECOG_OPERANDS];
2638 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2639 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2640 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2641 char goal_alternative_win[MAX_RECOG_OPERANDS];
2642 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2643 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2644 int goal_alternative_swapped;
2645 int best;
2646 int commutative;
2647 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2648 rtx substed_operand[MAX_RECOG_OPERANDS];
2649 rtx body = PATTERN (insn);
2650 rtx set = single_set (insn);
2651 int goal_earlyclobber = 0, this_earlyclobber;
2652 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2653 int retval = 0;
2654
2655 this_insn = insn;
2656 n_reloads = 0;
2657 n_replacements = 0;
2658 n_earlyclobbers = 0;
2659 replace_reloads = replace;
2660 hard_regs_live_known = live_known;
2661 static_reload_reg_p = reload_reg_p;
2662
2663 if (JUMP_P (insn) && INSN_CODE (insn) < 0)
2664 {
2665 extract_insn (insn);
2666 for (i = 0; i < recog_data.n_operands; i++)
2667 if (recog_data.operand_type[i] != OP_IN)
2668 break;
2669 if (i < recog_data.n_operands)
2670 {
2671 error_for_asm (insn,
2672 "the target does not support %<asm goto%> "
2673 "with outputs in %<asm%>");
2674 ira_nullify_asm_goto (insn);
2675 return 0;
2676 }
2677 }
2678
2679 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads. */
2680 if (JUMP_P (insn) || CALL_P (insn))
2681 no_output_reloads = 1;
2682
2683 /* The eliminated forms of any secondary memory locations are per-insn, so
2684 clear them out here. */
2685
2686 if (secondary_memlocs_elim_used)
2687 {
2688 memset (s: secondary_memlocs_elim, c: 0,
2689 n: sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2690 secondary_memlocs_elim_used = 0;
2691 }
2692
2693 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2694 is cheap to move between them. If it is not, there may not be an insn
2695 to do the copy, so we may need a reload. */
2696 if (GET_CODE (body) == SET
2697 && REG_P (SET_DEST (body))
2698 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2699 && REG_P (SET_SRC (body))
2700 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2701 && register_move_cost (GET_MODE (SET_SRC (body)),
2702 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2703 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2704 return 0;
2705
2706 extract_insn (insn);
2707
2708 noperands = reload_n_operands = recog_data.n_operands;
2709 n_alternatives = recog_data.n_alternatives;
2710
2711 /* Just return "no reloads" if insn has no operands with constraints. */
2712 if (noperands == 0 || n_alternatives == 0)
2713 return 0;
2714
2715 insn_code_number = INSN_CODE (insn);
2716 this_insn_is_asm = insn_code_number < 0;
2717
2718 memcpy (dest: operand_mode, src: recog_data.operand_mode,
2719 n: noperands * sizeof (machine_mode));
2720 memcpy (dest: constraints, src: recog_data.constraints,
2721 n: noperands * sizeof (const char *));
2722
2723 commutative = -1;
2724
2725 /* If we will need to know, later, whether some pair of operands
2726 are the same, we must compare them now and save the result.
2727 Reloading the base and index registers will clobber them
2728 and afterward they will fail to match. */
2729
2730 for (i = 0; i < noperands; i++)
2731 {
2732 const char *p;
2733 int c;
2734 char *end;
2735
2736 substed_operand[i] = recog_data.operand[i];
2737 p = constraints[i];
2738
2739 modified[i] = RELOAD_READ;
2740
2741 /* Scan this operand's constraint to see if it is an output operand,
2742 an in-out operand, is commutative, or should match another. */
2743
2744 while ((c = *p))
2745 {
2746 p += CONSTRAINT_LEN (c, p);
2747 switch (c)
2748 {
2749 case '=':
2750 modified[i] = RELOAD_WRITE;
2751 break;
2752 case '+':
2753 modified[i] = RELOAD_READ_WRITE;
2754 break;
2755 case '%':
2756 {
2757 /* The last operand should not be marked commutative. */
2758 gcc_assert (i != noperands - 1);
2759
2760 /* We currently only support one commutative pair of
2761 operands. Some existing asm code currently uses more
2762 than one pair. Previously, that would usually work,
2763 but sometimes it would crash the compiler. We
2764 continue supporting that case as well as we can by
2765 silently ignoring all but the first pair. In the
2766 future we may handle it correctly. */
2767 if (commutative < 0)
2768 commutative = i;
2769 else
2770 gcc_assert (this_insn_is_asm);
2771 }
2772 break;
2773 /* Use of ISDIGIT is tempting here, but it may get expensive because
2774 of locale support we don't want. */
2775 case '0': case '1': case '2': case '3': case '4':
2776 case '5': case '6': case '7': case '8': case '9':
2777 {
2778 c = strtoul (nptr: p - 1, endptr: &end, base: 10);
2779 p = end;
2780
2781 operands_match[c][i]
2782 = operands_match_p (x: recog_data.operand[c],
2783 y: recog_data.operand[i]);
2784
2785 /* An operand may not match itself. */
2786 gcc_assert (c != i);
2787
2788 /* If C can be commuted with C+1, and C might need to match I,
2789 then C+1 might also need to match I. */
2790 if (commutative >= 0)
2791 {
2792 if (c == commutative || c == commutative + 1)
2793 {
2794 int other = c + (c == commutative ? 1 : -1);
2795 operands_match[other][i]
2796 = operands_match_p (x: recog_data.operand[other],
2797 y: recog_data.operand[i]);
2798 }
2799 if (i == commutative || i == commutative + 1)
2800 {
2801 int other = i + (i == commutative ? 1 : -1);
2802 operands_match[c][other]
2803 = operands_match_p (x: recog_data.operand[c],
2804 y: recog_data.operand[other]);
2805 }
2806 /* Note that C is supposed to be less than I.
2807 No need to consider altering both C and I because in
2808 that case we would alter one into the other. */
2809 }
2810 }
2811 }
2812 }
2813 }
2814
2815 /* Examine each operand that is a memory reference or memory address
2816 and reload parts of the addresses into index registers.
2817 Also here any references to pseudo regs that didn't get hard regs
2818 but are equivalent to constants get replaced in the insn itself
2819 with those constants. Nobody will ever see them again.
2820
2821 Finally, set up the preferred classes of each operand. */
2822
2823 for (i = 0; i < noperands; i++)
2824 {
2825 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2826
2827 address_reloaded[i] = 0;
2828 address_operand_reloaded[i] = 0;
2829 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2830 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2831 : RELOAD_OTHER);
2832 address_type[i]
2833 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2834 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2835 : RELOAD_OTHER);
2836
2837 if (*constraints[i] == 0)
2838 /* Ignore things like match_operator operands. */
2839 ;
2840 else if (insn_extra_address_constraint
2841 (c: lookup_constraint (p: constraints[i])))
2842 {
2843 address_operand_reloaded[i]
2844 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2845 recog_data.operand[i],
2846 recog_data.operand_loc[i],
2847 i, operand_type[i], ind_levels, insn);
2848
2849 /* If we now have a simple operand where we used to have a
2850 PLUS or MULT or ASHIFT, re-recognize and try again. */
2851 if ((OBJECT_P (*recog_data.operand_loc[i])
2852 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2853 && (GET_CODE (recog_data.operand[i]) == MULT
2854 || GET_CODE (recog_data.operand[i]) == ASHIFT
2855 || GET_CODE (recog_data.operand[i]) == PLUS))
2856 {
2857 INSN_CODE (insn) = -1;
2858 retval = find_reloads (insn, replace, ind_levels, live_known,
2859 reload_reg_p);
2860 return retval;
2861 }
2862
2863 recog_data.operand[i] = *recog_data.operand_loc[i];
2864 substed_operand[i] = recog_data.operand[i];
2865
2866 /* Address operands are reloaded in their existing mode,
2867 no matter what is specified in the machine description. */
2868 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2869
2870 /* If the address is a single CONST_INT pick address mode
2871 instead otherwise we will later not know in which mode
2872 the reload should be performed. */
2873 if (operand_mode[i] == VOIDmode)
2874 operand_mode[i] = Pmode;
2875
2876 }
2877 else if (code == MEM)
2878 {
2879 address_reloaded[i]
2880 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2881 recog_data.operand_loc[i],
2882 XEXP (recog_data.operand[i], 0),
2883 &XEXP (recog_data.operand[i], 0),
2884 i, address_type[i], ind_levels, insn);
2885 recog_data.operand[i] = *recog_data.operand_loc[i];
2886 substed_operand[i] = recog_data.operand[i];
2887 }
2888 else if (code == SUBREG)
2889 {
2890 rtx reg = SUBREG_REG (recog_data.operand[i]);
2891 rtx op
2892 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2893 ind_levels,
2894 set != 0
2895 && &SET_DEST (set) == recog_data.operand_loc[i],
2896 insn,
2897 &address_reloaded[i]);
2898
2899 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2900 that didn't get a hard register, emit a USE with a REG_EQUAL
2901 note in front so that we might inherit a previous, possibly
2902 wider reload. */
2903
2904 if (replace
2905 && MEM_P (op)
2906 && REG_P (reg)
2907 && known_ge (GET_MODE_SIZE (GET_MODE (reg)),
2908 GET_MODE_SIZE (GET_MODE (op)))
2909 && reg_equiv_constant (REGNO (reg)) == 0)
2910 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2911 insn),
2912 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2913
2914 substed_operand[i] = recog_data.operand[i] = op;
2915 }
2916 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2917 /* We can get a PLUS as an "operand" as a result of register
2918 elimination. See eliminate_regs and gen_reload. We handle
2919 a unary operator by reloading the operand. */
2920 substed_operand[i] = recog_data.operand[i]
2921 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2922 ind_levels, 0, insn,
2923 &address_reloaded[i]);
2924 else if (code == REG)
2925 {
2926 /* This is equivalent to calling find_reloads_toplev.
2927 The code is duplicated for speed.
2928 When we find a pseudo always equivalent to a constant,
2929 we replace it by the constant. We must be sure, however,
2930 that we don't try to replace it in the insn in which it
2931 is being set. */
2932 int regno = REGNO (recog_data.operand[i]);
2933 if (reg_equiv_constant (regno) != 0
2934 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2935 {
2936 /* Record the existing mode so that the check if constants are
2937 allowed will work when operand_mode isn't specified. */
2938
2939 if (operand_mode[i] == VOIDmode)
2940 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2941
2942 substed_operand[i] = recog_data.operand[i]
2943 = reg_equiv_constant (regno);
2944 }
2945 if (reg_equiv_memory_loc (regno) != 0
2946 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2947 /* We need not give a valid is_set_dest argument since the case
2948 of a constant equivalence was checked above. */
2949 substed_operand[i] = recog_data.operand[i]
2950 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2951 ind_levels, 0, insn,
2952 &address_reloaded[i]);
2953 }
2954 /* If the operand is still a register (we didn't replace it with an
2955 equivalent), get the preferred class to reload it into. */
2956 code = GET_CODE (recog_data.operand[i]);
2957 preferred_class[i]
2958 = ((code == REG && REGNO (recog_data.operand[i])
2959 >= FIRST_PSEUDO_REGISTER)
2960 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2961 : NO_REGS);
2962 pref_or_nothing[i]
2963 = (code == REG
2964 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2965 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2966 }
2967
2968 /* If this is simply a copy from operand 1 to operand 0, merge the
2969 preferred classes for the operands. */
2970 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2971 && recog_data.operand[1] == SET_SRC (set))
2972 {
2973 preferred_class[0] = preferred_class[1]
2974 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2975 pref_or_nothing[0] |= pref_or_nothing[1];
2976 pref_or_nothing[1] |= pref_or_nothing[0];
2977 }
2978
2979 /* Now see what we need for pseudo-regs that didn't get hard regs
2980 or got the wrong kind of hard reg. For this, we must consider
2981 all the operands together against the register constraints. */
2982
2983 best = MAX_RECOG_OPERANDS * 2 + 600;
2984
2985 goal_alternative_swapped = 0;
2986
2987 /* The constraints are made of several alternatives.
2988 Each operand's constraint looks like foo,bar,... with commas
2989 separating the alternatives. The first alternatives for all
2990 operands go together, the second alternatives go together, etc.
2991
2992 First loop over alternatives. */
2993
2994 alternative_mask enabled = get_enabled_alternatives (insn);
2995 for (this_alternative_number = 0;
2996 this_alternative_number < n_alternatives;
2997 this_alternative_number++)
2998 {
2999 int swapped;
3000
3001 if (!TEST_BIT (enabled, this_alternative_number))
3002 {
3003 int i;
3004
3005 for (i = 0; i < recog_data.n_operands; i++)
3006 constraints[i] = skip_alternative (p: constraints[i]);
3007
3008 continue;
3009 }
3010
3011 /* If insn is commutative (it's safe to exchange a certain pair
3012 of operands) then we need to try each alternative twice, the
3013 second time matching those two operands as if we had
3014 exchanged them. To do this, really exchange them in
3015 operands. */
3016 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3017 {
3018 /* Loop over operands for one constraint alternative. */
3019 /* LOSERS counts those that don't fit this alternative
3020 and would require loading. */
3021 int losers = 0;
3022 /* BAD is set to 1 if it some operand can't fit this alternative
3023 even after reloading. */
3024 int bad = 0;
3025 /* REJECT is a count of how undesirable this alternative says it is
3026 if any reloading is required. If the alternative matches exactly
3027 then REJECT is ignored, but otherwise it gets this much
3028 counted against it in addition to the reloading needed. Each
3029 ? counts three times here since we want the disparaging caused by
3030 a bad register class to only count 1/3 as much. */
3031 int reject = 0;
3032
3033 if (swapped)
3034 {
3035 recog_data.operand[commutative] = substed_operand[commutative + 1];
3036 recog_data.operand[commutative + 1] = substed_operand[commutative];
3037 /* Swap the duplicates too. */
3038 for (i = 0; i < recog_data.n_dups; i++)
3039 if (recog_data.dup_num[i] == commutative
3040 || recog_data.dup_num[i] == commutative + 1)
3041 *recog_data.dup_loc[i]
3042 = recog_data.operand[(int) recog_data.dup_num[i]];
3043
3044 std::swap (a&: preferred_class[commutative],
3045 b&: preferred_class[commutative + 1]);
3046 std::swap (a&: pref_or_nothing[commutative],
3047 b&: pref_or_nothing[commutative + 1]);
3048 std::swap (a&: address_reloaded[commutative],
3049 b&: address_reloaded[commutative + 1]);
3050 }
3051
3052 this_earlyclobber = 0;
3053
3054 for (i = 0; i < noperands; i++)
3055 {
3056 const char *p = constraints[i];
3057 char *end;
3058 int len;
3059 int win = 0;
3060 int did_match = 0;
3061 /* 0 => this operand can be reloaded somehow for this alternative. */
3062 int badop = 1;
3063 /* 0 => this operand can be reloaded if the alternative allows regs. */
3064 int winreg = 0;
3065 int c;
3066 int m;
3067 rtx operand = recog_data.operand[i];
3068 int offset = 0;
3069 /* Nonzero means this is a MEM that must be reloaded into a reg
3070 regardless of what the constraint says. */
3071 int force_reload = 0;
3072 int offmemok = 0;
3073 /* Nonzero if a constant forced into memory would be OK for this
3074 operand. */
3075 int constmemok = 0;
3076 int earlyclobber = 0;
3077 enum constraint_num cn;
3078 enum reg_class cl;
3079
3080 /* If the operand is a SUBREG, extract
3081 the REG or MEM (or maybe even a constant) within.
3082 (Constants can occur as a result of reg_equiv_constant.) */
3083
3084 while (GET_CODE (operand) == SUBREG)
3085 {
3086 /* Offset only matters when operand is a REG and
3087 it is a hard reg. This is because it is passed
3088 to reg_fits_class_p if it is a REG and all pseudos
3089 return 0 from that function. */
3090 if (REG_P (SUBREG_REG (operand))
3091 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3092 {
3093 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3094 GET_MODE (SUBREG_REG (operand)),
3095 SUBREG_BYTE (operand),
3096 GET_MODE (operand)) < 0)
3097 force_reload = 1;
3098 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3099 GET_MODE (SUBREG_REG (operand)),
3100 SUBREG_BYTE (operand),
3101 GET_MODE (operand));
3102 }
3103 operand = SUBREG_REG (operand);
3104 /* Force reload if this is a constant or PLUS or if there may
3105 be a problem accessing OPERAND in the outer mode. */
3106 scalar_int_mode inner_mode;
3107 if (CONSTANT_P (operand)
3108 || GET_CODE (operand) == PLUS
3109 /* We must force a reload of paradoxical SUBREGs
3110 of a MEM because the alignment of the inner value
3111 may not be enough to do the outer reference. On
3112 big-endian machines, it may also reference outside
3113 the object.
3114
3115 On machines that extend byte operations and we have a
3116 SUBREG where both the inner and outer modes are no wider
3117 than a word and the inner mode is narrower, is integral,
3118 and gets extended when loaded from memory, combine.cc has
3119 made assumptions about the behavior of the machine in such
3120 register access. If the data is, in fact, in memory we
3121 must always load using the size assumed to be in the
3122 register and let the insn do the different-sized
3123 accesses.
3124
3125 This is doubly true if WORD_REGISTER_OPERATIONS. In
3126 this case eliminate_regs has left non-paradoxical
3127 subregs for push_reload to see. Make sure it does
3128 by forcing the reload.
3129
3130 ??? When is it right at this stage to have a subreg
3131 of a mem that is _not_ to be handled specially? IMO
3132 those should have been reduced to just a mem. */
3133 || ((MEM_P (operand)
3134 || (REG_P (operand)
3135 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3136 && (WORD_REGISTER_OPERATIONS
3137 || (((maybe_lt
3138 (a: GET_MODE_BITSIZE (GET_MODE (operand)),
3139 BIGGEST_ALIGNMENT))
3140 && (paradoxical_subreg_p
3141 (outermode: operand_mode[i], GET_MODE (operand)))))
3142 || BYTES_BIG_ENDIAN
3143 || (known_le (GET_MODE_SIZE (operand_mode[i]),
3144 UNITS_PER_WORD)
3145 && (is_a <scalar_int_mode>
3146 (GET_MODE (operand), result: &inner_mode))
3147 && (GET_MODE_SIZE (mode: inner_mode)
3148 <= UNITS_PER_WORD)
3149 && paradoxical_subreg_p (outermode: operand_mode[i],
3150 innermode: inner_mode)
3151 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3152 /* We must force a reload of a SUBREG's inner expression
3153 if it is a pseudo that will become a MEM and the MEM
3154 has a mode-dependent address, as in that case we
3155 obviously cannot change the mode of the MEM to that
3156 of the containing SUBREG as that would change the
3157 interpretation of the address. */
3158 || (REG_P (operand)
3159 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3160 && reg_equiv_mem (REGNO (operand))
3161 && (mode_dependent_address_p
3162 (XEXP (reg_equiv_mem (REGNO (operand)), 0),
3163 (MEM_ADDR_SPACE
3164 (reg_equiv_mem (REGNO (operand)))))))
3165 )
3166 force_reload = 1;
3167 }
3168
3169 this_alternative[i] = NO_REGS;
3170 this_alternative_win[i] = 0;
3171 this_alternative_match_win[i] = 0;
3172 this_alternative_offmemok[i] = 0;
3173 this_alternative_earlyclobber[i] = 0;
3174 this_alternative_matches[i] = -1;
3175
3176 /* An empty constraint or empty alternative
3177 allows anything which matched the pattern. */
3178 if (*p == 0 || *p == ',')
3179 win = 1, badop = 0;
3180
3181 /* Scan this alternative's specs for this operand;
3182 set WIN if the operand fits any letter in this alternative.
3183 Otherwise, clear BADOP if this operand could
3184 fit some letter after reloads,
3185 or set WINREG if this operand could fit after reloads
3186 provided the constraint allows some registers. */
3187
3188 do
3189 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3190 {
3191 case '\0':
3192 len = 0;
3193 break;
3194 case ',':
3195 c = '\0';
3196 break;
3197
3198 case '?':
3199 reject += 6;
3200 break;
3201
3202 case '!':
3203 reject = 600;
3204 break;
3205
3206 case '#':
3207 /* Ignore rest of this alternative as far as
3208 reloading is concerned. */
3209 do
3210 p++;
3211 while (*p && *p != ',');
3212 len = 0;
3213 break;
3214
3215 case '0': case '1': case '2': case '3': case '4':
3216 case '5': case '6': case '7': case '8': case '9':
3217 m = strtoul (nptr: p, endptr: &end, base: 10);
3218 p = end;
3219 len = 0;
3220
3221 this_alternative_matches[i] = m;
3222 /* We are supposed to match a previous operand.
3223 If we do, we win if that one did.
3224 If we do not, count both of the operands as losers.
3225 (This is too conservative, since most of the time
3226 only a single reload insn will be needed to make
3227 the two operands win. As a result, this alternative
3228 may be rejected when it is actually desirable.) */
3229 if ((swapped && (m != commutative || i != commutative + 1))
3230 /* If we are matching as if two operands were swapped,
3231 also pretend that operands_match had been computed
3232 with swapped.
3233 But if I is the second of those and C is the first,
3234 don't exchange them, because operands_match is valid
3235 only on one side of its diagonal. */
3236 ? (operands_match
3237 [(m == commutative || m == commutative + 1)
3238 ? 2 * commutative + 1 - m : m]
3239 [(i == commutative || i == commutative + 1)
3240 ? 2 * commutative + 1 - i : i])
3241 : operands_match[m][i])
3242 {
3243 /* If we are matching a non-offsettable address where an
3244 offsettable address was expected, then we must reject
3245 this combination, because we can't reload it. */
3246 if (this_alternative_offmemok[m]
3247 && MEM_P (recog_data.operand[m])
3248 && this_alternative[m] == NO_REGS
3249 && ! this_alternative_win[m])
3250 bad = 1;
3251
3252 did_match = this_alternative_win[m];
3253 }
3254 else
3255 {
3256 /* Operands don't match. */
3257 rtx value;
3258 int loc1, loc2;
3259 /* Retroactively mark the operand we had to match
3260 as a loser, if it wasn't already. */
3261 if (this_alternative_win[m])
3262 losers++;
3263 this_alternative_win[m] = 0;
3264 if (this_alternative[m] == NO_REGS)
3265 bad = 1;
3266 /* But count the pair only once in the total badness of
3267 this alternative, if the pair can be a dummy reload.
3268 The pointers in operand_loc are not swapped; swap
3269 them by hand if necessary. */
3270 if (swapped && i == commutative)
3271 loc1 = commutative + 1;
3272 else if (swapped && i == commutative + 1)
3273 loc1 = commutative;
3274 else
3275 loc1 = i;
3276 if (swapped && m == commutative)
3277 loc2 = commutative + 1;
3278 else if (swapped && m == commutative + 1)
3279 loc2 = commutative;
3280 else
3281 loc2 = m;
3282 value
3283 = find_dummy_reload (real_in: recog_data.operand[i],
3284 real_out: recog_data.operand[m],
3285 inloc: recog_data.operand_loc[loc1],
3286 outloc: recog_data.operand_loc[loc2],
3287 inmode: operand_mode[i], outmode: operand_mode[m],
3288 rclass: this_alternative[m], for_real: -1,
3289 earlyclobber: this_alternative_earlyclobber[m]);
3290
3291 if (value != 0)
3292 losers--;
3293 }
3294 /* This can be fixed with reloads if the operand
3295 we are supposed to match can be fixed with reloads. */
3296 badop = 0;
3297 this_alternative[i] = this_alternative[m];
3298
3299 /* If we have to reload this operand and some previous
3300 operand also had to match the same thing as this
3301 operand, we don't know how to do that. So reject this
3302 alternative. */
3303 if (! did_match || force_reload)
3304 for (j = 0; j < i; j++)
3305 if (this_alternative_matches[j]
3306 == this_alternative_matches[i])
3307 {
3308 badop = 1;
3309 break;
3310 }
3311 break;
3312
3313 case 'p':
3314 /* All necessary reloads for an address_operand
3315 were handled in find_reloads_address. */
3316 this_alternative[i]
3317 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3318 outer_code: ADDRESS, index_code: SCRATCH, insn);
3319 win = 1;
3320 badop = 0;
3321 break;
3322
3323 case TARGET_MEM_CONSTRAINT:
3324 if (force_reload)
3325 break;
3326 if (MEM_P (operand)
3327 || (REG_P (operand)
3328 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3329 && reg_renumber[REGNO (operand)] < 0))
3330 win = 1;
3331 if (CONST_POOL_OK_P (operand_mode[i], operand))
3332 badop = 0;
3333 constmemok = 1;
3334 break;
3335
3336 case '<':
3337 if (MEM_P (operand)
3338 && ! address_reloaded[i]
3339 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3340 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3341 win = 1;
3342 break;
3343
3344 case '>':
3345 if (MEM_P (operand)
3346 && ! address_reloaded[i]
3347 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3348 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3349 win = 1;
3350 break;
3351
3352 /* Memory operand whose address is not offsettable. */
3353 case 'V':
3354 if (force_reload)
3355 break;
3356 if (MEM_P (operand)
3357 && ! (ind_levels ? offsettable_memref_p (operand)
3358 : offsettable_nonstrict_memref_p (operand))
3359 /* Certain mem addresses will become offsettable
3360 after they themselves are reloaded. This is important;
3361 we don't want our own handling of unoffsettables
3362 to override the handling of reg_equiv_address. */
3363 && !(REG_P (XEXP (operand, 0))
3364 && (ind_levels == 0
3365 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3366 win = 1;
3367 break;
3368
3369 /* Memory operand whose address is offsettable. */
3370 case 'o':
3371 if (force_reload)
3372 break;
3373 if ((MEM_P (operand)
3374 /* If IND_LEVELS, find_reloads_address won't reload a
3375 pseudo that didn't get a hard reg, so we have to
3376 reject that case. */
3377 && ((ind_levels ? offsettable_memref_p (operand)
3378 : offsettable_nonstrict_memref_p (operand))
3379 /* A reloaded address is offsettable because it is now
3380 just a simple register indirect. */
3381 || address_reloaded[i] == 1))
3382 || (REG_P (operand)
3383 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3384 && reg_renumber[REGNO (operand)] < 0
3385 /* If reg_equiv_address is nonzero, we will be
3386 loading it into a register; hence it will be
3387 offsettable, but we cannot say that reg_equiv_mem
3388 is offsettable without checking. */
3389 && ((reg_equiv_mem (REGNO (operand)) != 0
3390 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3391 || (reg_equiv_address (REGNO (operand)) != 0))))
3392 win = 1;
3393 if (CONST_POOL_OK_P (operand_mode[i], operand)
3394 || MEM_P (operand))
3395 badop = 0;
3396 constmemok = 1;
3397 offmemok = 1;
3398 break;
3399
3400 case '&':
3401 /* Output operand that is stored before the need for the
3402 input operands (and their index registers) is over. */
3403 earlyclobber = 1, this_earlyclobber = 1;
3404 break;
3405
3406 case 'X':
3407 force_reload = 0;
3408 win = 1;
3409 break;
3410
3411 case 'g':
3412 if (! force_reload
3413 /* A PLUS is never a valid operand, but reload can make
3414 it from a register when eliminating registers. */
3415 && GET_CODE (operand) != PLUS
3416 /* A SCRATCH is not a valid operand. */
3417 && GET_CODE (operand) != SCRATCH
3418 && (! CONSTANT_P (operand)
3419 || ! flag_pic
3420 || LEGITIMATE_PIC_OPERAND_P (operand))
3421 && (GENERAL_REGS == ALL_REGS
3422 || !REG_P (operand)
3423 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3424 && reg_renumber[REGNO (operand)] < 0)))
3425 win = 1;
3426 cl = GENERAL_REGS;
3427 goto reg;
3428
3429 default:
3430 cn = lookup_constraint (p);
3431 switch (get_constraint_type (c: cn))
3432 {
3433 case CT_REGISTER:
3434 cl = reg_class_for_constraint (c: cn);
3435 if (cl != NO_REGS)
3436 goto reg;
3437 break;
3438
3439 case CT_CONST_INT:
3440 if (CONST_INT_P (operand)
3441 && (insn_const_int_ok_for_constraint
3442 (INTVAL (operand), cn)))
3443 win = true;
3444 break;
3445
3446 case CT_MEMORY:
3447 case CT_RELAXED_MEMORY:
3448 if (force_reload)
3449 break;
3450 if (constraint_satisfied_p (x: operand, c: cn))
3451 win = 1;
3452 /* If the address was already reloaded,
3453 we win as well. */
3454 else if (MEM_P (operand) && address_reloaded[i] == 1)
3455 win = 1;
3456 /* Likewise if the address will be reloaded because
3457 reg_equiv_address is nonzero. For reg_equiv_mem
3458 we have to check. */
3459 else if (REG_P (operand)
3460 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3461 && reg_renumber[REGNO (operand)] < 0
3462 && ((reg_equiv_mem (REGNO (operand)) != 0
3463 && (constraint_satisfied_p
3464 (reg_equiv_mem (REGNO (operand)),
3465 c: cn)))
3466 || (reg_equiv_address (REGNO (operand))
3467 != 0)))
3468 win = 1;
3469
3470 /* If we didn't already win, we can reload
3471 constants via force_const_mem, and other
3472 MEMs by reloading the address like for 'o'. */
3473 if (CONST_POOL_OK_P (operand_mode[i], operand)
3474 || MEM_P (operand))
3475 badop = 0;
3476 constmemok = 1;
3477 offmemok = 1;
3478 break;
3479
3480 case CT_SPECIAL_MEMORY:
3481 if (force_reload)
3482 break;
3483 if (constraint_satisfied_p (x: operand, c: cn))
3484 win = 1;
3485 /* Likewise if the address will be reloaded because
3486 reg_equiv_address is nonzero. For reg_equiv_mem
3487 we have to check. */
3488 else if (REG_P (operand)
3489 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3490 && reg_renumber[REGNO (operand)] < 0
3491 && reg_equiv_mem (REGNO (operand)) != 0
3492 && (constraint_satisfied_p
3493 (reg_equiv_mem (REGNO (operand)), c: cn)))
3494 win = 1;
3495 break;
3496
3497 case CT_ADDRESS:
3498 if (constraint_satisfied_p (x: operand, c: cn))
3499 win = 1;
3500
3501 /* If we didn't already win, we can reload
3502 the address into a base register. */
3503 this_alternative[i]
3504 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3505 outer_code: ADDRESS, index_code: SCRATCH, insn);
3506 badop = 0;
3507 break;
3508
3509 case CT_FIXED_FORM:
3510 if (constraint_satisfied_p (x: operand, c: cn))
3511 win = 1;
3512 break;
3513 }
3514 break;
3515
3516 reg:
3517 this_alternative[i]
3518 = reg_class_subunion[this_alternative[i]][cl];
3519 if (GET_MODE (operand) == BLKmode)
3520 break;
3521 winreg = 1;
3522 if (REG_P (operand)
3523 && reg_fits_class_p (operand, this_alternative[i],
3524 offset, GET_MODE (recog_data.operand[i])))
3525 win = 1;
3526 break;
3527 }
3528 while ((p += len), c);
3529
3530 if (swapped == (commutative >= 0 ? 1 : 0))
3531 constraints[i] = p;
3532
3533 /* If this operand could be handled with a reg,
3534 and some reg is allowed, then this operand can be handled. */
3535 if (winreg && this_alternative[i] != NO_REGS
3536 && (win || !class_only_fixed_regs[this_alternative[i]]))
3537 badop = 0;
3538
3539 /* Record which operands fit this alternative. */
3540 this_alternative_earlyclobber[i] = earlyclobber;
3541 if (win && ! force_reload)
3542 this_alternative_win[i] = 1;
3543 else if (did_match && ! force_reload)
3544 this_alternative_match_win[i] = 1;
3545 else
3546 {
3547 int const_to_mem = 0;
3548
3549 this_alternative_offmemok[i] = offmemok;
3550 losers++;
3551 if (badop)
3552 bad = 1;
3553 /* Alternative loses if it has no regs for a reg operand. */
3554 if (REG_P (operand)
3555 && this_alternative[i] == NO_REGS
3556 && this_alternative_matches[i] < 0)
3557 bad = 1;
3558
3559 /* If this is a constant that is reloaded into the desired
3560 class by copying it to memory first, count that as another
3561 reload. This is consistent with other code and is
3562 required to avoid choosing another alternative when
3563 the constant is moved into memory by this function on
3564 an early reload pass. Note that the test here is
3565 precisely the same as in the code below that calls
3566 force_const_mem. */
3567 if (CONST_POOL_OK_P (operand_mode[i], operand)
3568 && ((targetm.preferred_reload_class (operand,
3569 this_alternative[i])
3570 == NO_REGS)
3571 || no_input_reloads))
3572 {
3573 const_to_mem = 1;
3574 if (this_alternative[i] != NO_REGS)
3575 losers++;
3576 }
3577
3578 /* Alternative loses if it requires a type of reload not
3579 permitted for this insn. We can always reload SCRATCH
3580 and objects with a REG_UNUSED note. */
3581 if (GET_CODE (operand) != SCRATCH
3582 && modified[i] != RELOAD_READ && no_output_reloads
3583 && ! find_reg_note (insn, REG_UNUSED, operand))
3584 bad = 1;
3585 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3586 && ! const_to_mem)
3587 bad = 1;
3588
3589 /* If we can't reload this value at all, reject this
3590 alternative. Note that we could also lose due to
3591 LIMIT_RELOAD_CLASS, but we don't check that
3592 here. */
3593
3594 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3595 {
3596 if (targetm.preferred_reload_class (operand,
3597 this_alternative[i])
3598 == NO_REGS)
3599 reject = 600;
3600
3601 if (operand_type[i] == RELOAD_FOR_OUTPUT
3602 && (targetm.preferred_output_reload_class (operand,
3603 this_alternative[i])
3604 == NO_REGS))
3605 reject = 600;
3606 }
3607
3608 /* We prefer to reload pseudos over reloading other things,
3609 since such reloads may be able to be eliminated later.
3610 If we are reloading a SCRATCH, we won't be generating any
3611 insns, just using a register, so it is also preferred.
3612 So bump REJECT in other cases. Don't do this in the
3613 case where we are forcing a constant into memory and
3614 it will then win since we don't want to have a different
3615 alternative match then. */
3616 if (! (REG_P (operand)
3617 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3618 && GET_CODE (operand) != SCRATCH
3619 && ! (const_to_mem && constmemok))
3620 reject += 2;
3621
3622 /* Input reloads can be inherited more often than output
3623 reloads can be removed, so penalize output reloads. */
3624 if (operand_type[i] != RELOAD_FOR_INPUT
3625 && GET_CODE (operand) != SCRATCH)
3626 reject++;
3627 }
3628
3629 /* If this operand is a pseudo register that didn't get
3630 a hard reg and this alternative accepts some
3631 register, see if the class that we want is a subset
3632 of the preferred class for this register. If not,
3633 but it intersects that class, we'd like to use the
3634 intersection, but the best we can do is to use the
3635 preferred class, if it is instead a subset of the
3636 class we want in this alternative. If we can't use
3637 it, show that usage of this alternative should be
3638 discouraged; it will be discouraged more still if the
3639 register is `preferred or nothing'. We do this
3640 because it increases the chance of reusing our spill
3641 register in a later insn and avoiding a pair of
3642 memory stores and loads.
3643
3644 Don't bother with this if this alternative will
3645 accept this operand.
3646
3647 Don't do this for a multiword operand, since it is
3648 only a small win and has the risk of requiring more
3649 spill registers, which could cause a large loss.
3650
3651 Don't do this if the preferred class has only one
3652 register because we might otherwise exhaust the
3653 class. */
3654
3655 if (! win && ! did_match
3656 && this_alternative[i] != NO_REGS
3657 && known_le (GET_MODE_SIZE (operand_mode[i]), UNITS_PER_WORD)
3658 && reg_class_size [(int) preferred_class[i]] > 0
3659 && ! small_register_class_p (rclass: preferred_class[i]))
3660 {
3661 if (! reg_class_subset_p (this_alternative[i],
3662 preferred_class[i]))
3663 {
3664 /* Since we don't have a way of forming a register
3665 class for the intersection, we just do
3666 something special if the preferred class is a
3667 subset of the class we have; that's the most
3668 common case anyway. */
3669 if (reg_class_subset_p (preferred_class[i],
3670 this_alternative[i]))
3671 this_alternative[i] = preferred_class[i];
3672 else
3673 reject += (2 + 2 * pref_or_nothing[i]);
3674 }
3675 }
3676 }
3677
3678 /* Now see if any output operands that are marked "earlyclobber"
3679 in this alternative conflict with any input operands
3680 or any memory addresses. */
3681
3682 for (i = 0; i < noperands; i++)
3683 if (this_alternative_earlyclobber[i]
3684 && (this_alternative_win[i] || this_alternative_match_win[i]))
3685 {
3686 struct decomposition early_data;
3687
3688 early_data = decompose (x: recog_data.operand[i]);
3689
3690 gcc_assert (modified[i] != RELOAD_READ);
3691
3692 if (this_alternative[i] == NO_REGS)
3693 {
3694 this_alternative_earlyclobber[i] = 0;
3695 gcc_assert (this_insn_is_asm);
3696 error_for_asm (this_insn,
3697 "%<&%> constraint used with no register class");
3698 }
3699
3700 for (j = 0; j < noperands; j++)
3701 /* Is this an input operand or a memory ref? */
3702 if ((MEM_P (recog_data.operand[j])
3703 || modified[j] != RELOAD_WRITE)
3704 && j != i
3705 /* Ignore things like match_operator operands. */
3706 && !recog_data.is_operator[j]
3707 /* Don't count an input operand that is constrained to match
3708 the early clobber operand. */
3709 && ! (this_alternative_matches[j] == i
3710 && rtx_equal_p (recog_data.operand[i],
3711 recog_data.operand[j]))
3712 /* Is it altered by storing the earlyclobber operand? */
3713 && !immune_p (x: recog_data.operand[j], y: recog_data.operand[i],
3714 ydata: early_data))
3715 {
3716 /* If the output is in a non-empty few-regs class,
3717 it's costly to reload it, so reload the input instead. */
3718 if (small_register_class_p (rclass: this_alternative[i])
3719 && (REG_P (recog_data.operand[j])
3720 || GET_CODE (recog_data.operand[j]) == SUBREG))
3721 {
3722 losers++;
3723 this_alternative_win[j] = 0;
3724 this_alternative_match_win[j] = 0;
3725 }
3726 else
3727 break;
3728 }
3729 /* If an earlyclobber operand conflicts with something,
3730 it must be reloaded, so request this and count the cost. */
3731 if (j != noperands)
3732 {
3733 losers++;
3734 this_alternative_win[i] = 0;
3735 this_alternative_match_win[j] = 0;
3736 for (j = 0; j < noperands; j++)
3737 if (this_alternative_matches[j] == i
3738 && this_alternative_match_win[j])
3739 {
3740 this_alternative_win[j] = 0;
3741 this_alternative_match_win[j] = 0;
3742 losers++;
3743 }
3744 }
3745 }
3746
3747 /* If one alternative accepts all the operands, no reload required,
3748 choose that alternative; don't consider the remaining ones. */
3749 if (losers == 0)
3750 {
3751 /* Unswap these so that they are never swapped at `finish'. */
3752 if (swapped)
3753 {
3754 recog_data.operand[commutative] = substed_operand[commutative];
3755 recog_data.operand[commutative + 1]
3756 = substed_operand[commutative + 1];
3757 }
3758 for (i = 0; i < noperands; i++)
3759 {
3760 goal_alternative_win[i] = this_alternative_win[i];
3761 goal_alternative_match_win[i] = this_alternative_match_win[i];
3762 goal_alternative[i] = this_alternative[i];
3763 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3764 goal_alternative_matches[i] = this_alternative_matches[i];
3765 goal_alternative_earlyclobber[i]
3766 = this_alternative_earlyclobber[i];
3767 }
3768 goal_alternative_number = this_alternative_number;
3769 goal_alternative_swapped = swapped;
3770 goal_earlyclobber = this_earlyclobber;
3771 goto finish;
3772 }
3773
3774 /* REJECT, set by the ! and ? constraint characters and when a register
3775 would be reloaded into a non-preferred class, discourages the use of
3776 this alternative for a reload goal. REJECT is incremented by six
3777 for each ? and two for each non-preferred class. */
3778 losers = losers * 6 + reject;
3779
3780 /* If this alternative can be made to work by reloading,
3781 and it needs less reloading than the others checked so far,
3782 record it as the chosen goal for reloading. */
3783 if (! bad)
3784 {
3785 if (best > losers)
3786 {
3787 for (i = 0; i < noperands; i++)
3788 {
3789 goal_alternative[i] = this_alternative[i];
3790 goal_alternative_win[i] = this_alternative_win[i];
3791 goal_alternative_match_win[i]
3792 = this_alternative_match_win[i];
3793 goal_alternative_offmemok[i]
3794 = this_alternative_offmemok[i];
3795 goal_alternative_matches[i] = this_alternative_matches[i];
3796 goal_alternative_earlyclobber[i]
3797 = this_alternative_earlyclobber[i];
3798 }
3799 goal_alternative_swapped = swapped;
3800 best = losers;
3801 goal_alternative_number = this_alternative_number;
3802 goal_earlyclobber = this_earlyclobber;
3803 }
3804 }
3805
3806 if (swapped)
3807 {
3808 /* If the commutative operands have been swapped, swap
3809 them back in order to check the next alternative. */
3810 recog_data.operand[commutative] = substed_operand[commutative];
3811 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3812 /* Unswap the duplicates too. */
3813 for (i = 0; i < recog_data.n_dups; i++)
3814 if (recog_data.dup_num[i] == commutative
3815 || recog_data.dup_num[i] == commutative + 1)
3816 *recog_data.dup_loc[i]
3817 = recog_data.operand[(int) recog_data.dup_num[i]];
3818
3819 /* Unswap the operand related information as well. */
3820 std::swap (a&: preferred_class[commutative],
3821 b&: preferred_class[commutative + 1]);
3822 std::swap (a&: pref_or_nothing[commutative],
3823 b&: pref_or_nothing[commutative + 1]);
3824 std::swap (a&: address_reloaded[commutative],
3825 b&: address_reloaded[commutative + 1]);
3826 }
3827 }
3828 }
3829
3830 /* The operands don't meet the constraints.
3831 goal_alternative describes the alternative
3832 that we could reach by reloading the fewest operands.
3833 Reload so as to fit it. */
3834
3835 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3836 {
3837 /* No alternative works with reloads?? */
3838 if (insn_code_number >= 0)
3839 fatal_insn ("unable to generate reloads for:", insn);
3840 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3841 /* Avoid further trouble with this insn. */
3842 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3843 n_reloads = 0;
3844 return 0;
3845 }
3846
3847 /* Jump to `finish' from above if all operands are valid already.
3848 In that case, goal_alternative_win is all 1. */
3849 finish:
3850
3851 /* Right now, for any pair of operands I and J that are required to match,
3852 with I < J,
3853 goal_alternative_matches[J] is I.
3854 Set up goal_alternative_matched as the inverse function:
3855 goal_alternative_matched[I] = J. */
3856
3857 for (i = 0; i < noperands; i++)
3858 goal_alternative_matched[i] = -1;
3859
3860 for (i = 0; i < noperands; i++)
3861 if (! goal_alternative_win[i]
3862 && goal_alternative_matches[i] >= 0)
3863 goal_alternative_matched[goal_alternative_matches[i]] = i;
3864
3865 for (i = 0; i < noperands; i++)
3866 goal_alternative_win[i] |= goal_alternative_match_win[i];
3867
3868 /* If the best alternative is with operands 1 and 2 swapped,
3869 consider them swapped before reporting the reloads. Update the
3870 operand numbers of any reloads already pushed. */
3871
3872 if (goal_alternative_swapped)
3873 {
3874 std::swap (a&: substed_operand[commutative],
3875 b&: substed_operand[commutative + 1]);
3876 std::swap (a&: recog_data.operand[commutative],
3877 b&: recog_data.operand[commutative + 1]);
3878 std::swap (a&: *recog_data.operand_loc[commutative],
3879 b&: *recog_data.operand_loc[commutative + 1]);
3880
3881 for (i = 0; i < recog_data.n_dups; i++)
3882 if (recog_data.dup_num[i] == commutative
3883 || recog_data.dup_num[i] == commutative + 1)
3884 *recog_data.dup_loc[i]
3885 = recog_data.operand[(int) recog_data.dup_num[i]];
3886
3887 for (i = 0; i < n_reloads; i++)
3888 {
3889 if (rld[i].opnum == commutative)
3890 rld[i].opnum = commutative + 1;
3891 else if (rld[i].opnum == commutative + 1)
3892 rld[i].opnum = commutative;
3893 }
3894 }
3895
3896 for (i = 0; i < noperands; i++)
3897 {
3898 operand_reloadnum[i] = -1;
3899
3900 /* If this is an earlyclobber operand, we need to widen the scope.
3901 The reload must remain valid from the start of the insn being
3902 reloaded until after the operand is stored into its destination.
3903 We approximate this with RELOAD_OTHER even though we know that we
3904 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3905
3906 One special case that is worth checking is when we have an
3907 output that is earlyclobber but isn't used past the insn (typically
3908 a SCRATCH). In this case, we only need have the reload live
3909 through the insn itself, but not for any of our input or output
3910 reloads.
3911 But we must not accidentally narrow the scope of an existing
3912 RELOAD_OTHER reload - leave these alone.
3913
3914 In any case, anything needed to address this operand can remain
3915 however they were previously categorized. */
3916
3917 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3918 operand_type[i]
3919 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3920 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3921 }
3922
3923 /* Any constants that aren't allowed and can't be reloaded
3924 into registers are here changed into memory references. */
3925 for (i = 0; i < noperands; i++)
3926 if (! goal_alternative_win[i])
3927 {
3928 rtx op = recog_data.operand[i];
3929 rtx subreg = NULL_RTX;
3930 rtx plus = NULL_RTX;
3931 machine_mode mode = operand_mode[i];
3932
3933 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3934 push_reload so we have to let them pass here. */
3935 if (GET_CODE (op) == SUBREG)
3936 {
3937 subreg = op;
3938 op = SUBREG_REG (op);
3939 mode = GET_MODE (op);
3940 }
3941
3942 if (GET_CODE (op) == PLUS)
3943 {
3944 plus = op;
3945 op = XEXP (op, 1);
3946 }
3947
3948 if (CONST_POOL_OK_P (mode, op)
3949 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3950 == NO_REGS)
3951 || no_input_reloads))
3952 {
3953 int this_address_reloaded;
3954 rtx tem = force_const_mem (mode, op);
3955
3956 /* If we stripped a SUBREG or a PLUS above add it back. */
3957 if (plus != NULL_RTX)
3958 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3959
3960 if (subreg != NULL_RTX)
3961 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3962
3963 this_address_reloaded = 0;
3964 substed_operand[i] = recog_data.operand[i]
3965 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3966 0, insn, &this_address_reloaded);
3967
3968 /* If the alternative accepts constant pool refs directly
3969 there will be no reload needed at all. */
3970 if (plus == NULL_RTX
3971 && subreg == NULL_RTX
3972 && alternative_allows_const_pool_ref (this_address_reloaded != 1
3973 ? substed_operand[i]
3974 : NULL,
3975 recog_data.constraints[i],
3976 goal_alternative_number))
3977 goal_alternative_win[i] = 1;
3978 }
3979 }
3980
3981 /* Record the values of the earlyclobber operands for the caller. */
3982 if (goal_earlyclobber)
3983 for (i = 0; i < noperands; i++)
3984 if (goal_alternative_earlyclobber[i])
3985 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3986
3987 /* Now record reloads for all the operands that need them. */
3988 for (i = 0; i < noperands; i++)
3989 if (! goal_alternative_win[i])
3990 {
3991 /* Operands that match previous ones have already been handled. */
3992 if (goal_alternative_matches[i] >= 0)
3993 ;
3994 /* Handle an operand with a nonoffsettable address
3995 appearing where an offsettable address will do
3996 by reloading the address into a base register.
3997
3998 ??? We can also do this when the operand is a register and
3999 reg_equiv_mem is not offsettable, but this is a bit tricky,
4000 so we don't bother with it. It may not be worth doing. */
4001 else if (goal_alternative_matched[i] == -1
4002 && goal_alternative_offmemok[i]
4003 && MEM_P (recog_data.operand[i]))
4004 {
4005 /* If the address to be reloaded is a VOIDmode constant,
4006 use the default address mode as mode of the reload register,
4007 as would have been done by find_reloads_address. */
4008 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4009 machine_mode address_mode;
4010
4011 address_mode = get_address_mode (mem: recog_data.operand[i]);
4012 operand_reloadnum[i]
4013 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4014 inloc: &XEXP (recog_data.operand[i], 0), outloc: (rtx*) 0,
4015 rclass: base_reg_class (VOIDmode, as, outer_code: MEM, index_code: SCRATCH, insn),
4016 inmode: address_mode,
4017 VOIDmode, strict_low: 0, optional: 0, opnum: i, type: RELOAD_OTHER);
4018 rld[operand_reloadnum[i]].inc
4019 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4020
4021 /* If this operand is an output, we will have made any
4022 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4023 now we are treating part of the operand as an input, so
4024 we must change these to RELOAD_FOR_OTHER_ADDRESS. */
4025
4026 if (modified[i] == RELOAD_WRITE)
4027 {
4028 for (j = 0; j < n_reloads; j++)
4029 {
4030 if (rld[j].opnum == i)
4031 {
4032 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4033 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4034 else if (rld[j].when_needed
4035 == RELOAD_FOR_OUTADDR_ADDRESS)
4036 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4037 }
4038 }
4039 }
4040 }
4041 else if (goal_alternative_matched[i] == -1)
4042 {
4043 operand_reloadnum[i]
4044 = push_reload (in: (modified[i] != RELOAD_WRITE
4045 ? recog_data.operand[i] : 0),
4046 out: (modified[i] != RELOAD_READ
4047 ? recog_data.operand[i] : 0),
4048 inloc: (modified[i] != RELOAD_WRITE
4049 ? recog_data.operand_loc[i] : 0),
4050 outloc: (modified[i] != RELOAD_READ
4051 ? recog_data.operand_loc[i] : 0),
4052 rclass: (enum reg_class) goal_alternative[i],
4053 inmode: (modified[i] == RELOAD_WRITE
4054 ? VOIDmode : operand_mode[i]),
4055 outmode: (modified[i] == RELOAD_READ
4056 ? VOIDmode : operand_mode[i]),
4057 strict_low: (insn_code_number < 0 ? 0
4058 : insn_data[insn_code_number].operand[i].strict_low),
4059 optional: 0, opnum: i, type: operand_type[i]);
4060 }
4061 /* In a matching pair of operands, one must be input only
4062 and the other must be output only.
4063 Pass the input operand as IN and the other as OUT. */
4064 else if (modified[i] == RELOAD_READ
4065 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4066 {
4067 operand_reloadnum[i]
4068 = push_reload (in: recog_data.operand[i],
4069 out: recog_data.operand[goal_alternative_matched[i]],
4070 inloc: recog_data.operand_loc[i],
4071 outloc: recog_data.operand_loc[goal_alternative_matched[i]],
4072 rclass: (enum reg_class) goal_alternative[i],
4073 inmode: operand_mode[i],
4074 outmode: operand_mode[goal_alternative_matched[i]],
4075 strict_low: 0, optional: 0, opnum: i, type: RELOAD_OTHER);
4076 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4077 }
4078 else if (modified[i] == RELOAD_WRITE
4079 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4080 {
4081 operand_reloadnum[goal_alternative_matched[i]]
4082 = push_reload (in: recog_data.operand[goal_alternative_matched[i]],
4083 out: recog_data.operand[i],
4084 inloc: recog_data.operand_loc[goal_alternative_matched[i]],
4085 outloc: recog_data.operand_loc[i],
4086 rclass: (enum reg_class) goal_alternative[i],
4087 inmode: operand_mode[goal_alternative_matched[i]],
4088 outmode: operand_mode[i],
4089 strict_low: 0, optional: 0, opnum: i, type: RELOAD_OTHER);
4090 operand_reloadnum[i] = output_reloadnum;
4091 }
4092 else
4093 {
4094 gcc_assert (insn_code_number < 0);
4095 error_for_asm (insn, "inconsistent operand constraints "
4096 "in an %<asm%>");
4097 /* Avoid further trouble with this insn. */
4098 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4099 n_reloads = 0;
4100 return 0;
4101 }
4102 }
4103 else if (goal_alternative_matched[i] < 0
4104 && goal_alternative_matches[i] < 0
4105 && address_operand_reloaded[i] != 1
4106 && optimize)
4107 {
4108 /* For each non-matching operand that's a MEM or a pseudo-register
4109 that didn't get a hard register, make an optional reload.
4110 This may get done even if the insn needs no reloads otherwise. */
4111
4112 rtx operand = recog_data.operand[i];
4113
4114 while (GET_CODE (operand) == SUBREG)
4115 operand = SUBREG_REG (operand);
4116 if ((MEM_P (operand)
4117 || (REG_P (operand)
4118 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4119 /* If this is only for an output, the optional reload would not
4120 actually cause us to use a register now, just note that
4121 something is stored here. */
4122 && (goal_alternative[i] != NO_REGS
4123 || modified[i] == RELOAD_WRITE)
4124 && ! no_input_reloads
4125 /* An optional output reload might allow to delete INSN later.
4126 We mustn't make in-out reloads on insns that are not permitted
4127 output reloads.
4128 If this is an asm, we can't delete it; we must not even call
4129 push_reload for an optional output reload in this case,
4130 because we can't be sure that the constraint allows a register,
4131 and push_reload verifies the constraints for asms. */
4132 && (modified[i] == RELOAD_READ
4133 || (! no_output_reloads && ! this_insn_is_asm)))
4134 operand_reloadnum[i]
4135 = push_reload (in: (modified[i] != RELOAD_WRITE
4136 ? recog_data.operand[i] : 0),
4137 out: (modified[i] != RELOAD_READ
4138 ? recog_data.operand[i] : 0),
4139 inloc: (modified[i] != RELOAD_WRITE
4140 ? recog_data.operand_loc[i] : 0),
4141 outloc: (modified[i] != RELOAD_READ
4142 ? recog_data.operand_loc[i] : 0),
4143 rclass: (enum reg_class) goal_alternative[i],
4144 inmode: (modified[i] == RELOAD_WRITE
4145 ? VOIDmode : operand_mode[i]),
4146 outmode: (modified[i] == RELOAD_READ
4147 ? VOIDmode : operand_mode[i]),
4148 strict_low: (insn_code_number < 0 ? 0
4149 : insn_data[insn_code_number].operand[i].strict_low),
4150 optional: 1, opnum: i, type: operand_type[i]);
4151 /* If a memory reference remains (either as a MEM or a pseudo that
4152 did not get a hard register), yet we can't make an optional
4153 reload, check if this is actually a pseudo register reference;
4154 we then need to emit a USE and/or a CLOBBER so that reload
4155 inheritance will do the right thing. */
4156 else if (replace
4157 && (MEM_P (operand)
4158 || (REG_P (operand)
4159 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4160 && reg_renumber [REGNO (operand)] < 0)))
4161 {
4162 operand = *recog_data.operand_loc[i];
4163
4164 while (GET_CODE (operand) == SUBREG)
4165 operand = SUBREG_REG (operand);
4166 if (REG_P (operand))
4167 {
4168 if (modified[i] != RELOAD_WRITE)
4169 /* We mark the USE with QImode so that we recognize
4170 it as one that can be safely deleted at the end
4171 of reload. */
4172 PUT_MODE (x: emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4173 insn), QImode);
4174 if (modified[i] != RELOAD_READ)
4175 emit_insn_after (gen_clobber (operand), insn);
4176 }
4177 }
4178 }
4179 else if (goal_alternative_matches[i] >= 0
4180 && goal_alternative_win[goal_alternative_matches[i]]
4181 && modified[i] == RELOAD_READ
4182 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4183 && ! no_input_reloads && ! no_output_reloads
4184 && optimize)
4185 {
4186 /* Similarly, make an optional reload for a pair of matching
4187 objects that are in MEM or a pseudo that didn't get a hard reg. */
4188
4189 rtx operand = recog_data.operand[i];
4190
4191 while (GET_CODE (operand) == SUBREG)
4192 operand = SUBREG_REG (operand);
4193 if ((MEM_P (operand)
4194 || (REG_P (operand)
4195 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4196 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4197 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4198 = push_reload (in: recog_data.operand[goal_alternative_matches[i]],
4199 out: recog_data.operand[i],
4200 inloc: recog_data.operand_loc[goal_alternative_matches[i]],
4201 outloc: recog_data.operand_loc[i],
4202 rclass: (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4203 inmode: operand_mode[goal_alternative_matches[i]],
4204 outmode: operand_mode[i],
4205 strict_low: 0, optional: 1, opnum: goal_alternative_matches[i], type: RELOAD_OTHER);
4206 }
4207
4208 /* Perform whatever substitutions on the operands we are supposed
4209 to make due to commutativity or replacement of registers
4210 with equivalent constants or memory slots. */
4211
4212 for (i = 0; i < noperands; i++)
4213 {
4214 /* We only do this on the last pass through reload, because it is
4215 possible for some data (like reg_equiv_address) to be changed during
4216 later passes. Moreover, we lose the opportunity to get a useful
4217 reload_{in,out}_reg when we do these replacements. */
4218
4219 if (replace)
4220 {
4221 rtx substitution = substed_operand[i];
4222
4223 *recog_data.operand_loc[i] = substitution;
4224
4225 /* If we're replacing an operand with a LABEL_REF, we need to
4226 make sure that there's a REG_LABEL_OPERAND note attached to
4227 this instruction. */
4228 if (GET_CODE (substitution) == LABEL_REF
4229 && !find_reg_note (insn, REG_LABEL_OPERAND,
4230 label_ref_label (ref: substitution))
4231 /* For a JUMP_P, if it was a branch target it must have
4232 already been recorded as such. */
4233 && (!JUMP_P (insn)
4234 || !label_is_jump_target_p (label_ref_label (ref: substitution),
4235 insn)))
4236 {
4237 add_reg_note (insn, REG_LABEL_OPERAND,
4238 label_ref_label (ref: substitution));
4239 if (LABEL_P (label_ref_label (substitution)))
4240 ++LABEL_NUSES (label_ref_label (substitution));
4241 }
4242
4243 }
4244 else
4245 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4246 }
4247
4248 /* If this insn pattern contains any MATCH_DUP's, make sure that
4249 they will be substituted if the operands they match are substituted.
4250 Also do now any substitutions we already did on the operands.
4251
4252 Don't do this if we aren't making replacements because we might be
4253 propagating things allocated by frame pointer elimination into places
4254 it doesn't expect. */
4255
4256 if (insn_code_number >= 0 && replace)
4257 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4258 {
4259 int opno = recog_data.dup_num[i];
4260 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4261 dup_replacements (dup_loc: recog_data.dup_loc[i], orig_loc: recog_data.operand_loc[opno]);
4262 }
4263
4264#if 0
4265 /* This loses because reloading of prior insns can invalidate the equivalence
4266 (or at least find_equiv_reg isn't smart enough to find it any more),
4267 causing this insn to need more reload regs than it needed before.
4268 It may be too late to make the reload regs available.
4269 Now this optimization is done safely in choose_reload_regs. */
4270
4271 /* For each reload of a reg into some other class of reg,
4272 search for an existing equivalent reg (same value now) in the right class.
4273 We can use it as long as we don't need to change its contents. */
4274 for (i = 0; i < n_reloads; i++)
4275 if (rld[i].reg_rtx == 0
4276 && rld[i].in != 0
4277 && REG_P (rld[i].in)
4278 && rld[i].out == 0)
4279 {
4280 rld[i].reg_rtx
4281 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4282 static_reload_reg_p, 0, rld[i].inmode);
4283 /* Prevent generation of insn to load the value
4284 because the one we found already has the value. */
4285 if (rld[i].reg_rtx)
4286 rld[i].in = rld[i].reg_rtx;
4287 }
4288#endif
4289
4290 /* If we detected error and replaced asm instruction by USE, forget about the
4291 reloads. */
4292 if (GET_CODE (PATTERN (insn)) == USE
4293 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4294 n_reloads = 0;
4295
4296 /* Perhaps an output reload can be combined with another
4297 to reduce needs by one. */
4298 if (!goal_earlyclobber)
4299 combine_reloads ();
4300
4301 /* If we have a pair of reloads for parts of an address, they are reloading
4302 the same object, the operands themselves were not reloaded, and they
4303 are for two operands that are supposed to match, merge the reloads and
4304 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4305
4306 for (i = 0; i < n_reloads; i++)
4307 {
4308 int k;
4309
4310 for (j = i + 1; j < n_reloads; j++)
4311 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4312 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4313 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4314 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4315 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4316 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4317 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4318 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4319 && rtx_equal_p (rld[i].in, rld[j].in)
4320 && (operand_reloadnum[rld[i].opnum] < 0
4321 || rld[operand_reloadnum[rld[i].opnum]].optional)
4322 && (operand_reloadnum[rld[j].opnum] < 0
4323 || rld[operand_reloadnum[rld[j].opnum]].optional)
4324 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4325 || (goal_alternative_matches[rld[j].opnum]
4326 == rld[i].opnum)))
4327 {
4328 for (k = 0; k < n_replacements; k++)
4329 if (replacements[k].what == j)
4330 replacements[k].what = i;
4331
4332 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4333 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4334 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4335 else
4336 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4337 rld[j].in = 0;
4338 }
4339 }
4340
4341 /* Scan all the reloads and update their type.
4342 If a reload is for the address of an operand and we didn't reload
4343 that operand, change the type. Similarly, change the operand number
4344 of a reload when two operands match. If a reload is optional, treat it
4345 as though the operand isn't reloaded.
4346
4347 ??? This latter case is somewhat odd because if we do the optional
4348 reload, it means the object is hanging around. Thus we need only
4349 do the address reload if the optional reload was NOT done.
4350
4351 Change secondary reloads to be the address type of their operand, not
4352 the normal type.
4353
4354 If an operand's reload is now RELOAD_OTHER, change any
4355 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4356 RELOAD_FOR_OTHER_ADDRESS. */
4357
4358 for (i = 0; i < n_reloads; i++)
4359 {
4360 if (rld[i].secondary_p
4361 && rld[i].when_needed == operand_type[rld[i].opnum])
4362 rld[i].when_needed = address_type[rld[i].opnum];
4363
4364 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4365 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4366 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4367 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4368 && (operand_reloadnum[rld[i].opnum] < 0
4369 || rld[operand_reloadnum[rld[i].opnum]].optional))
4370 {
4371 /* If we have a secondary reload to go along with this reload,
4372 change its type to RELOAD_FOR_OPADDR_ADDR. */
4373
4374 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4375 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4376 && rld[i].secondary_in_reload != -1)
4377 {
4378 int secondary_in_reload = rld[i].secondary_in_reload;
4379
4380 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4381
4382 /* If there's a tertiary reload we have to change it also. */
4383 if (secondary_in_reload > 0
4384 && rld[secondary_in_reload].secondary_in_reload != -1)
4385 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4386 = RELOAD_FOR_OPADDR_ADDR;
4387 }
4388
4389 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4390 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4391 && rld[i].secondary_out_reload != -1)
4392 {
4393 int secondary_out_reload = rld[i].secondary_out_reload;
4394
4395 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4396
4397 /* If there's a tertiary reload we have to change it also. */
4398 if (secondary_out_reload
4399 && rld[secondary_out_reload].secondary_out_reload != -1)
4400 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4401 = RELOAD_FOR_OPADDR_ADDR;
4402 }
4403
4404 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4405 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4406 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4407 else
4408 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4409 }
4410
4411 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4412 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4413 && operand_reloadnum[rld[i].opnum] >= 0
4414 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4415 == RELOAD_OTHER))
4416 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4417
4418 if (goal_alternative_matches[rld[i].opnum] >= 0)
4419 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4420 }
4421
4422 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4423 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4424 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4425
4426 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4427 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4428 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4429 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4430 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4431 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4432 This is complicated by the fact that a single operand can have more
4433 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4434 choose_reload_regs without affecting code quality, and cases that
4435 actually fail are extremely rare, so it turns out to be better to fix
4436 the problem here by not generating cases that choose_reload_regs will
4437 fail for. */
4438 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4439 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4440 a single operand.
4441 We can reduce the register pressure by exploiting that a
4442 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4443 does not conflict with any of them, if it is only used for the first of
4444 the RELOAD_FOR_X_ADDRESS reloads. */
4445 {
4446 int first_op_addr_num = -2;
4447 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4448 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4449 int need_change = 0;
4450 /* We use last_op_addr_reload and the contents of the above arrays
4451 first as flags - -2 means no instance encountered, -1 means exactly
4452 one instance encountered.
4453 If more than one instance has been encountered, we store the reload
4454 number of the first reload of the kind in question; reload numbers
4455 are known to be non-negative. */
4456 for (i = 0; i < noperands; i++)
4457 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4458 for (i = n_reloads - 1; i >= 0; i--)
4459 {
4460 switch (rld[i].when_needed)
4461 {
4462 case RELOAD_FOR_OPERAND_ADDRESS:
4463 if (++first_op_addr_num >= 0)
4464 {
4465 first_op_addr_num = i;
4466 need_change = 1;
4467 }
4468 break;
4469 case RELOAD_FOR_INPUT_ADDRESS:
4470 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4471 {
4472 first_inpaddr_num[rld[i].opnum] = i;
4473 need_change = 1;
4474 }
4475 break;
4476 case RELOAD_FOR_OUTPUT_ADDRESS:
4477 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4478 {
4479 first_outpaddr_num[rld[i].opnum] = i;
4480 need_change = 1;
4481 }
4482 break;
4483 default:
4484 break;
4485 }
4486 }
4487
4488 if (need_change)
4489 {
4490 for (i = 0; i < n_reloads; i++)
4491 {
4492 int first_num;
4493 enum reload_type type;
4494
4495 switch (rld[i].when_needed)
4496 {
4497 case RELOAD_FOR_OPADDR_ADDR:
4498 first_num = first_op_addr_num;
4499 type = RELOAD_FOR_OPERAND_ADDRESS;
4500 break;
4501 case RELOAD_FOR_INPADDR_ADDRESS:
4502 first_num = first_inpaddr_num[rld[i].opnum];
4503 type = RELOAD_FOR_INPUT_ADDRESS;
4504 break;
4505 case RELOAD_FOR_OUTADDR_ADDRESS:
4506 first_num = first_outpaddr_num[rld[i].opnum];
4507 type = RELOAD_FOR_OUTPUT_ADDRESS;
4508 break;
4509 default:
4510 continue;
4511 }
4512 if (first_num < 0)
4513 continue;
4514 else if (i > first_num)
4515 rld[i].when_needed = type;
4516 else
4517 {
4518 /* Check if the only TYPE reload that uses reload I is
4519 reload FIRST_NUM. */
4520 for (j = n_reloads - 1; j > first_num; j--)
4521 {
4522 if (rld[j].when_needed == type
4523 && (rld[i].secondary_p
4524 ? rld[j].secondary_in_reload == i
4525 : reg_mentioned_p (rld[i].in, rld[j].in)))
4526 {
4527 rld[i].when_needed = type;
4528 break;
4529 }
4530 }
4531 }
4532 }
4533 }
4534 }
4535
4536 /* See if we have any reloads that are now allowed to be merged
4537 because we've changed when the reload is needed to
4538 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4539 check for the most common cases. */
4540
4541 for (i = 0; i < n_reloads; i++)
4542 if (rld[i].in != 0 && rld[i].out == 0
4543 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4544 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4545 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4546 for (j = 0; j < n_reloads; j++)
4547 if (i != j && rld[j].in != 0 && rld[j].out == 0
4548 && rld[j].when_needed == rld[i].when_needed
4549 && MATCHES (rld[i].in, rld[j].in)
4550 && rld[i].rclass == rld[j].rclass
4551 && !rld[i].nocombine && !rld[j].nocombine
4552 && rld[i].reg_rtx == rld[j].reg_rtx)
4553 {
4554 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4555 transfer_replacements (to: i, from: j);
4556 rld[j].in = 0;
4557 }
4558
4559 /* Compute reload_mode and reload_nregs. */
4560 for (i = 0; i < n_reloads; i++)
4561 {
4562 rld[i].mode = rld[i].inmode;
4563 if (rld[i].mode == VOIDmode
4564 || partial_subreg_p (outermode: rld[i].mode, innermode: rld[i].outmode))
4565 rld[i].mode = rld[i].outmode;
4566
4567 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4568 }
4569
4570 /* Special case a simple move with an input reload and a
4571 destination of a hard reg, if the hard reg is ok, use it. */
4572 for (i = 0; i < n_reloads; i++)
4573 if (rld[i].when_needed == RELOAD_FOR_INPUT
4574 && GET_CODE (PATTERN (insn)) == SET
4575 && REG_P (SET_DEST (PATTERN (insn)))
4576 && (SET_SRC (PATTERN (insn)) == rld[i].in
4577 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4578 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4579 {
4580 rtx dest = SET_DEST (PATTERN (insn));
4581 unsigned int regno = REGNO (dest);
4582
4583 if (regno < FIRST_PSEUDO_REGISTER
4584 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], bit: regno)
4585 && targetm.hard_regno_mode_ok (regno, rld[i].mode))
4586 {
4587 int nr = hard_regno_nregs (regno, mode: rld[i].mode);
4588 int ok = 1, nri;
4589
4590 for (nri = 1; nri < nr; nri ++)
4591 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], bit: regno + nri))
4592 {
4593 ok = 0;
4594 break;
4595 }
4596
4597 if (ok)
4598 rld[i].reg_rtx = dest;
4599 }
4600 }
4601
4602 return retval;
4603}
4604
4605/* Return true if alternative number ALTNUM in constraint-string
4606 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4607 MEM gives the reference if its address hasn't been fully reloaded,
4608 otherwise it is NULL. */
4609
4610static bool
4611alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4612 const char *constraint, int altnum)
4613{
4614 int c;
4615
4616 /* Skip alternatives before the one requested. */
4617 while (altnum > 0)
4618 {
4619 while (*constraint++ != ',')
4620 ;
4621 altnum--;
4622 }
4623 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4624 If one of them is present, this alternative accepts the result of
4625 passing a constant-pool reference through find_reloads_toplev.
4626
4627 The same is true of extra memory constraints if the address
4628 was reloaded into a register. However, the target may elect
4629 to disallow the original constant address, forcing it to be
4630 reloaded into a register instead. */
4631 for (; (c = *constraint) && c != ',' && c != '#';
4632 constraint += CONSTRAINT_LEN (c, constraint))
4633 {
4634 enum constraint_num cn = lookup_constraint (p: constraint);
4635 if (insn_extra_memory_constraint (c: cn)
4636 && (mem == NULL || constraint_satisfied_p (x: mem, c: cn)))
4637 return true;
4638 }
4639 return false;
4640}
4641
4642/* Scan X for memory references and scan the addresses for reloading.
4643 Also checks for references to "constant" regs that we want to eliminate
4644 and replaces them with the values they stand for.
4645 We may alter X destructively if it contains a reference to such.
4646 If X is just a constant reg, we return the equivalent value
4647 instead of X.
4648
4649 IND_LEVELS says how many levels of indirect addressing this machine
4650 supports.
4651
4652 OPNUM and TYPE identify the purpose of the reload.
4653
4654 IS_SET_DEST is true if X is the destination of a SET, which is not
4655 appropriate to be replaced by a constant.
4656
4657 INSN, if nonzero, is the insn in which we do the reload. It is used
4658 to determine if we may generate output reloads, and where to put USEs
4659 for pseudos that we have to replace with stack slots.
4660
4661 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4662 result of find_reloads_address. */
4663
4664static rtx
4665find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4666 int ind_levels, int is_set_dest, rtx_insn *insn,
4667 int *address_reloaded)
4668{
4669 RTX_CODE code = GET_CODE (x);
4670
4671 const char *fmt = GET_RTX_FORMAT (code);
4672 int i;
4673 int copied;
4674
4675 if (code == REG)
4676 {
4677 /* This code is duplicated for speed in find_reloads. */
4678 int regno = REGNO (x);
4679 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4680 x = reg_equiv_constant (regno);
4681#if 0
4682 /* This creates (subreg (mem...)) which would cause an unnecessary
4683 reload of the mem. */
4684 else if (reg_equiv_mem (regno) != 0)
4685 x = reg_equiv_mem (regno);
4686#endif
4687 else if (reg_equiv_memory_loc (regno)
4688 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4689 {
4690 rtx mem = make_memloc (x, regno);
4691 if (reg_equiv_address (regno)
4692 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4693 {
4694 /* If this is not a toplevel operand, find_reloads doesn't see
4695 this substitution. We have to emit a USE of the pseudo so
4696 that delete_output_reload can see it. */
4697 if (replace_reloads && recog_data.operand[opnum] != x)
4698 /* We mark the USE with QImode so that we recognize it
4699 as one that can be safely deleted at the end of
4700 reload. */
4701 PUT_MODE (x: emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4702 QImode);
4703 x = mem;
4704 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4705 opnum, type, ind_levels, insn);
4706 if (!rtx_equal_p (x, mem))
4707 push_reg_equiv_alt_mem (regno, mem: x);
4708 if (address_reloaded)
4709 *address_reloaded = i;
4710 }
4711 }
4712 return x;
4713 }
4714 if (code == MEM)
4715 {
4716 rtx tem = x;
4717
4718 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4719 opnum, type, ind_levels, insn);
4720 if (address_reloaded)
4721 *address_reloaded = i;
4722
4723 return tem;
4724 }
4725
4726 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4727 {
4728 /* Check for SUBREG containing a REG that's equivalent to a
4729 constant. If the constant has a known value, truncate it
4730 right now. Similarly if we are extracting a single-word of a
4731 multi-word constant. If the constant is symbolic, allow it
4732 to be substituted normally. push_reload will strip the
4733 subreg later. The constant must not be VOIDmode, because we
4734 will lose the mode of the register (this should never happen
4735 because one of the cases above should handle it). */
4736
4737 int regno = REGNO (SUBREG_REG (x));
4738 rtx tem;
4739
4740 if (regno >= FIRST_PSEUDO_REGISTER
4741 && reg_renumber[regno] < 0
4742 && reg_equiv_constant (regno) != 0)
4743 {
4744 tem =
4745 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4746 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4747 gcc_assert (tem);
4748 if (CONSTANT_P (tem)
4749 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4750 {
4751 tem = force_const_mem (GET_MODE (x), tem);
4752 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4753 &XEXP (tem, 0), opnum, type,
4754 ind_levels, insn);
4755 if (address_reloaded)
4756 *address_reloaded = i;
4757 }
4758 return tem;
4759 }
4760
4761 /* If the subreg contains a reg that will be converted to a mem,
4762 attempt to convert the whole subreg to a (narrower or wider)
4763 memory reference instead. If this succeeds, we're done --
4764 otherwise fall through to check whether the inner reg still
4765 needs address reloads anyway. */
4766
4767 if (regno >= FIRST_PSEUDO_REGISTER
4768 && reg_equiv_memory_loc (regno) != 0)
4769 {
4770 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4771 insn, address_reloaded);
4772 if (tem)
4773 return tem;
4774 }
4775 }
4776
4777 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4778 {
4779 if (fmt[i] == 'e')
4780 {
4781 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4782 ind_levels, is_set_dest, insn,
4783 address_reloaded);
4784 /* If we have replaced a reg with it's equivalent memory loc -
4785 that can still be handled here e.g. if it's in a paradoxical
4786 subreg - we must make the change in a copy, rather than using
4787 a destructive change. This way, find_reloads can still elect
4788 not to do the change. */
4789 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4790 {
4791 x = shallow_copy_rtx (x);
4792 copied = 1;
4793 }
4794 XEXP (x, i) = new_part;
4795 }
4796 }
4797 return x;
4798}
4799
4800/* Return a mem ref for the memory equivalent of reg REGNO.
4801 This mem ref is not shared with anything. */
4802
4803static rtx
4804make_memloc (rtx ad, int regno)
4805{
4806 /* We must rerun eliminate_regs, in case the elimination
4807 offsets have changed. */
4808 rtx tem
4809 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4810 0);
4811
4812 /* If TEM might contain a pseudo, we must copy it to avoid
4813 modifying it when we do the substitution for the reload. */
4814 if (rtx_varies_p (tem, 0))
4815 tem = copy_rtx (tem);
4816
4817 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4818 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4819
4820 /* Copy the result if it's still the same as the equivalence, to avoid
4821 modifying it when we do the substitution for the reload. */
4822 if (tem == reg_equiv_memory_loc (regno))
4823 tem = copy_rtx (tem);
4824 return tem;
4825}
4826
4827/* Returns true if AD could be turned into a valid memory reference
4828 to mode MODE in address space AS by reloading the part pointed to
4829 by PART into a register. */
4830
4831static bool
4832maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4833 addr_space_t as, rtx *part)
4834{
4835 bool retv;
4836 rtx tem = *part;
4837 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4838
4839 *part = reg;
4840 retv = memory_address_addr_space_p (mode, ad, as);
4841 *part = tem;
4842
4843 return retv;
4844}
4845
4846/* Record all reloads needed for handling memory address AD
4847 which appears in *LOC in a memory reference to mode MODE
4848 which itself is found in location *MEMREFLOC.
4849 Note that we take shortcuts assuming that no multi-reg machine mode
4850 occurs as part of an address.
4851
4852 OPNUM and TYPE specify the purpose of this reload.
4853
4854 IND_LEVELS says how many levels of indirect addressing this machine
4855 supports.
4856
4857 INSN, if nonzero, is the insn in which we do the reload. It is used
4858 to determine if we may generate output reloads, and where to put USEs
4859 for pseudos that we have to replace with stack slots.
4860
4861 Value is one if this address is reloaded or replaced as a whole; it is
4862 zero if the top level of this address was not reloaded or replaced, and
4863 it is -1 if it may or may not have been reloaded or replaced.
4864
4865 Note that there is no verification that the address will be valid after
4866 this routine does its work. Instead, we rely on the fact that the address
4867 was valid when reload started. So we need only undo things that reload
4868 could have broken. These are wrong register types, pseudos not allocated
4869 to a hard register, and frame pointer elimination. */
4870
4871static int
4872find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4873 rtx *loc, int opnum, enum reload_type type,
4874 int ind_levels, rtx_insn *insn)
4875{
4876 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4877 : ADDR_SPACE_GENERIC;
4878 int regno;
4879 int removed_and = 0;
4880 int op_index;
4881 rtx tem;
4882
4883 /* If the address is a register, see if it is a legitimate address and
4884 reload if not. We first handle the cases where we need not reload
4885 or where we must reload in a non-standard way. */
4886
4887 if (REG_P (ad))
4888 {
4889 regno = REGNO (ad);
4890
4891 if (reg_equiv_constant (regno) != 0)
4892 {
4893 find_reloads_address_part (reg_equiv_constant (regno), loc,
4894 base_reg_class (mode, as, outer_code: MEM,
4895 index_code: SCRATCH, insn),
4896 GET_MODE (ad), opnum, type, ind_levels);
4897 return 1;
4898 }
4899
4900 tem = reg_equiv_memory_loc (regno);
4901 if (tem != 0)
4902 {
4903 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4904 {
4905 tem = make_memloc (ad, regno);
4906 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4907 XEXP (tem, 0),
4908 MEM_ADDR_SPACE (tem)))
4909 {
4910 rtx orig = tem;
4911
4912 find_reloads_address (GET_MODE (tem), memrefloc: &tem, XEXP (tem, 0),
4913 loc: &XEXP (tem, 0), opnum,
4914 ADDR_TYPE (type), ind_levels, insn);
4915 if (!rtx_equal_p (tem, orig))
4916 push_reg_equiv_alt_mem (regno, mem: tem);
4917 }
4918 /* We can avoid a reload if the register's equivalent memory
4919 expression is valid as an indirect memory address.
4920 But not all addresses are valid in a mem used as an indirect
4921 address: only reg or reg+constant. */
4922
4923 if (ind_levels > 0
4924 && strict_memory_address_addr_space_p (mode, addr: tem, as)
4925 && (REG_P (XEXP (tem, 0))
4926 || (GET_CODE (XEXP (tem, 0)) == PLUS
4927 && REG_P (XEXP (XEXP (tem, 0), 0))
4928 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4929 {
4930 /* TEM is not the same as what we'll be replacing the
4931 pseudo with after reload, put a USE in front of INSN
4932 in the final reload pass. */
4933 if (replace_reloads
4934 && num_not_at_initial_offset
4935 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4936 {
4937 *loc = tem;
4938 /* We mark the USE with QImode so that we
4939 recognize it as one that can be safely
4940 deleted at the end of reload. */
4941 PUT_MODE (x: emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4942 insn), QImode);
4943
4944 /* This doesn't really count as replacing the address
4945 as a whole, since it is still a memory access. */
4946 }
4947 return 0;
4948 }
4949 ad = tem;
4950 }
4951 }
4952
4953 /* The only remaining case where we can avoid a reload is if this is a
4954 hard register that is valid as a base register and which is not the
4955 subject of a CLOBBER in this insn. */
4956
4957 else if (regno < FIRST_PSEUDO_REGISTER
4958 && regno_ok_for_base_p (regno, mode, as, outer_code: MEM, index_code: SCRATCH)
4959 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4960 return 0;
4961
4962 /* If we do not have one of the cases above, we must do the reload. */
4963 push_reload (in: ad, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
4964 rclass: base_reg_class (mode, as, outer_code: MEM, index_code: SCRATCH, insn),
4965 GET_MODE (ad), VOIDmode, strict_low: 0, optional: 0, opnum, type);
4966 return 1;
4967 }
4968
4969 if (strict_memory_address_addr_space_p (mode, addr: ad, as))
4970 {
4971 /* The address appears valid, so reloads are not needed.
4972 But the address may contain an eliminable register.
4973 This can happen because a machine with indirect addressing
4974 may consider a pseudo register by itself a valid address even when
4975 it has failed to get a hard reg.
4976 So do a tree-walk to find and eliminate all such regs. */
4977
4978 /* But first quickly dispose of a common case. */
4979 if (GET_CODE (ad) == PLUS
4980 && CONST_INT_P (XEXP (ad, 1))
4981 && REG_P (XEXP (ad, 0))
4982 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4983 return 0;
4984
4985 subst_reg_equivs_changed = 0;
4986 *loc = subst_reg_equivs (ad, insn);
4987
4988 if (! subst_reg_equivs_changed)
4989 return 0;
4990
4991 /* Check result for validity after substitution. */
4992 if (strict_memory_address_addr_space_p (mode, addr: ad, as))
4993 return 0;
4994 }
4995
4996#ifdef LEGITIMIZE_RELOAD_ADDRESS
4997 do
4998 {
4999 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5000 {
5001 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5002 ind_levels, win);
5003 }
5004 break;
5005 win:
5006 *memrefloc = copy_rtx (*memrefloc);
5007 XEXP (*memrefloc, 0) = ad;
5008 move_replacements (&ad, &XEXP (*memrefloc, 0));
5009 return -1;
5010 }
5011 while (0);
5012#endif
5013
5014 /* The address is not valid. We have to figure out why. First see if
5015 we have an outer AND and remove it if so. Then analyze what's inside. */
5016
5017 if (GET_CODE (ad) == AND)
5018 {
5019 removed_and = 1;
5020 loc = &XEXP (ad, 0);
5021 ad = *loc;
5022 }
5023
5024 /* One possibility for why the address is invalid is that it is itself
5025 a MEM. This can happen when the frame pointer is being eliminated, a
5026 pseudo is not allocated to a hard register, and the offset between the
5027 frame and stack pointers is not its initial value. In that case the
5028 pseudo will have been replaced by a MEM referring to the
5029 stack pointer. */
5030 if (MEM_P (ad))
5031 {
5032 /* First ensure that the address in this MEM is valid. Then, unless
5033 indirect addresses are valid, reload the MEM into a register. */
5034 tem = ad;
5035 find_reloads_address (GET_MODE (ad), memrefloc: &tem, XEXP (ad, 0), loc: &XEXP (ad, 0),
5036 opnum, ADDR_TYPE (type),
5037 ind_levels: ind_levels == 0 ? 0 : ind_levels - 1, insn);
5038
5039 /* If tem was changed, then we must create a new memory reference to
5040 hold it and store it back into memrefloc. */
5041 if (tem != ad && memrefloc)
5042 {
5043 *memrefloc = copy_rtx (*memrefloc);
5044 copy_replacements (tem, XEXP (*memrefloc, 0));
5045 loc = &XEXP (*memrefloc, 0);
5046 if (removed_and)
5047 loc = &XEXP (*loc, 0);
5048 }
5049
5050 /* Check similar cases as for indirect addresses as above except
5051 that we can allow pseudos and a MEM since they should have been
5052 taken care of above. */
5053
5054 if (ind_levels == 0
5055 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5056 || MEM_P (XEXP (tem, 0))
5057 || ! (REG_P (XEXP (tem, 0))
5058 || (GET_CODE (XEXP (tem, 0)) == PLUS
5059 && REG_P (XEXP (XEXP (tem, 0), 0))
5060 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5061 {
5062 /* Must use TEM here, not AD, since it is the one that will
5063 have any subexpressions reloaded, if needed. */
5064 push_reload (in: tem, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
5065 rclass: base_reg_class (mode, as, outer_code: MEM, index_code: SCRATCH), GET_MODE (tem),
5066 VOIDmode, strict_low: 0,
5067 optional: 0, opnum, type);
5068 return ! removed_and;
5069 }
5070 else
5071 return 0;
5072 }
5073
5074 /* If we have address of a stack slot but it's not valid because the
5075 displacement is too large, compute the sum in a register.
5076 Handle all base registers here, not just fp/ap/sp, because on some
5077 targets (namely SH) we can also get too large displacements from
5078 big-endian corrections. */
5079 else if (GET_CODE (ad) == PLUS
5080 && REG_P (XEXP (ad, 0))
5081 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5082 && CONST_INT_P (XEXP (ad, 1))
5083 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, outer_code: PLUS,
5084 index_code: CONST_INT)
5085 /* Similarly, if we were to reload the base register and the
5086 mem+offset address is still invalid, then we want to reload
5087 the whole address, not just the base register. */
5088 || ! maybe_memory_address_addr_space_p
5089 (mode, ad, as, part: &(XEXP (ad, 0)))))
5090
5091 {
5092 /* Unshare the MEM rtx so we can safely alter it. */
5093 if (memrefloc)
5094 {
5095 *memrefloc = copy_rtx (*memrefloc);
5096 loc = &XEXP (*memrefloc, 0);
5097 if (removed_and)
5098 loc = &XEXP (*loc, 0);
5099 }
5100
5101 if (double_reg_address_ok[mode]
5102 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5103 outer_code: PLUS, index_code: CONST_INT))
5104 {
5105 /* Unshare the sum as well. */
5106 *loc = ad = copy_rtx (ad);
5107
5108 /* Reload the displacement into an index reg.
5109 We assume the frame pointer or arg pointer is a base reg. */
5110 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5111 index_reg_class (insn), GET_MODE (ad), opnum,
5112 type, ind_levels);
5113 return 0;
5114 }
5115 else
5116 {
5117 /* If the sum of two regs is not necessarily valid,
5118 reload the sum into a base reg.
5119 That will at least work. */
5120 find_reloads_address_part (ad, loc,
5121 base_reg_class (mode, as, outer_code: MEM,
5122 index_code: SCRATCH, insn),
5123 GET_MODE (ad), opnum, type, ind_levels);
5124 }
5125 return ! removed_and;
5126 }
5127
5128 /* If we have an indexed stack slot, there are three possible reasons why
5129 it might be invalid: The index might need to be reloaded, the address
5130 might have been made by frame pointer elimination and hence have a
5131 constant out of range, or both reasons might apply.
5132
5133 We can easily check for an index needing reload, but even if that is the
5134 case, we might also have an invalid constant. To avoid making the
5135 conservative assumption and requiring two reloads, we see if this address
5136 is valid when not interpreted strictly. If it is, the only problem is
5137 that the index needs a reload and find_reloads_address_1 will take care
5138 of it.
5139
5140 Handle all base registers here, not just fp/ap/sp, because on some
5141 targets (namely SPARC) we can also get invalid addresses from preventive
5142 subreg big-endian corrections made by find_reloads_toplev. We
5143 can also get expressions involving LO_SUM (rather than PLUS) from
5144 find_reloads_subreg_address.
5145
5146 If we decide to do something, it must be that `double_reg_address_ok'
5147 is true. We generate a reload of the base register + constant and
5148 rework the sum so that the reload register will be added to the index.
5149 This is safe because we know the address isn't shared.
5150
5151 We check for the base register as both the first and second operand of
5152 the innermost PLUS and/or LO_SUM. */
5153
5154 for (op_index = 0; op_index < 2; ++op_index)
5155 {
5156 rtx operand, addend;
5157 enum rtx_code inner_code;
5158
5159 if (GET_CODE (ad) != PLUS)
5160 continue;
5161
5162 inner_code = GET_CODE (XEXP (ad, 0));
5163 if (!(GET_CODE (ad) == PLUS
5164 && CONST_INT_P (XEXP (ad, 1))
5165 && (inner_code == PLUS || inner_code == LO_SUM)))
5166 continue;
5167
5168 operand = XEXP (XEXP (ad, 0), op_index);
5169 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5170 continue;
5171
5172 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5173
5174 if ((regno_ok_for_base_p (REGNO (operand), mode, as, outer_code: inner_code,
5175 GET_CODE (addend))
5176 || operand == frame_pointer_rtx
5177 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5178 && operand == hard_frame_pointer_rtx)
5179 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5180 && operand == arg_pointer_rtx)
5181 || operand == stack_pointer_rtx)
5182 && ! maybe_memory_address_addr_space_p
5183 (mode, ad, as, part: &XEXP (XEXP (ad, 0), 1 - op_index)))
5184 {
5185 rtx offset_reg;
5186 enum reg_class cls;
5187
5188 offset_reg = plus_constant (GET_MODE (ad), operand,
5189 INTVAL (XEXP (ad, 1)));
5190
5191 /* Form the adjusted address. */
5192 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5193 ad = gen_rtx_PLUS (GET_MODE (ad),
5194 op_index == 0 ? offset_reg : addend,
5195 op_index == 0 ? addend : offset_reg);
5196 else
5197 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5198 op_index == 0 ? offset_reg : addend,
5199 op_index == 0 ? addend : offset_reg);
5200 *loc = ad;
5201
5202 cls = base_reg_class (mode, as, outer_code: MEM, GET_CODE (addend), insn);
5203 find_reloads_address_part (XEXP (ad, op_index),
5204 &XEXP (ad, op_index), cls,
5205 GET_MODE (ad), opnum, type, ind_levels);
5206 find_reloads_address_1 (mode, as,
5207 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5208 GET_CODE (XEXP (ad, op_index)),
5209 &XEXP (ad, 1 - op_index), opnum,
5210 type, 0, insn);
5211
5212 return 0;
5213 }
5214 }
5215
5216 /* See if address becomes valid when an eliminable register
5217 in a sum is replaced. */
5218
5219 tem = ad;
5220 if (GET_CODE (ad) == PLUS)
5221 tem = subst_indexed_address (ad);
5222 if (tem != ad && strict_memory_address_addr_space_p (mode, addr: tem, as))
5223 {
5224 /* Ok, we win that way. Replace any additional eliminable
5225 registers. */
5226
5227 subst_reg_equivs_changed = 0;
5228 tem = subst_reg_equivs (tem, insn);
5229
5230 /* Make sure that didn't make the address invalid again. */
5231
5232 if (! subst_reg_equivs_changed
5233 || strict_memory_address_addr_space_p (mode, addr: tem, as))
5234 {
5235 *loc = tem;
5236 return 0;
5237 }
5238 }
5239
5240 /* If constants aren't valid addresses, reload the constant address
5241 into a register. */
5242 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, addr: ad, as))
5243 {
5244 machine_mode address_mode = GET_MODE (ad);
5245 if (address_mode == VOIDmode)
5246 address_mode = targetm.addr_space.address_mode (as);
5247
5248 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5249 Unshare it so we can safely alter it. */
5250 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5251 && CONSTANT_POOL_ADDRESS_P (ad))
5252 {
5253 *memrefloc = copy_rtx (*memrefloc);
5254 loc = &XEXP (*memrefloc, 0);
5255 if (removed_and)
5256 loc = &XEXP (*loc, 0);
5257 }
5258
5259 find_reloads_address_part (ad, loc,
5260 base_reg_class (mode, as, outer_code: MEM,
5261 index_code: SCRATCH, insn),
5262 address_mode, opnum, type, ind_levels);
5263 return ! removed_and;
5264 }
5265
5266 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5267 opnum, type, ind_levels, insn);
5268}
5269
5270/* Find all pseudo regs appearing in AD
5271 that are eliminable in favor of equivalent values
5272 and do not have hard regs; replace them by their equivalents.
5273 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5274 front of it for pseudos that we have to replace with stack slots. */
5275
5276static rtx
5277subst_reg_equivs (rtx ad, rtx_insn *insn)
5278{
5279 RTX_CODE code = GET_CODE (ad);
5280 int i;
5281 const char *fmt;
5282
5283 switch (code)
5284 {
5285 case HIGH:
5286 case CONST:
5287 CASE_CONST_ANY:
5288 case SYMBOL_REF:
5289 case LABEL_REF:
5290 case PC:
5291 return ad;
5292
5293 case REG:
5294 {
5295 int regno = REGNO (ad);
5296
5297 if (reg_equiv_constant (regno) != 0)
5298 {
5299 subst_reg_equivs_changed = 1;
5300 return reg_equiv_constant (regno);
5301 }
5302 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5303 {
5304 rtx mem = make_memloc (ad, regno);
5305 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5306 {
5307 subst_reg_equivs_changed = 1;
5308 /* We mark the USE with QImode so that we recognize it
5309 as one that can be safely deleted at the end of
5310 reload. */
5311 PUT_MODE (x: emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5312 QImode);
5313 return mem;
5314 }
5315 }
5316 }
5317 return ad;
5318
5319 case PLUS:
5320 /* Quickly dispose of a common case. */
5321 if (XEXP (ad, 0) == frame_pointer_rtx
5322 && CONST_INT_P (XEXP (ad, 1)))
5323 return ad;
5324 break;
5325
5326 default:
5327 break;
5328 }
5329
5330 fmt = GET_RTX_FORMAT (code);
5331 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5332 if (fmt[i] == 'e')
5333 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5334 return ad;
5335}
5336
5337/* Compute the sum of X and Y, making canonicalizations assumed in an
5338 address, namely: sum constant integers, surround the sum of two
5339 constants with a CONST, put the constant as the second operand, and
5340 group the constant on the outermost sum.
5341
5342 This routine assumes both inputs are already in canonical form. */
5343
5344rtx
5345form_sum (machine_mode mode, rtx x, rtx y)
5346{
5347 rtx tem;
5348
5349 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5350 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5351
5352 if (CONST_INT_P (x))
5353 return plus_constant (mode, y, INTVAL (x));
5354 else if (CONST_INT_P (y))
5355 return plus_constant (mode, x, INTVAL (y));
5356 else if (CONSTANT_P (x))
5357 tem = x, x = y, y = tem;
5358
5359 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5360 return form_sum (mode, XEXP (x, 0), y: form_sum (mode, XEXP (x, 1), y));
5361
5362 /* Note that if the operands of Y are specified in the opposite
5363 order in the recursive calls below, infinite recursion will occur. */
5364 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5365 return form_sum (mode, x: form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5366
5367 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5368 constant will have been placed second. */
5369 if (CONSTANT_P (x) && CONSTANT_P (y))
5370 {
5371 if (GET_CODE (x) == CONST)
5372 x = XEXP (x, 0);
5373 if (GET_CODE (y) == CONST)
5374 y = XEXP (y, 0);
5375
5376 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5377 }
5378
5379 return gen_rtx_PLUS (mode, x, y);
5380}
5381
5382/* If ADDR is a sum containing a pseudo register that should be
5383 replaced with a constant (from reg_equiv_constant),
5384 return the result of doing so, and also apply the associative
5385 law so that the result is more likely to be a valid address.
5386 (But it is not guaranteed to be one.)
5387
5388 Note that at most one register is replaced, even if more are
5389 replaceable. Also, we try to put the result into a canonical form
5390 so it is more likely to be a valid address.
5391
5392 In all other cases, return ADDR. */
5393
5394static rtx
5395subst_indexed_address (rtx addr)
5396{
5397 rtx op0 = 0, op1 = 0, op2 = 0;
5398 rtx tem;
5399 int regno;
5400
5401 if (GET_CODE (addr) == PLUS)
5402 {
5403 /* Try to find a register to replace. */
5404 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5405 if (REG_P (op0)
5406 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5407 && reg_renumber[regno] < 0
5408 && reg_equiv_constant (regno) != 0)
5409 op0 = reg_equiv_constant (regno);
5410 else if (REG_P (op1)
5411 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5412 && reg_renumber[regno] < 0
5413 && reg_equiv_constant (regno) != 0)
5414 op1 = reg_equiv_constant (regno);
5415 else if (GET_CODE (op0) == PLUS
5416 && (tem = subst_indexed_address (addr: op0)) != op0)
5417 op0 = tem;
5418 else if (GET_CODE (op1) == PLUS
5419 && (tem = subst_indexed_address (addr: op1)) != op1)
5420 op1 = tem;
5421 else
5422 return addr;
5423
5424 /* Pick out up to three things to add. */
5425 if (GET_CODE (op1) == PLUS)
5426 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5427 else if (GET_CODE (op0) == PLUS)
5428 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5429
5430 /* Compute the sum. */
5431 if (op2 != 0)
5432 op1 = form_sum (GET_MODE (addr), x: op1, y: op2);
5433 if (op1 != 0)
5434 op0 = form_sum (GET_MODE (addr), x: op0, y: op1);
5435
5436 return op0;
5437 }
5438 return addr;
5439}
5440
5441/* Update the REG_INC notes for an insn. It updates all REG_INC
5442 notes for the instruction which refer to REGNO the to refer
5443 to the reload number.
5444
5445 INSN is the insn for which any REG_INC notes need updating.
5446
5447 REGNO is the register number which has been reloaded.
5448
5449 RELOADNUM is the reload number. */
5450
5451static void
5452update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5453 int reloadnum ATTRIBUTE_UNUSED)
5454{
5455 if (!AUTO_INC_DEC)
5456 return;
5457
5458 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5459 if (REG_NOTE_KIND (link) == REG_INC
5460 && (int) REGNO (XEXP (link, 0)) == regno)
5461 push_replacement (loc: &XEXP (link, 0), reloadnum, VOIDmode);
5462}
5463
5464/* Record the pseudo registers we must reload into hard registers in a
5465 subexpression of a would-be memory address, X referring to a value
5466 in mode MODE. (This function is not called if the address we find
5467 is strictly valid.)
5468
5469 CONTEXT = 1 means we are considering regs as index regs,
5470 = 0 means we are considering them as base regs.
5471 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5472 or an autoinc code.
5473 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5474 is the code of the index part of the address. Otherwise, pass SCRATCH
5475 for this argument.
5476 OPNUM and TYPE specify the purpose of any reloads made.
5477
5478 IND_LEVELS says how many levels of indirect addressing are
5479 supported at this point in the address.
5480
5481 INSN, if nonzero, is the insn in which we do the reload. It is used
5482 to determine if we may generate output reloads.
5483
5484 We return nonzero if X, as a whole, is reloaded or replaced. */
5485
5486/* Note that we take shortcuts assuming that no multi-reg machine mode
5487 occurs as part of an address.
5488 Also, this is not fully machine-customizable; it works for machines
5489 such as VAXen and 68000's and 32000's, but other possible machines
5490 could have addressing modes that this does not handle right.
5491 If you add push_reload calls here, you need to make sure gen_reload
5492 handles those cases gracefully. */
5493
5494static int
5495find_reloads_address_1 (machine_mode mode, addr_space_t as,
5496 rtx x, int context,
5497 enum rtx_code outer_code, enum rtx_code index_code,
5498 rtx *loc, int opnum, enum reload_type type,
5499 int ind_levels, rtx_insn *insn)
5500{
5501#define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5502 ((CONTEXT) == 0 \
5503 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5504 : REGNO_OK_FOR_INDEX_P (REGNO))
5505
5506 enum reg_class context_reg_class;
5507 RTX_CODE code = GET_CODE (x);
5508 bool reloaded_inner_of_autoinc = false;
5509
5510 if (context == 1)
5511 context_reg_class = index_reg_class (insn);
5512 else
5513 context_reg_class = base_reg_class (mode, as, outer_code, index_code,
5514 insn);
5515
5516 switch (code)
5517 {
5518 case PLUS:
5519 {
5520 rtx orig_op0 = XEXP (x, 0);
5521 rtx orig_op1 = XEXP (x, 1);
5522 RTX_CODE code0 = GET_CODE (orig_op0);
5523 RTX_CODE code1 = GET_CODE (orig_op1);
5524 rtx op0 = orig_op0;
5525 rtx op1 = orig_op1;
5526
5527 if (GET_CODE (op0) == SUBREG)
5528 {
5529 op0 = SUBREG_REG (op0);
5530 code0 = GET_CODE (op0);
5531 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5532 op0 = gen_rtx_REG (word_mode,
5533 (REGNO (op0) +
5534 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5535 GET_MODE (SUBREG_REG (orig_op0)),
5536 SUBREG_BYTE (orig_op0),
5537 GET_MODE (orig_op0))));
5538 }
5539
5540 if (GET_CODE (op1) == SUBREG)
5541 {
5542 op1 = SUBREG_REG (op1);
5543 code1 = GET_CODE (op1);
5544 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5545 /* ??? Why is this given op1's mode and above for
5546 ??? op0 SUBREGs we use word_mode? */
5547 op1 = gen_rtx_REG (GET_MODE (op1),
5548 (REGNO (op1) +
5549 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5550 GET_MODE (SUBREG_REG (orig_op1)),
5551 SUBREG_BYTE (orig_op1),
5552 GET_MODE (orig_op1))));
5553 }
5554 /* Plus in the index register may be created only as a result of
5555 register rematerialization for expression like &localvar*4. Reload it.
5556 It may be possible to combine the displacement on the outer level,
5557 but it is probably not worthwhile to do so. */
5558 if (context == 1)
5559 {
5560 find_reloads_address (GET_MODE (x), memrefloc: loc, XEXP (x, 0), loc: &XEXP (x, 0),
5561 opnum, ADDR_TYPE (type), ind_levels, insn);
5562 push_reload (in: *loc, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
5563 rclass: context_reg_class,
5564 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
5565 return 1;
5566 }
5567
5568 if (code0 == MULT || code0 == ASHIFT
5569 || code0 == SIGN_EXTEND || code0 == TRUNCATE
5570 || code0 == ZERO_EXTEND || code1 == MEM)
5571 {
5572 find_reloads_address_1 (mode, as, x: orig_op0, context: 1, outer_code: PLUS, index_code: SCRATCH,
5573 loc: &XEXP (x, 0), opnum, type, ind_levels,
5574 insn);
5575 find_reloads_address_1 (mode, as, x: orig_op1, context: 0, outer_code: PLUS, index_code: code0,
5576 loc: &XEXP (x, 1), opnum, type, ind_levels,
5577 insn);
5578 }
5579
5580 else if (code1 == MULT || code1 == ASHIFT
5581 || code1 == SIGN_EXTEND || code1 == TRUNCATE
5582 || code1 == ZERO_EXTEND || code0 == MEM)
5583 {
5584 find_reloads_address_1 (mode, as, x: orig_op0, context: 0, outer_code: PLUS, index_code: code1,
5585 loc: &XEXP (x, 0), opnum, type, ind_levels,
5586 insn);
5587 find_reloads_address_1 (mode, as, x: orig_op1, context: 1, outer_code: PLUS, index_code: SCRATCH,
5588 loc: &XEXP (x, 1), opnum, type, ind_levels,
5589 insn);
5590 }
5591
5592 else if (code0 == CONST_INT || code0 == CONST
5593 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5594 find_reloads_address_1 (mode, as, x: orig_op1, context: 0, outer_code: PLUS, index_code: code0,
5595 loc: &XEXP (x, 1), opnum, type, ind_levels,
5596 insn);
5597
5598 else if (code1 == CONST_INT || code1 == CONST
5599 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5600 find_reloads_address_1 (mode, as, x: orig_op0, context: 0, outer_code: PLUS, index_code: code1,
5601 loc: &XEXP (x, 0), opnum, type, ind_levels,
5602 insn);
5603
5604 else if (code0 == REG && code1 == REG)
5605 {
5606 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5607 && regno_ok_for_base_p (REGNO (op0), mode, as, outer_code: PLUS, index_code: REG))
5608 return 0;
5609 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5610 && regno_ok_for_base_p (REGNO (op1), mode, as, outer_code: PLUS, index_code: REG))
5611 return 0;
5612 else if (regno_ok_for_base_p (REGNO (op0), mode, as, outer_code: PLUS, index_code: REG))
5613 find_reloads_address_1 (mode, as, x: orig_op1, context: 1, outer_code: PLUS, index_code: SCRATCH,
5614 loc: &XEXP (x, 1), opnum, type, ind_levels,
5615 insn);
5616 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5617 find_reloads_address_1 (mode, as, x: orig_op0, context: 0, outer_code: PLUS, index_code: REG,
5618 loc: &XEXP (x, 0), opnum, type, ind_levels,
5619 insn);
5620 else if (regno_ok_for_base_p (REGNO (op1), mode, as, outer_code: PLUS, index_code: REG))
5621 find_reloads_address_1 (mode, as, x: orig_op0, context: 1, outer_code: PLUS, index_code: SCRATCH,
5622 loc: &XEXP (x, 0), opnum, type, ind_levels,
5623 insn);
5624 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5625 find_reloads_address_1 (mode, as, x: orig_op1, context: 0, outer_code: PLUS, index_code: REG,
5626 loc: &XEXP (x, 1), opnum, type, ind_levels,
5627 insn);
5628 else
5629 {
5630 find_reloads_address_1 (mode, as, x: orig_op0, context: 0, outer_code: PLUS, index_code: REG,
5631 loc: &XEXP (x, 0), opnum, type, ind_levels,
5632 insn);
5633 find_reloads_address_1 (mode, as, x: orig_op1, context: 1, outer_code: PLUS, index_code: SCRATCH,
5634 loc: &XEXP (x, 1), opnum, type, ind_levels,
5635 insn);
5636 }
5637 }
5638
5639 else if (code0 == REG)
5640 {
5641 find_reloads_address_1 (mode, as, x: orig_op0, context: 1, outer_code: PLUS, index_code: SCRATCH,
5642 loc: &XEXP (x, 0), opnum, type, ind_levels,
5643 insn);
5644 find_reloads_address_1 (mode, as, x: orig_op1, context: 0, outer_code: PLUS, index_code: REG,
5645 loc: &XEXP (x, 1), opnum, type, ind_levels,
5646 insn);
5647 }
5648
5649 else if (code1 == REG)
5650 {
5651 find_reloads_address_1 (mode, as, x: orig_op1, context: 1, outer_code: PLUS, index_code: SCRATCH,
5652 loc: &XEXP (x, 1), opnum, type, ind_levels,
5653 insn);
5654 find_reloads_address_1 (mode, as, x: orig_op0, context: 0, outer_code: PLUS, index_code: REG,
5655 loc: &XEXP (x, 0), opnum, type, ind_levels,
5656 insn);
5657 }
5658 }
5659
5660 return 0;
5661
5662 case POST_MODIFY:
5663 case PRE_MODIFY:
5664 {
5665 rtx op0 = XEXP (x, 0);
5666 rtx op1 = XEXP (x, 1);
5667 enum rtx_code index_code;
5668 int regno;
5669 int reloadnum;
5670
5671 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5672 return 0;
5673
5674 /* Currently, we only support {PRE,POST}_MODIFY constructs
5675 where a base register is {inc,dec}remented by the contents
5676 of another register or by a constant value. Thus, these
5677 operands must match. */
5678 gcc_assert (op0 == XEXP (op1, 0));
5679
5680 /* Require index register (or constant). Let's just handle the
5681 register case in the meantime... If the target allows
5682 auto-modify by a constant then we could try replacing a pseudo
5683 register with its equivalent constant where applicable.
5684
5685 We also handle the case where the register was eliminated
5686 resulting in a PLUS subexpression.
5687
5688 If we later decide to reload the whole PRE_MODIFY or
5689 POST_MODIFY, inc_for_reload might clobber the reload register
5690 before reading the index. The index register might therefore
5691 need to live longer than a TYPE reload normally would, so be
5692 conservative and class it as RELOAD_OTHER. */
5693 if ((REG_P (XEXP (op1, 1))
5694 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5695 || GET_CODE (XEXP (op1, 1)) == PLUS)
5696 find_reloads_address_1 (mode, as, XEXP (op1, 1), context: 1, outer_code: code, index_code: SCRATCH,
5697 loc: &XEXP (op1, 1), opnum, type: RELOAD_OTHER,
5698 ind_levels, insn);
5699
5700 gcc_assert (REG_P (XEXP (op1, 0)));
5701
5702 regno = REGNO (XEXP (op1, 0));
5703 index_code = GET_CODE (XEXP (op1, 1));
5704
5705 /* A register that is incremented cannot be constant! */
5706 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5707 || reg_equiv_constant (regno) == 0);
5708
5709 /* Handle a register that is equivalent to a memory location
5710 which cannot be addressed directly. */
5711 if (reg_equiv_memory_loc (regno) != 0
5712 && (reg_equiv_address (regno) != 0
5713 || num_not_at_initial_offset))
5714 {
5715 rtx tem = make_memloc (XEXP (x, 0), regno);
5716
5717 if (reg_equiv_address (regno)
5718 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5719 {
5720 rtx orig = tem;
5721
5722 /* First reload the memory location's address.
5723 We can't use ADDR_TYPE (type) here, because we need to
5724 write back the value after reading it, hence we actually
5725 need two registers. */
5726 find_reloads_address (GET_MODE (tem), memrefloc: &tem, XEXP (tem, 0),
5727 loc: &XEXP (tem, 0), opnum,
5728 type: RELOAD_OTHER,
5729 ind_levels, insn);
5730
5731 if (!rtx_equal_p (tem, orig))
5732 push_reg_equiv_alt_mem (regno, mem: tem);
5733
5734 /* Then reload the memory location into a base
5735 register. */
5736 reloadnum = push_reload (in: tem, out: tem, inloc: &XEXP (x, 0),
5737 outloc: &XEXP (op1, 0),
5738 rclass: base_reg_class (mode, as,
5739 outer_code: code, index_code,
5740 insn),
5741 GET_MODE (x), GET_MODE (x), strict_low: 0,
5742 optional: 0, opnum, type: RELOAD_OTHER);
5743
5744 update_auto_inc_notes (insn: this_insn, regno, reloadnum);
5745 return 0;
5746 }
5747 }
5748
5749 if (reg_renumber[regno] >= 0)
5750 regno = reg_renumber[regno];
5751
5752 /* We require a base register here... */
5753 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, outer_code: code, index_code))
5754 {
5755 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5756 inloc: &XEXP (op1, 0), outloc: &XEXP (x, 0),
5757 rclass: base_reg_class (mode, as,
5758 outer_code: code, index_code,
5759 insn),
5760 GET_MODE (x), GET_MODE (x), strict_low: 0, optional: 0,
5761 opnum, type: RELOAD_OTHER);
5762
5763 update_auto_inc_notes (insn: this_insn, regno, reloadnum);
5764 return 0;
5765 }
5766 }
5767 return 0;
5768
5769 case POST_INC:
5770 case POST_DEC:
5771 case PRE_INC:
5772 case PRE_DEC:
5773 if (REG_P (XEXP (x, 0)))
5774 {
5775 int regno = REGNO (XEXP (x, 0));
5776 int value = 0;
5777 rtx x_orig = x;
5778
5779 /* A register that is incremented cannot be constant! */
5780 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5781 || reg_equiv_constant (regno) == 0);
5782
5783 /* Handle a register that is equivalent to a memory location
5784 which cannot be addressed directly. */
5785 if (reg_equiv_memory_loc (regno) != 0
5786 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5787 {
5788 rtx tem = make_memloc (XEXP (x, 0), regno);
5789 if (reg_equiv_address (regno)
5790 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5791 {
5792 rtx orig = tem;
5793
5794 /* First reload the memory location's address.
5795 We can't use ADDR_TYPE (type) here, because we need to
5796 write back the value after reading it, hence we actually
5797 need two registers. */
5798 find_reloads_address (GET_MODE (tem), memrefloc: &tem, XEXP (tem, 0),
5799 loc: &XEXP (tem, 0), opnum, type,
5800 ind_levels, insn);
5801 reloaded_inner_of_autoinc = true;
5802 if (!rtx_equal_p (tem, orig))
5803 push_reg_equiv_alt_mem (regno, mem: tem);
5804 /* Put this inside a new increment-expression. */
5805 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5806 /* Proceed to reload that, as if it contained a register. */
5807 }
5808 }
5809
5810 /* If we have a hard register that is ok in this incdec context,
5811 don't make a reload. If the register isn't nice enough for
5812 autoincdec, we can reload it. But, if an autoincrement of a
5813 register that we here verified as playing nice, still outside
5814 isn't "valid", it must be that no autoincrement is "valid".
5815 If that is true and something made an autoincrement anyway,
5816 this must be a special context where one is allowed.
5817 (For example, a "push" instruction.)
5818 We can't improve this address, so leave it alone. */
5819
5820 /* Otherwise, reload the autoincrement into a suitable hard reg
5821 and record how much to increment by. */
5822
5823 if (reg_renumber[regno] >= 0)
5824 regno = reg_renumber[regno];
5825 if (regno >= FIRST_PSEUDO_REGISTER
5826 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5827 index_code))
5828 {
5829 int reloadnum;
5830
5831 /* If we can output the register afterwards, do so, this
5832 saves the extra update.
5833 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5834 CALL_INSN.
5835 But don't do this if we cannot directly address the
5836 memory location, since this will make it harder to
5837 reuse address reloads, and increases register pressure.
5838 Also don't do this if we can probably update x directly. */
5839 rtx equiv = (MEM_P (XEXP (x, 0))
5840 ? XEXP (x, 0)
5841 : reg_equiv_mem (regno));
5842 enum insn_code icode = optab_handler (op: add_optab, GET_MODE (x));
5843 if (insn && NONJUMP_INSN_P (insn)
5844 && (regno < FIRST_PSEUDO_REGISTER
5845 || (equiv
5846 && memory_operand (equiv, GET_MODE (equiv))
5847 && ! (icode != CODE_FOR_nothing
5848 && insn_operand_matches (icode, opno: 0, operand: equiv)
5849 && insn_operand_matches (icode, opno: 1, operand: equiv))))
5850 /* Using RELOAD_OTHER means we emit this and the reload we
5851 made earlier in the wrong order. */
5852 && !reloaded_inner_of_autoinc)
5853 {
5854 /* We use the original pseudo for loc, so that
5855 emit_reload_insns() knows which pseudo this
5856 reload refers to and updates the pseudo rtx, not
5857 its equivalent memory location, as well as the
5858 corresponding entry in reg_last_reload_reg. */
5859 loc = &XEXP (x_orig, 0);
5860 x = XEXP (x, 0);
5861 reloadnum
5862 = push_reload (in: x, out: x, inloc: loc, outloc: loc,
5863 rclass: context_reg_class,
5864 GET_MODE (x), GET_MODE (x), strict_low: 0, optional: 0,
5865 opnum, type: RELOAD_OTHER);
5866 }
5867 else
5868 {
5869 reloadnum
5870 = push_reload (in: x, out: x, inloc: loc, outloc: (rtx*) 0,
5871 rclass: context_reg_class,
5872 GET_MODE (x), GET_MODE (x), strict_low: 0, optional: 0,
5873 opnum, type);
5874 rld[reloadnum].inc
5875 = find_inc_amount (PATTERN (insn: this_insn), XEXP (x_orig, 0));
5876
5877 value = 1;
5878 }
5879
5880 update_auto_inc_notes (insn: this_insn, REGNO (XEXP (x_orig, 0)),
5881 reloadnum);
5882 }
5883 return value;
5884 }
5885 return 0;
5886
5887 case TRUNCATE:
5888 case SIGN_EXTEND:
5889 case ZERO_EXTEND:
5890 /* Look for parts to reload in the inner expression and reload them
5891 too, in addition to this operation. Reloading all inner parts in
5892 addition to this one shouldn't be necessary, but at this point,
5893 we don't know if we can possibly omit any part that *can* be
5894 reloaded. Targets that are better off reloading just either part
5895 (or perhaps even a different part of an outer expression), should
5896 define LEGITIMIZE_RELOAD_ADDRESS. */
5897 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5898 context, outer_code: code, index_code: SCRATCH, loc: &XEXP (x, 0), opnum,
5899 type, ind_levels, insn);
5900 push_reload (in: x, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
5901 rclass: context_reg_class,
5902 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
5903 return 1;
5904
5905 case MEM:
5906 /* This is probably the result of a substitution, by eliminate_regs, of
5907 an equivalent address for a pseudo that was not allocated to a hard
5908 register. Verify that the specified address is valid and reload it
5909 into a register.
5910
5911 Since we know we are going to reload this item, don't decrement for
5912 the indirection level.
5913
5914 Note that this is actually conservative: it would be slightly more
5915 efficient to use the value of SPILL_INDIRECT_LEVELS from
5916 reload1.cc here. */
5917
5918 find_reloads_address (GET_MODE (x), memrefloc: loc, XEXP (x, 0), loc: &XEXP (x, 0),
5919 opnum, ADDR_TYPE (type), ind_levels, insn);
5920 push_reload (in: *loc, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
5921 rclass: context_reg_class,
5922 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
5923 return 1;
5924
5925 case REG:
5926 {
5927 int regno = REGNO (x);
5928
5929 if (reg_equiv_constant (regno) != 0)
5930 {
5931 find_reloads_address_part (reg_equiv_constant (regno), loc,
5932 context_reg_class,
5933 GET_MODE (x), opnum, type, ind_levels);
5934 return 1;
5935 }
5936
5937#if 0 /* This might screw code in reload1.cc to delete prior output-reload
5938 that feeds this insn. */
5939 if (reg_equiv_mem (regno) != 0)
5940 {
5941 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5942 context_reg_class,
5943 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5944 return 1;
5945 }
5946#endif
5947
5948 if (reg_equiv_memory_loc (regno)
5949 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5950 {
5951 rtx tem = make_memloc (ad: x, regno);
5952 if (reg_equiv_address (regno) != 0
5953 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5954 {
5955 x = tem;
5956 find_reloads_address (GET_MODE (x), memrefloc: &x, XEXP (x, 0),
5957 loc: &XEXP (x, 0), opnum, ADDR_TYPE (type),
5958 ind_levels, insn);
5959 if (!rtx_equal_p (x, tem))
5960 push_reg_equiv_alt_mem (regno, mem: x);
5961 }
5962 }
5963
5964 if (reg_renumber[regno] >= 0)
5965 regno = reg_renumber[regno];
5966
5967 if (regno >= FIRST_PSEUDO_REGISTER
5968 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5969 index_code))
5970 {
5971 push_reload (in: x, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
5972 rclass: context_reg_class,
5973 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
5974 return 1;
5975 }
5976
5977 /* If a register appearing in an address is the subject of a CLOBBER
5978 in this insn, reload it into some other register to be safe.
5979 The CLOBBER is supposed to make the register unavailable
5980 from before this insn to after it. */
5981 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5982 {
5983 push_reload (in: x, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
5984 rclass: context_reg_class,
5985 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
5986 return 1;
5987 }
5988 }
5989 return 0;
5990
5991 case SUBREG:
5992 if (REG_P (SUBREG_REG (x)))
5993 {
5994 /* If this is a SUBREG of a hard register and the resulting register
5995 is of the wrong class, reload the whole SUBREG. This avoids
5996 needless copies if SUBREG_REG is multi-word. */
5997 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5998 {
5999 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6000
6001 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6002 index_code))
6003 {
6004 push_reload (in: x, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
6005 rclass: context_reg_class,
6006 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
6007 return 1;
6008 }
6009 }
6010 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6011 is larger than the class size, then reload the whole SUBREG. */
6012 else
6013 {
6014 enum reg_class rclass = context_reg_class;
6015 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6016 > reg_class_size[(int) rclass])
6017 {
6018 /* If the inner register will be replaced by a memory
6019 reference, we can do this only if we can replace the
6020 whole subreg by a (narrower) memory reference. If
6021 this is not possible, fall through and reload just
6022 the inner register (including address reloads). */
6023 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6024 {
6025 rtx tem = find_reloads_subreg_address (x, opnum,
6026 ADDR_TYPE (type),
6027 ind_levels, insn,
6028 NULL);
6029 if (tem)
6030 {
6031 push_reload (in: tem, NULL_RTX, inloc: loc, outloc: (rtx*) 0, rclass,
6032 GET_MODE (tem), VOIDmode, strict_low: 0, optional: 0,
6033 opnum, type);
6034 return 1;
6035 }
6036 }
6037 else
6038 {
6039 push_reload (in: x, NULL_RTX, inloc: loc, outloc: (rtx*) 0, rclass,
6040 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
6041 return 1;
6042 }
6043 }
6044 }
6045 }
6046 break;
6047
6048 default:
6049 break;
6050 }
6051
6052 {
6053 const char *fmt = GET_RTX_FORMAT (code);
6054 int i;
6055
6056 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6057 {
6058 if (fmt[i] == 'e')
6059 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6060 we get here. */
6061 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6062 outer_code: code, index_code: SCRATCH, loc: &XEXP (x, i),
6063 opnum, type, ind_levels, insn);
6064 }
6065 }
6066
6067#undef REG_OK_FOR_CONTEXT
6068 return 0;
6069}
6070
6071/* X, which is found at *LOC, is a part of an address that needs to be
6072 reloaded into a register of class RCLASS. If X is a constant, or if
6073 X is a PLUS that contains a constant, check that the constant is a
6074 legitimate operand and that we are supposed to be able to load
6075 it into the register.
6076
6077 If not, force the constant into memory and reload the MEM instead.
6078
6079 MODE is the mode to use, in case X is an integer constant.
6080
6081 OPNUM and TYPE describe the purpose of any reloads made.
6082
6083 IND_LEVELS says how many levels of indirect addressing this machine
6084 supports. */
6085
6086static void
6087find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6088 machine_mode mode, int opnum,
6089 enum reload_type type, int ind_levels)
6090{
6091 if (CONSTANT_P (x)
6092 && (!targetm.legitimate_constant_p (mode, x)
6093 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6094 {
6095 x = force_const_mem (mode, x);
6096 find_reloads_address (mode, memrefloc: &x, XEXP (x, 0), loc: &XEXP (x, 0),
6097 opnum, type, ind_levels, insn: 0);
6098 }
6099
6100 else if (GET_CODE (x) == PLUS
6101 && CONSTANT_P (XEXP (x, 1))
6102 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6103 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6104 == NO_REGS))
6105 {
6106 rtx tem;
6107
6108 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6109 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6110 find_reloads_address (mode, memrefloc: &XEXP (x, 1), XEXP (tem, 0), loc: &XEXP (tem, 0),
6111 opnum, type, ind_levels, insn: 0);
6112 }
6113
6114 push_reload (in: x, NULL_RTX, inloc: loc, outloc: (rtx*) 0, rclass,
6115 inmode: mode, VOIDmode, strict_low: 0, optional: 0, opnum, type);
6116}
6117
6118/* X, a subreg of a pseudo, is a part of an address that needs to be
6119 reloaded, and the pseusdo is equivalent to a memory location.
6120
6121 Attempt to replace the whole subreg by a (possibly narrower or wider)
6122 memory reference. If this is possible, return this new memory
6123 reference, and push all required address reloads. Otherwise,
6124 return NULL.
6125
6126 OPNUM and TYPE identify the purpose of the reload.
6127
6128 IND_LEVELS says how many levels of indirect addressing are
6129 supported at this point in the address.
6130
6131 INSN, if nonzero, is the insn in which we do the reload. It is used
6132 to determine where to put USEs for pseudos that we have to replace with
6133 stack slots. */
6134
6135static rtx
6136find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6137 int ind_levels, rtx_insn *insn,
6138 int *address_reloaded)
6139{
6140 machine_mode outer_mode = GET_MODE (x);
6141 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6142 int regno = REGNO (SUBREG_REG (x));
6143 int reloaded = 0;
6144 rtx tem, orig;
6145 poly_int64 offset;
6146
6147 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6148
6149 /* We cannot replace the subreg with a modified memory reference if:
6150
6151 - we have a paradoxical subreg that implicitly acts as a zero or
6152 sign extension operation due to LOAD_EXTEND_OP;
6153
6154 - we have a subreg that is implicitly supposed to act on the full
6155 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6156
6157 - the address of the equivalent memory location is mode-dependent; or
6158
6159 - we have a paradoxical subreg and the resulting memory is not
6160 sufficiently aligned to allow access in the wider mode.
6161
6162 In addition, we choose not to perform the replacement for *any*
6163 paradoxical subreg, even if it were possible in principle. This
6164 is to avoid generating wider memory references than necessary.
6165
6166 This corresponds to how previous versions of reload used to handle
6167 paradoxical subregs where no address reload was required. */
6168
6169 if (paradoxical_subreg_p (x))
6170 return NULL;
6171
6172 if (WORD_REGISTER_OPERATIONS
6173 && partial_subreg_p (outermode: outer_mode, innermode: inner_mode)
6174 && known_equal_after_align_down (a: GET_MODE_SIZE (mode: outer_mode) - 1,
6175 b: GET_MODE_SIZE (mode: inner_mode) - 1,
6176 UNITS_PER_WORD))
6177 return NULL;
6178
6179 /* Since we don't attempt to handle paradoxical subregs, we can just
6180 call into simplify_subreg, which will handle all remaining checks
6181 for us. */
6182 orig = make_memloc (SUBREG_REG (x), regno);
6183 offset = SUBREG_BYTE (x);
6184 tem = simplify_subreg (outermode: outer_mode, op: orig, innermode: inner_mode, byte: offset);
6185 if (!tem || !MEM_P (tem))
6186 return NULL;
6187
6188 /* Now push all required address reloads, if any. */
6189 reloaded = find_reloads_address (GET_MODE (tem), memrefloc: &tem,
6190 XEXP (tem, 0), loc: &XEXP (tem, 0),
6191 opnum, type, ind_levels, insn);
6192 /* ??? Do we need to handle nonzero offsets somehow? */
6193 if (known_eq (offset, 0) && !rtx_equal_p (tem, orig))
6194 push_reg_equiv_alt_mem (regno, mem: tem);
6195
6196 /* For some processors an address may be valid in the original mode but
6197 not in a smaller mode. For example, ARM accepts a scaled index register
6198 in SImode but not in HImode. Note that this is only a problem if the
6199 address in reg_equiv_mem is already invalid in the new mode; other
6200 cases would be fixed by find_reloads_address as usual.
6201
6202 ??? We attempt to handle such cases here by doing an additional reload
6203 of the full address after the usual processing by find_reloads_address.
6204 Note that this may not work in the general case, but it seems to cover
6205 the cases where this situation currently occurs. A more general fix
6206 might be to reload the *value* instead of the address, but this would
6207 not be expected by the callers of this routine as-is.
6208
6209 If find_reloads_address already completed replaced the address, there
6210 is nothing further to do. */
6211 if (reloaded == 0
6212 && reg_equiv_mem (regno) != 0
6213 && !strict_memory_address_addr_space_p
6214 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6215 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6216 {
6217 push_reload (XEXP (tem, 0), NULL_RTX, inloc: &XEXP (tem, 0), outloc: (rtx*) 0,
6218 rclass: base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6219 outer_code: MEM, index_code: SCRATCH, insn),
6220 GET_MODE (XEXP (tem, 0)), VOIDmode, strict_low: 0, optional: 0, opnum, type);
6221 reloaded = 1;
6222 }
6223
6224 /* If this is not a toplevel operand, find_reloads doesn't see this
6225 substitution. We have to emit a USE of the pseudo so that
6226 delete_output_reload can see it. */
6227 if (replace_reloads && recog_data.operand[opnum] != x)
6228 /* We mark the USE with QImode so that we recognize it as one that
6229 can be safely deleted at the end of reload. */
6230 PUT_MODE (x: emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6231 QImode);
6232
6233 if (address_reloaded)
6234 *address_reloaded = reloaded;
6235
6236 return tem;
6237}
6238
6239/* Substitute into the current INSN the registers into which we have reloaded
6240 the things that need reloading. The array `replacements'
6241 contains the locations of all pointers that must be changed
6242 and says what to replace them with.
6243
6244 Return the rtx that X translates into; usually X, but modified. */
6245
6246void
6247subst_reloads (rtx_insn *insn)
6248{
6249 int i;
6250
6251 for (i = 0; i < n_replacements; i++)
6252 {
6253 struct replacement *r = &replacements[i];
6254 rtx reloadreg = rld[r->what].reg_rtx;
6255 if (reloadreg)
6256 {
6257#ifdef DEBUG_RELOAD
6258 /* This checking takes a very long time on some platforms
6259 causing the gcc.c-torture/compile/limits-fnargs.c test
6260 to time out during testing. See PR 31850.
6261
6262 Internal consistency test. Check that we don't modify
6263 anything in the equivalence arrays. Whenever something from
6264 those arrays needs to be reloaded, it must be unshared before
6265 being substituted into; the equivalence must not be modified.
6266 Otherwise, if the equivalence is used after that, it will
6267 have been modified, and the thing substituted (probably a
6268 register) is likely overwritten and not a usable equivalence. */
6269 int check_regno;
6270
6271 for (check_regno = 0; check_regno < max_regno; check_regno++)
6272 {
6273#define CHECK_MODF(ARRAY) \
6274 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6275 || !loc_mentioned_in_p (r->where, \
6276 (*reg_equivs)[check_regno].ARRAY))
6277
6278 CHECK_MODF (constant);
6279 CHECK_MODF (memory_loc);
6280 CHECK_MODF (address);
6281 CHECK_MODF (mem);
6282#undef CHECK_MODF
6283 }
6284#endif /* DEBUG_RELOAD */
6285
6286 /* If we're replacing a LABEL_REF with a register, there must
6287 already be an indication (to e.g. flow) which label this
6288 register refers to. */
6289 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6290 || !JUMP_P (insn)
6291 || find_reg_note (insn,
6292 REG_LABEL_OPERAND,
6293 XEXP (*r->where, 0))
6294 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6295
6296 /* Encapsulate RELOADREG so its machine mode matches what
6297 used to be there. Note that gen_lowpart_common will
6298 do the wrong thing if RELOADREG is multi-word. RELOADREG
6299 will always be a REG here. */
6300 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6301 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6302
6303 *r->where = reloadreg;
6304 }
6305 /* If reload got no reg and isn't optional, something's wrong. */
6306 else
6307 gcc_assert (rld[r->what].optional);
6308 }
6309}
6310
6311/* Make a copy of any replacements being done into X and move those
6312 copies to locations in Y, a copy of X. */
6313
6314void
6315copy_replacements (rtx x, rtx y)
6316{
6317 copy_replacements_1 (&x, &y, n_replacements);
6318}
6319
6320static void
6321copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6322{
6323 int i, j;
6324 rtx x, y;
6325 struct replacement *r;
6326 enum rtx_code code;
6327 const char *fmt;
6328
6329 for (j = 0; j < orig_replacements; j++)
6330 if (replacements[j].where == px)
6331 {
6332 r = &replacements[n_replacements++];
6333 r->where = py;
6334 r->what = replacements[j].what;
6335 r->mode = replacements[j].mode;
6336 }
6337
6338 x = *px;
6339 y = *py;
6340 code = GET_CODE (x);
6341 fmt = GET_RTX_FORMAT (code);
6342
6343 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6344 {
6345 if (fmt[i] == 'e')
6346 copy_replacements_1 (px: &XEXP (x, i), py: &XEXP (y, i), orig_replacements);
6347 else if (fmt[i] == 'E')
6348 for (j = XVECLEN (x, i); --j >= 0; )
6349 copy_replacements_1 (px: &XVECEXP (x, i, j), py: &XVECEXP (y, i, j),
6350 orig_replacements);
6351 }
6352}
6353
6354/* Change any replacements being done to *X to be done to *Y. */
6355
6356void
6357move_replacements (rtx *x, rtx *y)
6358{
6359 int i;
6360
6361 for (i = 0; i < n_replacements; i++)
6362 if (replacements[i].where == x)
6363 replacements[i].where = y;
6364}
6365
6366/* If LOC was scheduled to be replaced by something, return the replacement.
6367 Otherwise, return *LOC. */
6368
6369rtx
6370find_replacement (rtx *loc)
6371{
6372 struct replacement *r;
6373
6374 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6375 {
6376 rtx reloadreg = rld[r->what].reg_rtx;
6377
6378 if (reloadreg && r->where == loc)
6379 {
6380 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6381 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6382
6383 return reloadreg;
6384 }
6385 else if (reloadreg && GET_CODE (*loc) == SUBREG
6386 && r->where == &SUBREG_REG (*loc))
6387 {
6388 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6389 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6390
6391 return simplify_gen_subreg (GET_MODE (*loc), op: reloadreg,
6392 GET_MODE (SUBREG_REG (*loc)),
6393 SUBREG_BYTE (*loc));
6394 }
6395 }
6396
6397 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6398 what's inside and make a new rtl if so. */
6399 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6400 || GET_CODE (*loc) == MULT)
6401 {
6402 rtx x = find_replacement (loc: &XEXP (*loc, 0));
6403 rtx y = find_replacement (loc: &XEXP (*loc, 1));
6404
6405 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6406 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6407 }
6408
6409 return *loc;
6410}
6411
6412/* Return nonzero if register in range [REGNO, ENDREGNO)
6413 appears either explicitly or implicitly in X
6414 other than being stored into (except for earlyclobber operands).
6415
6416 References contained within the substructure at LOC do not count.
6417 LOC may be zero, meaning don't ignore anything.
6418
6419 This is similar to refers_to_regno_p in rtlanal.cc except that we
6420 look at equivalences for pseudos that didn't get hard registers. */
6421
6422static int
6423refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6424 rtx x, rtx *loc)
6425{
6426 int i;
6427 unsigned int r;
6428 RTX_CODE code;
6429 const char *fmt;
6430
6431 if (x == 0)
6432 return 0;
6433
6434 repeat:
6435 code = GET_CODE (x);
6436
6437 switch (code)
6438 {
6439 case REG:
6440 r = REGNO (x);
6441
6442 /* If this is a pseudo, a hard register must not have been allocated.
6443 X must therefore either be a constant or be in memory. */
6444 if (r >= FIRST_PSEUDO_REGISTER)
6445 {
6446 if (reg_equiv_memory_loc (r))
6447 return refers_to_regno_for_reload_p (regno, endregno,
6448 reg_equiv_memory_loc (r),
6449 loc: (rtx*) 0);
6450
6451 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6452 return 0;
6453 }
6454
6455 return endregno > r && regno < END_REGNO (x);
6456
6457 case SUBREG:
6458 /* If this is a SUBREG of a hard reg, we can see exactly which
6459 registers are being modified. Otherwise, handle normally. */
6460 if (REG_P (SUBREG_REG (x))
6461 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6462 {
6463 unsigned int inner_regno = subreg_regno (x);
6464 unsigned int inner_endregno
6465 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6466 ? subreg_nregs (x) : 1);
6467
6468 return endregno > inner_regno && regno < inner_endregno;
6469 }
6470 break;
6471
6472 case CLOBBER:
6473 case SET:
6474 if (&SET_DEST (x) != loc
6475 /* Note setting a SUBREG counts as referring to the REG it is in for
6476 a pseudo but not for hard registers since we can
6477 treat each word individually. */
6478 && ((GET_CODE (SET_DEST (x)) == SUBREG
6479 && loc != &SUBREG_REG (SET_DEST (x))
6480 && REG_P (SUBREG_REG (SET_DEST (x)))
6481 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6482 && refers_to_regno_for_reload_p (regno, endregno,
6483 SUBREG_REG (SET_DEST (x)),
6484 loc))
6485 /* If the output is an earlyclobber operand, this is
6486 a conflict. */
6487 || ((!REG_P (SET_DEST (x))
6488 || earlyclobber_operand_p (SET_DEST (x)))
6489 && refers_to_regno_for_reload_p (regno, endregno,
6490 SET_DEST (x), loc))))
6491 return 1;
6492
6493 if (code == CLOBBER || loc == &SET_SRC (x))
6494 return 0;
6495 x = SET_SRC (x);
6496 goto repeat;
6497
6498 default:
6499 break;
6500 }
6501
6502 /* X does not match, so try its subexpressions. */
6503
6504 fmt = GET_RTX_FORMAT (code);
6505 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6506 {
6507 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6508 {
6509 if (i == 0)
6510 {
6511 x = XEXP (x, 0);
6512 goto repeat;
6513 }
6514 else
6515 if (refers_to_regno_for_reload_p (regno, endregno,
6516 XEXP (x, i), loc))
6517 return 1;
6518 }
6519 else if (fmt[i] == 'E')
6520 {
6521 int j;
6522 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6523 if (loc != &XVECEXP (x, i, j)
6524 && refers_to_regno_for_reload_p (regno, endregno,
6525 XVECEXP (x, i, j), loc))
6526 return 1;
6527 }
6528 }
6529 return 0;
6530}
6531
6532/* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6533 we check if any register number in X conflicts with the relevant register
6534 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6535 contains a MEM (we don't bother checking for memory addresses that can't
6536 conflict because we expect this to be a rare case.
6537
6538 This function is similar to reg_overlap_mentioned_p in rtlanal.cc except
6539 that we look at equivalences for pseudos that didn't get hard registers. */
6540
6541int
6542reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6543{
6544 int regno, endregno;
6545
6546 /* Overly conservative. */
6547 if (GET_CODE (x) == STRICT_LOW_PART
6548 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6549 x = XEXP (x, 0);
6550
6551 /* If either argument is a constant, then modifying X cannot affect IN. */
6552 if (CONSTANT_P (x) || CONSTANT_P (in))
6553 return 0;
6554 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6555 return refers_to_mem_for_reload_p (in);
6556 else if (GET_CODE (x) == SUBREG)
6557 {
6558 regno = REGNO (SUBREG_REG (x));
6559 if (regno < FIRST_PSEUDO_REGISTER)
6560 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6561 GET_MODE (SUBREG_REG (x)),
6562 SUBREG_BYTE (x),
6563 GET_MODE (x));
6564 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6565 ? subreg_nregs (x) : 1);
6566
6567 return refers_to_regno_for_reload_p (regno, endregno, x: in, loc: (rtx*) 0);
6568 }
6569 else if (REG_P (x))
6570 {
6571 regno = REGNO (x);
6572
6573 /* If this is a pseudo, it must not have been assigned a hard register.
6574 Therefore, it must either be in memory or be a constant. */
6575
6576 if (regno >= FIRST_PSEUDO_REGISTER)
6577 {
6578 if (reg_equiv_memory_loc (regno))
6579 return refers_to_mem_for_reload_p (in);
6580 gcc_assert (reg_equiv_constant (regno));
6581 return 0;
6582 }
6583
6584 endregno = END_REGNO (x);
6585
6586 return refers_to_regno_for_reload_p (regno, endregno, x: in, loc: (rtx*) 0);
6587 }
6588 else if (MEM_P (x))
6589 return refers_to_mem_for_reload_p (in);
6590 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC)
6591 return reg_mentioned_p (x, in);
6592 else
6593 {
6594 gcc_assert (GET_CODE (x) == PLUS);
6595
6596 /* We actually want to know if X is mentioned somewhere inside IN.
6597 We must not say that (plus (sp) (const_int 124)) is in
6598 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6599 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6600 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6601 while (MEM_P (in))
6602 in = XEXP (in, 0);
6603 if (REG_P (in))
6604 return 0;
6605 else if (GET_CODE (in) == PLUS)
6606 return (rtx_equal_p (x, in)
6607 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6608 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6609 else
6610 return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6611 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6612 }
6613}
6614
6615/* Return nonzero if anything in X contains a MEM. Look also for pseudo
6616 registers. */
6617
6618static int
6619refers_to_mem_for_reload_p (rtx x)
6620{
6621 const char *fmt;
6622 int i;
6623
6624 if (MEM_P (x))
6625 return 1;
6626
6627 if (REG_P (x))
6628 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6629 && reg_equiv_memory_loc (REGNO (x)));
6630
6631 fmt = GET_RTX_FORMAT (GET_CODE (x));
6632 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6633 if (fmt[i] == 'e'
6634 && (MEM_P (XEXP (x, i))
6635 || refers_to_mem_for_reload_p (XEXP (x, i))))
6636 return 1;
6637
6638 return 0;
6639}
6640
6641/* Check the insns before INSN to see if there is a suitable register
6642 containing the same value as GOAL.
6643 If OTHER is -1, look for a register in class RCLASS.
6644 Otherwise, just see if register number OTHER shares GOAL's value.
6645
6646 Return an rtx for the register found, or zero if none is found.
6647
6648 If RELOAD_REG_P is (short *)1,
6649 we reject any hard reg that appears in reload_reg_rtx
6650 because such a hard reg is also needed coming into this insn.
6651
6652 If RELOAD_REG_P is any other nonzero value,
6653 it is a vector indexed by hard reg number
6654 and we reject any hard reg whose element in the vector is nonnegative
6655 as well as any that appears in reload_reg_rtx.
6656
6657 If GOAL is zero, then GOALREG is a register number; we look
6658 for an equivalent for that register.
6659
6660 MODE is the machine mode of the value we want an equivalence for.
6661 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6662
6663 This function is used by jump.cc as well as in the reload pass.
6664
6665 If GOAL is the sum of the stack pointer and a constant, we treat it
6666 as if it were a constant except that sp is required to be unchanging. */
6667
6668rtx
6669find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6670 short *reload_reg_p, int goalreg, machine_mode mode)
6671{
6672 rtx_insn *p = insn;
6673 rtx goaltry, valtry, value;
6674 rtx_insn *where;
6675 rtx pat;
6676 int regno = -1;
6677 int valueno;
6678 int goal_mem = 0;
6679 int goal_const = 0;
6680 int goal_mem_addr_varies = 0;
6681 int need_stable_sp = 0;
6682 int nregs;
6683 int valuenregs;
6684 int num = 0;
6685
6686 if (goal == 0)
6687 regno = goalreg;
6688 else if (REG_P (goal))
6689 regno = REGNO (goal);
6690 else if (MEM_P (goal))
6691 {
6692 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6693 if (MEM_VOLATILE_P (goal))
6694 return 0;
6695 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6696 return 0;
6697 /* An address with side effects must be reexecuted. */
6698 switch (code)
6699 {
6700 case POST_INC:
6701 case PRE_INC:
6702 case POST_DEC:
6703 case PRE_DEC:
6704 case POST_MODIFY:
6705 case PRE_MODIFY:
6706 return 0;
6707 default:
6708 break;
6709 }
6710 goal_mem = 1;
6711 }
6712 else if (CONSTANT_P (goal))
6713 goal_const = 1;
6714 else if (GET_CODE (goal) == PLUS
6715 && XEXP (goal, 0) == stack_pointer_rtx
6716 && CONSTANT_P (XEXP (goal, 1)))
6717 goal_const = need_stable_sp = 1;
6718 else if (GET_CODE (goal) == PLUS
6719 && XEXP (goal, 0) == frame_pointer_rtx
6720 && CONSTANT_P (XEXP (goal, 1)))
6721 goal_const = 1;
6722 else
6723 return 0;
6724
6725 num = 0;
6726 /* Scan insns back from INSN, looking for one that copies
6727 a value into or out of GOAL.
6728 Stop and give up if we reach a label. */
6729
6730 while (1)
6731 {
6732 p = PREV_INSN (insn: p);
6733 if (p && DEBUG_INSN_P (p))
6734 continue;
6735 num++;
6736 if (p == 0 || LABEL_P (p)
6737 || num > param_max_reload_search_insns)
6738 return 0;
6739
6740 /* Don't reuse register contents from before a setjmp-type
6741 function call; on the second return (from the longjmp) it
6742 might have been clobbered by a later reuse. It doesn't
6743 seem worthwhile to actually go and see if it is actually
6744 reused even if that information would be readily available;
6745 just don't reuse it across the setjmp call. */
6746 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6747 return 0;
6748
6749 if (NONJUMP_INSN_P (p)
6750 /* If we don't want spill regs ... */
6751 && (! (reload_reg_p != 0
6752 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6753 /* ... then ignore insns introduced by reload; they aren't
6754 useful and can cause results in reload_as_needed to be
6755 different from what they were when calculating the need for
6756 spills. If we notice an input-reload insn here, we will
6757 reject it below, but it might hide a usable equivalent.
6758 That makes bad code. It may even fail: perhaps no reg was
6759 spilled for this insn because it was assumed we would find
6760 that equivalent. */
6761 || INSN_UID (insn: p) < reload_first_uid))
6762 {
6763 rtx tem;
6764 pat = single_set (insn: p);
6765
6766 /* First check for something that sets some reg equal to GOAL. */
6767 if (pat != 0
6768 && ((regno >= 0
6769 && true_regnum (SET_SRC (pat)) == regno
6770 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6771 ||
6772 (regno >= 0
6773 && true_regnum (SET_DEST (pat)) == regno
6774 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6775 ||
6776 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6777 /* When looking for stack pointer + const,
6778 make sure we don't use a stack adjust. */
6779 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), in: goal)
6780 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6781 || (goal_mem
6782 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6783 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6784 || (goal_mem
6785 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6786 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6787 /* If we are looking for a constant,
6788 and something equivalent to that constant was copied
6789 into a reg, we can use that reg. */
6790 || (goal_const && REG_NOTES (p) != 0
6791 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6792 && ((rtx_equal_p (XEXP (tem, 0), goal)
6793 && (valueno
6794 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6795 || (REG_P (SET_DEST (pat))
6796 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6797 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6798 && CONST_INT_P (goal)
6799 && (goaltry = operand_subword (XEXP (tem, 0), 0,
6800 0, VOIDmode)) != 0
6801 && rtx_equal_p (goal, goaltry)
6802 && (valtry
6803 = operand_subword (SET_DEST (pat), 0, 0,
6804 VOIDmode))
6805 && (valueno = true_regnum (valtry)) >= 0)))
6806 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6807 NULL_RTX))
6808 && REG_P (SET_DEST (pat))
6809 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6810 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6811 && CONST_INT_P (goal)
6812 && (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6813 VOIDmode)) != 0
6814 && rtx_equal_p (goal, goaltry)
6815 && (valtry
6816 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6817 && (valueno = true_regnum (valtry)) >= 0)))
6818 {
6819 if (other >= 0)
6820 {
6821 if (valueno != other)
6822 continue;
6823 }
6824 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6825 continue;
6826 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6827 mode, regno: valueno))
6828 continue;
6829 value = valtry;
6830 where = p;
6831 break;
6832 }
6833 }
6834 }
6835
6836 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6837 (or copying VALUE into GOAL, if GOAL is also a register).
6838 Now verify that VALUE is really valid. */
6839
6840 /* VALUENO is the register number of VALUE; a hard register. */
6841
6842 /* Don't try to re-use something that is killed in this insn. We want
6843 to be able to trust REG_UNUSED notes. */
6844 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6845 return 0;
6846
6847 /* If we propose to get the value from the stack pointer or if GOAL is
6848 a MEM based on the stack pointer, we need a stable SP. */
6849 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6850 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6851 in: goal)))
6852 need_stable_sp = 1;
6853
6854 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6855 if (GET_MODE (value) != mode)
6856 return 0;
6857
6858 /* Reject VALUE if it was loaded from GOAL
6859 and is also a register that appears in the address of GOAL. */
6860
6861 if (goal_mem && value == SET_DEST (single_set (where))
6862 && refers_to_regno_for_reload_p (regno: valueno, endregno: end_hard_regno (mode, regno: valueno),
6863 x: goal, loc: (rtx*) 0))
6864 return 0;
6865
6866 /* Reject registers that overlap GOAL. */
6867
6868 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6869 nregs = hard_regno_nregs (regno, mode);
6870 else
6871 nregs = 1;
6872 valuenregs = hard_regno_nregs (regno: valueno, mode);
6873
6874 if (!goal_mem && !goal_const
6875 && regno + nregs > valueno && regno < valueno + valuenregs)
6876 return 0;
6877
6878 /* Reject VALUE if it is one of the regs reserved for reloads.
6879 Reload1 knows how to reuse them anyway, and it would get
6880 confused if we allocated one without its knowledge.
6881 (Now that insns introduced by reload are ignored above,
6882 this case shouldn't happen, but I'm not positive.) */
6883
6884 if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6885 {
6886 int i;
6887 for (i = 0; i < valuenregs; ++i)
6888 if (reload_reg_p[valueno + i] >= 0)
6889 return 0;
6890 }
6891
6892 /* Reject VALUE if it is a register being used for an input reload
6893 even if it is not one of those reserved. */
6894
6895 if (reload_reg_p != 0)
6896 {
6897 int i;
6898 for (i = 0; i < n_reloads; i++)
6899 if (rld[i].reg_rtx != 0
6900 && rld[i].in
6901 && (int) REGNO (rld[i].reg_rtx) < valueno + valuenregs
6902 && (int) END_REGNO (x: rld[i].reg_rtx) > valueno)
6903 return 0;
6904 }
6905
6906 if (goal_mem)
6907 /* We must treat frame pointer as varying here,
6908 since it can vary--in a nonlocal goto as generated by expand_goto. */
6909 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6910
6911 /* Now verify that the values of GOAL and VALUE remain unaltered
6912 until INSN is reached. */
6913
6914 p = insn;
6915 while (1)
6916 {
6917 p = PREV_INSN (insn: p);
6918 if (p == where)
6919 return value;
6920
6921 /* Don't trust the conversion past a function call
6922 if either of the two is in a call-clobbered register, or memory. */
6923 if (CALL_P (p))
6924 {
6925 if (goal_mem || need_stable_sp)
6926 return 0;
6927
6928 function_abi callee_abi = insn_callee_abi (p);
6929 if (regno >= 0
6930 && regno < FIRST_PSEUDO_REGISTER
6931 && callee_abi.clobbers_reg_p (mode, regno))
6932 return 0;
6933
6934 if (valueno >= 0
6935 && valueno < FIRST_PSEUDO_REGISTER
6936 && callee_abi.clobbers_reg_p (mode, regno: valueno))
6937 return 0;
6938 }
6939
6940 if (INSN_P (p))
6941 {
6942 pat = PATTERN (insn: p);
6943
6944 /* Watch out for unspec_volatile, and volatile asms. */
6945 if (volatile_insn_p (pat))
6946 return 0;
6947
6948 /* If this insn P stores in either GOAL or VALUE, return 0.
6949 If GOAL is a memory ref and this insn writes memory, return 0.
6950 If GOAL is a memory ref and its address is not constant,
6951 and this insn P changes a register used in GOAL, return 0. */
6952
6953 if (GET_CODE (pat) == COND_EXEC)
6954 pat = COND_EXEC_CODE (pat);
6955 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6956 {
6957 rtx dest = SET_DEST (pat);
6958 while (GET_CODE (dest) == SUBREG
6959 || GET_CODE (dest) == ZERO_EXTRACT
6960 || GET_CODE (dest) == STRICT_LOW_PART)
6961 dest = XEXP (dest, 0);
6962 if (REG_P (dest))
6963 {
6964 int xregno = REGNO (dest);
6965 int end_xregno = END_REGNO (x: dest);
6966 if (xregno < regno + nregs && end_xregno > regno)
6967 return 0;
6968 if (xregno < valueno + valuenregs
6969 && end_xregno > valueno)
6970 return 0;
6971 if (goal_mem_addr_varies
6972 && reg_overlap_mentioned_for_reload_p (x: dest, in: goal))
6973 return 0;
6974 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6975 return 0;
6976 }
6977 else if (goal_mem && MEM_P (dest)
6978 && ! push_operand (dest, GET_MODE (dest)))
6979 return 0;
6980 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6981 && reg_equiv_memory_loc (regno) != 0)
6982 return 0;
6983 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6984 return 0;
6985 }
6986 else if (GET_CODE (pat) == PARALLEL)
6987 {
6988 int i;
6989 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6990 {
6991 rtx v1 = XVECEXP (pat, 0, i);
6992 if (GET_CODE (v1) == COND_EXEC)
6993 v1 = COND_EXEC_CODE (v1);
6994 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6995 {
6996 rtx dest = SET_DEST (v1);
6997 while (GET_CODE (dest) == SUBREG
6998 || GET_CODE (dest) == ZERO_EXTRACT
6999 || GET_CODE (dest) == STRICT_LOW_PART)
7000 dest = XEXP (dest, 0);
7001 if (REG_P (dest))
7002 {
7003 int xregno = REGNO (dest);
7004 int end_xregno = END_REGNO (x: dest);
7005 if (xregno < regno + nregs
7006 && end_xregno > regno)
7007 return 0;
7008 if (xregno < valueno + valuenregs
7009 && end_xregno > valueno)
7010 return 0;
7011 if (goal_mem_addr_varies
7012 && reg_overlap_mentioned_for_reload_p (x: dest,
7013 in: goal))
7014 return 0;
7015 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7016 return 0;
7017 }
7018 else if (goal_mem && MEM_P (dest)
7019 && ! push_operand (dest, GET_MODE (dest)))
7020 return 0;
7021 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7022 && reg_equiv_memory_loc (regno) != 0)
7023 return 0;
7024 else if (need_stable_sp
7025 && push_operand (dest, GET_MODE (dest)))
7026 return 0;
7027 }
7028 }
7029 }
7030
7031 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7032 {
7033 rtx link;
7034
7035 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7036 link = XEXP (link, 1))
7037 {
7038 pat = XEXP (link, 0);
7039 if (GET_CODE (pat) == CLOBBER)
7040 {
7041 rtx dest = SET_DEST (pat);
7042
7043 if (REG_P (dest))
7044 {
7045 int xregno = REGNO (dest);
7046 int end_xregno = END_REGNO (x: dest);
7047
7048 if (xregno < regno + nregs
7049 && end_xregno > regno)
7050 return 0;
7051 else if (xregno < valueno + valuenregs
7052 && end_xregno > valueno)
7053 return 0;
7054 else if (goal_mem_addr_varies
7055 && reg_overlap_mentioned_for_reload_p (x: dest,
7056 in: goal))
7057 return 0;
7058 }
7059
7060 else if (goal_mem && MEM_P (dest)
7061 && ! push_operand (dest, GET_MODE (dest)))
7062 return 0;
7063 else if (need_stable_sp
7064 && push_operand (dest, GET_MODE (dest)))
7065 return 0;
7066 }
7067 }
7068 }
7069
7070#if AUTO_INC_DEC
7071 /* If this insn auto-increments or auto-decrements
7072 either regno or valueno, return 0 now.
7073 If GOAL is a memory ref and its address is not constant,
7074 and this insn P increments a register used in GOAL, return 0. */
7075 {
7076 rtx link;
7077
7078 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7079 if (REG_NOTE_KIND (link) == REG_INC
7080 && REG_P (XEXP (link, 0)))
7081 {
7082 int incno = REGNO (XEXP (link, 0));
7083 if (incno < regno + nregs && incno >= regno)
7084 return 0;
7085 if (incno < valueno + valuenregs && incno >= valueno)
7086 return 0;
7087 if (goal_mem_addr_varies
7088 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7089 goal))
7090 return 0;
7091 }
7092 }
7093#endif
7094 }
7095 }
7096}
7097
7098/* Find a place where INCED appears in an increment or decrement operator
7099 within X, and return the amount INCED is incremented or decremented by.
7100 The value is always positive. */
7101
7102static poly_int64
7103find_inc_amount (rtx x, rtx inced)
7104{
7105 enum rtx_code code = GET_CODE (x);
7106 const char *fmt;
7107 int i;
7108
7109 if (code == MEM)
7110 {
7111 rtx addr = XEXP (x, 0);
7112 if ((GET_CODE (addr) == PRE_DEC
7113 || GET_CODE (addr) == POST_DEC
7114 || GET_CODE (addr) == PRE_INC
7115 || GET_CODE (addr) == POST_INC)
7116 && XEXP (addr, 0) == inced)
7117 return GET_MODE_SIZE (GET_MODE (x));
7118 else if ((GET_CODE (addr) == PRE_MODIFY
7119 || GET_CODE (addr) == POST_MODIFY)
7120 && GET_CODE (XEXP (addr, 1)) == PLUS
7121 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7122 && XEXP (addr, 0) == inced
7123 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7124 {
7125 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7126 return i < 0 ? -i : i;
7127 }
7128 }
7129
7130 fmt = GET_RTX_FORMAT (code);
7131 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7132 {
7133 if (fmt[i] == 'e')
7134 {
7135 poly_int64 tem = find_inc_amount (XEXP (x, i), inced);
7136 if (maybe_ne (a: tem, b: 0))
7137 return tem;
7138 }
7139 if (fmt[i] == 'E')
7140 {
7141 int j;
7142 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7143 {
7144 poly_int64 tem = find_inc_amount (XVECEXP (x, i, j), inced);
7145 if (maybe_ne (a: tem, b: 0))
7146 return tem;
7147 }
7148 }
7149 }
7150
7151 return 0;
7152}
7153
7154/* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7155 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7156
7157static int
7158reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7159 rtx insn)
7160{
7161 rtx link;
7162
7163 if (!AUTO_INC_DEC)
7164 return 0;
7165
7166 gcc_assert (insn);
7167
7168 if (! INSN_P (insn))
7169 return 0;
7170
7171 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7172 if (REG_NOTE_KIND (link) == REG_INC)
7173 {
7174 unsigned int test = (int) REGNO (XEXP (link, 0));
7175 if (test >= regno && test < endregno)
7176 return 1;
7177 }
7178 return 0;
7179}
7180
7181/* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7182 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7183 REG_INC. REGNO must refer to a hard register. */
7184
7185int
7186regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7187 int sets)
7188{
7189 /* regno must be a hard register. */
7190 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7191
7192 unsigned int endregno = end_hard_regno (mode, regno);
7193
7194 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7195 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7196 && REG_P (XEXP (PATTERN (insn), 0)))
7197 {
7198 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7199
7200 return test >= regno && test < endregno;
7201 }
7202
7203 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7204 return 1;
7205
7206 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7207 {
7208 int i = XVECLEN (PATTERN (insn), 0) - 1;
7209
7210 for (; i >= 0; i--)
7211 {
7212 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7213 if ((GET_CODE (elt) == CLOBBER
7214 || (sets == 1 && GET_CODE (elt) == SET))
7215 && REG_P (XEXP (elt, 0)))
7216 {
7217 unsigned int test = REGNO (XEXP (elt, 0));
7218
7219 if (test >= regno && test < endregno)
7220 return 1;
7221 }
7222 if (sets == 2
7223 && reg_inc_found_and_valid_p (regno, endregno, insn: elt))
7224 return 1;
7225 }
7226 }
7227
7228 return 0;
7229}
7230
7231/* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7232rtx
7233reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7234{
7235 int regno;
7236
7237 if (GET_MODE (reloadreg) == mode)
7238 return reloadreg;
7239
7240 regno = REGNO (reloadreg);
7241
7242 if (REG_WORDS_BIG_ENDIAN)
7243 regno += ((int) REG_NREGS (reloadreg)
7244 - (int) hard_regno_nregs (regno, mode));
7245
7246 return gen_rtx_REG (mode, regno);
7247}
7248
7249static const char *const reload_when_needed_name[] =
7250{
7251 "RELOAD_FOR_INPUT",
7252 "RELOAD_FOR_OUTPUT",
7253 "RELOAD_FOR_INSN",
7254 "RELOAD_FOR_INPUT_ADDRESS",
7255 "RELOAD_FOR_INPADDR_ADDRESS",
7256 "RELOAD_FOR_OUTPUT_ADDRESS",
7257 "RELOAD_FOR_OUTADDR_ADDRESS",
7258 "RELOAD_FOR_OPERAND_ADDRESS",
7259 "RELOAD_FOR_OPADDR_ADDR",
7260 "RELOAD_OTHER",
7261 "RELOAD_FOR_OTHER_ADDRESS"
7262};
7263
7264/* These functions are used to print the variables set by 'find_reloads' */
7265
7266DEBUG_FUNCTION void
7267debug_reload_to_stream (FILE *f)
7268{
7269 int r;
7270 const char *prefix;
7271
7272 if (! f)
7273 f = stderr;
7274 for (r = 0; r < n_reloads; r++)
7275 {
7276 fprintf (stream: f, format: "Reload %d: ", r);
7277
7278 if (rld[r].in != 0)
7279 {
7280 fprintf (stream: f, format: "reload_in (%s) = ",
7281 GET_MODE_NAME (rld[r].inmode));
7282 print_inline_rtx (f, rld[r].in, 24);
7283 fprintf (stream: f, format: "\n\t");
7284 }
7285
7286 if (rld[r].out != 0)
7287 {
7288 fprintf (stream: f, format: "reload_out (%s) = ",
7289 GET_MODE_NAME (rld[r].outmode));
7290 print_inline_rtx (f, rld[r].out, 24);
7291 fprintf (stream: f, format: "\n\t");
7292 }
7293
7294 fprintf (stream: f, format: "%s, ", reg_class_names[(int) rld[r].rclass]);
7295
7296 fprintf (stream: f, format: "%s (opnum = %d)",
7297 reload_when_needed_name[(int) rld[r].when_needed],
7298 rld[r].opnum);
7299
7300 if (rld[r].optional)
7301 fprintf (stream: f, format: ", optional");
7302
7303 if (rld[r].nongroup)
7304 fprintf (stream: f, format: ", nongroup");
7305
7306 if (maybe_ne (a: rld[r].inc, b: 0))
7307 {
7308 fprintf (stream: f, format: ", inc by ");
7309 print_dec (value: rld[r].inc, file: f, sgn: SIGNED);
7310 }
7311
7312 if (rld[r].nocombine)
7313 fprintf (stream: f, format: ", can't combine");
7314
7315 if (rld[r].secondary_p)
7316 fprintf (stream: f, format: ", secondary_reload_p");
7317
7318 if (rld[r].in_reg != 0)
7319 {
7320 fprintf (stream: f, format: "\n\treload_in_reg: ");
7321 print_inline_rtx (f, rld[r].in_reg, 24);
7322 }
7323
7324 if (rld[r].out_reg != 0)
7325 {
7326 fprintf (stream: f, format: "\n\treload_out_reg: ");
7327 print_inline_rtx (f, rld[r].out_reg, 24);
7328 }
7329
7330 if (rld[r].reg_rtx != 0)
7331 {
7332 fprintf (stream: f, format: "\n\treload_reg_rtx: ");
7333 print_inline_rtx (f, rld[r].reg_rtx, 24);
7334 }
7335
7336 prefix = "\n\t";
7337 if (rld[r].secondary_in_reload != -1)
7338 {
7339 fprintf (stream: f, format: "%ssecondary_in_reload = %d",
7340 prefix, rld[r].secondary_in_reload);
7341 prefix = ", ";
7342 }
7343
7344 if (rld[r].secondary_out_reload != -1)
7345 fprintf (stream: f, format: "%ssecondary_out_reload = %d\n",
7346 prefix, rld[r].secondary_out_reload);
7347
7348 prefix = "\n\t";
7349 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7350 {
7351 fprintf (stream: f, format: "%ssecondary_in_icode = %s", prefix,
7352 insn_data[rld[r].secondary_in_icode].name);
7353 prefix = ", ";
7354 }
7355
7356 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7357 fprintf (stream: f, format: "%ssecondary_out_icode = %s", prefix,
7358 insn_data[rld[r].secondary_out_icode].name);
7359
7360 fprintf (stream: f, format: "\n");
7361 }
7362}
7363
7364DEBUG_FUNCTION void
7365debug_reload (void)
7366{
7367 debug_reload_to_stream (stderr);
7368}
7369

source code of gcc/reload.cc