1/* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2025 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* This file contains subroutines used only from the file reload1.cc.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
681 happens every time find_reloads is called.
692 happens only when REPLACE is 1, which is only when
70actually doing the reloads, not when just counting them.
71
72Using a reload register for several reloads in one insn:
73
74When an insn has reloads, it is considered as having three parts:
75the input reloads, the insn itself after reloading, and the output reloads.
76Reloads of values used in memory addresses are often needed for only one part.
77
78When this is so, reload_when_needed records which part needs the reload.
79Two reloads for different parts of the insn can share the same reload
80register.
81
82When a reload is used for addresses in multiple parts, or when it is
83an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84a register with any other reload. */
85
86#define REG_OK_STRICT
87
88/* We do not enable this with CHECKING_P, since it is awfully slow. */
89#undef DEBUG_RELOAD
90
91#include "config.h"
92#include "system.h"
93#include "coretypes.h"
94#include "backend.h"
95#include "target.h"
96#include "rtl.h"
97#include "tree.h"
98#include "df.h"
99#include "memmodel.h"
100#include "tm_p.h"
101#include "optabs.h"
102#include "regs.h"
103#include "ira.h"
104#include "recog.h"
105#include "rtl-error.h"
106#include "reload.h"
107#include "addresses.h"
108#include "function-abi.h"
109
110/* True if X is a constant that can be forced into the constant pool.
111 MODE is the mode of the operand, or VOIDmode if not known. */
112#define CONST_POOL_OK_P(MODE, X) \
113 ((MODE) != VOIDmode \
114 && CONSTANT_P (X) \
115 && GET_CODE (X) != HIGH \
116 && !targetm.cannot_force_const_mem (MODE, X))
117
118/* True if C is a non-empty register class that has too few registers
119 to be safely used as a reload target class. */
120
121static inline bool
122small_register_class_p (reg_class_t rclass)
123{
124 return (reg_class_size [(int) rclass] == 1
125 || (reg_class_size [(int) rclass] >= 1
126 && targetm.class_likely_spilled_p (rclass)));
127}
128
129
130/* All reloads of the current insn are recorded here. See reload.h for
131 comments. */
132int n_reloads;
133struct reload rld[MAX_RELOADS];
134
135/* All the "earlyclobber" operands of the current insn
136 are recorded here. */
137int n_earlyclobbers;
138rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139
140int reload_n_operands;
141
142/* Replacing reloads.
143
144 If `replace_reloads' is nonzero, then as each reload is recorded
145 an entry is made for it in the table `replacements'.
146 Then later `subst_reloads' can look through that table and
147 perform all the replacements needed. */
148
149/* Nonzero means record the places to replace. */
150static int replace_reloads;
151
152/* Each replacement is recorded with a structure like this. */
153struct replacement
154{
155 rtx *where; /* Location to store in */
156 int what; /* which reload this is for */
157 machine_mode mode; /* mode it must have */
158};
159
160static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
161
162/* Number of replacements currently recorded. */
163static int n_replacements;
164
165/* Used to track what is modified by an operand. */
166struct decomposition
167{
168 int reg_flag; /* Nonzero if referencing a register. */
169 int safe; /* Nonzero if this can't conflict with anything. */
170 rtx base; /* Base address for MEM. */
171 poly_int64 start; /* Starting offset or register number. */
172 poly_int64 end; /* Ending offset or register number. */
173};
174
175/* Save MEMs needed to copy from one class of registers to another. One MEM
176 is used per mode, but normally only one or two modes are ever used.
177
178 We keep two versions, before and after register elimination. The one
179 after register elimination is record separately for each operand. This
180 is done in case the address is not valid to be sure that we separately
181 reload each. */
182
183static rtx secondary_memlocs[NUM_MACHINE_MODES];
184static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
185static int secondary_memlocs_elim_used = 0;
186
187/* The instruction we are doing reloads for;
188 so we can test whether a register dies in it. */
189static rtx_insn *this_insn;
190
191/* Nonzero if this instruction is a user-specified asm with operands. */
192static int this_insn_is_asm;
193
194/* If hard_regs_live_known is nonzero,
195 we can tell which hard regs are currently live,
196 at least enough to succeed in choosing dummy reloads. */
197static int hard_regs_live_known;
198
199/* Indexed by hard reg number,
200 element is nonnegative if hard reg has been spilled.
201 This vector is passed to `find_reloads' as an argument
202 and is not changed here. */
203static short *static_reload_reg_p;
204
205/* Set to 1 in subst_reg_equivs if it changes anything. */
206static int subst_reg_equivs_changed;
207
208/* On return from push_reload, holds the reload-number for the OUT
209 operand, which can be different for that from the input operand. */
210static int output_reloadnum;
211
212 /* Compare two RTX's. */
213#define MATCHES(x, y) \
214 (x == y || (x != 0 && (REG_P (x) \
215 ? REG_P (y) && REGNO (x) == REGNO (y) \
216 : rtx_equal_p (x, y) && ! side_effects_p (x))))
217
218 /* Indicates if two reloads purposes are for similar enough things that we
219 can merge their reloads. */
220#define MERGABLE_RELOADS(when1, when2, op1, op2) \
221 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
222 || ((when1) == (when2) && (op1) == (op2)) \
223 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
224 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
225 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
226 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
227 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
228
229 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
230#define MERGE_TO_OTHER(when1, when2, op1, op2) \
231 ((when1) != (when2) \
232 || ! ((op1) == (op2) \
233 || (when1) == RELOAD_FOR_INPUT \
234 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
235 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
236
237 /* If we are going to reload an address, compute the reload type to
238 use. */
239#define ADDR_TYPE(type) \
240 ((type) == RELOAD_FOR_INPUT_ADDRESS \
241 ? RELOAD_FOR_INPADDR_ADDRESS \
242 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
243 ? RELOAD_FOR_OUTADDR_ADDRESS \
244 : (type)))
245
246static int push_secondary_reload (int, rtx, int, int, enum reg_class,
247 machine_mode, enum reload_type,
248 enum insn_code *, secondary_reload_info *);
249static enum reg_class find_valid_class (machine_mode, machine_mode,
250 int, unsigned int);
251static void push_replacement (rtx *, int, machine_mode);
252static void dup_replacements (rtx *, rtx *);
253static void combine_reloads (void);
254static int find_reusable_reload (rtx *, rtx, enum reg_class,
255 enum reload_type, int, int);
256static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
257 machine_mode, reg_class_t, int, int);
258static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
259static struct decomposition decompose (rtx);
260static int immune_p (rtx, rtx, struct decomposition);
261static bool alternative_allows_const_pool_ref (rtx, const char *, int);
262static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
263 rtx_insn *, int *);
264static rtx make_memloc (rtx, int);
265static bool maybe_memory_address_addr_space_p (machine_mode, rtx,
266 addr_space_t, rtx *);
267static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
268 int, enum reload_type, int, rtx_insn *);
269static rtx subst_reg_equivs (rtx, rtx_insn *);
270static rtx subst_indexed_address (rtx);
271static void update_auto_inc_notes (rtx_insn *, int, int);
272static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
273 enum rtx_code, enum rtx_code, rtx *,
274 int, enum reload_type,int, rtx_insn *);
275static void find_reloads_address_part (rtx, rtx *, enum reg_class,
276 machine_mode, int,
277 enum reload_type, int);
278static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
279 int, rtx_insn *, int *);
280static void copy_replacements_1 (rtx *, rtx *, int);
281static poly_int64 find_inc_amount (rtx, rtx);
282static int refers_to_mem_for_reload_p (rtx);
283static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
284 rtx, rtx *);
285
286/* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
287 list yet. */
288
289static void
290push_reg_equiv_alt_mem (int regno, rtx mem)
291{
292 rtx it;
293
294 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
295 if (rtx_equal_p (XEXP (it, 0), mem))
296 return;
297
298 reg_equiv_alt_mem_list (regno)
299 = alloc_EXPR_LIST (REG_EQUIV, mem,
300 reg_equiv_alt_mem_list (regno));
301}
302
303/* Determine if any secondary reloads are needed for loading (if IN_P is
304 nonzero) or storing (if IN_P is zero) X to or from a reload register of
305 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
306 are needed, push them.
307
308 Return the reload number of the secondary reload we made, or -1 if
309 we didn't need one. *PICODE is set to the insn_code to use if we do
310 need a secondary reload. */
311
312static int
313push_secondary_reload (int in_p, rtx x, int opnum, int optional,
314 enum reg_class reload_class,
315 machine_mode reload_mode, enum reload_type type,
316 enum insn_code *picode, secondary_reload_info *prev_sri)
317{
318 enum reg_class rclass = NO_REGS;
319 enum reg_class scratch_class;
320 machine_mode mode = reload_mode;
321 enum insn_code icode = CODE_FOR_nothing;
322 enum insn_code t_icode = CODE_FOR_nothing;
323 enum reload_type secondary_type;
324 int s_reload, t_reload = -1;
325 const char *scratch_constraint;
326 secondary_reload_info sri;
327
328 if (type == RELOAD_FOR_INPUT_ADDRESS
329 || type == RELOAD_FOR_OUTPUT_ADDRESS
330 || type == RELOAD_FOR_INPADDR_ADDRESS
331 || type == RELOAD_FOR_OUTADDR_ADDRESS)
332 secondary_type = type;
333 else
334 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
335
336 *picode = CODE_FOR_nothing;
337
338 /* If X is a paradoxical SUBREG, use the inner value to determine both the
339 mode and object being reloaded. */
340 if (paradoxical_subreg_p (x))
341 {
342 x = SUBREG_REG (x);
343 reload_mode = GET_MODE (x);
344 }
345
346 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
347 is still a pseudo-register by now, it *must* have an equivalent MEM
348 but we don't want to assume that), use that equivalent when seeing if
349 a secondary reload is needed since whether or not a reload is needed
350 might be sensitive to the form of the MEM. */
351
352 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
353 && reg_equiv_mem (REGNO (x)))
354 x = reg_equiv_mem (REGNO (x));
355
356 sri.icode = CODE_FOR_nothing;
357 sri.prev_sri = prev_sri;
358 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
359 reload_mode, &sri);
360 icode = (enum insn_code) sri.icode;
361
362 /* If we don't need any secondary registers, done. */
363 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
364 return -1;
365
366 if (rclass != NO_REGS)
367 t_reload = push_secondary_reload (in_p, x, opnum, optional, reload_class: rclass,
368 reload_mode, type, picode: &t_icode, prev_sri: &sri);
369
370 /* If we will be using an insn, the secondary reload is for a
371 scratch register. */
372
373 if (icode != CODE_FOR_nothing)
374 {
375 /* If IN_P is nonzero, the reload register will be the output in
376 operand 0. If IN_P is zero, the reload register will be the input
377 in operand 1. Outputs should have an initial "=", which we must
378 skip. */
379
380 /* ??? It would be useful to be able to handle only two, or more than
381 three, operands, but for now we can only handle the case of having
382 exactly three: output, input and one temp/scratch. */
383 gcc_assert (insn_data[(int) icode].n_operands == 3);
384
385 /* ??? We currently have no way to represent a reload that needs
386 an icode to reload from an intermediate tertiary reload register.
387 We should probably have a new field in struct reload to tag a
388 chain of scratch operand reloads onto. */
389 gcc_assert (rclass == NO_REGS);
390
391 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
392 gcc_assert (*scratch_constraint == '=');
393 scratch_constraint++;
394 if (*scratch_constraint == '&')
395 scratch_constraint++;
396 scratch_class = (reg_class_for_constraint
397 (c: lookup_constraint (p: scratch_constraint)));
398
399 rclass = scratch_class;
400 mode = insn_data[(int) icode].operand[2].mode;
401 }
402
403 /* This case isn't valid, so fail. Reload is allowed to use the same
404 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
405 in the case of a secondary register, we actually need two different
406 registers for correct code. We fail here to prevent the possibility of
407 silently generating incorrect code later.
408
409 The convention is that secondary input reloads are valid only if the
410 secondary_class is different from class. If you have such a case, you
411 cannot use secondary reloads, you must work around the problem some
412 other way.
413
414 Allow this when a reload_in/out pattern is being used. I.e. assume
415 that the generated code handles this case. */
416
417 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
418 || t_icode != CODE_FOR_nothing);
419
420 /* See if we can reuse an existing secondary reload. */
421 for (s_reload = 0; s_reload < n_reloads; s_reload++)
422 if (rld[s_reload].secondary_p
423 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
424 || reg_class_subset_p (rld[s_reload].rclass, rclass))
425 && ((in_p && rld[s_reload].inmode == mode)
426 || (! in_p && rld[s_reload].outmode == mode))
427 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
428 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
429 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
430 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
431 && (small_register_class_p (rclass)
432 || targetm.small_register_classes_for_mode_p (VOIDmode))
433 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
434 opnum, rld[s_reload].opnum))
435 {
436 if (in_p)
437 rld[s_reload].inmode = mode;
438 if (! in_p)
439 rld[s_reload].outmode = mode;
440
441 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
442 rld[s_reload].rclass = rclass;
443
444 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
445 rld[s_reload].optional &= optional;
446 rld[s_reload].secondary_p = 1;
447 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
448 opnum, rld[s_reload].opnum))
449 rld[s_reload].when_needed = RELOAD_OTHER;
450
451 break;
452 }
453
454 if (s_reload == n_reloads)
455 {
456 /* If we need a memory location to copy between the two reload regs,
457 set it up now. Note that we do the input case before making
458 the reload and the output case after. This is due to the
459 way reloads are output. */
460
461 if (in_p && icode == CODE_FOR_nothing
462 && targetm.secondary_memory_needed (mode, rclass, reload_class))
463 {
464 get_secondary_mem (x, reload_mode, opnum, type);
465
466 /* We may have just added new reloads. Make sure we add
467 the new reload at the end. */
468 s_reload = n_reloads;
469 }
470
471 /* We need to make a new secondary reload for this register class. */
472 rld[s_reload].in = rld[s_reload].out = 0;
473 rld[s_reload].rclass = rclass;
474
475 rld[s_reload].inmode = in_p ? mode : VOIDmode;
476 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
477 rld[s_reload].reg_rtx = 0;
478 rld[s_reload].optional = optional;
479 rld[s_reload].inc = 0;
480 /* Maybe we could combine these, but it seems too tricky. */
481 rld[s_reload].nocombine = 1;
482 rld[s_reload].in_reg = 0;
483 rld[s_reload].out_reg = 0;
484 rld[s_reload].opnum = opnum;
485 rld[s_reload].when_needed = secondary_type;
486 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
487 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
488 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
489 rld[s_reload].secondary_out_icode
490 = ! in_p ? t_icode : CODE_FOR_nothing;
491 rld[s_reload].secondary_p = 1;
492
493 n_reloads++;
494
495 if (! in_p && icode == CODE_FOR_nothing
496 && targetm.secondary_memory_needed (mode, reload_class, rclass))
497 get_secondary_mem (x, mode, opnum, type);
498 }
499
500 *picode = icode;
501 return s_reload;
502}
503
504/* If a secondary reload is needed, return its class. If both an intermediate
505 register and a scratch register is needed, we return the class of the
506 intermediate register. */
507reg_class_t
508secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
509 rtx x)
510{
511 enum insn_code icode;
512 secondary_reload_info sri;
513
514 sri.icode = CODE_FOR_nothing;
515 sri.prev_sri = NULL;
516 rclass
517 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
518 icode = (enum insn_code) sri.icode;
519
520 /* If there are no secondary reloads at all, we return NO_REGS.
521 If an intermediate register is needed, we return its class. */
522 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
523 return rclass;
524
525 /* No intermediate register is needed, but we have a special reload
526 pattern, which we assume for now needs a scratch register. */
527 return scratch_reload_class (icode);
528}
529
530/* ICODE is the insn_code of a reload pattern. Check that it has exactly
531 three operands, verify that operand 2 is an output operand, and return
532 its register class.
533 ??? We'd like to be able to handle any pattern with at least 2 operands,
534 for zero or more scratch registers, but that needs more infrastructure. */
535enum reg_class
536scratch_reload_class (enum insn_code icode)
537{
538 const char *scratch_constraint;
539 enum reg_class rclass;
540
541 gcc_assert (insn_data[(int) icode].n_operands == 3);
542 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
543 gcc_assert (*scratch_constraint == '=');
544 scratch_constraint++;
545 if (*scratch_constraint == '&')
546 scratch_constraint++;
547 rclass = reg_class_for_constraint (c: lookup_constraint (p: scratch_constraint));
548 gcc_assert (rclass != NO_REGS);
549 return rclass;
550}
551
552/* Return a memory location that will be used to copy X in mode MODE.
553 If we haven't already made a location for this mode in this insn,
554 call find_reloads_address on the location being returned. */
555
556rtx
557get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
558 int opnum, enum reload_type type)
559{
560 rtx loc;
561 int mem_valid;
562
563 /* By default, if MODE is narrower than a word, widen it to a word.
564 This is required because most machines that require these memory
565 locations do not support short load and stores from all registers
566 (e.g., FP registers). */
567
568 mode = targetm.secondary_memory_needed_mode (mode);
569
570 /* If we already have made a MEM for this operand in MODE, return it. */
571 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
572 return secondary_memlocs_elim[(int) mode][opnum];
573
574 /* If this is the first time we've tried to get a MEM for this mode,
575 allocate a new one. `something_changed' in reload will get set
576 by noticing that the frame size has changed. */
577
578 if (secondary_memlocs[(int) mode] == 0)
579 {
580#ifdef SECONDARY_MEMORY_NEEDED_RTX
581 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
582#else
583 secondary_memlocs[(int) mode]
584 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
585#endif
586 }
587
588 /* Get a version of the address doing any eliminations needed. If that
589 didn't give us a new MEM, make a new one if it isn't valid. */
590
591 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
592 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
593 MEM_ADDR_SPACE (loc));
594
595 if (! mem_valid && loc == secondary_memlocs[(int) mode])
596 loc = copy_rtx (loc);
597
598 /* The only time the call below will do anything is if the stack
599 offset is too large. In that case IND_LEVELS doesn't matter, so we
600 can just pass a zero. Adjust the type to be the address of the
601 corresponding object. If the address was valid, save the eliminated
602 address. If it wasn't valid, we need to make a reload each time, so
603 don't save it. */
604
605 if (! mem_valid)
606 {
607 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
608 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
609 : RELOAD_OTHER);
610
611 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
612 opnum, type, 0, 0);
613 }
614
615 secondary_memlocs_elim[(int) mode][opnum] = loc;
616 if (secondary_memlocs_elim_used <= (int)mode)
617 secondary_memlocs_elim_used = (int)mode + 1;
618 return loc;
619}
620
621/* Clear any secondary memory locations we've made. */
622
623void
624clear_secondary_mem (void)
625{
626 memset (s: secondary_memlocs, c: 0, n: sizeof secondary_memlocs);
627}
628
629
630/* Find the largest class which has at least one register valid in
631 mode INNER, and which for every such register, that register number
632 plus N is also valid in OUTER (if in range) and is cheap to move
633 into REGNO. Such a class must exist. */
634
635static enum reg_class
636find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
637 machine_mode inner ATTRIBUTE_UNUSED, int n,
638 unsigned int dest_regno ATTRIBUTE_UNUSED)
639{
640 int best_cost = -1;
641 int rclass;
642 int regno;
643 enum reg_class best_class = NO_REGS;
644 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
645 unsigned int best_size = 0;
646 int cost;
647
648 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
649 {
650 int bad = 0;
651 int good = 0;
652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
653 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], bit: regno))
654 {
655 if (targetm.hard_regno_mode_ok (regno, inner))
656 {
657 good = 1;
658 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], bit: regno + n)
659 && !targetm.hard_regno_mode_ok (regno + n, outer))
660 bad = 1;
661 }
662 }
663
664 if (bad || !good)
665 continue;
666 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
667
668 if ((reg_class_size[rclass] > best_size
669 && (best_cost < 0 || best_cost >= cost))
670 || best_cost > cost)
671 {
672 best_class = (enum reg_class) rclass;
673 best_size = reg_class_size[rclass];
674 best_cost = register_move_cost (outer, (enum reg_class) rclass,
675 dest_class);
676 }
677 }
678
679 gcc_assert (best_size != 0);
680
681 return best_class;
682}
683
684/* We are trying to reload a subreg of something that is not a register.
685 Find the largest class which contains only registers valid in
686 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
687 which we would eventually like to obtain the object. */
688
689static enum reg_class
690find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
691 machine_mode mode ATTRIBUTE_UNUSED,
692 enum reg_class dest_class ATTRIBUTE_UNUSED)
693{
694 int best_cost = -1;
695 int rclass;
696 int regno;
697 enum reg_class best_class = NO_REGS;
698 unsigned int best_size = 0;
699 int cost;
700
701 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
702 {
703 unsigned int computed_rclass_size = 0;
704
705 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
706 {
707 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
708 && targetm.hard_regno_mode_ok (regno, mode))
709 computed_rclass_size++;
710 }
711
712 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
713
714 if ((computed_rclass_size > best_size
715 && (best_cost < 0 || best_cost >= cost))
716 || best_cost > cost)
717 {
718 best_class = (enum reg_class) rclass;
719 best_size = computed_rclass_size;
720 best_cost = register_move_cost (outer, (enum reg_class) rclass,
721 dest_class);
722 }
723 }
724
725 gcc_assert (best_size != 0);
726
727#ifdef LIMIT_RELOAD_CLASS
728 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
729#endif
730 return best_class;
731}
732
733/* Return the number of a previously made reload that can be combined with
734 a new one, or n_reloads if none of the existing reloads can be used.
735 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
736 push_reload, they determine the kind of the new reload that we try to
737 combine. P_IN points to the corresponding value of IN, which can be
738 modified by this function.
739 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
740
741static int
742find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
743 enum reload_type type, int opnum, int dont_share)
744{
745 rtx in = *p_in;
746 int i;
747 /* We can't merge two reloads if the output of either one is
748 earlyclobbered. */
749
750 if (earlyclobber_operand_p (out))
751 return n_reloads;
752
753 /* We can use an existing reload if the class is right
754 and at least one of IN and OUT is a match
755 and the other is at worst neutral.
756 (A zero compared against anything is neutral.)
757
758 For targets with small register classes, don't use existing reloads
759 unless they are for the same thing since that can cause us to need
760 more reload registers than we otherwise would. */
761
762 for (i = 0; i < n_reloads; i++)
763 if ((reg_class_subset_p (rclass, rld[i].rclass)
764 || reg_class_subset_p (rld[i].rclass, rclass))
765 /* If the existing reload has a register, it must fit our class. */
766 && (rld[i].reg_rtx == 0
767 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
768 bit: true_regnum (rld[i].reg_rtx)))
769 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
770 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
771 || (out != 0 && MATCHES (rld[i].out, out)
772 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
773 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
774 && (small_register_class_p (rclass)
775 || targetm.small_register_classes_for_mode_p (VOIDmode))
776 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
777 return i;
778
779 /* Reloading a plain reg for input can match a reload to postincrement
780 that reg, since the postincrement's value is the right value.
781 Likewise, it can match a preincrement reload, since we regard
782 the preincrementation as happening before any ref in this insn
783 to that register. */
784 for (i = 0; i < n_reloads; i++)
785 if ((reg_class_subset_p (rclass, rld[i].rclass)
786 || reg_class_subset_p (rld[i].rclass, rclass))
787 /* If the existing reload has a register, it must fit our
788 class. */
789 && (rld[i].reg_rtx == 0
790 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
791 bit: true_regnum (rld[i].reg_rtx)))
792 && out == 0 && rld[i].out == 0 && rld[i].in != 0
793 && ((REG_P (in)
794 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
795 && MATCHES (XEXP (rld[i].in, 0), in))
796 || (REG_P (rld[i].in)
797 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
798 && MATCHES (XEXP (in, 0), rld[i].in)))
799 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
800 && (small_register_class_p (rclass)
801 || targetm.small_register_classes_for_mode_p (VOIDmode))
802 && MERGABLE_RELOADS (type, rld[i].when_needed,
803 opnum, rld[i].opnum))
804 {
805 /* Make sure reload_in ultimately has the increment,
806 not the plain register. */
807 if (REG_P (in))
808 *p_in = rld[i].in;
809 return i;
810 }
811 return n_reloads;
812}
813
814/* Return true if:
815
816 (a) (subreg:OUTER_MODE REG ...) represents a word or subword subreg
817 of a multiword value; and
818
819 (b) the number of *words* in REG does not match the number of *registers*
820 in REG. */
821
822static bool
823complex_word_subreg_p (machine_mode outer_mode, rtx reg)
824{
825 machine_mode inner_mode = GET_MODE (reg);
826 poly_uint64 reg_words = REG_NREGS (reg) * UNITS_PER_WORD;
827 return (known_le (GET_MODE_SIZE (outer_mode), UNITS_PER_WORD)
828 && maybe_gt (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
829 && !known_equal_after_align_up (a: GET_MODE_SIZE (mode: inner_mode),
830 b: reg_words, UNITS_PER_WORD));
831}
832
833/* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
834 expression. MODE is the mode that X will be used in. OUTPUT is true if
835 the function is invoked for the output part of an enclosing reload. */
836
837static bool
838reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
839{
840 rtx inner;
841
842 /* Only SUBREGs are problematical. */
843 if (GET_CODE (x) != SUBREG)
844 return false;
845
846 inner = SUBREG_REG (x);
847
848 /* If INNER is a constant or PLUS, then INNER will need reloading. */
849 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
850 return true;
851
852 /* If INNER is not a hard register, then INNER will not need reloading. */
853 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
854 return false;
855
856 /* If INNER is not ok for MODE, then INNER will need reloading. */
857 if (!targetm.hard_regno_mode_ok (subreg_regno (x), mode))
858 return true;
859
860 /* If this is for an output, and the outer part is a word or smaller,
861 INNER is larger than a word and the number of registers in INNER is
862 not the same as the number of words in INNER, then INNER will need
863 reloading (with an in-out reload). */
864 return output && complex_word_subreg_p (outer_mode: mode, reg: inner);
865}
866
867/* Return nonzero if IN can be reloaded into REGNO with mode MODE without
868 requiring an extra reload register. The caller has already found that
869 IN contains some reference to REGNO, so check that we can produce the
870 new value in a single step. E.g. if we have
871 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
872 instruction that adds one to a register, this should succeed.
873 However, if we have something like
874 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
875 needs to be loaded into a register first, we need a separate reload
876 register.
877 Such PLUS reloads are generated by find_reload_address_part.
878 The out-of-range PLUS expressions are usually introduced in the instruction
879 patterns by register elimination and substituting pseudos without a home
880 by their function-invariant equivalences. */
881static int
882can_reload_into (rtx in, int regno, machine_mode mode)
883{
884 rtx dst;
885 rtx_insn *test_insn;
886 int r = 0;
887 struct recog_data_d save_recog_data;
888
889 /* For matching constraints, we often get notional input reloads where
890 we want to use the original register as the reload register. I.e.
891 technically this is a non-optional input-output reload, but IN is
892 already a valid register, and has been chosen as the reload register.
893 Speed this up, since it trivially works. */
894 if (REG_P (in))
895 return 1;
896
897 /* To test MEMs properly, we'd have to take into account all the reloads
898 that are already scheduled, which can become quite complicated.
899 And since we've already handled address reloads for this MEM, it
900 should always succeed anyway. */
901 if (MEM_P (in))
902 return 1;
903
904 /* If we can make a simple SET insn that does the job, everything should
905 be fine. */
906 dst = gen_rtx_REG (mode, regno);
907 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
908 save_recog_data = recog_data;
909 if (recog_memoized (insn: test_insn) >= 0)
910 {
911 extract_insn (test_insn);
912 r = constrain_operands (1, get_enabled_alternatives (test_insn));
913 }
914 recog_data = save_recog_data;
915 return r;
916}
917
918/* Record one reload that needs to be performed.
919 IN is an rtx saying where the data are to be found before this instruction.
920 OUT says where they must be stored after the instruction.
921 (IN is zero for data not read, and OUT is zero for data not written.)
922 INLOC and OUTLOC point to the places in the instructions where
923 IN and OUT were found.
924 If IN and OUT are both nonzero, it means the same register must be used
925 to reload both IN and OUT.
926
927 RCLASS is a register class required for the reloaded data.
928 INMODE is the machine mode that the instruction requires
929 for the reg that replaces IN and OUTMODE is likewise for OUT.
930
931 If IN is zero, then OUT's location and mode should be passed as
932 INLOC and INMODE.
933
934 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
935
936 OPTIONAL nonzero means this reload does not need to be performed:
937 it can be discarded if that is more convenient.
938
939 OPNUM and TYPE say what the purpose of this reload is.
940
941 The return value is the reload-number for this reload.
942
943 If both IN and OUT are nonzero, in some rare cases we might
944 want to make two separate reloads. (Actually we never do this now.)
945 Therefore, the reload-number for OUT is stored in
946 output_reloadnum when we return; the return value applies to IN.
947 Usually (presently always), when IN and OUT are nonzero,
948 the two reload-numbers are equal, but the caller should be careful to
949 distinguish them. */
950
951int
952push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
953 enum reg_class rclass, machine_mode inmode,
954 machine_mode outmode, int strict_low, int optional,
955 int opnum, enum reload_type type)
956{
957 int i;
958 int dont_share = 0;
959 int dont_remove_subreg = 0;
960#ifdef LIMIT_RELOAD_CLASS
961 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
962#endif
963 int secondary_in_reload = -1, secondary_out_reload = -1;
964 enum insn_code secondary_in_icode = CODE_FOR_nothing;
965 enum insn_code secondary_out_icode = CODE_FOR_nothing;
966 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
967 subreg_in_class = NO_REGS;
968
969 /* INMODE and/or OUTMODE could be VOIDmode if no mode
970 has been specified for the operand. In that case,
971 use the operand's mode as the mode to reload. */
972 if (inmode == VOIDmode && in != 0)
973 inmode = GET_MODE (in);
974 if (outmode == VOIDmode && out != 0)
975 outmode = GET_MODE (out);
976
977 /* If find_reloads and friends until now missed to replace a pseudo
978 with a constant of reg_equiv_constant something went wrong
979 beforehand.
980 Note that it can't simply be done here if we missed it earlier
981 since the constant might need to be pushed into the literal pool
982 and the resulting memref would probably need further
983 reloading. */
984 if (in != 0 && REG_P (in))
985 {
986 int regno = REGNO (in);
987
988 gcc_assert (regno < FIRST_PSEUDO_REGISTER
989 || reg_renumber[regno] >= 0
990 || reg_equiv_constant (regno) == NULL_RTX);
991 }
992
993 /* reg_equiv_constant only contains constants which are obviously
994 not appropriate as destination. So if we would need to replace
995 the destination pseudo with a constant we are in real
996 trouble. */
997 if (out != 0 && REG_P (out))
998 {
999 int regno = REGNO (out);
1000
1001 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1002 || reg_renumber[regno] >= 0
1003 || reg_equiv_constant (regno) == NULL_RTX);
1004 }
1005
1006 /* If we have a read-write operand with an address side-effect,
1007 change either IN or OUT so the side-effect happens only once. */
1008 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1009 switch (GET_CODE (XEXP (in, 0)))
1010 {
1011 case POST_INC: case POST_DEC: case POST_MODIFY:
1012 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1013 break;
1014
1015 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1016 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1017 break;
1018
1019 default:
1020 break;
1021 }
1022
1023 /* If we are reloading a (SUBREG constant ...), really reload just the
1024 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1025 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1026 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1027 register is a pseudo, also reload the inside expression.
1028 For machines that extend byte loads, do this for any SUBREG of a pseudo
1029 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1030 M2 is an integral mode that gets extended when loaded.
1031 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1032 where either M1 is not valid for R or M2 is wider than a word but we
1033 only need one register to store an M2-sized quantity in R.
1034 (However, if OUT is nonzero, we need to reload the reg *and*
1035 the subreg, so do nothing here, and let following statement handle it.)
1036
1037 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1038 we can't handle it here because CONST_INT does not indicate a mode.
1039
1040 Similarly, we must reload the inside expression if we have a
1041 STRICT_LOW_PART (presumably, in == out in this case).
1042
1043 Also reload the inner expression if it does not require a secondary
1044 reload but the SUBREG does.
1045
1046 Also reload the inner expression if it is a register that is in
1047 the class whose registers cannot be referenced in a different size
1048 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1049 cannot reload just the inside since we might end up with the wrong
1050 register class. But if it is inside a STRICT_LOW_PART, we have
1051 no choice, so we hope we do get the right register class there.
1052
1053 Finally, reload the inner expression if it is a pseudo that will
1054 become a MEM and the MEM has a mode-dependent address, as in that
1055 case we obviously cannot change the mode of the MEM to that of the
1056 containing SUBREG as that would change the interpretation of the
1057 address. */
1058
1059 scalar_int_mode inner_mode;
1060 if (in != 0 && GET_CODE (in) == SUBREG
1061 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
1062 inmode, rclass)
1063 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1064 && (strict_low
1065 || (subreg_lowpart_p (in)
1066 && (CONSTANT_P (SUBREG_REG (in))
1067 || GET_CODE (SUBREG_REG (in)) == PLUS
1068 || (((REG_P (SUBREG_REG (in))
1069 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1070 || MEM_P (SUBREG_REG (in)))
1071 && (paradoxical_subreg_p (outermode: inmode,
1072 GET_MODE (SUBREG_REG (in)))
1073 || (known_le (GET_MODE_SIZE (inmode), UNITS_PER_WORD)
1074 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG
1075 (in)),
1076 result: &inner_mode)
1077 && GET_MODE_SIZE (mode: inner_mode) <= UNITS_PER_WORD
1078 && paradoxical_subreg_p (outermode: inmode, innermode: inner_mode)
1079 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1080 || (WORD_REGISTER_OPERATIONS
1081 && partial_subreg_p (outermode: inmode,
1082 GET_MODE (SUBREG_REG (in)))
1083 && (known_equal_after_align_down
1084 (a: GET_MODE_SIZE (mode: inmode) - 1,
1085 b: GET_MODE_SIZE (GET_MODE (SUBREG_REG
1086 (in))) - 1,
1087 UNITS_PER_WORD)))))
1088 || (REG_P (SUBREG_REG (in))
1089 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1090 /* The case where out is nonzero
1091 is handled differently in the following statement. */
1092 && (out == 0 || subreg_lowpart_p (in))
1093 && (complex_word_subreg_p (outer_mode: inmode, SUBREG_REG (in))
1094 || !targetm.hard_regno_mode_ok (subreg_regno (in),
1095 inmode)))
1096 || (secondary_reload_class (in_p: 1, rclass, mode: inmode, x: in) != NO_REGS
1097 && (secondary_reload_class (in_p: 1, rclass,
1098 GET_MODE (SUBREG_REG (in)),
1099 SUBREG_REG (in))
1100 == NO_REGS))
1101 || (REG_P (SUBREG_REG (in))
1102 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1103 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (in)),
1104 GET_MODE (SUBREG_REG (in)),
1105 inmode))))
1106 || (REG_P (SUBREG_REG (in))
1107 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER
1108 && reg_equiv_mem (REGNO (SUBREG_REG (in)))
1109 && (mode_dependent_address_p
1110 (XEXP (reg_equiv_mem (REGNO (SUBREG_REG (in))), 0),
1111 MEM_ADDR_SPACE (reg_equiv_mem (REGNO (SUBREG_REG (in)))))))))
1112 {
1113#ifdef LIMIT_RELOAD_CLASS
1114 in_subreg_loc = inloc;
1115#endif
1116 inloc = &SUBREG_REG (in);
1117 in = *inloc;
1118
1119 if (!WORD_REGISTER_OPERATIONS
1120 && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1121 && MEM_P (in))
1122 /* This is supposed to happen only for paradoxical subregs made by
1123 combine.cc. (SUBREG (MEM)) isn't supposed to occur other ways. */
1124 gcc_assert (known_le (GET_MODE_SIZE (GET_MODE (in)),
1125 GET_MODE_SIZE (inmode)));
1126
1127 inmode = GET_MODE (in);
1128 }
1129
1130 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1131 where M1 is not valid for R if it was not handled by the code above.
1132
1133 Similar issue for (SUBREG constant ...) if it was not handled by the
1134 code above. This can happen if SUBREG_BYTE != 0.
1135
1136 However, we must reload the inner reg *as well as* the subreg in
1137 that case. */
1138
1139 if (in != 0 && reload_inner_reg_of_subreg (x: in, mode: inmode, output: false))
1140 {
1141 if (REG_P (SUBREG_REG (in)))
1142 subreg_in_class
1143 = find_valid_class (outer: inmode, GET_MODE (SUBREG_REG (in)),
1144 n: subreg_regno_offset (REGNO (SUBREG_REG (in)),
1145 GET_MODE (SUBREG_REG (in)),
1146 SUBREG_BYTE (in),
1147 GET_MODE (in)),
1148 REGNO (SUBREG_REG (in)));
1149 else if (CONSTANT_P (SUBREG_REG (in))
1150 || GET_CODE (SUBREG_REG (in)) == PLUS)
1151 subreg_in_class = find_valid_class_1 (outer: inmode,
1152 GET_MODE (SUBREG_REG (in)),
1153 dest_class: rclass);
1154
1155 /* This relies on the fact that emit_reload_insns outputs the
1156 instructions for input reloads of type RELOAD_OTHER in the same
1157 order as the reloads. Thus if the outer reload is also of type
1158 RELOAD_OTHER, we are guaranteed that this inner reload will be
1159 output before the outer reload. */
1160 push_reload (SUBREG_REG (in), NULL_RTX, inloc: &SUBREG_REG (in), outloc: (rtx *) 0,
1161 rclass: subreg_in_class, VOIDmode, VOIDmode, strict_low: 0, optional: 0, opnum, type);
1162 dont_remove_subreg = 1;
1163 }
1164
1165 /* Similarly for paradoxical and problematical SUBREGs on the output.
1166 Note that there is no reason we need worry about the previous value
1167 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1168 entitled to clobber it all (except in the case of a word mode subreg
1169 or of a STRICT_LOW_PART, in that latter case the constraint should
1170 label it input-output.) */
1171 if (out != 0 && GET_CODE (out) == SUBREG
1172 && (subreg_lowpart_p (out) || strict_low)
1173 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (out)),
1174 outmode, rclass)
1175 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1176 && (CONSTANT_P (SUBREG_REG (out))
1177 || strict_low
1178 || (((REG_P (SUBREG_REG (out))
1179 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1180 || MEM_P (SUBREG_REG (out)))
1181 && (paradoxical_subreg_p (outermode: outmode, GET_MODE (SUBREG_REG (out)))
1182 || (WORD_REGISTER_OPERATIONS
1183 && partial_subreg_p (outermode: outmode, GET_MODE (SUBREG_REG (out)))
1184 && (known_equal_after_align_down
1185 (a: GET_MODE_SIZE (mode: outmode) - 1,
1186 b: GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1,
1187 UNITS_PER_WORD)))))
1188 || (REG_P (SUBREG_REG (out))
1189 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1190 /* The case of a word mode subreg
1191 is handled differently in the following statement. */
1192 && ! (known_le (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1193 && maybe_gt (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))),
1194 UNITS_PER_WORD))
1195 && !targetm.hard_regno_mode_ok (subreg_regno (out), outmode))
1196 || (secondary_reload_class (in_p: 0, rclass, mode: outmode, x: out) != NO_REGS
1197 && (secondary_reload_class (in_p: 0, rclass, GET_MODE (SUBREG_REG (out)),
1198 SUBREG_REG (out))
1199 == NO_REGS))
1200 || (REG_P (SUBREG_REG (out))
1201 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1202 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1203 GET_MODE (SUBREG_REG (out)),
1204 outmode))))
1205 {
1206#ifdef LIMIT_RELOAD_CLASS
1207 out_subreg_loc = outloc;
1208#endif
1209 outloc = &SUBREG_REG (out);
1210 out = *outloc;
1211 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1212 || known_le (GET_MODE_SIZE (GET_MODE (out)),
1213 GET_MODE_SIZE (outmode)));
1214 outmode = GET_MODE (out);
1215 }
1216
1217 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1218 where either M1 is not valid for R or M2 is wider than a word but we
1219 only need one register to store an M2-sized quantity in R.
1220
1221 However, we must reload the inner reg *as well as* the subreg in
1222 that case and the inner reg is an in-out reload. */
1223
1224 if (out != 0 && reload_inner_reg_of_subreg (x: out, mode: outmode, output: true))
1225 {
1226 enum reg_class in_out_class
1227 = find_valid_class (outer: outmode, GET_MODE (SUBREG_REG (out)),
1228 n: subreg_regno_offset (REGNO (SUBREG_REG (out)),
1229 GET_MODE (SUBREG_REG (out)),
1230 SUBREG_BYTE (out),
1231 GET_MODE (out)),
1232 REGNO (SUBREG_REG (out)));
1233
1234 /* This relies on the fact that emit_reload_insns outputs the
1235 instructions for output reloads of type RELOAD_OTHER in reverse
1236 order of the reloads. Thus if the outer reload is also of type
1237 RELOAD_OTHER, we are guaranteed that this inner reload will be
1238 output after the outer reload. */
1239 push_reload (SUBREG_REG (out), SUBREG_REG (out), inloc: &SUBREG_REG (out),
1240 outloc: &SUBREG_REG (out), rclass: in_out_class, VOIDmode, VOIDmode,
1241 strict_low: 0, optional: 0, opnum, type: RELOAD_OTHER);
1242 dont_remove_subreg = 1;
1243 }
1244
1245 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1246 if (in != 0 && out != 0 && MEM_P (out)
1247 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1248 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1249 dont_share = 1;
1250
1251 /* If IN is a SUBREG of a hard register, make a new REG. This
1252 simplifies some of the cases below. */
1253
1254 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1255 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1256 && ! dont_remove_subreg)
1257 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1258
1259 /* Similarly for OUT. */
1260 if (out != 0 && GET_CODE (out) == SUBREG
1261 && REG_P (SUBREG_REG (out))
1262 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1263 && ! dont_remove_subreg)
1264 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1265
1266 /* Narrow down the class of register wanted if that is
1267 desirable on this machine for efficiency. */
1268 {
1269 reg_class_t preferred_class = rclass;
1270
1271 if (in != 0)
1272 preferred_class = targetm.preferred_reload_class (in, rclass);
1273
1274 /* Output reloads may need analogous treatment, different in detail. */
1275 if (out != 0)
1276 preferred_class
1277 = targetm.preferred_output_reload_class (out, preferred_class);
1278
1279 /* Discard what the target said if we cannot do it. */
1280 if (preferred_class != NO_REGS
1281 || (optional && type == RELOAD_FOR_OUTPUT))
1282 rclass = (enum reg_class) preferred_class;
1283 }
1284
1285 /* Make sure we use a class that can handle the actual pseudo
1286 inside any subreg. For example, on the 386, QImode regs
1287 can appear within SImode subregs. Although GENERAL_REGS
1288 can handle SImode, QImode needs a smaller class. */
1289#ifdef LIMIT_RELOAD_CLASS
1290 if (in_subreg_loc)
1291 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1292 else if (in != 0 && GET_CODE (in) == SUBREG)
1293 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1294
1295 if (out_subreg_loc)
1296 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1297 if (out != 0 && GET_CODE (out) == SUBREG)
1298 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1299#endif
1300
1301 /* Verify that this class is at least possible for the mode that
1302 is specified. */
1303 if (this_insn_is_asm)
1304 {
1305 machine_mode mode;
1306 if (paradoxical_subreg_p (outermode: inmode, innermode: outmode))
1307 mode = inmode;
1308 else
1309 mode = outmode;
1310 if (mode == VOIDmode)
1311 {
1312 error_for_asm (this_insn, "cannot reload integer constant "
1313 "operand in %<asm%>");
1314 mode = word_mode;
1315 if (in != 0)
1316 inmode = word_mode;
1317 if (out != 0)
1318 outmode = word_mode;
1319 }
1320 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1321 if (targetm.hard_regno_mode_ok (i, mode)
1322 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, regno: i))
1323 break;
1324 if (i == FIRST_PSEUDO_REGISTER)
1325 {
1326 error_for_asm (this_insn, "impossible register constraint "
1327 "in %<asm%>");
1328 /* Avoid further trouble with this insn. */
1329 PATTERN (insn: this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1330 /* We used to continue here setting class to ALL_REGS, but it triggers
1331 sanity check on i386 for:
1332 void foo(long double d)
1333 {
1334 asm("" :: "a" (d));
1335 }
1336 Returning zero here ought to be safe as we take care in
1337 find_reloads to not process the reloads when instruction was
1338 replaced by USE. */
1339
1340 return 0;
1341 }
1342 }
1343
1344 /* Optional output reloads are always OK even if we have no register class,
1345 since the function of these reloads is only to have spill_reg_store etc.
1346 set, so that the storing insn can be deleted later. */
1347 gcc_assert (rclass != NO_REGS
1348 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1349
1350 i = find_reusable_reload (p_in: &in, out, rclass, type, opnum, dont_share);
1351
1352 if (i == n_reloads)
1353 {
1354 /* See if we need a secondary reload register to move between CLASS
1355 and IN or CLASS and OUT. Get the icode and push any required reloads
1356 needed for each of them if so. */
1357
1358 if (in != 0)
1359 secondary_in_reload
1360 = push_secondary_reload (in_p: 1, x: in, opnum, optional, reload_class: rclass, reload_mode: inmode, type,
1361 picode: &secondary_in_icode, NULL);
1362 if (out != 0 && GET_CODE (out) != SCRATCH)
1363 secondary_out_reload
1364 = push_secondary_reload (in_p: 0, x: out, opnum, optional, reload_class: rclass, reload_mode: outmode,
1365 type, picode: &secondary_out_icode, NULL);
1366
1367 /* We found no existing reload suitable for re-use.
1368 So add an additional reload. */
1369
1370 if (subreg_in_class == NO_REGS
1371 && in != 0
1372 && (REG_P (in)
1373 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1374 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1375 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1376 /* If a memory location is needed for the copy, make one. */
1377 if (subreg_in_class != NO_REGS
1378 && targetm.secondary_memory_needed (inmode, subreg_in_class, rclass))
1379 get_secondary_mem (x: in, mode: inmode, opnum, type);
1380
1381 i = n_reloads;
1382 rld[i].in = in;
1383 rld[i].out = out;
1384 rld[i].rclass = rclass;
1385 rld[i].inmode = inmode;
1386 rld[i].outmode = outmode;
1387 rld[i].reg_rtx = 0;
1388 rld[i].optional = optional;
1389 rld[i].inc = 0;
1390 rld[i].nocombine = 0;
1391 rld[i].in_reg = inloc ? *inloc : 0;
1392 rld[i].out_reg = outloc ? *outloc : 0;
1393 rld[i].opnum = opnum;
1394 rld[i].when_needed = type;
1395 rld[i].secondary_in_reload = secondary_in_reload;
1396 rld[i].secondary_out_reload = secondary_out_reload;
1397 rld[i].secondary_in_icode = secondary_in_icode;
1398 rld[i].secondary_out_icode = secondary_out_icode;
1399 rld[i].secondary_p = 0;
1400
1401 n_reloads++;
1402
1403 if (out != 0
1404 && (REG_P (out)
1405 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1406 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1407 && (targetm.secondary_memory_needed
1408 (outmode, rclass, REGNO_REG_CLASS (reg_or_subregno (out)))))
1409 get_secondary_mem (x: out, mode: outmode, opnum, type);
1410 }
1411 else
1412 {
1413 /* We are reusing an existing reload,
1414 but we may have additional information for it.
1415 For example, we may now have both IN and OUT
1416 while the old one may have just one of them. */
1417
1418 /* The modes can be different. If they are, we want to reload in
1419 the larger mode, so that the value is valid for both modes. */
1420 if (inmode != VOIDmode
1421 && partial_subreg_p (outermode: rld[i].inmode, innermode: inmode))
1422 rld[i].inmode = inmode;
1423 if (outmode != VOIDmode
1424 && partial_subreg_p (outermode: rld[i].outmode, innermode: outmode))
1425 rld[i].outmode = outmode;
1426 if (in != 0)
1427 {
1428 rtx in_reg = inloc ? *inloc : 0;
1429 /* If we merge reloads for two distinct rtl expressions that
1430 are identical in content, there might be duplicate address
1431 reloads. Remove the extra set now, so that if we later find
1432 that we can inherit this reload, we can get rid of the
1433 address reloads altogether.
1434
1435 Do not do this if both reloads are optional since the result
1436 would be an optional reload which could potentially leave
1437 unresolved address replacements.
1438
1439 It is not sufficient to call transfer_replacements since
1440 choose_reload_regs will remove the replacements for address
1441 reloads of inherited reloads which results in the same
1442 problem. */
1443 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1444 && ! (rld[i].optional && optional))
1445 {
1446 /* We must keep the address reload with the lower operand
1447 number alive. */
1448 if (opnum > rld[i].opnum)
1449 {
1450 remove_address_replacements (in_rtx: in);
1451 in = rld[i].in;
1452 in_reg = rld[i].in_reg;
1453 }
1454 else
1455 remove_address_replacements (in_rtx: rld[i].in);
1456 }
1457 /* When emitting reloads we don't necessarily look at the in-
1458 and outmode, but also directly at the operands (in and out).
1459 So we can't simply overwrite them with whatever we have found
1460 for this (to-be-merged) reload, we have to "merge" that too.
1461 Reusing another reload already verified that we deal with the
1462 same operands, just possibly in different modes. So we
1463 overwrite the operands only when the new mode is larger.
1464 See also PR33613. */
1465 if (!rld[i].in
1466 || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
1467 rld[i].in = in;
1468 if (!rld[i].in_reg
1469 || (in_reg
1470 && partial_subreg_p (GET_MODE (rld[i].in_reg),
1471 GET_MODE (in_reg))))
1472 rld[i].in_reg = in_reg;
1473 }
1474 if (out != 0)
1475 {
1476 if (!rld[i].out
1477 || (out
1478 && partial_subreg_p (GET_MODE (rld[i].out),
1479 GET_MODE (out))))
1480 rld[i].out = out;
1481 if (outloc
1482 && (!rld[i].out_reg
1483 || partial_subreg_p (GET_MODE (rld[i].out_reg),
1484 GET_MODE (*outloc))))
1485 rld[i].out_reg = *outloc;
1486 }
1487 if (reg_class_subset_p (rclass, rld[i].rclass))
1488 rld[i].rclass = rclass;
1489 rld[i].optional &= optional;
1490 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1491 opnum, rld[i].opnum))
1492 rld[i].when_needed = RELOAD_OTHER;
1493 rld[i].opnum = MIN (rld[i].opnum, opnum);
1494 }
1495
1496 /* If the ostensible rtx being reloaded differs from the rtx found
1497 in the location to substitute, this reload is not safe to combine
1498 because we cannot reliably tell whether it appears in the insn. */
1499
1500 if (in != 0 && in != *inloc)
1501 rld[i].nocombine = 1;
1502
1503 /* If we will replace IN and OUT with the reload-reg,
1504 record where they are located so that substitution need
1505 not do a tree walk. */
1506
1507 if (replace_reloads)
1508 {
1509 if (inloc != 0)
1510 {
1511 struct replacement *r = &replacements[n_replacements++];
1512 r->what = i;
1513 r->where = inloc;
1514 r->mode = inmode;
1515 }
1516 if (outloc != 0 && outloc != inloc)
1517 {
1518 struct replacement *r = &replacements[n_replacements++];
1519 r->what = i;
1520 r->where = outloc;
1521 r->mode = outmode;
1522 }
1523 }
1524
1525 /* If this reload is just being introduced and it has both
1526 an incoming quantity and an outgoing quantity that are
1527 supposed to be made to match, see if either one of the two
1528 can serve as the place to reload into.
1529
1530 If one of them is acceptable, set rld[i].reg_rtx
1531 to that one. */
1532
1533 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1534 {
1535 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1536 inmode, outmode,
1537 rld[i].rclass, i,
1538 earlyclobber_operand_p (out));
1539
1540 /* If the outgoing register already contains the same value
1541 as the incoming one, we can dispense with loading it.
1542 The easiest way to tell the caller that is to give a phony
1543 value for the incoming operand (same as outgoing one). */
1544 if (rld[i].reg_rtx == out
1545 && (REG_P (in) || CONSTANT_P (in))
1546 && find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1547 static_reload_reg_p, i, inmode) != 0)
1548 rld[i].in = out;
1549 }
1550
1551 /* If this is an input reload and the operand contains a register that
1552 dies in this insn and is used nowhere else, see if it is the right class
1553 to be used for this reload. Use it if so. (This occurs most commonly
1554 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1555 this if it is also an output reload that mentions the register unless
1556 the output is a SUBREG that clobbers an entire register.
1557
1558 Note that the operand might be one of the spill regs, if it is a
1559 pseudo reg and we are in a block where spilling has not taken place.
1560 But if there is no spilling in this block, that is OK.
1561 An explicitly used hard reg cannot be a spill reg. */
1562
1563 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1564 {
1565 rtx note;
1566 int regno;
1567 machine_mode rel_mode = inmode;
1568
1569 if (out && partial_subreg_p (outermode: rel_mode, innermode: outmode))
1570 rel_mode = outmode;
1571
1572 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1573 if (REG_NOTE_KIND (note) == REG_DEAD
1574 && REG_P (XEXP (note, 0))
1575 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1576 && reg_mentioned_p (XEXP (note, 0), in)
1577 /* Check that a former pseudo is valid; see find_dummy_reload. */
1578 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1579 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1580 ORIGINAL_REGNO (XEXP (note, 0)))
1581 && REG_NREGS (XEXP (note, 0)) == 1))
1582 && ! refers_to_regno_for_reload_p (regno,
1583 end_hard_regno (mode: rel_mode,
1584 regno),
1585 PATTERN (insn: this_insn), inloc)
1586 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1587 /* If this is also an output reload, IN cannot be used as
1588 the reload register if it is set in this insn unless IN
1589 is also OUT. */
1590 && (out == 0 || in == out
1591 || ! hard_reg_set_here_p (regno,
1592 end_hard_regno (mode: rel_mode, regno),
1593 PATTERN (insn: this_insn)))
1594 /* ??? Why is this code so different from the previous?
1595 Is there any simple coherent way to describe the two together?
1596 What's going on here. */
1597 && (in != out
1598 || (GET_CODE (in) == SUBREG
1599 && (known_equal_after_align_up
1600 (a: GET_MODE_SIZE (GET_MODE (in)),
1601 b: GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))),
1602 UNITS_PER_WORD))))
1603 /* Make sure the operand fits in the reg that dies. */
1604 && known_le (GET_MODE_SIZE (rel_mode),
1605 GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1606 && targetm.hard_regno_mode_ok (regno, inmode)
1607 && targetm.hard_regno_mode_ok (regno, outmode))
1608 {
1609 unsigned int offs;
1610 unsigned int nregs = MAX (hard_regno_nregs (regno, inmode),
1611 hard_regno_nregs (regno, outmode));
1612
1613 for (offs = 0; offs < nregs; offs++)
1614 if (fixed_regs[regno + offs]
1615 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1616 bit: regno + offs))
1617 break;
1618
1619 if (offs == nregs
1620 && (! (refers_to_regno_for_reload_p
1621 (regno, end_hard_regno (mode: inmode, regno), in, (rtx *) 0))
1622 || can_reload_into (in, regno, mode: inmode)))
1623 {
1624 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1625 break;
1626 }
1627 }
1628 }
1629
1630 if (out)
1631 output_reloadnum = i;
1632
1633 return i;
1634}
1635
1636/* Record an additional place we must replace a value
1637 for which we have already recorded a reload.
1638 RELOADNUM is the value returned by push_reload
1639 when the reload was recorded.
1640 This is used in insn patterns that use match_dup. */
1641
1642static void
1643push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1644{
1645 if (replace_reloads)
1646 {
1647 struct replacement *r = &replacements[n_replacements++];
1648 r->what = reloadnum;
1649 r->where = loc;
1650 r->mode = mode;
1651 }
1652}
1653
1654/* Duplicate any replacement we have recorded to apply at
1655 location ORIG_LOC to also be performed at DUP_LOC.
1656 This is used in insn patterns that use match_dup. */
1657
1658static void
1659dup_replacements (rtx *dup_loc, rtx *orig_loc)
1660{
1661 int i, n = n_replacements;
1662
1663 for (i = 0; i < n; i++)
1664 {
1665 struct replacement *r = &replacements[i];
1666 if (r->where == orig_loc)
1667 push_replacement (loc: dup_loc, reloadnum: r->what, mode: r->mode);
1668 }
1669}
1670
1671/* Transfer all replacements that used to be in reload FROM to be in
1672 reload TO. */
1673
1674void
1675transfer_replacements (int to, int from)
1676{
1677 int i;
1678
1679 for (i = 0; i < n_replacements; i++)
1680 if (replacements[i].what == from)
1681 replacements[i].what = to;
1682}
1683
1684/* IN_RTX is the value loaded by a reload that we now decided to inherit,
1685 or a subpart of it. If we have any replacements registered for IN_RTX,
1686 cancel the reloads that were supposed to load them.
1687 Return nonzero if we canceled any reloads. */
1688int
1689remove_address_replacements (rtx in_rtx)
1690{
1691 int i, j;
1692 char reload_flags[MAX_RELOADS];
1693 int something_changed = 0;
1694
1695 memset (s: reload_flags, c: 0, n: sizeof reload_flags);
1696 for (i = 0, j = 0; i < n_replacements; i++)
1697 {
1698 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1699 reload_flags[replacements[i].what] |= 1;
1700 else
1701 {
1702 replacements[j++] = replacements[i];
1703 reload_flags[replacements[i].what] |= 2;
1704 }
1705 }
1706 /* Note that the following store must be done before the recursive calls. */
1707 n_replacements = j;
1708
1709 for (i = n_reloads - 1; i >= 0; i--)
1710 {
1711 if (reload_flags[i] == 1)
1712 {
1713 deallocate_reload_reg (r: i);
1714 remove_address_replacements (in_rtx: rld[i].in);
1715 rld[i].in = 0;
1716 something_changed = 1;
1717 }
1718 }
1719 return something_changed;
1720}
1721
1722/* If there is only one output reload, and it is not for an earlyclobber
1723 operand, try to combine it with a (logically unrelated) input reload
1724 to reduce the number of reload registers needed.
1725
1726 This is safe if the input reload does not appear in
1727 the value being output-reloaded, because this implies
1728 it is not needed any more once the original insn completes.
1729
1730 If that doesn't work, see we can use any of the registers that
1731 die in this insn as a reload register. We can if it is of the right
1732 class and does not appear in the value being output-reloaded. */
1733
1734static void
1735combine_reloads (void)
1736{
1737 int i, regno;
1738 int output_reload = -1;
1739 int secondary_out = -1;
1740 rtx note;
1741
1742 /* Find the output reload; return unless there is exactly one
1743 and that one is mandatory. */
1744
1745 for (i = 0; i < n_reloads; i++)
1746 if (rld[i].out != 0)
1747 {
1748 if (output_reload >= 0)
1749 return;
1750 output_reload = i;
1751 }
1752
1753 if (output_reload < 0 || rld[output_reload].optional)
1754 return;
1755
1756 /* An input-output reload isn't combinable. */
1757
1758 if (rld[output_reload].in != 0)
1759 return;
1760
1761 /* If this reload is for an earlyclobber operand, we can't do anything. */
1762 if (earlyclobber_operand_p (rld[output_reload].out))
1763 return;
1764
1765 /* If there is a reload for part of the address of this operand, we would
1766 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1767 its life to the point where doing this combine would not lower the
1768 number of spill registers needed. */
1769 for (i = 0; i < n_reloads; i++)
1770 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1771 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1772 && rld[i].opnum == rld[output_reload].opnum)
1773 return;
1774
1775 /* Check each input reload; can we combine it? */
1776
1777 for (i = 0; i < n_reloads; i++)
1778 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1779 /* Life span of this reload must not extend past main insn. */
1780 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1781 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1782 && rld[i].when_needed != RELOAD_OTHER
1783 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1784 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1785 [(int) rld[output_reload].outmode])
1786 && known_eq (rld[i].inc, 0)
1787 && rld[i].reg_rtx == 0
1788 /* Don't combine two reloads with different secondary
1789 memory locations. */
1790 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1791 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1792 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1793 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1794 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1795 ? (rld[i].rclass == rld[output_reload].rclass)
1796 : (reg_class_subset_p (rld[i].rclass,
1797 rld[output_reload].rclass)
1798 || reg_class_subset_p (rld[output_reload].rclass,
1799 rld[i].rclass)))
1800 && (MATCHES (rld[i].in, rld[output_reload].out)
1801 /* Args reversed because the first arg seems to be
1802 the one that we imagine being modified
1803 while the second is the one that might be affected. */
1804 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1805 rld[i].in)
1806 /* However, if the input is a register that appears inside
1807 the output, then we also can't share.
1808 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1809 If the same reload reg is used for both reg 69 and the
1810 result to be stored in memory, then that result
1811 will clobber the address of the memory ref. */
1812 && ! (REG_P (rld[i].in)
1813 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1814 rld[output_reload].out))))
1815 && ! reload_inner_reg_of_subreg (x: rld[i].in, mode: rld[i].inmode,
1816 output: rld[i].when_needed != RELOAD_FOR_INPUT)
1817 && (reg_class_size[(int) rld[i].rclass]
1818 || targetm.small_register_classes_for_mode_p (VOIDmode))
1819 /* We will allow making things slightly worse by combining an
1820 input and an output, but no worse than that. */
1821 && (rld[i].when_needed == RELOAD_FOR_INPUT
1822 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1823 {
1824 int j;
1825
1826 /* We have found a reload to combine with! */
1827 rld[i].out = rld[output_reload].out;
1828 rld[i].out_reg = rld[output_reload].out_reg;
1829 rld[i].outmode = rld[output_reload].outmode;
1830 /* Mark the old output reload as inoperative. */
1831 rld[output_reload].out = 0;
1832 /* The combined reload is needed for the entire insn. */
1833 rld[i].when_needed = RELOAD_OTHER;
1834 /* If the output reload had a secondary reload, copy it. */
1835 if (rld[output_reload].secondary_out_reload != -1)
1836 {
1837 rld[i].secondary_out_reload
1838 = rld[output_reload].secondary_out_reload;
1839 rld[i].secondary_out_icode
1840 = rld[output_reload].secondary_out_icode;
1841 }
1842
1843 /* Copy any secondary MEM. */
1844 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1845 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1846 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1847 /* If required, minimize the register class. */
1848 if (reg_class_subset_p (rld[output_reload].rclass,
1849 rld[i].rclass))
1850 rld[i].rclass = rld[output_reload].rclass;
1851
1852 /* Transfer all replacements from the old reload to the combined. */
1853 for (j = 0; j < n_replacements; j++)
1854 if (replacements[j].what == output_reload)
1855 replacements[j].what = i;
1856
1857 return;
1858 }
1859
1860 /* If this insn has only one operand that is modified or written (assumed
1861 to be the first), it must be the one corresponding to this reload. It
1862 is safe to use anything that dies in this insn for that output provided
1863 that it does not occur in the output (we already know it isn't an
1864 earlyclobber. If this is an asm insn, give up. */
1865
1866 if (INSN_CODE (this_insn) == -1)
1867 return;
1868
1869 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1870 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1871 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1872 return;
1873
1874 /* See if some hard register that dies in this insn and is not used in
1875 the output is the right class. Only works if the register we pick
1876 up can fully hold our output reload. */
1877 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1878 if (REG_NOTE_KIND (note) == REG_DEAD
1879 && REG_P (XEXP (note, 0))
1880 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1881 rld[output_reload].out)
1882 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1883 && targetm.hard_regno_mode_ok (regno, rld[output_reload].outmode)
1884 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1885 bit: regno)
1886 && (hard_regno_nregs (regno, mode: rld[output_reload].outmode)
1887 <= REG_NREGS (XEXP (note, 0)))
1888 /* Ensure that a secondary or tertiary reload for this output
1889 won't want this register. */
1890 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1891 || (!(TEST_HARD_REG_BIT
1892 (reg_class_contents[(int) rld[secondary_out].rclass], bit: regno))
1893 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1894 || !(TEST_HARD_REG_BIT
1895 (reg_class_contents[(int) rld[secondary_out].rclass],
1896 bit: regno)))))
1897 && !fixed_regs[regno]
1898 /* Check that a former pseudo is valid; see find_dummy_reload. */
1899 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1900 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1901 ORIGINAL_REGNO (XEXP (note, 0)))
1902 && REG_NREGS (XEXP (note, 0)) == 1)))
1903 {
1904 rld[output_reload].reg_rtx
1905 = gen_rtx_REG (rld[output_reload].outmode, regno);
1906 return;
1907 }
1908}
1909
1910/* Try to find a reload register for an in-out reload (expressions IN and OUT).
1911 See if one of IN and OUT is a register that may be used;
1912 this is desirable since a spill-register won't be needed.
1913 If so, return the register rtx that proves acceptable.
1914
1915 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1916 RCLASS is the register class required for the reload.
1917
1918 If FOR_REAL is >= 0, it is the number of the reload,
1919 and in some cases when it can be discovered that OUT doesn't need
1920 to be computed, clear out rld[FOR_REAL].out.
1921
1922 If FOR_REAL is -1, this should not be done, because this call
1923 is just to see if a register can be found, not to find and install it.
1924
1925 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1926 puts an additional constraint on being able to use IN for OUT since
1927 IN must not appear elsewhere in the insn (it is assumed that IN itself
1928 is safe from the earlyclobber). */
1929
1930static rtx
1931find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1932 machine_mode inmode, machine_mode outmode,
1933 reg_class_t rclass, int for_real, int earlyclobber)
1934{
1935 rtx in = real_in;
1936 rtx out = real_out;
1937 int in_offset = 0;
1938 int out_offset = 0;
1939 rtx value = 0;
1940
1941 /* If operands exceed a word, we can't use either of them
1942 unless they have the same size. */
1943 if (maybe_ne (a: GET_MODE_SIZE (mode: outmode), b: GET_MODE_SIZE (mode: inmode))
1944 && (maybe_gt (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1945 || maybe_gt (GET_MODE_SIZE (inmode), UNITS_PER_WORD)))
1946 return 0;
1947
1948 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1949 respectively refers to a hard register. */
1950
1951 /* Find the inside of any subregs. */
1952 while (GET_CODE (out) == SUBREG)
1953 {
1954 if (REG_P (SUBREG_REG (out))
1955 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1956 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1957 GET_MODE (SUBREG_REG (out)),
1958 SUBREG_BYTE (out),
1959 GET_MODE (out));
1960 out = SUBREG_REG (out);
1961 }
1962 while (GET_CODE (in) == SUBREG)
1963 {
1964 if (REG_P (SUBREG_REG (in))
1965 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1966 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1967 GET_MODE (SUBREG_REG (in)),
1968 SUBREG_BYTE (in),
1969 GET_MODE (in));
1970 in = SUBREG_REG (in);
1971 }
1972
1973 /* Narrow down the reg class, the same way push_reload will;
1974 otherwise we might find a dummy now, but push_reload won't. */
1975 {
1976 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1977 if (preferred_class != NO_REGS)
1978 rclass = (enum reg_class) preferred_class;
1979 }
1980
1981 /* See if OUT will do. */
1982 if (REG_P (out)
1983 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1984 {
1985 unsigned int regno = REGNO (out) + out_offset;
1986 unsigned int nwords = hard_regno_nregs (regno, mode: outmode);
1987 rtx saved_rtx;
1988
1989 /* When we consider whether the insn uses OUT,
1990 ignore references within IN. They don't prevent us
1991 from copying IN into OUT, because those refs would
1992 move into the insn that reloads IN.
1993
1994 However, we only ignore IN in its role as this reload.
1995 If the insn uses IN elsewhere and it contains OUT,
1996 that counts. We can't be sure it's the "same" operand
1997 so it might not go through this reload.
1998
1999 We also need to avoid using OUT if it, or part of it, is a
2000 fixed register. Modifying such registers, even transiently,
2001 may have undefined effects on the machine, such as modifying
2002 the stack pointer. */
2003 saved_rtx = *inloc;
2004 *inloc = const0_rtx;
2005
2006 if (regno < FIRST_PSEUDO_REGISTER
2007 && targetm.hard_regno_mode_ok (regno, outmode)
2008 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2009 PATTERN (insn: this_insn), outloc))
2010 {
2011 unsigned int i;
2012
2013 for (i = 0; i < nwords; i++)
2014 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2015 bit: regno + i)
2016 || fixed_regs[regno + i])
2017 break;
2018
2019 if (i == nwords)
2020 {
2021 if (REG_P (real_out))
2022 value = real_out;
2023 else
2024 value = gen_rtx_REG (outmode, regno);
2025 }
2026 }
2027
2028 *inloc = saved_rtx;
2029 }
2030
2031 /* Consider using IN if OUT was not acceptable
2032 or if OUT dies in this insn (like the quotient in a divmod insn).
2033 We can't use IN unless it is dies in this insn,
2034 which means we must know accurately which hard regs are live.
2035 Also, the result can't go in IN if IN is used within OUT,
2036 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2037 if (hard_regs_live_known
2038 && REG_P (in)
2039 && REGNO (in) < FIRST_PSEUDO_REGISTER
2040 && (value == 0
2041 || find_reg_note (this_insn, REG_UNUSED, real_out))
2042 && find_reg_note (this_insn, REG_DEAD, real_in)
2043 && !fixed_regs[REGNO (in)]
2044 && targetm.hard_regno_mode_ok (REGNO (in),
2045 /* The only case where out and real_out
2046 might have different modes is where
2047 real_out is a subreg, and in that
2048 case, out has a real mode. */
2049 (GET_MODE (out) != VOIDmode
2050 ? GET_MODE (out) : outmode))
2051 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2052 /* However only do this if we can be sure that this input
2053 operand doesn't correspond with an uninitialized pseudo.
2054 global can assign some hardreg to it that is the same as
2055 the one assigned to a different, also live pseudo (as it
2056 can ignore the conflict). We must never introduce writes
2057 to such hardregs, as they would clobber the other live
2058 pseudo. See PR 20973. */
2059 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2060 ORIGINAL_REGNO (in))
2061 /* Similarly, only do this if we can be sure that the death
2062 note is still valid. global can assign some hardreg to
2063 the pseudo referenced in the note and simultaneously a
2064 subword of this hardreg to a different, also live pseudo,
2065 because only another subword of the hardreg is actually
2066 used in the insn. This cannot happen if the pseudo has
2067 been assigned exactly one hardreg. See PR 33732. */
2068 && REG_NREGS (in) == 1)))
2069 {
2070 unsigned int regno = REGNO (in) + in_offset;
2071 unsigned int nwords = hard_regno_nregs (regno, mode: inmode);
2072
2073 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2074 && ! hard_reg_set_here_p (regno, regno + nwords,
2075 PATTERN (insn: this_insn))
2076 && (! earlyclobber
2077 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2078 PATTERN (insn: this_insn), inloc)))
2079 {
2080 unsigned int i;
2081
2082 for (i = 0; i < nwords; i++)
2083 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2084 bit: regno + i))
2085 break;
2086
2087 if (i == nwords)
2088 {
2089 /* If we were going to use OUT as the reload reg
2090 and changed our mind, it means OUT is a dummy that
2091 dies here. So don't bother copying value to it. */
2092 if (for_real >= 0 && value == real_out)
2093 rld[for_real].out = 0;
2094 if (REG_P (real_in))
2095 value = real_in;
2096 else
2097 value = gen_rtx_REG (inmode, regno);
2098 }
2099 }
2100 }
2101
2102 return value;
2103}
2104
2105/* This page contains subroutines used mainly for determining
2106 whether the IN or an OUT of a reload can serve as the
2107 reload register. */
2108
2109/* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2110
2111int
2112earlyclobber_operand_p (rtx x)
2113{
2114 int i;
2115
2116 for (i = 0; i < n_earlyclobbers; i++)
2117 if (reload_earlyclobbers[i] == x)
2118 return 1;
2119
2120 return 0;
2121}
2122
2123/* Return 1 if expression X alters a hard reg in the range
2124 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2125 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2126 X should be the body of an instruction. */
2127
2128static int
2129hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2130{
2131 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2132 {
2133 rtx op0 = SET_DEST (x);
2134
2135 while (GET_CODE (op0) == SUBREG)
2136 op0 = SUBREG_REG (op0);
2137 if (REG_P (op0))
2138 {
2139 unsigned int r = REGNO (op0);
2140
2141 /* See if this reg overlaps range under consideration. */
2142 if (r < end_regno
2143 && end_hard_regno (GET_MODE (op0), regno: r) > beg_regno)
2144 return 1;
2145 }
2146 }
2147 else if (GET_CODE (x) == PARALLEL)
2148 {
2149 int i = XVECLEN (x, 0) - 1;
2150
2151 for (; i >= 0; i--)
2152 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2153 return 1;
2154 }
2155
2156 return 0;
2157}
2158
2159/* Return true if ADDR is a valid memory address for mode MODE
2160 in address space AS, and check that each pseudo reg has the
2161 proper kind of hard reg. */
2162
2163bool
2164strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2165 rtx addr, addr_space_t as, code_helper)
2166{
2167#ifdef GO_IF_LEGITIMATE_ADDRESS
2168 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2169 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2170 return false;
2171
2172 win:
2173 return true;
2174#else
2175 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as,
2176 ERROR_MARK);
2177#endif
2178}
2179
2180/* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2181 if they are the same hard reg, and has special hacks for
2182 autoincrement and autodecrement.
2183 This is specifically intended for find_reloads to use
2184 in determining whether two operands match.
2185 X is the operand whose number is the lower of the two.
2186
2187 The value is 2 if Y contains a pre-increment that matches
2188 a non-incrementing address in X. */
2189
2190/* ??? To be completely correct, we should arrange to pass
2191 for X the output operand and for Y the input operand.
2192 For now, we assume that the output operand has the lower number
2193 because that is natural in (SET output (... input ...)). */
2194
2195int
2196operands_match_p (rtx x, rtx y)
2197{
2198 int i;
2199 RTX_CODE code = GET_CODE (x);
2200 const char *fmt;
2201 int success_2;
2202
2203 if (x == y)
2204 return 1;
2205 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2206 && (REG_P (y) || (GET_CODE (y) == SUBREG
2207 && REG_P (SUBREG_REG (y)))))
2208 {
2209 int j;
2210
2211 if (code == SUBREG)
2212 {
2213 i = REGNO (SUBREG_REG (x));
2214 if (i >= FIRST_PSEUDO_REGISTER
2215 || simplify_subreg_regno (REGNO (SUBREG_REG (x)),
2216 GET_MODE (SUBREG_REG (x)),
2217 SUBREG_BYTE (x),
2218 GET_MODE (x)) == -1)
2219 goto slow;
2220 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2221 GET_MODE (SUBREG_REG (x)),
2222 SUBREG_BYTE (x),
2223 GET_MODE (x));
2224 }
2225 else
2226 i = REGNO (x);
2227
2228 if (GET_CODE (y) == SUBREG)
2229 {
2230 j = REGNO (SUBREG_REG (y));
2231 if (j >= FIRST_PSEUDO_REGISTER
2232 || simplify_subreg_regno (REGNO (SUBREG_REG (y)),
2233 GET_MODE (SUBREG_REG (y)),
2234 SUBREG_BYTE (y),
2235 GET_MODE (y)) == -1)
2236 goto slow;
2237 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2238 GET_MODE (SUBREG_REG (y)),
2239 SUBREG_BYTE (y),
2240 GET_MODE (y));
2241 }
2242 else
2243 j = REGNO (y);
2244
2245 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2246 multiple hard register group of scalar integer registers, so that
2247 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2248 register. */
2249 scalar_int_mode xmode;
2250 if (REG_WORDS_BIG_ENDIAN
2251 && is_a <scalar_int_mode> (GET_MODE (x), result: &xmode)
2252 && GET_MODE_SIZE (mode: xmode) > UNITS_PER_WORD
2253 && i < FIRST_PSEUDO_REGISTER)
2254 i += hard_regno_nregs (regno: i, mode: xmode) - 1;
2255 scalar_int_mode ymode;
2256 if (REG_WORDS_BIG_ENDIAN
2257 && is_a <scalar_int_mode> (GET_MODE (y), result: &ymode)
2258 && GET_MODE_SIZE (mode: ymode) > UNITS_PER_WORD
2259 && j < FIRST_PSEUDO_REGISTER)
2260 j += hard_regno_nregs (regno: j, mode: ymode) - 1;
2261
2262 return i == j;
2263 }
2264 /* If two operands must match, because they are really a single
2265 operand of an assembler insn, then two postincrements are invalid
2266 because the assembler insn would increment only once.
2267 On the other hand, a postincrement matches ordinary indexing
2268 if the postincrement is the output operand. */
2269 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2270 return operands_match_p (XEXP (x, 0), y);
2271 /* Two preincrements are invalid
2272 because the assembler insn would increment only once.
2273 On the other hand, a preincrement matches ordinary indexing
2274 if the preincrement is the input operand.
2275 In this case, return 2, since some callers need to do special
2276 things when this happens. */
2277 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2278 || GET_CODE (y) == PRE_MODIFY)
2279 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2280
2281 slow:
2282
2283 /* Now we have disposed of all the cases in which different rtx codes
2284 can match. */
2285 if (code != GET_CODE (y))
2286 return 0;
2287
2288 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2289 if (GET_MODE (x) != GET_MODE (y))
2290 return 0;
2291
2292 /* MEMs referring to different address space are not equivalent. */
2293 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2294 return 0;
2295
2296 switch (code)
2297 {
2298 CASE_CONST_UNIQUE:
2299 return 0;
2300
2301 case CONST_VECTOR:
2302 if (!same_vector_encodings_p (x, y))
2303 return false;
2304 break;
2305
2306 case LABEL_REF:
2307 return label_ref_label (ref: x) == label_ref_label (ref: y);
2308 case SYMBOL_REF:
2309 return XSTR (x, 0) == XSTR (y, 0);
2310
2311 default:
2312 break;
2313 }
2314
2315 /* Compare the elements. If any pair of corresponding elements
2316 fail to match, return 0 for the whole things. */
2317
2318 success_2 = 0;
2319 fmt = GET_RTX_FORMAT (code);
2320 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2321 {
2322 int val, j;
2323 switch (fmt[i])
2324 {
2325 case 'w':
2326 if (XWINT (x, i) != XWINT (y, i))
2327 return 0;
2328 break;
2329
2330 case 'i':
2331 if (XINT (x, i) != XINT (y, i))
2332 return 0;
2333 break;
2334
2335 case 'L':
2336 if (XLOC (x, i) != XLOC (y, i))
2337 return 0;
2338 break;
2339
2340 case 'p':
2341 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2342 return 0;
2343 break;
2344
2345 case 'e':
2346 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2347 if (val == 0)
2348 return 0;
2349 /* If any subexpression returns 2,
2350 we should return 2 if we are successful. */
2351 if (val == 2)
2352 success_2 = 1;
2353 break;
2354
2355 case '0':
2356 break;
2357
2358 case 'E':
2359 if (XVECLEN (x, i) != XVECLEN (y, i))
2360 return 0;
2361 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2362 {
2363 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2364 if (val == 0)
2365 return 0;
2366 if (val == 2)
2367 success_2 = 1;
2368 }
2369 break;
2370
2371 /* It is believed that rtx's at this level will never
2372 contain anything but integers and other rtx's,
2373 except for within LABEL_REFs and SYMBOL_REFs. */
2374 default:
2375 gcc_unreachable ();
2376 }
2377 }
2378 return 1 + success_2;
2379}
2380
2381/* Describe the range of registers or memory referenced by X.
2382 If X is a register, set REG_FLAG and put the first register
2383 number into START and the last plus one into END.
2384 If X is a memory reference, put a base address into BASE
2385 and a range of integer offsets into START and END.
2386 If X is pushing on the stack, we can assume it causes no trouble,
2387 so we set the SAFE field. */
2388
2389static struct decomposition
2390decompose (rtx x)
2391{
2392 struct decomposition val;
2393 int all_const = 0, regno;
2394
2395 memset (s: &val, c: 0, n: sizeof (val));
2396
2397 switch (GET_CODE (x))
2398 {
2399 case MEM:
2400 {
2401 rtx base = NULL_RTX, offset = 0;
2402 rtx addr = XEXP (x, 0);
2403
2404 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2405 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2406 {
2407 val.base = XEXP (addr, 0);
2408 val.start = -GET_MODE_SIZE (GET_MODE (x));
2409 val.end = GET_MODE_SIZE (GET_MODE (x));
2410 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2411 return val;
2412 }
2413
2414 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2415 {
2416 if (GET_CODE (XEXP (addr, 1)) == PLUS
2417 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2418 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2419 {
2420 val.base = XEXP (addr, 0);
2421 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2422 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2423 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2424 return val;
2425 }
2426 }
2427
2428 if (GET_CODE (addr) == CONST)
2429 {
2430 addr = XEXP (addr, 0);
2431 all_const = 1;
2432 }
2433 if (GET_CODE (addr) == PLUS)
2434 {
2435 if (CONSTANT_P (XEXP (addr, 0)))
2436 {
2437 base = XEXP (addr, 1);
2438 offset = XEXP (addr, 0);
2439 }
2440 else if (CONSTANT_P (XEXP (addr, 1)))
2441 {
2442 base = XEXP (addr, 0);
2443 offset = XEXP (addr, 1);
2444 }
2445 }
2446
2447 if (offset == 0)
2448 {
2449 base = addr;
2450 offset = const0_rtx;
2451 }
2452 if (GET_CODE (offset) == CONST)
2453 offset = XEXP (offset, 0);
2454 if (GET_CODE (offset) == PLUS)
2455 {
2456 if (CONST_INT_P (XEXP (offset, 0)))
2457 {
2458 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2459 offset = XEXP (offset, 0);
2460 }
2461 else if (CONST_INT_P (XEXP (offset, 1)))
2462 {
2463 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2464 offset = XEXP (offset, 1);
2465 }
2466 else
2467 {
2468 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2469 offset = const0_rtx;
2470 }
2471 }
2472 else if (!CONST_INT_P (offset))
2473 {
2474 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2475 offset = const0_rtx;
2476 }
2477
2478 if (all_const && GET_CODE (base) == PLUS)
2479 base = gen_rtx_CONST (GET_MODE (base), base);
2480
2481 gcc_assert (CONST_INT_P (offset));
2482
2483 val.start = INTVAL (offset);
2484 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2485 val.base = base;
2486 }
2487 break;
2488
2489 case REG:
2490 val.reg_flag = 1;
2491 regno = true_regnum (x);
2492 if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2493 {
2494 /* A pseudo with no hard reg. */
2495 val.start = REGNO (x);
2496 val.end = val.start + 1;
2497 }
2498 else
2499 {
2500 /* A hard reg. */
2501 val.start = regno;
2502 val.end = end_hard_regno (GET_MODE (x), regno);
2503 }
2504 break;
2505
2506 case SUBREG:
2507 if (!REG_P (SUBREG_REG (x)))
2508 /* This could be more precise, but it's good enough. */
2509 return decompose (SUBREG_REG (x));
2510 regno = true_regnum (x);
2511 if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2512 return decompose (SUBREG_REG (x));
2513
2514 /* A hard reg. */
2515 val.reg_flag = 1;
2516 val.start = regno;
2517 val.end = regno + subreg_nregs (x);
2518 break;
2519
2520 case SCRATCH:
2521 /* This hasn't been assigned yet, so it can't conflict yet. */
2522 val.safe = 1;
2523 break;
2524
2525 default:
2526 gcc_assert (CONSTANT_P (x));
2527 val.safe = 1;
2528 break;
2529 }
2530 return val;
2531}
2532
2533/* Return 1 if altering Y will not modify the value of X.
2534 Y is also described by YDATA, which should be decompose (Y). */
2535
2536static int
2537immune_p (rtx x, rtx y, struct decomposition ydata)
2538{
2539 struct decomposition xdata;
2540
2541 if (ydata.reg_flag)
2542 /* In this case the decomposition structure contains register
2543 numbers rather than byte offsets. */
2544 return !refers_to_regno_for_reload_p (ydata.start.to_constant (),
2545 ydata.end.to_constant (),
2546 x, (rtx *) 0);
2547 if (ydata.safe)
2548 return 1;
2549
2550 gcc_assert (MEM_P (y));
2551 /* If Y is memory and X is not, Y can't affect X. */
2552 if (!MEM_P (x))
2553 return 1;
2554
2555 xdata = decompose (x);
2556
2557 if (! rtx_equal_p (xdata.base, ydata.base))
2558 {
2559 /* If bases are distinct symbolic constants, there is no overlap. */
2560 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2561 return 1;
2562 /* Constants and stack slots never overlap. */
2563 if (CONSTANT_P (xdata.base)
2564 && (ydata.base == frame_pointer_rtx
2565 || ydata.base == hard_frame_pointer_rtx
2566 || ydata.base == stack_pointer_rtx))
2567 return 1;
2568 if (CONSTANT_P (ydata.base)
2569 && (xdata.base == frame_pointer_rtx
2570 || xdata.base == hard_frame_pointer_rtx
2571 || xdata.base == stack_pointer_rtx))
2572 return 1;
2573 /* If either base is variable, we don't know anything. */
2574 return 0;
2575 }
2576
2577 return known_ge (xdata.start, ydata.end) || known_ge (ydata.start, xdata.end);
2578}
2579
2580/* Similar, but calls decompose. */
2581
2582int
2583safe_from_earlyclobber (rtx op, rtx clobber)
2584{
2585 struct decomposition early_data;
2586
2587 early_data = decompose (x: clobber);
2588 return immune_p (x: op, y: clobber, ydata: early_data);
2589}
2590
2591/* Main entry point of this file: search the body of INSN
2592 for values that need reloading and record them with push_reload.
2593 REPLACE nonzero means record also where the values occur
2594 so that subst_reloads can be used.
2595
2596 IND_LEVELS says how many levels of indirection are supported by this
2597 machine; a value of zero means that a memory reference is not a valid
2598 memory address.
2599
2600 LIVE_KNOWN says we have valid information about which hard
2601 regs are live at each point in the program; this is true when
2602 we are called from global_alloc but false when stupid register
2603 allocation has been done.
2604
2605 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2606 which is nonnegative if the reg has been commandeered for reloading into.
2607 It is copied into STATIC_RELOAD_REG_P and referenced from there
2608 by various subroutines.
2609
2610 Return TRUE if some operands need to be changed, because of swapping
2611 commutative operands, reg_equiv_address substitution, or whatever. */
2612
2613int
2614find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2615 short *reload_reg_p)
2616{
2617 int insn_code_number;
2618 int i, j;
2619 int noperands;
2620 /* These start out as the constraints for the insn
2621 and they are chewed up as we consider alternatives. */
2622 const char *constraints[MAX_RECOG_OPERANDS];
2623 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2624 a register. */
2625 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2626 char pref_or_nothing[MAX_RECOG_OPERANDS];
2627 /* Nonzero for a MEM operand whose entire address needs a reload.
2628 May be -1 to indicate the entire address may or may not need a reload. */
2629 int address_reloaded[MAX_RECOG_OPERANDS];
2630 /* Nonzero for an address operand that needs to be completely reloaded.
2631 May be -1 to indicate the entire operand may or may not need a reload. */
2632 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2633 /* Value of enum reload_type to use for operand. */
2634 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2635 /* Value of enum reload_type to use within address of operand. */
2636 enum reload_type address_type[MAX_RECOG_OPERANDS];
2637 /* Save the usage of each operand. */
2638 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2639 int no_input_reloads = 0, no_output_reloads = 0;
2640 int n_alternatives;
2641 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2642 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2643 char this_alternative_win[MAX_RECOG_OPERANDS];
2644 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2645 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2646 int this_alternative_matches[MAX_RECOG_OPERANDS];
2647 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2648 int this_alternative_number;
2649 int goal_alternative_number = 0;
2650 int operand_reloadnum[MAX_RECOG_OPERANDS];
2651 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2652 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2653 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2654 char goal_alternative_win[MAX_RECOG_OPERANDS];
2655 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2656 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2657 int goal_alternative_swapped;
2658 int best;
2659 int commutative;
2660 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2661 rtx substed_operand[MAX_RECOG_OPERANDS];
2662 rtx body = PATTERN (insn);
2663 rtx set = single_set (insn);
2664 int goal_earlyclobber = 0, this_earlyclobber;
2665 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2666 int retval = 0;
2667
2668 this_insn = insn;
2669 n_reloads = 0;
2670 n_replacements = 0;
2671 n_earlyclobbers = 0;
2672 replace_reloads = replace;
2673 hard_regs_live_known = live_known;
2674 static_reload_reg_p = reload_reg_p;
2675
2676 if (JUMP_P (insn) && INSN_CODE (insn) < 0)
2677 {
2678 extract_insn (insn);
2679 for (i = 0; i < recog_data.n_operands; i++)
2680 if (recog_data.operand_type[i] != OP_IN)
2681 break;
2682 if (i < recog_data.n_operands)
2683 {
2684 error_for_asm (insn,
2685 "the target does not support %<asm goto%> "
2686 "with outputs in %<asm%>");
2687 ira_nullify_asm_goto (insn);
2688 return 0;
2689 }
2690 }
2691
2692 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads. */
2693 if (JUMP_P (insn) || CALL_P (insn))
2694 no_output_reloads = 1;
2695
2696 /* The eliminated forms of any secondary memory locations are per-insn, so
2697 clear them out here. */
2698
2699 if (secondary_memlocs_elim_used)
2700 {
2701 memset (s: secondary_memlocs_elim, c: 0,
2702 n: sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2703 secondary_memlocs_elim_used = 0;
2704 }
2705
2706 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2707 is cheap to move between them. If it is not, there may not be an insn
2708 to do the copy, so we may need a reload. */
2709 if (GET_CODE (body) == SET
2710 && REG_P (SET_DEST (body))
2711 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2712 && REG_P (SET_SRC (body))
2713 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2714 && register_move_cost (GET_MODE (SET_SRC (body)),
2715 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2716 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2717 return 0;
2718
2719 extract_insn (insn);
2720
2721 noperands = reload_n_operands = recog_data.n_operands;
2722 n_alternatives = recog_data.n_alternatives;
2723
2724 /* Just return "no reloads" if insn has no operands with constraints. */
2725 if (noperands == 0 || n_alternatives == 0)
2726 return 0;
2727
2728 insn_code_number = INSN_CODE (insn);
2729 this_insn_is_asm = insn_code_number < 0;
2730
2731 memcpy (dest: operand_mode, src: recog_data.operand_mode,
2732 n: noperands * sizeof (machine_mode));
2733 memcpy (dest: constraints, src: recog_data.constraints,
2734 n: noperands * sizeof (const char *));
2735
2736 commutative = -1;
2737
2738 /* If we will need to know, later, whether some pair of operands
2739 are the same, we must compare them now and save the result.
2740 Reloading the base and index registers will clobber them
2741 and afterward they will fail to match. */
2742
2743 for (i = 0; i < noperands; i++)
2744 {
2745 const char *p;
2746 int c;
2747 char *end;
2748
2749 substed_operand[i] = recog_data.operand[i];
2750 p = constraints[i];
2751
2752 modified[i] = RELOAD_READ;
2753
2754 /* Scan this operand's constraint to see if it is an output operand,
2755 an in-out operand, is commutative, or should match another. */
2756
2757 while ((c = *p))
2758 {
2759 p += CONSTRAINT_LEN (c, p);
2760 switch (c)
2761 {
2762 case '=':
2763 modified[i] = RELOAD_WRITE;
2764 break;
2765 case '+':
2766 modified[i] = RELOAD_READ_WRITE;
2767 break;
2768 case '%':
2769 {
2770 /* The last operand should not be marked commutative. */
2771 gcc_assert (i != noperands - 1);
2772
2773 /* We currently only support one commutative pair of
2774 operands. Some existing asm code currently uses more
2775 than one pair. Previously, that would usually work,
2776 but sometimes it would crash the compiler. We
2777 continue supporting that case as well as we can by
2778 silently ignoring all but the first pair. In the
2779 future we may handle it correctly. */
2780 if (commutative < 0)
2781 commutative = i;
2782 else
2783 gcc_assert (this_insn_is_asm);
2784 }
2785 break;
2786 /* Use of ISDIGIT is tempting here, but it may get expensive because
2787 of locale support we don't want. */
2788 case '0': case '1': case '2': case '3': case '4':
2789 case '5': case '6': case '7': case '8': case '9':
2790 {
2791 c = strtoul (nptr: p - 1, endptr: &end, base: 10);
2792 p = end;
2793
2794 operands_match[c][i]
2795 = operands_match_p (x: recog_data.operand[c],
2796 y: recog_data.operand[i]);
2797
2798 /* An operand may not match itself. */
2799 gcc_assert (c != i);
2800
2801 /* If C can be commuted with C+1, and C might need to match I,
2802 then C+1 might also need to match I. */
2803 if (commutative >= 0)
2804 {
2805 if (c == commutative || c == commutative + 1)
2806 {
2807 int other = c + (c == commutative ? 1 : -1);
2808 operands_match[other][i]
2809 = operands_match_p (x: recog_data.operand[other],
2810 y: recog_data.operand[i]);
2811 }
2812 if (i == commutative || i == commutative + 1)
2813 {
2814 int other = i + (i == commutative ? 1 : -1);
2815 operands_match[c][other]
2816 = operands_match_p (x: recog_data.operand[c],
2817 y: recog_data.operand[other]);
2818 }
2819 /* Note that C is supposed to be less than I.
2820 No need to consider altering both C and I because in
2821 that case we would alter one into the other. */
2822 }
2823 }
2824 }
2825 }
2826 }
2827
2828 /* Examine each operand that is a memory reference or memory address
2829 and reload parts of the addresses into index registers.
2830 Also here any references to pseudo regs that didn't get hard regs
2831 but are equivalent to constants get replaced in the insn itself
2832 with those constants. Nobody will ever see them again.
2833
2834 Finally, set up the preferred classes of each operand. */
2835
2836 for (i = 0; i < noperands; i++)
2837 {
2838 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2839
2840 address_reloaded[i] = 0;
2841 address_operand_reloaded[i] = 0;
2842 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2843 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2844 : RELOAD_OTHER);
2845 address_type[i]
2846 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2847 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2848 : RELOAD_OTHER);
2849
2850 if (*constraints[i] == 0)
2851 /* Ignore things like match_operator operands. */
2852 ;
2853 else if (insn_extra_address_constraint
2854 (c: lookup_constraint (p: constraints[i])))
2855 {
2856 address_operand_reloaded[i]
2857 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2858 recog_data.operand[i],
2859 recog_data.operand_loc[i],
2860 i, operand_type[i], ind_levels, insn);
2861
2862 /* If we now have a simple operand where we used to have a
2863 PLUS or MULT or ASHIFT, re-recognize and try again. */
2864 if ((OBJECT_P (*recog_data.operand_loc[i])
2865 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2866 && (GET_CODE (recog_data.operand[i]) == MULT
2867 || GET_CODE (recog_data.operand[i]) == ASHIFT
2868 || GET_CODE (recog_data.operand[i]) == PLUS))
2869 {
2870 INSN_CODE (insn) = -1;
2871 retval = find_reloads (insn, replace, ind_levels, live_known,
2872 reload_reg_p);
2873 return retval;
2874 }
2875
2876 recog_data.operand[i] = *recog_data.operand_loc[i];
2877 substed_operand[i] = recog_data.operand[i];
2878
2879 /* Address operands are reloaded in their existing mode,
2880 no matter what is specified in the machine description. */
2881 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2882
2883 /* If the address is a single CONST_INT pick address mode
2884 instead otherwise we will later not know in which mode
2885 the reload should be performed. */
2886 if (operand_mode[i] == VOIDmode)
2887 operand_mode[i] = Pmode;
2888
2889 }
2890 else if (code == MEM)
2891 {
2892 address_reloaded[i]
2893 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2894 recog_data.operand_loc[i],
2895 XEXP (recog_data.operand[i], 0),
2896 &XEXP (recog_data.operand[i], 0),
2897 i, address_type[i], ind_levels, insn);
2898 recog_data.operand[i] = *recog_data.operand_loc[i];
2899 substed_operand[i] = recog_data.operand[i];
2900 }
2901 else if (code == SUBREG)
2902 {
2903 rtx reg = SUBREG_REG (recog_data.operand[i]);
2904 rtx op
2905 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2906 ind_levels,
2907 set != 0
2908 && &SET_DEST (set) == recog_data.operand_loc[i],
2909 insn,
2910 &address_reloaded[i]);
2911
2912 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2913 that didn't get a hard register, emit a USE with a REG_EQUAL
2914 note in front so that we might inherit a previous, possibly
2915 wider reload. */
2916
2917 if (replace
2918 && MEM_P (op)
2919 && REG_P (reg)
2920 && known_ge (GET_MODE_SIZE (GET_MODE (reg)),
2921 GET_MODE_SIZE (GET_MODE (op)))
2922 && reg_equiv_constant (REGNO (reg)) == 0)
2923 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2924 insn),
2925 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2926
2927 substed_operand[i] = recog_data.operand[i] = op;
2928 }
2929 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2930 /* We can get a PLUS as an "operand" as a result of register
2931 elimination. See eliminate_regs and gen_reload. We handle
2932 a unary operator by reloading the operand. */
2933 substed_operand[i] = recog_data.operand[i]
2934 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2935 ind_levels, 0, insn,
2936 &address_reloaded[i]);
2937 else if (code == REG)
2938 {
2939 /* This is equivalent to calling find_reloads_toplev.
2940 The code is duplicated for speed.
2941 When we find a pseudo always equivalent to a constant,
2942 we replace it by the constant. We must be sure, however,
2943 that we don't try to replace it in the insn in which it
2944 is being set. */
2945 int regno = REGNO (recog_data.operand[i]);
2946 if (reg_equiv_constant (regno) != 0
2947 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2948 {
2949 /* Record the existing mode so that the check if constants are
2950 allowed will work when operand_mode isn't specified. */
2951
2952 if (operand_mode[i] == VOIDmode)
2953 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2954
2955 substed_operand[i] = recog_data.operand[i]
2956 = reg_equiv_constant (regno);
2957 }
2958 if (reg_equiv_memory_loc (regno) != 0
2959 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2960 /* We need not give a valid is_set_dest argument since the case
2961 of a constant equivalence was checked above. */
2962 substed_operand[i] = recog_data.operand[i]
2963 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2964 ind_levels, 0, insn,
2965 &address_reloaded[i]);
2966 }
2967 /* If the operand is still a register (we didn't replace it with an
2968 equivalent), get the preferred class to reload it into. */
2969 code = GET_CODE (recog_data.operand[i]);
2970 preferred_class[i]
2971 = ((code == REG && REGNO (recog_data.operand[i])
2972 >= FIRST_PSEUDO_REGISTER)
2973 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2974 : NO_REGS);
2975 pref_or_nothing[i]
2976 = (code == REG
2977 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2978 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2979 }
2980
2981 /* If this is simply a copy from operand 1 to operand 0, merge the
2982 preferred classes for the operands. */
2983 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2984 && recog_data.operand[1] == SET_SRC (set))
2985 {
2986 preferred_class[0] = preferred_class[1]
2987 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2988 pref_or_nothing[0] |= pref_or_nothing[1];
2989 pref_or_nothing[1] |= pref_or_nothing[0];
2990 }
2991
2992 /* Now see what we need for pseudo-regs that didn't get hard regs
2993 or got the wrong kind of hard reg. For this, we must consider
2994 all the operands together against the register constraints. */
2995
2996 best = MAX_RECOG_OPERANDS * 2 + 600;
2997
2998 goal_alternative_swapped = 0;
2999
3000 /* The constraints are made of several alternatives.
3001 Each operand's constraint looks like foo,bar,... with commas
3002 separating the alternatives. The first alternatives for all
3003 operands go together, the second alternatives go together, etc.
3004
3005 First loop over alternatives. */
3006
3007 alternative_mask enabled = get_enabled_alternatives (insn);
3008 for (this_alternative_number = 0;
3009 this_alternative_number < n_alternatives;
3010 this_alternative_number++)
3011 {
3012 int swapped;
3013
3014 if (!TEST_BIT (enabled, this_alternative_number))
3015 {
3016 int i;
3017
3018 for (i = 0; i < recog_data.n_operands; i++)
3019 constraints[i] = skip_alternative (p: constraints[i]);
3020
3021 continue;
3022 }
3023
3024 /* If insn is commutative (it's safe to exchange a certain pair
3025 of operands) then we need to try each alternative twice, the
3026 second time matching those two operands as if we had
3027 exchanged them. To do this, really exchange them in
3028 operands. */
3029 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3030 {
3031 /* Loop over operands for one constraint alternative. */
3032 /* LOSERS counts those that don't fit this alternative
3033 and would require loading. */
3034 int losers = 0;
3035 /* BAD is set to 1 if it some operand can't fit this alternative
3036 even after reloading. */
3037 int bad = 0;
3038 /* REJECT is a count of how undesirable this alternative says it is
3039 if any reloading is required. If the alternative matches exactly
3040 then REJECT is ignored, but otherwise it gets this much
3041 counted against it in addition to the reloading needed. Each
3042 ? counts three times here since we want the disparaging caused by
3043 a bad register class to only count 1/3 as much. */
3044 int reject = 0;
3045
3046 if (swapped)
3047 {
3048 recog_data.operand[commutative] = substed_operand[commutative + 1];
3049 recog_data.operand[commutative + 1] = substed_operand[commutative];
3050 /* Swap the duplicates too. */
3051 for (i = 0; i < recog_data.n_dups; i++)
3052 if (recog_data.dup_num[i] == commutative
3053 || recog_data.dup_num[i] == commutative + 1)
3054 *recog_data.dup_loc[i]
3055 = recog_data.operand[(int) recog_data.dup_num[i]];
3056
3057 std::swap (a&: preferred_class[commutative],
3058 b&: preferred_class[commutative + 1]);
3059 std::swap (a&: pref_or_nothing[commutative],
3060 b&: pref_or_nothing[commutative + 1]);
3061 std::swap (a&: address_reloaded[commutative],
3062 b&: address_reloaded[commutative + 1]);
3063 }
3064
3065 this_earlyclobber = 0;
3066
3067 for (i = 0; i < noperands; i++)
3068 {
3069 const char *p = constraints[i];
3070 char *end;
3071 int len;
3072 int win = 0;
3073 int did_match = 0;
3074 /* 0 => this operand can be reloaded somehow for this alternative. */
3075 int badop = 1;
3076 /* 0 => this operand can be reloaded if the alternative allows regs. */
3077 int winreg = 0;
3078 int c;
3079 int m;
3080 rtx operand = recog_data.operand[i];
3081 int offset = 0;
3082 /* Nonzero means this is a MEM that must be reloaded into a reg
3083 regardless of what the constraint says. */
3084 int force_reload = 0;
3085 int offmemok = 0;
3086 /* Nonzero if a constant forced into memory would be OK for this
3087 operand. */
3088 int constmemok = 0;
3089 int earlyclobber = 0;
3090 enum constraint_num cn;
3091 enum reg_class cl;
3092
3093 /* If the operand is a SUBREG, extract
3094 the REG or MEM (or maybe even a constant) within.
3095 (Constants can occur as a result of reg_equiv_constant.) */
3096
3097 while (GET_CODE (operand) == SUBREG)
3098 {
3099 /* Offset only matters when operand is a REG and
3100 it is a hard reg. This is because it is passed
3101 to reg_fits_class_p if it is a REG and all pseudos
3102 return 0 from that function. */
3103 if (REG_P (SUBREG_REG (operand))
3104 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3105 {
3106 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3107 GET_MODE (SUBREG_REG (operand)),
3108 SUBREG_BYTE (operand),
3109 GET_MODE (operand)) < 0)
3110 force_reload = 1;
3111 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3112 GET_MODE (SUBREG_REG (operand)),
3113 SUBREG_BYTE (operand),
3114 GET_MODE (operand));
3115 }
3116 operand = SUBREG_REG (operand);
3117 /* Force reload if this is a constant or PLUS or if there may
3118 be a problem accessing OPERAND in the outer mode. */
3119 scalar_int_mode inner_mode;
3120 if (CONSTANT_P (operand)
3121 || GET_CODE (operand) == PLUS
3122 /* We must force a reload of paradoxical SUBREGs
3123 of a MEM because the alignment of the inner value
3124 may not be enough to do the outer reference. On
3125 big-endian machines, it may also reference outside
3126 the object.
3127
3128 On machines that extend byte operations and we have a
3129 SUBREG where both the inner and outer modes are no wider
3130 than a word and the inner mode is narrower, is integral,
3131 and gets extended when loaded from memory, combine.cc has
3132 made assumptions about the behavior of the machine in such
3133 register access. If the data is, in fact, in memory we
3134 must always load using the size assumed to be in the
3135 register and let the insn do the different-sized
3136 accesses.
3137
3138 This is doubly true if WORD_REGISTER_OPERATIONS. In
3139 this case eliminate_regs has left non-paradoxical
3140 subregs for push_reload to see. Make sure it does
3141 by forcing the reload.
3142
3143 ??? When is it right at this stage to have a subreg
3144 of a mem that is _not_ to be handled specially? IMO
3145 those should have been reduced to just a mem. */
3146 || ((MEM_P (operand)
3147 || (REG_P (operand)
3148 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3149 && (WORD_REGISTER_OPERATIONS
3150 || (((maybe_lt
3151 (a: GET_MODE_BITSIZE (GET_MODE (operand)),
3152 BIGGEST_ALIGNMENT))
3153 && (paradoxical_subreg_p
3154 (outermode: operand_mode[i], GET_MODE (operand)))))
3155 || BYTES_BIG_ENDIAN
3156 || (known_le (GET_MODE_SIZE (operand_mode[i]),
3157 UNITS_PER_WORD)
3158 && (is_a <scalar_int_mode>
3159 (GET_MODE (operand), result: &inner_mode))
3160 && (GET_MODE_SIZE (mode: inner_mode)
3161 <= UNITS_PER_WORD)
3162 && paradoxical_subreg_p (outermode: operand_mode[i],
3163 innermode: inner_mode)
3164 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3165 /* We must force a reload of a SUBREG's inner expression
3166 if it is a pseudo that will become a MEM and the MEM
3167 has a mode-dependent address, as in that case we
3168 obviously cannot change the mode of the MEM to that
3169 of the containing SUBREG as that would change the
3170 interpretation of the address. */
3171 || (REG_P (operand)
3172 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3173 && reg_equiv_mem (REGNO (operand))
3174 && (mode_dependent_address_p
3175 (XEXP (reg_equiv_mem (REGNO (operand)), 0),
3176 (MEM_ADDR_SPACE
3177 (reg_equiv_mem (REGNO (operand)))))))
3178 )
3179 force_reload = 1;
3180 }
3181
3182 this_alternative[i] = NO_REGS;
3183 this_alternative_win[i] = 0;
3184 this_alternative_match_win[i] = 0;
3185 this_alternative_offmemok[i] = 0;
3186 this_alternative_earlyclobber[i] = 0;
3187 this_alternative_matches[i] = -1;
3188
3189 /* An empty constraint or empty alternative
3190 allows anything which matched the pattern. */
3191 if (*p == 0 || *p == ',')
3192 win = 1, badop = 0;
3193
3194 /* Scan this alternative's specs for this operand;
3195 set WIN if the operand fits any letter in this alternative.
3196 Otherwise, clear BADOP if this operand could
3197 fit some letter after reloads,
3198 or set WINREG if this operand could fit after reloads
3199 provided the constraint allows some registers. */
3200
3201 do
3202 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3203 {
3204 case '\0':
3205 len = 0;
3206 break;
3207 case ',':
3208 c = '\0';
3209 break;
3210
3211 case '?':
3212 reject += 6;
3213 break;
3214
3215 case '!':
3216 reject = 600;
3217 break;
3218
3219 case '#':
3220 /* Ignore rest of this alternative as far as
3221 reloading is concerned. */
3222 do
3223 p++;
3224 while (*p && *p != ',');
3225 len = 0;
3226 break;
3227
3228 case '0': case '1': case '2': case '3': case '4':
3229 case '5': case '6': case '7': case '8': case '9':
3230 m = strtoul (nptr: p, endptr: &end, base: 10);
3231 p = end;
3232 len = 0;
3233
3234 this_alternative_matches[i] = m;
3235 /* We are supposed to match a previous operand.
3236 If we do, we win if that one did.
3237 If we do not, count both of the operands as losers.
3238 (This is too conservative, since most of the time
3239 only a single reload insn will be needed to make
3240 the two operands win. As a result, this alternative
3241 may be rejected when it is actually desirable.) */
3242 if ((swapped && (m != commutative || i != commutative + 1))
3243 /* If we are matching as if two operands were swapped,
3244 also pretend that operands_match had been computed
3245 with swapped.
3246 But if I is the second of those and C is the first,
3247 don't exchange them, because operands_match is valid
3248 only on one side of its diagonal. */
3249 ? (operands_match
3250 [(m == commutative || m == commutative + 1)
3251 ? 2 * commutative + 1 - m : m]
3252 [(i == commutative || i == commutative + 1)
3253 ? 2 * commutative + 1 - i : i])
3254 : operands_match[m][i])
3255 {
3256 /* If we are matching a non-offsettable address where an
3257 offsettable address was expected, then we must reject
3258 this combination, because we can't reload it. */
3259 if (this_alternative_offmemok[m]
3260 && MEM_P (recog_data.operand[m])
3261 && this_alternative[m] == NO_REGS
3262 && ! this_alternative_win[m])
3263 bad = 1;
3264
3265 did_match = this_alternative_win[m];
3266 }
3267 else
3268 {
3269 /* Operands don't match. */
3270 rtx value;
3271 int loc1, loc2;
3272 /* Retroactively mark the operand we had to match
3273 as a loser, if it wasn't already. */
3274 if (this_alternative_win[m])
3275 losers++;
3276 this_alternative_win[m] = 0;
3277 if (this_alternative[m] == NO_REGS)
3278 bad = 1;
3279 /* But count the pair only once in the total badness of
3280 this alternative, if the pair can be a dummy reload.
3281 The pointers in operand_loc are not swapped; swap
3282 them by hand if necessary. */
3283 if (swapped && i == commutative)
3284 loc1 = commutative + 1;
3285 else if (swapped && i == commutative + 1)
3286 loc1 = commutative;
3287 else
3288 loc1 = i;
3289 if (swapped && m == commutative)
3290 loc2 = commutative + 1;
3291 else if (swapped && m == commutative + 1)
3292 loc2 = commutative;
3293 else
3294 loc2 = m;
3295 value
3296 = find_dummy_reload (real_in: recog_data.operand[i],
3297 real_out: recog_data.operand[m],
3298 inloc: recog_data.operand_loc[loc1],
3299 outloc: recog_data.operand_loc[loc2],
3300 inmode: operand_mode[i], outmode: operand_mode[m],
3301 rclass: this_alternative[m], for_real: -1,
3302 earlyclobber: this_alternative_earlyclobber[m]);
3303
3304 if (value != 0)
3305 losers--;
3306 }
3307 /* This can be fixed with reloads if the operand
3308 we are supposed to match can be fixed with reloads. */
3309 badop = 0;
3310 this_alternative[i] = this_alternative[m];
3311
3312 /* If we have to reload this operand and some previous
3313 operand also had to match the same thing as this
3314 operand, we don't know how to do that. So reject this
3315 alternative. */
3316 if (! did_match || force_reload)
3317 for (j = 0; j < i; j++)
3318 if (this_alternative_matches[j]
3319 == this_alternative_matches[i])
3320 {
3321 badop = 1;
3322 break;
3323 }
3324 break;
3325
3326 case 'p':
3327 /* All necessary reloads for an address_operand
3328 were handled in find_reloads_address. */
3329 this_alternative[i]
3330 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3331 outer_code: ADDRESS, index_code: SCRATCH, insn);
3332 win = 1;
3333 badop = 0;
3334 break;
3335
3336 case TARGET_MEM_CONSTRAINT:
3337 if (force_reload)
3338 break;
3339 if (MEM_P (operand)
3340 || (REG_P (operand)
3341 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3342 && reg_renumber[REGNO (operand)] < 0))
3343 win = 1;
3344 if (CONST_POOL_OK_P (operand_mode[i], operand))
3345 badop = 0;
3346 constmemok = 1;
3347 break;
3348
3349 case '<':
3350 if (MEM_P (operand)
3351 && ! address_reloaded[i]
3352 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3353 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3354 win = 1;
3355 break;
3356
3357 case '>':
3358 if (MEM_P (operand)
3359 && ! address_reloaded[i]
3360 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3361 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3362 win = 1;
3363 break;
3364
3365 /* Memory operand whose address is not offsettable. */
3366 case 'V':
3367 if (force_reload)
3368 break;
3369 if (MEM_P (operand)
3370 && ! (ind_levels ? offsettable_memref_p (operand)
3371 : offsettable_nonstrict_memref_p (operand))
3372 /* Certain mem addresses will become offsettable
3373 after they themselves are reloaded. This is important;
3374 we don't want our own handling of unoffsettables
3375 to override the handling of reg_equiv_address. */
3376 && !(REG_P (XEXP (operand, 0))
3377 && (ind_levels == 0
3378 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3379 win = 1;
3380 break;
3381
3382 /* Memory operand whose address is offsettable. */
3383 case 'o':
3384 if (force_reload)
3385 break;
3386 if ((MEM_P (operand)
3387 /* If IND_LEVELS, find_reloads_address won't reload a
3388 pseudo that didn't get a hard reg, so we have to
3389 reject that case. */
3390 && ((ind_levels ? offsettable_memref_p (operand)
3391 : offsettable_nonstrict_memref_p (operand))
3392 /* A reloaded address is offsettable because it is now
3393 just a simple register indirect. */
3394 || address_reloaded[i] == 1))
3395 || (REG_P (operand)
3396 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3397 && reg_renumber[REGNO (operand)] < 0
3398 /* If reg_equiv_address is nonzero, we will be
3399 loading it into a register; hence it will be
3400 offsettable, but we cannot say that reg_equiv_mem
3401 is offsettable without checking. */
3402 && ((reg_equiv_mem (REGNO (operand)) != 0
3403 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3404 || (reg_equiv_address (REGNO (operand)) != 0))))
3405 win = 1;
3406 if (CONST_POOL_OK_P (operand_mode[i], operand)
3407 || MEM_P (operand))
3408 badop = 0;
3409 constmemok = 1;
3410 offmemok = 1;
3411 break;
3412
3413 case '&':
3414 /* Output operand that is stored before the need for the
3415 input operands (and their index registers) is over. */
3416 earlyclobber = 1, this_earlyclobber = 1;
3417 break;
3418
3419 case 'X':
3420 force_reload = 0;
3421 win = 1;
3422 break;
3423
3424 case 'g':
3425 if (! force_reload
3426 /* A PLUS is never a valid operand, but reload can make
3427 it from a register when eliminating registers. */
3428 && GET_CODE (operand) != PLUS
3429 /* A SCRATCH is not a valid operand. */
3430 && GET_CODE (operand) != SCRATCH
3431 && (! CONSTANT_P (operand)
3432 || ! flag_pic
3433 || LEGITIMATE_PIC_OPERAND_P (operand))
3434 && (GENERAL_REGS == ALL_REGS
3435 || !REG_P (operand)
3436 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3437 && reg_renumber[REGNO (operand)] < 0)))
3438 win = 1;
3439 cl = GENERAL_REGS;
3440 goto reg;
3441
3442 default:
3443 cn = lookup_constraint (p);
3444 switch (get_constraint_type (c: cn))
3445 {
3446 case CT_REGISTER:
3447 cl = reg_class_for_constraint (c: cn);
3448 if (cl != NO_REGS)
3449 goto reg;
3450 break;
3451
3452 case CT_CONST_INT:
3453 if (CONST_INT_P (operand)
3454 && (insn_const_int_ok_for_constraint
3455 (INTVAL (operand), cn)))
3456 win = true;
3457 break;
3458
3459 case CT_MEMORY:
3460 case CT_RELAXED_MEMORY:
3461 if (force_reload)
3462 break;
3463 if (constraint_satisfied_p (x: operand, c: cn))
3464 win = 1;
3465 /* If the address was already reloaded,
3466 we win as well. */
3467 else if (MEM_P (operand) && address_reloaded[i] == 1)
3468 win = 1;
3469 /* Likewise if the address will be reloaded because
3470 reg_equiv_address is nonzero. For reg_equiv_mem
3471 we have to check. */
3472 else if (REG_P (operand)
3473 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3474 && reg_renumber[REGNO (operand)] < 0
3475 && ((reg_equiv_mem (REGNO (operand)) != 0
3476 && (constraint_satisfied_p
3477 (reg_equiv_mem (REGNO (operand)),
3478 c: cn)))
3479 || (reg_equiv_address (REGNO (operand))
3480 != 0)))
3481 win = 1;
3482
3483 /* If we didn't already win, we can reload
3484 constants via force_const_mem, and other
3485 MEMs by reloading the address like for 'o'. */
3486 if (CONST_POOL_OK_P (operand_mode[i], operand)
3487 || MEM_P (operand))
3488 badop = 0;
3489 constmemok = 1;
3490 offmemok = 1;
3491 break;
3492
3493 case CT_SPECIAL_MEMORY:
3494 if (force_reload)
3495 break;
3496 if (constraint_satisfied_p (x: operand, c: cn))
3497 win = 1;
3498 /* Likewise if the address will be reloaded because
3499 reg_equiv_address is nonzero. For reg_equiv_mem
3500 we have to check. */
3501 else if (REG_P (operand)
3502 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3503 && reg_renumber[REGNO (operand)] < 0
3504 && reg_equiv_mem (REGNO (operand)) != 0
3505 && (constraint_satisfied_p
3506 (reg_equiv_mem (REGNO (operand)), c: cn)))
3507 win = 1;
3508 break;
3509
3510 case CT_ADDRESS:
3511 if (constraint_satisfied_p (x: operand, c: cn))
3512 win = 1;
3513
3514 /* If we didn't already win, we can reload
3515 the address into a base register. */
3516 this_alternative[i]
3517 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3518 outer_code: ADDRESS, index_code: SCRATCH, insn);
3519 badop = 0;
3520 break;
3521
3522 case CT_FIXED_FORM:
3523 if (constraint_satisfied_p (x: operand, c: cn))
3524 win = 1;
3525 break;
3526 }
3527 break;
3528
3529 reg:
3530 this_alternative[i]
3531 = reg_class_subunion[this_alternative[i]][cl];
3532 if (GET_MODE (operand) == BLKmode)
3533 break;
3534 winreg = 1;
3535 if (REG_P (operand)
3536 && reg_fits_class_p (operand, this_alternative[i],
3537 offset, GET_MODE (recog_data.operand[i])))
3538 win = 1;
3539 break;
3540 }
3541 while ((p += len), c);
3542
3543 if (swapped == (commutative >= 0 ? 1 : 0))
3544 constraints[i] = p;
3545
3546 /* If this operand could be handled with a reg,
3547 and some reg is allowed, then this operand can be handled. */
3548 if (winreg && this_alternative[i] != NO_REGS
3549 && (win || !class_only_fixed_regs[this_alternative[i]]))
3550 badop = 0;
3551
3552 /* Record which operands fit this alternative. */
3553 this_alternative_earlyclobber[i] = earlyclobber;
3554 if (win && ! force_reload)
3555 this_alternative_win[i] = 1;
3556 else if (did_match && ! force_reload)
3557 this_alternative_match_win[i] = 1;
3558 else
3559 {
3560 int const_to_mem = 0;
3561
3562 this_alternative_offmemok[i] = offmemok;
3563 losers++;
3564 if (badop)
3565 bad = 1;
3566 /* Alternative loses if it has no regs for a reg operand. */
3567 if (REG_P (operand)
3568 && this_alternative[i] == NO_REGS
3569 && this_alternative_matches[i] < 0)
3570 bad = 1;
3571
3572 /* If this is a constant that is reloaded into the desired
3573 class by copying it to memory first, count that as another
3574 reload. This is consistent with other code and is
3575 required to avoid choosing another alternative when
3576 the constant is moved into memory by this function on
3577 an early reload pass. Note that the test here is
3578 precisely the same as in the code below that calls
3579 force_const_mem. */
3580 if (CONST_POOL_OK_P (operand_mode[i], operand)
3581 && ((targetm.preferred_reload_class (operand,
3582 this_alternative[i])
3583 == NO_REGS)
3584 || no_input_reloads))
3585 {
3586 const_to_mem = 1;
3587 if (this_alternative[i] != NO_REGS)
3588 losers++;
3589 }
3590
3591 /* Alternative loses if it requires a type of reload not
3592 permitted for this insn. We can always reload SCRATCH
3593 and objects with a REG_UNUSED note. */
3594 if (GET_CODE (operand) != SCRATCH
3595 && modified[i] != RELOAD_READ && no_output_reloads
3596 && ! find_reg_note (insn, REG_UNUSED, operand))
3597 bad = 1;
3598 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3599 && ! const_to_mem)
3600 bad = 1;
3601
3602 /* If we can't reload this value at all, reject this
3603 alternative. Note that we could also lose due to
3604 LIMIT_RELOAD_CLASS, but we don't check that
3605 here. */
3606
3607 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3608 {
3609 if (targetm.preferred_reload_class (operand,
3610 this_alternative[i])
3611 == NO_REGS)
3612 reject = 600;
3613
3614 if (operand_type[i] == RELOAD_FOR_OUTPUT
3615 && (targetm.preferred_output_reload_class (operand,
3616 this_alternative[i])
3617 == NO_REGS))
3618 reject = 600;
3619 }
3620
3621 /* We prefer to reload pseudos over reloading other things,
3622 since such reloads may be able to be eliminated later.
3623 If we are reloading a SCRATCH, we won't be generating any
3624 insns, just using a register, so it is also preferred.
3625 So bump REJECT in other cases. Don't do this in the
3626 case where we are forcing a constant into memory and
3627 it will then win since we don't want to have a different
3628 alternative match then. */
3629 if (! (REG_P (operand)
3630 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3631 && GET_CODE (operand) != SCRATCH
3632 && ! (const_to_mem && constmemok))
3633 reject += 2;
3634
3635 /* Input reloads can be inherited more often than output
3636 reloads can be removed, so penalize output reloads. */
3637 if (operand_type[i] != RELOAD_FOR_INPUT
3638 && GET_CODE (operand) != SCRATCH)
3639 reject++;
3640 }
3641
3642 /* If this operand is a pseudo register that didn't get
3643 a hard reg and this alternative accepts some
3644 register, see if the class that we want is a subset
3645 of the preferred class for this register. If not,
3646 but it intersects that class, we'd like to use the
3647 intersection, but the best we can do is to use the
3648 preferred class, if it is instead a subset of the
3649 class we want in this alternative. If we can't use
3650 it, show that usage of this alternative should be
3651 discouraged; it will be discouraged more still if the
3652 register is `preferred or nothing'. We do this
3653 because it increases the chance of reusing our spill
3654 register in a later insn and avoiding a pair of
3655 memory stores and loads.
3656
3657 Don't bother with this if this alternative will
3658 accept this operand.
3659
3660 Don't do this for a multiword operand, since it is
3661 only a small win and has the risk of requiring more
3662 spill registers, which could cause a large loss.
3663
3664 Don't do this if the preferred class has only one
3665 register because we might otherwise exhaust the
3666 class. */
3667
3668 if (! win && ! did_match
3669 && this_alternative[i] != NO_REGS
3670 && known_le (GET_MODE_SIZE (operand_mode[i]), UNITS_PER_WORD)
3671 && reg_class_size [(int) preferred_class[i]] > 0
3672 && ! small_register_class_p (rclass: preferred_class[i]))
3673 {
3674 if (! reg_class_subset_p (this_alternative[i],
3675 preferred_class[i]))
3676 {
3677 /* Since we don't have a way of forming a register
3678 class for the intersection, we just do
3679 something special if the preferred class is a
3680 subset of the class we have; that's the most
3681 common case anyway. */
3682 if (reg_class_subset_p (preferred_class[i],
3683 this_alternative[i]))
3684 this_alternative[i] = preferred_class[i];
3685 else
3686 reject += (2 + 2 * pref_or_nothing[i]);
3687 }
3688 }
3689 }
3690
3691 /* Now see if any output operands that are marked "earlyclobber"
3692 in this alternative conflict with any input operands
3693 or any memory addresses. */
3694
3695 for (i = 0; i < noperands; i++)
3696 if (this_alternative_earlyclobber[i]
3697 && (this_alternative_win[i] || this_alternative_match_win[i]))
3698 {
3699 struct decomposition early_data;
3700
3701 early_data = decompose (x: recog_data.operand[i]);
3702
3703 gcc_assert (modified[i] != RELOAD_READ);
3704
3705 if (this_alternative[i] == NO_REGS)
3706 {
3707 this_alternative_earlyclobber[i] = 0;
3708 gcc_assert (this_insn_is_asm);
3709 error_for_asm (this_insn,
3710 "%<&%> constraint used with no register class");
3711 }
3712
3713 for (j = 0; j < noperands; j++)
3714 /* Is this an input operand or a memory ref? */
3715 if ((MEM_P (recog_data.operand[j])
3716 || modified[j] != RELOAD_WRITE)
3717 && j != i
3718 /* Ignore things like match_operator operands. */
3719 && !recog_data.is_operator[j]
3720 /* Don't count an input operand that is constrained to match
3721 the early clobber operand. */
3722 && ! (this_alternative_matches[j] == i
3723 && rtx_equal_p (recog_data.operand[i],
3724 recog_data.operand[j]))
3725 /* Is it altered by storing the earlyclobber operand? */
3726 && !immune_p (x: recog_data.operand[j], y: recog_data.operand[i],
3727 ydata: early_data))
3728 {
3729 /* If the output is in a non-empty few-regs class,
3730 it's costly to reload it, so reload the input instead. */
3731 if (small_register_class_p (rclass: this_alternative[i])
3732 && (REG_P (recog_data.operand[j])
3733 || GET_CODE (recog_data.operand[j]) == SUBREG))
3734 {
3735 losers++;
3736 this_alternative_win[j] = 0;
3737 this_alternative_match_win[j] = 0;
3738 }
3739 else
3740 break;
3741 }
3742 /* If an earlyclobber operand conflicts with something,
3743 it must be reloaded, so request this and count the cost. */
3744 if (j != noperands)
3745 {
3746 losers++;
3747 this_alternative_win[i] = 0;
3748 this_alternative_match_win[j] = 0;
3749 for (j = 0; j < noperands; j++)
3750 if (this_alternative_matches[j] == i
3751 && this_alternative_match_win[j])
3752 {
3753 this_alternative_win[j] = 0;
3754 this_alternative_match_win[j] = 0;
3755 losers++;
3756 }
3757 }
3758 }
3759
3760 /* If one alternative accepts all the operands, no reload required,
3761 choose that alternative; don't consider the remaining ones. */
3762 if (losers == 0)
3763 {
3764 /* Unswap these so that they are never swapped at `finish'. */
3765 if (swapped)
3766 {
3767 recog_data.operand[commutative] = substed_operand[commutative];
3768 recog_data.operand[commutative + 1]
3769 = substed_operand[commutative + 1];
3770 }
3771 for (i = 0; i < noperands; i++)
3772 {
3773 goal_alternative_win[i] = this_alternative_win[i];
3774 goal_alternative_match_win[i] = this_alternative_match_win[i];
3775 goal_alternative[i] = this_alternative[i];
3776 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3777 goal_alternative_matches[i] = this_alternative_matches[i];
3778 goal_alternative_earlyclobber[i]
3779 = this_alternative_earlyclobber[i];
3780 }
3781 goal_alternative_number = this_alternative_number;
3782 goal_alternative_swapped = swapped;
3783 goal_earlyclobber = this_earlyclobber;
3784 goto finish;
3785 }
3786
3787 /* REJECT, set by the ! and ? constraint characters and when a register
3788 would be reloaded into a non-preferred class, discourages the use of
3789 this alternative for a reload goal. REJECT is incremented by six
3790 for each ? and two for each non-preferred class. */
3791 losers = losers * 6 + reject;
3792
3793 /* If this alternative can be made to work by reloading,
3794 and it needs less reloading than the others checked so far,
3795 record it as the chosen goal for reloading. */
3796 if (! bad)
3797 {
3798 if (best > losers)
3799 {
3800 for (i = 0; i < noperands; i++)
3801 {
3802 goal_alternative[i] = this_alternative[i];
3803 goal_alternative_win[i] = this_alternative_win[i];
3804 goal_alternative_match_win[i]
3805 = this_alternative_match_win[i];
3806 goal_alternative_offmemok[i]
3807 = this_alternative_offmemok[i];
3808 goal_alternative_matches[i] = this_alternative_matches[i];
3809 goal_alternative_earlyclobber[i]
3810 = this_alternative_earlyclobber[i];
3811 }
3812 goal_alternative_swapped = swapped;
3813 best = losers;
3814 goal_alternative_number = this_alternative_number;
3815 goal_earlyclobber = this_earlyclobber;
3816 }
3817 }
3818
3819 if (swapped)
3820 {
3821 /* If the commutative operands have been swapped, swap
3822 them back in order to check the next alternative. */
3823 recog_data.operand[commutative] = substed_operand[commutative];
3824 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3825 /* Unswap the duplicates too. */
3826 for (i = 0; i < recog_data.n_dups; i++)
3827 if (recog_data.dup_num[i] == commutative
3828 || recog_data.dup_num[i] == commutative + 1)
3829 *recog_data.dup_loc[i]
3830 = recog_data.operand[(int) recog_data.dup_num[i]];
3831
3832 /* Unswap the operand related information as well. */
3833 std::swap (a&: preferred_class[commutative],
3834 b&: preferred_class[commutative + 1]);
3835 std::swap (a&: pref_or_nothing[commutative],
3836 b&: pref_or_nothing[commutative + 1]);
3837 std::swap (a&: address_reloaded[commutative],
3838 b&: address_reloaded[commutative + 1]);
3839 }
3840 }
3841 }
3842
3843 /* The operands don't meet the constraints.
3844 goal_alternative describes the alternative
3845 that we could reach by reloading the fewest operands.
3846 Reload so as to fit it. */
3847
3848 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3849 {
3850 /* No alternative works with reloads?? */
3851 if (insn_code_number >= 0)
3852 fatal_insn ("unable to generate reloads for:", insn);
3853 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3854 /* Avoid further trouble with this insn. */
3855 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3856 n_reloads = 0;
3857 return 0;
3858 }
3859
3860 /* Jump to `finish' from above if all operands are valid already.
3861 In that case, goal_alternative_win is all 1. */
3862 finish:
3863
3864 /* Right now, for any pair of operands I and J that are required to match,
3865 with I < J,
3866 goal_alternative_matches[J] is I.
3867 Set up goal_alternative_matched as the inverse function:
3868 goal_alternative_matched[I] = J. */
3869
3870 for (i = 0; i < noperands; i++)
3871 goal_alternative_matched[i] = -1;
3872
3873 for (i = 0; i < noperands; i++)
3874 if (! goal_alternative_win[i]
3875 && goal_alternative_matches[i] >= 0)
3876 goal_alternative_matched[goal_alternative_matches[i]] = i;
3877
3878 for (i = 0; i < noperands; i++)
3879 goal_alternative_win[i] |= goal_alternative_match_win[i];
3880
3881 /* If the best alternative is with operands 1 and 2 swapped,
3882 consider them swapped before reporting the reloads. Update the
3883 operand numbers of any reloads already pushed. */
3884
3885 if (goal_alternative_swapped)
3886 {
3887 std::swap (a&: substed_operand[commutative],
3888 b&: substed_operand[commutative + 1]);
3889 std::swap (a&: recog_data.operand[commutative],
3890 b&: recog_data.operand[commutative + 1]);
3891 std::swap (a&: *recog_data.operand_loc[commutative],
3892 b&: *recog_data.operand_loc[commutative + 1]);
3893
3894 for (i = 0; i < recog_data.n_dups; i++)
3895 if (recog_data.dup_num[i] == commutative
3896 || recog_data.dup_num[i] == commutative + 1)
3897 *recog_data.dup_loc[i]
3898 = recog_data.operand[(int) recog_data.dup_num[i]];
3899
3900 for (i = 0; i < n_reloads; i++)
3901 {
3902 if (rld[i].opnum == commutative)
3903 rld[i].opnum = commutative + 1;
3904 else if (rld[i].opnum == commutative + 1)
3905 rld[i].opnum = commutative;
3906 }
3907 }
3908
3909 for (i = 0; i < noperands; i++)
3910 {
3911 operand_reloadnum[i] = -1;
3912
3913 /* If this is an earlyclobber operand, we need to widen the scope.
3914 The reload must remain valid from the start of the insn being
3915 reloaded until after the operand is stored into its destination.
3916 We approximate this with RELOAD_OTHER even though we know that we
3917 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3918
3919 One special case that is worth checking is when we have an
3920 output that is earlyclobber but isn't used past the insn (typically
3921 a SCRATCH). In this case, we only need have the reload live
3922 through the insn itself, but not for any of our input or output
3923 reloads.
3924 But we must not accidentally narrow the scope of an existing
3925 RELOAD_OTHER reload - leave these alone.
3926
3927 In any case, anything needed to address this operand can remain
3928 however they were previously categorized. */
3929
3930 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3931 operand_type[i]
3932 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3933 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3934 }
3935
3936 /* Any constants that aren't allowed and can't be reloaded
3937 into registers are here changed into memory references. */
3938 for (i = 0; i < noperands; i++)
3939 if (! goal_alternative_win[i])
3940 {
3941 rtx op = recog_data.operand[i];
3942 rtx subreg = NULL_RTX;
3943 rtx plus = NULL_RTX;
3944 machine_mode mode = operand_mode[i];
3945
3946 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3947 push_reload so we have to let them pass here. */
3948 if (GET_CODE (op) == SUBREG)
3949 {
3950 subreg = op;
3951 op = SUBREG_REG (op);
3952 mode = GET_MODE (op);
3953 }
3954
3955 if (GET_CODE (op) == PLUS)
3956 {
3957 plus = op;
3958 op = XEXP (op, 1);
3959 }
3960
3961 if (CONST_POOL_OK_P (mode, op)
3962 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3963 == NO_REGS)
3964 || no_input_reloads))
3965 {
3966 int this_address_reloaded;
3967 rtx tem = force_const_mem (mode, op);
3968
3969 /* If we stripped a SUBREG or a PLUS above add it back. */
3970 if (plus != NULL_RTX)
3971 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3972
3973 if (subreg != NULL_RTX)
3974 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3975
3976 this_address_reloaded = 0;
3977 substed_operand[i] = recog_data.operand[i]
3978 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3979 0, insn, &this_address_reloaded);
3980
3981 /* If the alternative accepts constant pool refs directly
3982 there will be no reload needed at all. */
3983 if (plus == NULL_RTX
3984 && subreg == NULL_RTX
3985 && alternative_allows_const_pool_ref (this_address_reloaded != 1
3986 ? substed_operand[i]
3987 : NULL,
3988 recog_data.constraints[i],
3989 goal_alternative_number))
3990 goal_alternative_win[i] = 1;
3991 }
3992 }
3993
3994 /* Record the values of the earlyclobber operands for the caller. */
3995 if (goal_earlyclobber)
3996 for (i = 0; i < noperands; i++)
3997 if (goal_alternative_earlyclobber[i])
3998 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3999
4000 /* Now record reloads for all the operands that need them. */
4001 for (i = 0; i < noperands; i++)
4002 if (! goal_alternative_win[i])
4003 {
4004 /* Operands that match previous ones have already been handled. */
4005 if (goal_alternative_matches[i] >= 0)
4006 ;
4007 /* Handle an operand with a nonoffsettable address
4008 appearing where an offsettable address will do
4009 by reloading the address into a base register.
4010
4011 ??? We can also do this when the operand is a register and
4012 reg_equiv_mem is not offsettable, but this is a bit tricky,
4013 so we don't bother with it. It may not be worth doing. */
4014 else if (goal_alternative_matched[i] == -1
4015 && goal_alternative_offmemok[i]
4016 && MEM_P (recog_data.operand[i]))
4017 {
4018 /* If the address to be reloaded is a VOIDmode constant,
4019 use the default address mode as mode of the reload register,
4020 as would have been done by find_reloads_address. */
4021 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4022 machine_mode address_mode;
4023
4024 address_mode = get_address_mode (mem: recog_data.operand[i]);
4025 operand_reloadnum[i]
4026 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4027 inloc: &XEXP (recog_data.operand[i], 0), outloc: (rtx*) 0,
4028 rclass: base_reg_class (VOIDmode, as, outer_code: MEM, index_code: SCRATCH, insn),
4029 inmode: address_mode,
4030 VOIDmode, strict_low: 0, optional: 0, opnum: i, type: RELOAD_OTHER);
4031 rld[operand_reloadnum[i]].inc
4032 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4033
4034 /* If this operand is an output, we will have made any
4035 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4036 now we are treating part of the operand as an input, so
4037 we must change these to RELOAD_FOR_OTHER_ADDRESS. */
4038
4039 if (modified[i] == RELOAD_WRITE)
4040 {
4041 for (j = 0; j < n_reloads; j++)
4042 {
4043 if (rld[j].opnum == i)
4044 {
4045 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4046 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4047 else if (rld[j].when_needed
4048 == RELOAD_FOR_OUTADDR_ADDRESS)
4049 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4050 }
4051 }
4052 }
4053 }
4054 else if (goal_alternative_matched[i] == -1)
4055 {
4056 operand_reloadnum[i]
4057 = push_reload (in: (modified[i] != RELOAD_WRITE
4058 ? recog_data.operand[i] : 0),
4059 out: (modified[i] != RELOAD_READ
4060 ? recog_data.operand[i] : 0),
4061 inloc: (modified[i] != RELOAD_WRITE
4062 ? recog_data.operand_loc[i] : 0),
4063 outloc: (modified[i] != RELOAD_READ
4064 ? recog_data.operand_loc[i] : 0),
4065 rclass: (enum reg_class) goal_alternative[i],
4066 inmode: (modified[i] == RELOAD_WRITE
4067 ? VOIDmode : operand_mode[i]),
4068 outmode: (modified[i] == RELOAD_READ
4069 ? VOIDmode : operand_mode[i]),
4070 strict_low: (insn_code_number < 0 ? 0
4071 : insn_data[insn_code_number].operand[i].strict_low),
4072 optional: 0, opnum: i, type: operand_type[i]);
4073 }
4074 /* In a matching pair of operands, one must be input only
4075 and the other must be output only.
4076 Pass the input operand as IN and the other as OUT. */
4077 else if (modified[i] == RELOAD_READ
4078 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4079 {
4080 operand_reloadnum[i]
4081 = push_reload (in: recog_data.operand[i],
4082 out: recog_data.operand[goal_alternative_matched[i]],
4083 inloc: recog_data.operand_loc[i],
4084 outloc: recog_data.operand_loc[goal_alternative_matched[i]],
4085 rclass: (enum reg_class) goal_alternative[i],
4086 inmode: operand_mode[i],
4087 outmode: operand_mode[goal_alternative_matched[i]],
4088 strict_low: 0, optional: 0, opnum: i, type: RELOAD_OTHER);
4089 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4090 }
4091 else if (modified[i] == RELOAD_WRITE
4092 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4093 {
4094 operand_reloadnum[goal_alternative_matched[i]]
4095 = push_reload (in: recog_data.operand[goal_alternative_matched[i]],
4096 out: recog_data.operand[i],
4097 inloc: recog_data.operand_loc[goal_alternative_matched[i]],
4098 outloc: recog_data.operand_loc[i],
4099 rclass: (enum reg_class) goal_alternative[i],
4100 inmode: operand_mode[goal_alternative_matched[i]],
4101 outmode: operand_mode[i],
4102 strict_low: 0, optional: 0, opnum: i, type: RELOAD_OTHER);
4103 operand_reloadnum[i] = output_reloadnum;
4104 }
4105 else
4106 {
4107 gcc_assert (insn_code_number < 0);
4108 error_for_asm (insn, "inconsistent operand constraints "
4109 "in an %<asm%>");
4110 /* Avoid further trouble with this insn. */
4111 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4112 n_reloads = 0;
4113 return 0;
4114 }
4115 }
4116 else if (goal_alternative_matched[i] < 0
4117 && goal_alternative_matches[i] < 0
4118 && address_operand_reloaded[i] != 1
4119 && optimize)
4120 {
4121 /* For each non-matching operand that's a MEM or a pseudo-register
4122 that didn't get a hard register, make an optional reload.
4123 This may get done even if the insn needs no reloads otherwise. */
4124
4125 rtx operand = recog_data.operand[i];
4126
4127 while (GET_CODE (operand) == SUBREG)
4128 operand = SUBREG_REG (operand);
4129 if ((MEM_P (operand)
4130 || (REG_P (operand)
4131 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4132 /* If this is only for an output, the optional reload would not
4133 actually cause us to use a register now, just note that
4134 something is stored here. */
4135 && (goal_alternative[i] != NO_REGS
4136 || modified[i] == RELOAD_WRITE)
4137 && ! no_input_reloads
4138 /* An optional output reload might allow to delete INSN later.
4139 We mustn't make in-out reloads on insns that are not permitted
4140 output reloads.
4141 If this is an asm, we can't delete it; we must not even call
4142 push_reload for an optional output reload in this case,
4143 because we can't be sure that the constraint allows a register,
4144 and push_reload verifies the constraints for asms. */
4145 && (modified[i] == RELOAD_READ
4146 || (! no_output_reloads && ! this_insn_is_asm)))
4147 operand_reloadnum[i]
4148 = push_reload (in: (modified[i] != RELOAD_WRITE
4149 ? recog_data.operand[i] : 0),
4150 out: (modified[i] != RELOAD_READ
4151 ? recog_data.operand[i] : 0),
4152 inloc: (modified[i] != RELOAD_WRITE
4153 ? recog_data.operand_loc[i] : 0),
4154 outloc: (modified[i] != RELOAD_READ
4155 ? recog_data.operand_loc[i] : 0),
4156 rclass: (enum reg_class) goal_alternative[i],
4157 inmode: (modified[i] == RELOAD_WRITE
4158 ? VOIDmode : operand_mode[i]),
4159 outmode: (modified[i] == RELOAD_READ
4160 ? VOIDmode : operand_mode[i]),
4161 strict_low: (insn_code_number < 0 ? 0
4162 : insn_data[insn_code_number].operand[i].strict_low),
4163 optional: 1, opnum: i, type: operand_type[i]);
4164 /* If a memory reference remains (either as a MEM or a pseudo that
4165 did not get a hard register), yet we can't make an optional
4166 reload, check if this is actually a pseudo register reference;
4167 we then need to emit a USE and/or a CLOBBER so that reload
4168 inheritance will do the right thing. */
4169 else if (replace
4170 && (MEM_P (operand)
4171 || (REG_P (operand)
4172 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4173 && reg_renumber [REGNO (operand)] < 0)))
4174 {
4175 operand = *recog_data.operand_loc[i];
4176
4177 while (GET_CODE (operand) == SUBREG)
4178 operand = SUBREG_REG (operand);
4179 if (REG_P (operand))
4180 {
4181 if (modified[i] != RELOAD_WRITE)
4182 /* We mark the USE with QImode so that we recognize
4183 it as one that can be safely deleted at the end
4184 of reload. */
4185 PUT_MODE (x: emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4186 insn), QImode);
4187 if (modified[i] != RELOAD_READ)
4188 emit_insn_after (gen_clobber (operand), insn);
4189 }
4190 }
4191 }
4192 else if (goal_alternative_matches[i] >= 0
4193 && goal_alternative_win[goal_alternative_matches[i]]
4194 && modified[i] == RELOAD_READ
4195 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4196 && ! no_input_reloads && ! no_output_reloads
4197 && optimize)
4198 {
4199 /* Similarly, make an optional reload for a pair of matching
4200 objects that are in MEM or a pseudo that didn't get a hard reg. */
4201
4202 rtx operand = recog_data.operand[i];
4203
4204 while (GET_CODE (operand) == SUBREG)
4205 operand = SUBREG_REG (operand);
4206 if ((MEM_P (operand)
4207 || (REG_P (operand)
4208 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4209 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4210 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4211 = push_reload (in: recog_data.operand[goal_alternative_matches[i]],
4212 out: recog_data.operand[i],
4213 inloc: recog_data.operand_loc[goal_alternative_matches[i]],
4214 outloc: recog_data.operand_loc[i],
4215 rclass: (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4216 inmode: operand_mode[goal_alternative_matches[i]],
4217 outmode: operand_mode[i],
4218 strict_low: 0, optional: 1, opnum: goal_alternative_matches[i], type: RELOAD_OTHER);
4219 }
4220
4221 /* Perform whatever substitutions on the operands we are supposed
4222 to make due to commutativity or replacement of registers
4223 with equivalent constants or memory slots. */
4224
4225 for (i = 0; i < noperands; i++)
4226 {
4227 /* We only do this on the last pass through reload, because it is
4228 possible for some data (like reg_equiv_address) to be changed during
4229 later passes. Moreover, we lose the opportunity to get a useful
4230 reload_{in,out}_reg when we do these replacements. */
4231
4232 if (replace)
4233 {
4234 rtx substitution = substed_operand[i];
4235
4236 *recog_data.operand_loc[i] = substitution;
4237
4238 /* If we're replacing an operand with a LABEL_REF, we need to
4239 make sure that there's a REG_LABEL_OPERAND note attached to
4240 this instruction. */
4241 if (GET_CODE (substitution) == LABEL_REF
4242 && !find_reg_note (insn, REG_LABEL_OPERAND,
4243 label_ref_label (ref: substitution))
4244 /* For a JUMP_P, if it was a branch target it must have
4245 already been recorded as such. */
4246 && (!JUMP_P (insn)
4247 || !label_is_jump_target_p (label_ref_label (ref: substitution),
4248 insn)))
4249 {
4250 add_reg_note (insn, REG_LABEL_OPERAND,
4251 label_ref_label (ref: substitution));
4252 if (LABEL_P (label_ref_label (substitution)))
4253 ++LABEL_NUSES (label_ref_label (substitution));
4254 }
4255
4256 }
4257 else
4258 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4259 }
4260
4261 /* If this insn pattern contains any MATCH_DUP's, make sure that
4262 they will be substituted if the operands they match are substituted.
4263 Also do now any substitutions we already did on the operands.
4264
4265 Don't do this if we aren't making replacements because we might be
4266 propagating things allocated by frame pointer elimination into places
4267 it doesn't expect. */
4268
4269 if (insn_code_number >= 0 && replace)
4270 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4271 {
4272 int opno = recog_data.dup_num[i];
4273 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4274 dup_replacements (dup_loc: recog_data.dup_loc[i], orig_loc: recog_data.operand_loc[opno]);
4275 }
4276
4277#if 0
4278 /* This loses because reloading of prior insns can invalidate the equivalence
4279 (or at least find_equiv_reg isn't smart enough to find it any more),
4280 causing this insn to need more reload regs than it needed before.
4281 It may be too late to make the reload regs available.
4282 Now this optimization is done safely in choose_reload_regs. */
4283
4284 /* For each reload of a reg into some other class of reg,
4285 search for an existing equivalent reg (same value now) in the right class.
4286 We can use it as long as we don't need to change its contents. */
4287 for (i = 0; i < n_reloads; i++)
4288 if (rld[i].reg_rtx == 0
4289 && rld[i].in != 0
4290 && REG_P (rld[i].in)
4291 && rld[i].out == 0)
4292 {
4293 rld[i].reg_rtx
4294 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4295 static_reload_reg_p, 0, rld[i].inmode);
4296 /* Prevent generation of insn to load the value
4297 because the one we found already has the value. */
4298 if (rld[i].reg_rtx)
4299 rld[i].in = rld[i].reg_rtx;
4300 }
4301#endif
4302
4303 /* If we detected error and replaced asm instruction by USE, forget about the
4304 reloads. */
4305 if (GET_CODE (PATTERN (insn)) == USE
4306 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4307 n_reloads = 0;
4308
4309 /* Perhaps an output reload can be combined with another
4310 to reduce needs by one. */
4311 if (!goal_earlyclobber)
4312 combine_reloads ();
4313
4314 /* If we have a pair of reloads for parts of an address, they are reloading
4315 the same object, the operands themselves were not reloaded, and they
4316 are for two operands that are supposed to match, merge the reloads and
4317 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4318
4319 for (i = 0; i < n_reloads; i++)
4320 {
4321 int k;
4322
4323 for (j = i + 1; j < n_reloads; j++)
4324 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4325 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4326 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4327 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4328 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4329 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4330 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4331 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4332 && rtx_equal_p (rld[i].in, rld[j].in)
4333 && (operand_reloadnum[rld[i].opnum] < 0
4334 || rld[operand_reloadnum[rld[i].opnum]].optional)
4335 && (operand_reloadnum[rld[j].opnum] < 0
4336 || rld[operand_reloadnum[rld[j].opnum]].optional)
4337 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4338 || (goal_alternative_matches[rld[j].opnum]
4339 == rld[i].opnum)))
4340 {
4341 for (k = 0; k < n_replacements; k++)
4342 if (replacements[k].what == j)
4343 replacements[k].what = i;
4344
4345 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4346 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4347 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4348 else
4349 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4350 rld[j].in = 0;
4351 }
4352 }
4353
4354 /* Scan all the reloads and update their type.
4355 If a reload is for the address of an operand and we didn't reload
4356 that operand, change the type. Similarly, change the operand number
4357 of a reload when two operands match. If a reload is optional, treat it
4358 as though the operand isn't reloaded.
4359
4360 ??? This latter case is somewhat odd because if we do the optional
4361 reload, it means the object is hanging around. Thus we need only
4362 do the address reload if the optional reload was NOT done.
4363
4364 Change secondary reloads to be the address type of their operand, not
4365 the normal type.
4366
4367 If an operand's reload is now RELOAD_OTHER, change any
4368 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4369 RELOAD_FOR_OTHER_ADDRESS. */
4370
4371 for (i = 0; i < n_reloads; i++)
4372 {
4373 if (rld[i].secondary_p
4374 && rld[i].when_needed == operand_type[rld[i].opnum])
4375 rld[i].when_needed = address_type[rld[i].opnum];
4376
4377 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4378 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4379 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4380 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4381 && (operand_reloadnum[rld[i].opnum] < 0
4382 || rld[operand_reloadnum[rld[i].opnum]].optional))
4383 {
4384 /* If we have a secondary reload to go along with this reload,
4385 change its type to RELOAD_FOR_OPADDR_ADDR. */
4386
4387 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4388 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4389 && rld[i].secondary_in_reload != -1)
4390 {
4391 int secondary_in_reload = rld[i].secondary_in_reload;
4392
4393 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4394
4395 /* If there's a tertiary reload we have to change it also. */
4396 if (secondary_in_reload > 0
4397 && rld[secondary_in_reload].secondary_in_reload != -1)
4398 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4399 = RELOAD_FOR_OPADDR_ADDR;
4400 }
4401
4402 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4403 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4404 && rld[i].secondary_out_reload != -1)
4405 {
4406 int secondary_out_reload = rld[i].secondary_out_reload;
4407
4408 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4409
4410 /* If there's a tertiary reload we have to change it also. */
4411 if (secondary_out_reload
4412 && rld[secondary_out_reload].secondary_out_reload != -1)
4413 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4414 = RELOAD_FOR_OPADDR_ADDR;
4415 }
4416
4417 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4418 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4419 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4420 else
4421 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4422 }
4423
4424 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4425 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4426 && operand_reloadnum[rld[i].opnum] >= 0
4427 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4428 == RELOAD_OTHER))
4429 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4430
4431 if (goal_alternative_matches[rld[i].opnum] >= 0)
4432 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4433 }
4434
4435 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4436 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4437 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4438
4439 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4440 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4441 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4442 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4443 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4444 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4445 This is complicated by the fact that a single operand can have more
4446 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4447 choose_reload_regs without affecting code quality, and cases that
4448 actually fail are extremely rare, so it turns out to be better to fix
4449 the problem here by not generating cases that choose_reload_regs will
4450 fail for. */
4451 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4452 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4453 a single operand.
4454 We can reduce the register pressure by exploiting that a
4455 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4456 does not conflict with any of them, if it is only used for the first of
4457 the RELOAD_FOR_X_ADDRESS reloads. */
4458 {
4459 int first_op_addr_num = -2;
4460 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4461 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4462 int need_change = 0;
4463 /* We use last_op_addr_reload and the contents of the above arrays
4464 first as flags - -2 means no instance encountered, -1 means exactly
4465 one instance encountered.
4466 If more than one instance has been encountered, we store the reload
4467 number of the first reload of the kind in question; reload numbers
4468 are known to be non-negative. */
4469 for (i = 0; i < noperands; i++)
4470 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4471 for (i = n_reloads - 1; i >= 0; i--)
4472 {
4473 switch (rld[i].when_needed)
4474 {
4475 case RELOAD_FOR_OPERAND_ADDRESS:
4476 if (++first_op_addr_num >= 0)
4477 {
4478 first_op_addr_num = i;
4479 need_change = 1;
4480 }
4481 break;
4482 case RELOAD_FOR_INPUT_ADDRESS:
4483 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4484 {
4485 first_inpaddr_num[rld[i].opnum] = i;
4486 need_change = 1;
4487 }
4488 break;
4489 case RELOAD_FOR_OUTPUT_ADDRESS:
4490 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4491 {
4492 first_outpaddr_num[rld[i].opnum] = i;
4493 need_change = 1;
4494 }
4495 break;
4496 default:
4497 break;
4498 }
4499 }
4500
4501 if (need_change)
4502 {
4503 for (i = 0; i < n_reloads; i++)
4504 {
4505 int first_num;
4506 enum reload_type type;
4507
4508 switch (rld[i].when_needed)
4509 {
4510 case RELOAD_FOR_OPADDR_ADDR:
4511 first_num = first_op_addr_num;
4512 type = RELOAD_FOR_OPERAND_ADDRESS;
4513 break;
4514 case RELOAD_FOR_INPADDR_ADDRESS:
4515 first_num = first_inpaddr_num[rld[i].opnum];
4516 type = RELOAD_FOR_INPUT_ADDRESS;
4517 break;
4518 case RELOAD_FOR_OUTADDR_ADDRESS:
4519 first_num = first_outpaddr_num[rld[i].opnum];
4520 type = RELOAD_FOR_OUTPUT_ADDRESS;
4521 break;
4522 default:
4523 continue;
4524 }
4525 if (first_num < 0)
4526 continue;
4527 else if (i > first_num)
4528 rld[i].when_needed = type;
4529 else
4530 {
4531 /* Check if the only TYPE reload that uses reload I is
4532 reload FIRST_NUM. */
4533 for (j = n_reloads - 1; j > first_num; j--)
4534 {
4535 if (rld[j].when_needed == type
4536 && (rld[i].secondary_p
4537 ? rld[j].secondary_in_reload == i
4538 : reg_mentioned_p (rld[i].in, rld[j].in)))
4539 {
4540 rld[i].when_needed = type;
4541 break;
4542 }
4543 }
4544 }
4545 }
4546 }
4547 }
4548
4549 /* See if we have any reloads that are now allowed to be merged
4550 because we've changed when the reload is needed to
4551 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4552 check for the most common cases. */
4553
4554 for (i = 0; i < n_reloads; i++)
4555 if (rld[i].in != 0 && rld[i].out == 0
4556 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4557 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4558 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4559 for (j = 0; j < n_reloads; j++)
4560 if (i != j && rld[j].in != 0 && rld[j].out == 0
4561 && rld[j].when_needed == rld[i].when_needed
4562 && MATCHES (rld[i].in, rld[j].in)
4563 && rld[i].rclass == rld[j].rclass
4564 && !rld[i].nocombine && !rld[j].nocombine
4565 && rld[i].reg_rtx == rld[j].reg_rtx)
4566 {
4567 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4568 transfer_replacements (to: i, from: j);
4569 rld[j].in = 0;
4570 }
4571
4572 /* Compute reload_mode and reload_nregs. */
4573 for (i = 0; i < n_reloads; i++)
4574 {
4575 rld[i].mode = rld[i].inmode;
4576 if (rld[i].mode == VOIDmode
4577 || partial_subreg_p (outermode: rld[i].mode, innermode: rld[i].outmode))
4578 rld[i].mode = rld[i].outmode;
4579
4580 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4581 }
4582
4583 /* Special case a simple move with an input reload and a
4584 destination of a hard reg, if the hard reg is ok, use it. */
4585 for (i = 0; i < n_reloads; i++)
4586 if (rld[i].when_needed == RELOAD_FOR_INPUT
4587 && GET_CODE (PATTERN (insn)) == SET
4588 && REG_P (SET_DEST (PATTERN (insn)))
4589 && (SET_SRC (PATTERN (insn)) == rld[i].in
4590 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4591 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4592 {
4593 rtx dest = SET_DEST (PATTERN (insn));
4594 unsigned int regno = REGNO (dest);
4595
4596 if (regno < FIRST_PSEUDO_REGISTER
4597 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], bit: regno)
4598 && targetm.hard_regno_mode_ok (regno, rld[i].mode))
4599 {
4600 int nr = hard_regno_nregs (regno, mode: rld[i].mode);
4601 int ok = 1, nri;
4602
4603 for (nri = 1; nri < nr; nri ++)
4604 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], bit: regno + nri))
4605 {
4606 ok = 0;
4607 break;
4608 }
4609
4610 if (ok)
4611 rld[i].reg_rtx = dest;
4612 }
4613 }
4614
4615 return retval;
4616}
4617
4618/* Return true if alternative number ALTNUM in constraint-string
4619 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4620 MEM gives the reference if its address hasn't been fully reloaded,
4621 otherwise it is NULL. */
4622
4623static bool
4624alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4625 const char *constraint, int altnum)
4626{
4627 int c;
4628
4629 /* Skip alternatives before the one requested. */
4630 while (altnum > 0)
4631 {
4632 while (*constraint++ != ',')
4633 ;
4634 altnum--;
4635 }
4636 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4637 If one of them is present, this alternative accepts the result of
4638 passing a constant-pool reference through find_reloads_toplev.
4639
4640 The same is true of extra memory constraints if the address
4641 was reloaded into a register. However, the target may elect
4642 to disallow the original constant address, forcing it to be
4643 reloaded into a register instead. */
4644 for (; (c = *constraint) && c != ',' && c != '#';
4645 constraint += CONSTRAINT_LEN (c, constraint))
4646 {
4647 enum constraint_num cn = lookup_constraint (p: constraint);
4648 if (insn_extra_memory_constraint (c: cn)
4649 && (mem == NULL || constraint_satisfied_p (x: mem, c: cn)))
4650 return true;
4651 }
4652 return false;
4653}
4654
4655/* Scan X for memory references and scan the addresses for reloading.
4656 Also checks for references to "constant" regs that we want to eliminate
4657 and replaces them with the values they stand for.
4658 We may alter X destructively if it contains a reference to such.
4659 If X is just a constant reg, we return the equivalent value
4660 instead of X.
4661
4662 IND_LEVELS says how many levels of indirect addressing this machine
4663 supports.
4664
4665 OPNUM and TYPE identify the purpose of the reload.
4666
4667 IS_SET_DEST is true if X is the destination of a SET, which is not
4668 appropriate to be replaced by a constant.
4669
4670 INSN, if nonzero, is the insn in which we do the reload. It is used
4671 to determine if we may generate output reloads, and where to put USEs
4672 for pseudos that we have to replace with stack slots.
4673
4674 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4675 result of find_reloads_address. */
4676
4677static rtx
4678find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4679 int ind_levels, int is_set_dest, rtx_insn *insn,
4680 int *address_reloaded)
4681{
4682 RTX_CODE code = GET_CODE (x);
4683
4684 const char *fmt = GET_RTX_FORMAT (code);
4685 int i;
4686 int copied;
4687
4688 if (code == REG)
4689 {
4690 /* This code is duplicated for speed in find_reloads. */
4691 int regno = REGNO (x);
4692 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4693 x = reg_equiv_constant (regno);
4694#if 0
4695 /* This creates (subreg (mem...)) which would cause an unnecessary
4696 reload of the mem. */
4697 else if (reg_equiv_mem (regno) != 0)
4698 x = reg_equiv_mem (regno);
4699#endif
4700 else if (reg_equiv_memory_loc (regno)
4701 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4702 {
4703 rtx mem = make_memloc (x, regno);
4704 if (reg_equiv_address (regno)
4705 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4706 {
4707 /* If this is not a toplevel operand, find_reloads doesn't see
4708 this substitution. We have to emit a USE of the pseudo so
4709 that delete_output_reload can see it. */
4710 if (replace_reloads && recog_data.operand[opnum] != x)
4711 /* We mark the USE with QImode so that we recognize it
4712 as one that can be safely deleted at the end of
4713 reload. */
4714 PUT_MODE (x: emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4715 QImode);
4716 x = mem;
4717 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4718 opnum, type, ind_levels, insn);
4719 if (!rtx_equal_p (x, mem))
4720 push_reg_equiv_alt_mem (regno, mem: x);
4721 if (address_reloaded)
4722 *address_reloaded = i;
4723 }
4724 }
4725 return x;
4726 }
4727 if (code == MEM)
4728 {
4729 rtx tem = x;
4730
4731 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4732 opnum, type, ind_levels, insn);
4733 if (address_reloaded)
4734 *address_reloaded = i;
4735
4736 return tem;
4737 }
4738
4739 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4740 {
4741 /* Check for SUBREG containing a REG that's equivalent to a
4742 constant. If the constant has a known value, truncate it
4743 right now. Similarly if we are extracting a single-word of a
4744 multi-word constant. If the constant is symbolic, allow it
4745 to be substituted normally. push_reload will strip the
4746 subreg later. The constant must not be VOIDmode, because we
4747 will lose the mode of the register (this should never happen
4748 because one of the cases above should handle it). */
4749
4750 int regno = REGNO (SUBREG_REG (x));
4751 rtx tem;
4752
4753 if (regno >= FIRST_PSEUDO_REGISTER
4754 && reg_renumber[regno] < 0
4755 && reg_equiv_constant (regno) != 0)
4756 {
4757 tem =
4758 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4759 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4760 gcc_assert (tem);
4761 if (CONSTANT_P (tem)
4762 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4763 {
4764 tem = force_const_mem (GET_MODE (x), tem);
4765 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4766 &XEXP (tem, 0), opnum, type,
4767 ind_levels, insn);
4768 if (address_reloaded)
4769 *address_reloaded = i;
4770 }
4771 return tem;
4772 }
4773
4774 /* If the subreg contains a reg that will be converted to a mem,
4775 attempt to convert the whole subreg to a (narrower or wider)
4776 memory reference instead. If this succeeds, we're done --
4777 otherwise fall through to check whether the inner reg still
4778 needs address reloads anyway. */
4779
4780 if (regno >= FIRST_PSEUDO_REGISTER
4781 && reg_equiv_memory_loc (regno) != 0)
4782 {
4783 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4784 insn, address_reloaded);
4785 if (tem)
4786 return tem;
4787 }
4788 }
4789
4790 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4791 {
4792 if (fmt[i] == 'e')
4793 {
4794 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4795 ind_levels, is_set_dest, insn,
4796 address_reloaded);
4797 /* If we have replaced a reg with it's equivalent memory loc -
4798 that can still be handled here e.g. if it's in a paradoxical
4799 subreg - we must make the change in a copy, rather than using
4800 a destructive change. This way, find_reloads can still elect
4801 not to do the change. */
4802 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4803 {
4804 x = shallow_copy_rtx (x);
4805 copied = 1;
4806 }
4807 XEXP (x, i) = new_part;
4808 }
4809 }
4810 return x;
4811}
4812
4813/* Return a mem ref for the memory equivalent of reg REGNO.
4814 This mem ref is not shared with anything. */
4815
4816static rtx
4817make_memloc (rtx ad, int regno)
4818{
4819 /* We must rerun eliminate_regs, in case the elimination
4820 offsets have changed. */
4821 rtx tem
4822 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4823 0);
4824
4825 /* If TEM might contain a pseudo, we must copy it to avoid
4826 modifying it when we do the substitution for the reload. */
4827 if (rtx_varies_p (tem, 0))
4828 tem = copy_rtx (tem);
4829
4830 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4831 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4832
4833 /* Copy the result if it's still the same as the equivalence, to avoid
4834 modifying it when we do the substitution for the reload. */
4835 if (tem == reg_equiv_memory_loc (regno))
4836 tem = copy_rtx (tem);
4837 return tem;
4838}
4839
4840/* Returns true if AD could be turned into a valid memory reference
4841 to mode MODE in address space AS by reloading the part pointed to
4842 by PART into a register. */
4843
4844static bool
4845maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4846 addr_space_t as, rtx *part)
4847{
4848 bool retv;
4849 rtx tem = *part;
4850 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4851
4852 *part = reg;
4853 retv = memory_address_addr_space_p (mode, ad, as);
4854 *part = tem;
4855
4856 return retv;
4857}
4858
4859/* Record all reloads needed for handling memory address AD
4860 which appears in *LOC in a memory reference to mode MODE
4861 which itself is found in location *MEMREFLOC.
4862 Note that we take shortcuts assuming that no multi-reg machine mode
4863 occurs as part of an address.
4864
4865 OPNUM and TYPE specify the purpose of this reload.
4866
4867 IND_LEVELS says how many levels of indirect addressing this machine
4868 supports.
4869
4870 INSN, if nonzero, is the insn in which we do the reload. It is used
4871 to determine if we may generate output reloads, and where to put USEs
4872 for pseudos that we have to replace with stack slots.
4873
4874 Value is one if this address is reloaded or replaced as a whole; it is
4875 zero if the top level of this address was not reloaded or replaced, and
4876 it is -1 if it may or may not have been reloaded or replaced.
4877
4878 Note that there is no verification that the address will be valid after
4879 this routine does its work. Instead, we rely on the fact that the address
4880 was valid when reload started. So we need only undo things that reload
4881 could have broken. These are wrong register types, pseudos not allocated
4882 to a hard register, and frame pointer elimination. */
4883
4884static int
4885find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4886 rtx *loc, int opnum, enum reload_type type,
4887 int ind_levels, rtx_insn *insn)
4888{
4889 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4890 : ADDR_SPACE_GENERIC;
4891 int regno;
4892 int removed_and = 0;
4893 int op_index;
4894 rtx tem;
4895
4896 /* If the address is a register, see if it is a legitimate address and
4897 reload if not. We first handle the cases where we need not reload
4898 or where we must reload in a non-standard way. */
4899
4900 if (REG_P (ad))
4901 {
4902 regno = REGNO (ad);
4903
4904 if (reg_equiv_constant (regno) != 0)
4905 {
4906 find_reloads_address_part (reg_equiv_constant (regno), loc,
4907 base_reg_class (mode, as, outer_code: MEM,
4908 index_code: SCRATCH, insn),
4909 GET_MODE (ad), opnum, type, ind_levels);
4910 return 1;
4911 }
4912
4913 tem = reg_equiv_memory_loc (regno);
4914 if (tem != 0)
4915 {
4916 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4917 {
4918 tem = make_memloc (ad, regno);
4919 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4920 XEXP (tem, 0),
4921 MEM_ADDR_SPACE (tem)))
4922 {
4923 rtx orig = tem;
4924
4925 find_reloads_address (GET_MODE (tem), memrefloc: &tem, XEXP (tem, 0),
4926 loc: &XEXP (tem, 0), opnum,
4927 ADDR_TYPE (type), ind_levels, insn);
4928 if (!rtx_equal_p (tem, orig))
4929 push_reg_equiv_alt_mem (regno, mem: tem);
4930 }
4931 /* We can avoid a reload if the register's equivalent memory
4932 expression is valid as an indirect memory address.
4933 But not all addresses are valid in a mem used as an indirect
4934 address: only reg or reg+constant. */
4935
4936 if (ind_levels > 0
4937 && strict_memory_address_addr_space_p (mode, addr: tem, as)
4938 && (REG_P (XEXP (tem, 0))
4939 || (GET_CODE (XEXP (tem, 0)) == PLUS
4940 && REG_P (XEXP (XEXP (tem, 0), 0))
4941 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4942 {
4943 /* TEM is not the same as what we'll be replacing the
4944 pseudo with after reload, put a USE in front of INSN
4945 in the final reload pass. */
4946 if (replace_reloads
4947 && num_not_at_initial_offset
4948 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4949 {
4950 *loc = tem;
4951 /* We mark the USE with QImode so that we
4952 recognize it as one that can be safely
4953 deleted at the end of reload. */
4954 PUT_MODE (x: emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4955 insn), QImode);
4956
4957 /* This doesn't really count as replacing the address
4958 as a whole, since it is still a memory access. */
4959 }
4960 return 0;
4961 }
4962 ad = tem;
4963 }
4964 }
4965
4966 /* The only remaining case where we can avoid a reload is if this is a
4967 hard register that is valid as a base register and which is not the
4968 subject of a CLOBBER in this insn. */
4969
4970 else if (regno < FIRST_PSEUDO_REGISTER
4971 && regno_ok_for_base_p (regno, mode, as, outer_code: MEM, index_code: SCRATCH)
4972 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4973 return 0;
4974
4975 /* If we do not have one of the cases above, we must do the reload. */
4976 push_reload (in: ad, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
4977 rclass: base_reg_class (mode, as, outer_code: MEM, index_code: SCRATCH, insn),
4978 GET_MODE (ad), VOIDmode, strict_low: 0, optional: 0, opnum, type);
4979 return 1;
4980 }
4981
4982 if (strict_memory_address_addr_space_p (mode, addr: ad, as))
4983 {
4984 /* The address appears valid, so reloads are not needed.
4985 But the address may contain an eliminable register.
4986 This can happen because a machine with indirect addressing
4987 may consider a pseudo register by itself a valid address even when
4988 it has failed to get a hard reg.
4989 So do a tree-walk to find and eliminate all such regs. */
4990
4991 /* But first quickly dispose of a common case. */
4992 if (GET_CODE (ad) == PLUS
4993 && CONST_INT_P (XEXP (ad, 1))
4994 && REG_P (XEXP (ad, 0))
4995 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4996 return 0;
4997
4998 subst_reg_equivs_changed = 0;
4999 *loc = subst_reg_equivs (ad, insn);
5000
5001 if (! subst_reg_equivs_changed)
5002 return 0;
5003
5004 /* Check result for validity after substitution. */
5005 if (strict_memory_address_addr_space_p (mode, addr: ad, as))
5006 return 0;
5007 }
5008
5009#ifdef LEGITIMIZE_RELOAD_ADDRESS
5010 do
5011 {
5012 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5013 {
5014 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5015 ind_levels, win);
5016 }
5017 break;
5018 win:
5019 *memrefloc = copy_rtx (*memrefloc);
5020 XEXP (*memrefloc, 0) = ad;
5021 move_replacements (&ad, &XEXP (*memrefloc, 0));
5022 return -1;
5023 }
5024 while (0);
5025#endif
5026
5027 /* The address is not valid. We have to figure out why. First see if
5028 we have an outer AND and remove it if so. Then analyze what's inside. */
5029
5030 if (GET_CODE (ad) == AND)
5031 {
5032 removed_and = 1;
5033 loc = &XEXP (ad, 0);
5034 ad = *loc;
5035 }
5036
5037 /* One possibility for why the address is invalid is that it is itself
5038 a MEM. This can happen when the frame pointer is being eliminated, a
5039 pseudo is not allocated to a hard register, and the offset between the
5040 frame and stack pointers is not its initial value. In that case the
5041 pseudo will have been replaced by a MEM referring to the
5042 stack pointer. */
5043 if (MEM_P (ad))
5044 {
5045 /* First ensure that the address in this MEM is valid. Then, unless
5046 indirect addresses are valid, reload the MEM into a register. */
5047 tem = ad;
5048 find_reloads_address (GET_MODE (ad), memrefloc: &tem, XEXP (ad, 0), loc: &XEXP (ad, 0),
5049 opnum, ADDR_TYPE (type),
5050 ind_levels: ind_levels == 0 ? 0 : ind_levels - 1, insn);
5051
5052 /* If tem was changed, then we must create a new memory reference to
5053 hold it and store it back into memrefloc. */
5054 if (tem != ad && memrefloc)
5055 {
5056 *memrefloc = copy_rtx (*memrefloc);
5057 copy_replacements (tem, XEXP (*memrefloc, 0));
5058 loc = &XEXP (*memrefloc, 0);
5059 if (removed_and)
5060 loc = &XEXP (*loc, 0);
5061 }
5062
5063 /* Check similar cases as for indirect addresses as above except
5064 that we can allow pseudos and a MEM since they should have been
5065 taken care of above. */
5066
5067 if (ind_levels == 0
5068 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5069 || MEM_P (XEXP (tem, 0))
5070 || ! (REG_P (XEXP (tem, 0))
5071 || (GET_CODE (XEXP (tem, 0)) == PLUS
5072 && REG_P (XEXP (XEXP (tem, 0), 0))
5073 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5074 {
5075 /* Must use TEM here, not AD, since it is the one that will
5076 have any subexpressions reloaded, if needed. */
5077 push_reload (in: tem, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
5078 rclass: base_reg_class (mode, as, outer_code: MEM, index_code: SCRATCH), GET_MODE (tem),
5079 VOIDmode, strict_low: 0,
5080 optional: 0, opnum, type);
5081 return ! removed_and;
5082 }
5083 else
5084 return 0;
5085 }
5086
5087 /* If we have address of a stack slot but it's not valid because the
5088 displacement is too large, compute the sum in a register.
5089 Handle all base registers here, not just fp/ap/sp, because on some
5090 targets (namely SH) we can also get too large displacements from
5091 big-endian corrections. */
5092 else if (GET_CODE (ad) == PLUS
5093 && REG_P (XEXP (ad, 0))
5094 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5095 && CONST_INT_P (XEXP (ad, 1))
5096 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, outer_code: PLUS,
5097 index_code: CONST_INT)
5098 /* Similarly, if we were to reload the base register and the
5099 mem+offset address is still invalid, then we want to reload
5100 the whole address, not just the base register. */
5101 || ! maybe_memory_address_addr_space_p
5102 (mode, ad, as, part: &(XEXP (ad, 0)))))
5103
5104 {
5105 /* Unshare the MEM rtx so we can safely alter it. */
5106 if (memrefloc)
5107 {
5108 *memrefloc = copy_rtx (*memrefloc);
5109 loc = &XEXP (*memrefloc, 0);
5110 if (removed_and)
5111 loc = &XEXP (*loc, 0);
5112 }
5113
5114 if (double_reg_address_ok[mode]
5115 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5116 outer_code: PLUS, index_code: CONST_INT))
5117 {
5118 /* Unshare the sum as well. */
5119 *loc = ad = copy_rtx (ad);
5120
5121 /* Reload the displacement into an index reg.
5122 We assume the frame pointer or arg pointer is a base reg. */
5123 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5124 index_reg_class (insn), GET_MODE (ad), opnum,
5125 type, ind_levels);
5126 return 0;
5127 }
5128 else
5129 {
5130 /* If the sum of two regs is not necessarily valid,
5131 reload the sum into a base reg.
5132 That will at least work. */
5133 find_reloads_address_part (ad, loc,
5134 base_reg_class (mode, as, outer_code: MEM,
5135 index_code: SCRATCH, insn),
5136 GET_MODE (ad), opnum, type, ind_levels);
5137 }
5138 return ! removed_and;
5139 }
5140
5141 /* If we have an indexed stack slot, there are three possible reasons why
5142 it might be invalid: The index might need to be reloaded, the address
5143 might have been made by frame pointer elimination and hence have a
5144 constant out of range, or both reasons might apply.
5145
5146 We can easily check for an index needing reload, but even if that is the
5147 case, we might also have an invalid constant. To avoid making the
5148 conservative assumption and requiring two reloads, we see if this address
5149 is valid when not interpreted strictly. If it is, the only problem is
5150 that the index needs a reload and find_reloads_address_1 will take care
5151 of it.
5152
5153 Handle all base registers here, not just fp/ap/sp, because on some
5154 targets (namely SPARC) we can also get invalid addresses from preventive
5155 subreg big-endian corrections made by find_reloads_toplev. We
5156 can also get expressions involving LO_SUM (rather than PLUS) from
5157 find_reloads_subreg_address.
5158
5159 If we decide to do something, it must be that `double_reg_address_ok'
5160 is true. We generate a reload of the base register + constant and
5161 rework the sum so that the reload register will be added to the index.
5162 This is safe because we know the address isn't shared.
5163
5164 We check for the base register as both the first and second operand of
5165 the innermost PLUS and/or LO_SUM. */
5166
5167 for (op_index = 0; op_index < 2; ++op_index)
5168 {
5169 rtx operand, addend;
5170 enum rtx_code inner_code;
5171
5172 if (GET_CODE (ad) != PLUS)
5173 continue;
5174
5175 inner_code = GET_CODE (XEXP (ad, 0));
5176 if (!(GET_CODE (ad) == PLUS
5177 && CONST_INT_P (XEXP (ad, 1))
5178 && (inner_code == PLUS || inner_code == LO_SUM)))
5179 continue;
5180
5181 operand = XEXP (XEXP (ad, 0), op_index);
5182 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5183 continue;
5184
5185 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5186
5187 if ((regno_ok_for_base_p (REGNO (operand), mode, as, outer_code: inner_code,
5188 GET_CODE (addend))
5189 || operand == frame_pointer_rtx
5190 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5191 && operand == hard_frame_pointer_rtx)
5192 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5193 && operand == arg_pointer_rtx)
5194 || operand == stack_pointer_rtx)
5195 && ! maybe_memory_address_addr_space_p
5196 (mode, ad, as, part: &XEXP (XEXP (ad, 0), 1 - op_index)))
5197 {
5198 rtx offset_reg;
5199 enum reg_class cls;
5200
5201 offset_reg = plus_constant (GET_MODE (ad), operand,
5202 INTVAL (XEXP (ad, 1)));
5203
5204 /* Form the adjusted address. */
5205 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5206 ad = gen_rtx_PLUS (GET_MODE (ad),
5207 op_index == 0 ? offset_reg : addend,
5208 op_index == 0 ? addend : offset_reg);
5209 else
5210 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5211 op_index == 0 ? offset_reg : addend,
5212 op_index == 0 ? addend : offset_reg);
5213 *loc = ad;
5214
5215 cls = base_reg_class (mode, as, outer_code: MEM, GET_CODE (addend), insn);
5216 find_reloads_address_part (XEXP (ad, op_index),
5217 &XEXP (ad, op_index), cls,
5218 GET_MODE (ad), opnum, type, ind_levels);
5219 find_reloads_address_1 (mode, as,
5220 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5221 GET_CODE (XEXP (ad, op_index)),
5222 &XEXP (ad, 1 - op_index), opnum,
5223 type, 0, insn);
5224
5225 return 0;
5226 }
5227 }
5228
5229 /* See if address becomes valid when an eliminable register
5230 in a sum is replaced. */
5231
5232 tem = ad;
5233 if (GET_CODE (ad) == PLUS)
5234 tem = subst_indexed_address (ad);
5235 if (tem != ad && strict_memory_address_addr_space_p (mode, addr: tem, as))
5236 {
5237 /* Ok, we win that way. Replace any additional eliminable
5238 registers. */
5239
5240 subst_reg_equivs_changed = 0;
5241 tem = subst_reg_equivs (tem, insn);
5242
5243 /* Make sure that didn't make the address invalid again. */
5244
5245 if (! subst_reg_equivs_changed
5246 || strict_memory_address_addr_space_p (mode, addr: tem, as))
5247 {
5248 *loc = tem;
5249 return 0;
5250 }
5251 }
5252
5253 /* If constants aren't valid addresses, reload the constant address
5254 into a register. */
5255 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, addr: ad, as))
5256 {
5257 machine_mode address_mode = GET_MODE (ad);
5258 if (address_mode == VOIDmode)
5259 address_mode = targetm.addr_space.address_mode (as);
5260
5261 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5262 Unshare it so we can safely alter it. */
5263 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5264 && CONSTANT_POOL_ADDRESS_P (ad))
5265 {
5266 *memrefloc = copy_rtx (*memrefloc);
5267 loc = &XEXP (*memrefloc, 0);
5268 if (removed_and)
5269 loc = &XEXP (*loc, 0);
5270 }
5271
5272 find_reloads_address_part (ad, loc,
5273 base_reg_class (mode, as, outer_code: MEM,
5274 index_code: SCRATCH, insn),
5275 address_mode, opnum, type, ind_levels);
5276 return ! removed_and;
5277 }
5278
5279 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5280 opnum, type, ind_levels, insn);
5281}
5282
5283/* Find all pseudo regs appearing in AD
5284 that are eliminable in favor of equivalent values
5285 and do not have hard regs; replace them by their equivalents.
5286 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5287 front of it for pseudos that we have to replace with stack slots. */
5288
5289static rtx
5290subst_reg_equivs (rtx ad, rtx_insn *insn)
5291{
5292 RTX_CODE code = GET_CODE (ad);
5293 int i;
5294 const char *fmt;
5295
5296 switch (code)
5297 {
5298 case HIGH:
5299 case CONST:
5300 CASE_CONST_ANY:
5301 case SYMBOL_REF:
5302 case LABEL_REF:
5303 case PC:
5304 return ad;
5305
5306 case REG:
5307 {
5308 int regno = REGNO (ad);
5309
5310 if (reg_equiv_constant (regno) != 0)
5311 {
5312 subst_reg_equivs_changed = 1;
5313 return reg_equiv_constant (regno);
5314 }
5315 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5316 {
5317 rtx mem = make_memloc (ad, regno);
5318 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5319 {
5320 subst_reg_equivs_changed = 1;
5321 /* We mark the USE with QImode so that we recognize it
5322 as one that can be safely deleted at the end of
5323 reload. */
5324 PUT_MODE (x: emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5325 QImode);
5326 return mem;
5327 }
5328 }
5329 }
5330 return ad;
5331
5332 case PLUS:
5333 /* Quickly dispose of a common case. */
5334 if (XEXP (ad, 0) == frame_pointer_rtx
5335 && CONST_INT_P (XEXP (ad, 1)))
5336 return ad;
5337 break;
5338
5339 default:
5340 break;
5341 }
5342
5343 fmt = GET_RTX_FORMAT (code);
5344 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5345 if (fmt[i] == 'e')
5346 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5347 return ad;
5348}
5349
5350/* Compute the sum of X and Y, making canonicalizations assumed in an
5351 address, namely: sum constant integers, surround the sum of two
5352 constants with a CONST, put the constant as the second operand, and
5353 group the constant on the outermost sum.
5354
5355 This routine assumes both inputs are already in canonical form. */
5356
5357rtx
5358form_sum (machine_mode mode, rtx x, rtx y)
5359{
5360 rtx tem;
5361
5362 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5363 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5364
5365 if (CONST_INT_P (x))
5366 return plus_constant (mode, y, INTVAL (x));
5367 else if (CONST_INT_P (y))
5368 return plus_constant (mode, x, INTVAL (y));
5369 else if (CONSTANT_P (x))
5370 tem = x, x = y, y = tem;
5371
5372 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5373 return form_sum (mode, XEXP (x, 0), y: form_sum (mode, XEXP (x, 1), y));
5374
5375 /* Note that if the operands of Y are specified in the opposite
5376 order in the recursive calls below, infinite recursion will occur. */
5377 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5378 return form_sum (mode, x: form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5379
5380 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5381 constant will have been placed second. */
5382 if (CONSTANT_P (x) && CONSTANT_P (y))
5383 {
5384 if (GET_CODE (x) == CONST)
5385 x = XEXP (x, 0);
5386 if (GET_CODE (y) == CONST)
5387 y = XEXP (y, 0);
5388
5389 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5390 }
5391
5392 return gen_rtx_PLUS (mode, x, y);
5393}
5394
5395/* If ADDR is a sum containing a pseudo register that should be
5396 replaced with a constant (from reg_equiv_constant),
5397 return the result of doing so, and also apply the associative
5398 law so that the result is more likely to be a valid address.
5399 (But it is not guaranteed to be one.)
5400
5401 Note that at most one register is replaced, even if more are
5402 replaceable. Also, we try to put the result into a canonical form
5403 so it is more likely to be a valid address.
5404
5405 In all other cases, return ADDR. */
5406
5407static rtx
5408subst_indexed_address (rtx addr)
5409{
5410 rtx op0 = 0, op1 = 0, op2 = 0;
5411 rtx tem;
5412 int regno;
5413
5414 if (GET_CODE (addr) == PLUS)
5415 {
5416 /* Try to find a register to replace. */
5417 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5418 if (REG_P (op0)
5419 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5420 && reg_renumber[regno] < 0
5421 && reg_equiv_constant (regno) != 0)
5422 op0 = reg_equiv_constant (regno);
5423 else if (REG_P (op1)
5424 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5425 && reg_renumber[regno] < 0
5426 && reg_equiv_constant (regno) != 0)
5427 op1 = reg_equiv_constant (regno);
5428 else if (GET_CODE (op0) == PLUS
5429 && (tem = subst_indexed_address (addr: op0)) != op0)
5430 op0 = tem;
5431 else if (GET_CODE (op1) == PLUS
5432 && (tem = subst_indexed_address (addr: op1)) != op1)
5433 op1 = tem;
5434 else
5435 return addr;
5436
5437 /* Pick out up to three things to add. */
5438 if (GET_CODE (op1) == PLUS)
5439 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5440 else if (GET_CODE (op0) == PLUS)
5441 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5442
5443 /* Compute the sum. */
5444 if (op2 != 0)
5445 op1 = form_sum (GET_MODE (addr), x: op1, y: op2);
5446 if (op1 != 0)
5447 op0 = form_sum (GET_MODE (addr), x: op0, y: op1);
5448
5449 return op0;
5450 }
5451 return addr;
5452}
5453
5454/* Update the REG_INC notes for an insn. It updates all REG_INC
5455 notes for the instruction which refer to REGNO the to refer
5456 to the reload number.
5457
5458 INSN is the insn for which any REG_INC notes need updating.
5459
5460 REGNO is the register number which has been reloaded.
5461
5462 RELOADNUM is the reload number. */
5463
5464static void
5465update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5466 int reloadnum ATTRIBUTE_UNUSED)
5467{
5468 if (!AUTO_INC_DEC)
5469 return;
5470
5471 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5472 if (REG_NOTE_KIND (link) == REG_INC
5473 && (int) REGNO (XEXP (link, 0)) == regno)
5474 push_replacement (loc: &XEXP (link, 0), reloadnum, VOIDmode);
5475}
5476
5477/* Record the pseudo registers we must reload into hard registers in a
5478 subexpression of a would-be memory address, X referring to a value
5479 in mode MODE. (This function is not called if the address we find
5480 is strictly valid.)
5481
5482 CONTEXT = 1 means we are considering regs as index regs,
5483 = 0 means we are considering them as base regs.
5484 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5485 or an autoinc code.
5486 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5487 is the code of the index part of the address. Otherwise, pass SCRATCH
5488 for this argument.
5489 OPNUM and TYPE specify the purpose of any reloads made.
5490
5491 IND_LEVELS says how many levels of indirect addressing are
5492 supported at this point in the address.
5493
5494 INSN, if nonzero, is the insn in which we do the reload. It is used
5495 to determine if we may generate output reloads.
5496
5497 We return nonzero if X, as a whole, is reloaded or replaced. */
5498
5499/* Note that we take shortcuts assuming that no multi-reg machine mode
5500 occurs as part of an address.
5501 Also, this is not fully machine-customizable; it works for machines
5502 such as VAXen and 68000's and 32000's, but other possible machines
5503 could have addressing modes that this does not handle right.
5504 If you add push_reload calls here, you need to make sure gen_reload
5505 handles those cases gracefully. */
5506
5507static int
5508find_reloads_address_1 (machine_mode mode, addr_space_t as,
5509 rtx x, int context,
5510 enum rtx_code outer_code, enum rtx_code index_code,
5511 rtx *loc, int opnum, enum reload_type type,
5512 int ind_levels, rtx_insn *insn)
5513{
5514#define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5515 ((CONTEXT) == 0 \
5516 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5517 : REGNO_OK_FOR_INDEX_P (REGNO))
5518
5519 enum reg_class context_reg_class;
5520 RTX_CODE code = GET_CODE (x);
5521 bool reloaded_inner_of_autoinc = false;
5522
5523 if (context == 1)
5524 context_reg_class = index_reg_class (insn);
5525 else
5526 context_reg_class = base_reg_class (mode, as, outer_code, index_code,
5527 insn);
5528
5529 switch (code)
5530 {
5531 case PLUS:
5532 {
5533 rtx orig_op0 = XEXP (x, 0);
5534 rtx orig_op1 = XEXP (x, 1);
5535 RTX_CODE code0 = GET_CODE (orig_op0);
5536 RTX_CODE code1 = GET_CODE (orig_op1);
5537 rtx op0 = orig_op0;
5538 rtx op1 = orig_op1;
5539
5540 if (GET_CODE (op0) == SUBREG)
5541 {
5542 op0 = SUBREG_REG (op0);
5543 code0 = GET_CODE (op0);
5544 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5545 op0 = gen_rtx_REG (word_mode,
5546 (REGNO (op0) +
5547 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5548 GET_MODE (SUBREG_REG (orig_op0)),
5549 SUBREG_BYTE (orig_op0),
5550 GET_MODE (orig_op0))));
5551 }
5552
5553 if (GET_CODE (op1) == SUBREG)
5554 {
5555 op1 = SUBREG_REG (op1);
5556 code1 = GET_CODE (op1);
5557 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5558 /* ??? Why is this given op1's mode and above for
5559 ??? op0 SUBREGs we use word_mode? */
5560 op1 = gen_rtx_REG (GET_MODE (op1),
5561 (REGNO (op1) +
5562 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5563 GET_MODE (SUBREG_REG (orig_op1)),
5564 SUBREG_BYTE (orig_op1),
5565 GET_MODE (orig_op1))));
5566 }
5567 /* Plus in the index register may be created only as a result of
5568 register rematerialization for expression like &localvar*4. Reload it.
5569 It may be possible to combine the displacement on the outer level,
5570 but it is probably not worthwhile to do so. */
5571 if (context == 1)
5572 {
5573 find_reloads_address (GET_MODE (x), memrefloc: loc, XEXP (x, 0), loc: &XEXP (x, 0),
5574 opnum, ADDR_TYPE (type), ind_levels, insn);
5575 push_reload (in: *loc, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
5576 rclass: context_reg_class,
5577 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
5578 return 1;
5579 }
5580
5581 if (code0 == MULT || code0 == ASHIFT
5582 || code0 == SIGN_EXTEND || code0 == TRUNCATE
5583 || code0 == ZERO_EXTEND || code1 == MEM)
5584 {
5585 find_reloads_address_1 (mode, as, x: orig_op0, context: 1, outer_code: PLUS, index_code: SCRATCH,
5586 loc: &XEXP (x, 0), opnum, type, ind_levels,
5587 insn);
5588 find_reloads_address_1 (mode, as, x: orig_op1, context: 0, outer_code: PLUS, index_code: code0,
5589 loc: &XEXP (x, 1), opnum, type, ind_levels,
5590 insn);
5591 }
5592
5593 else if (code1 == MULT || code1 == ASHIFT
5594 || code1 == SIGN_EXTEND || code1 == TRUNCATE
5595 || code1 == ZERO_EXTEND || code0 == MEM)
5596 {
5597 find_reloads_address_1 (mode, as, x: orig_op0, context: 0, outer_code: PLUS, index_code: code1,
5598 loc: &XEXP (x, 0), opnum, type, ind_levels,
5599 insn);
5600 find_reloads_address_1 (mode, as, x: orig_op1, context: 1, outer_code: PLUS, index_code: SCRATCH,
5601 loc: &XEXP (x, 1), opnum, type, ind_levels,
5602 insn);
5603 }
5604
5605 else if (code0 == CONST_INT || code0 == CONST
5606 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5607 find_reloads_address_1 (mode, as, x: orig_op1, context: 0, outer_code: PLUS, index_code: code0,
5608 loc: &XEXP (x, 1), opnum, type, ind_levels,
5609 insn);
5610
5611 else if (code1 == CONST_INT || code1 == CONST
5612 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5613 find_reloads_address_1 (mode, as, x: orig_op0, context: 0, outer_code: PLUS, index_code: code1,
5614 loc: &XEXP (x, 0), opnum, type, ind_levels,
5615 insn);
5616
5617 else if (code0 == REG && code1 == REG)
5618 {
5619 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5620 && regno_ok_for_base_p (REGNO (op0), mode, as, outer_code: PLUS, index_code: REG))
5621 return 0;
5622 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5623 && regno_ok_for_base_p (REGNO (op1), mode, as, outer_code: PLUS, index_code: REG))
5624 return 0;
5625 else if (regno_ok_for_base_p (REGNO (op0), mode, as, outer_code: PLUS, index_code: REG))
5626 find_reloads_address_1 (mode, as, x: orig_op1, context: 1, outer_code: PLUS, index_code: SCRATCH,
5627 loc: &XEXP (x, 1), opnum, type, ind_levels,
5628 insn);
5629 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5630 find_reloads_address_1 (mode, as, x: orig_op0, context: 0, outer_code: PLUS, index_code: REG,
5631 loc: &XEXP (x, 0), opnum, type, ind_levels,
5632 insn);
5633 else if (regno_ok_for_base_p (REGNO (op1), mode, as, outer_code: PLUS, index_code: REG))
5634 find_reloads_address_1 (mode, as, x: orig_op0, context: 1, outer_code: PLUS, index_code: SCRATCH,
5635 loc: &XEXP (x, 0), opnum, type, ind_levels,
5636 insn);
5637 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5638 find_reloads_address_1 (mode, as, x: orig_op1, context: 0, outer_code: PLUS, index_code: REG,
5639 loc: &XEXP (x, 1), opnum, type, ind_levels,
5640 insn);
5641 else
5642 {
5643 find_reloads_address_1 (mode, as, x: orig_op0, context: 0, outer_code: PLUS, index_code: REG,
5644 loc: &XEXP (x, 0), opnum, type, ind_levels,
5645 insn);
5646 find_reloads_address_1 (mode, as, x: orig_op1, context: 1, outer_code: PLUS, index_code: SCRATCH,
5647 loc: &XEXP (x, 1), opnum, type, ind_levels,
5648 insn);
5649 }
5650 }
5651
5652 else if (code0 == REG)
5653 {
5654 find_reloads_address_1 (mode, as, x: orig_op0, context: 1, outer_code: PLUS, index_code: SCRATCH,
5655 loc: &XEXP (x, 0), opnum, type, ind_levels,
5656 insn);
5657 find_reloads_address_1 (mode, as, x: orig_op1, context: 0, outer_code: PLUS, index_code: REG,
5658 loc: &XEXP (x, 1), opnum, type, ind_levels,
5659 insn);
5660 }
5661
5662 else if (code1 == REG)
5663 {
5664 find_reloads_address_1 (mode, as, x: orig_op1, context: 1, outer_code: PLUS, index_code: SCRATCH,
5665 loc: &XEXP (x, 1), opnum, type, ind_levels,
5666 insn);
5667 find_reloads_address_1 (mode, as, x: orig_op0, context: 0, outer_code: PLUS, index_code: REG,
5668 loc: &XEXP (x, 0), opnum, type, ind_levels,
5669 insn);
5670 }
5671 }
5672
5673 return 0;
5674
5675 case POST_MODIFY:
5676 case PRE_MODIFY:
5677 {
5678 rtx op0 = XEXP (x, 0);
5679 rtx op1 = XEXP (x, 1);
5680 enum rtx_code index_code;
5681 int regno;
5682 int reloadnum;
5683
5684 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5685 return 0;
5686
5687 /* Currently, we only support {PRE,POST}_MODIFY constructs
5688 where a base register is {inc,dec}remented by the contents
5689 of another register or by a constant value. Thus, these
5690 operands must match. */
5691 gcc_assert (op0 == XEXP (op1, 0));
5692
5693 /* Require index register (or constant). Let's just handle the
5694 register case in the meantime... If the target allows
5695 auto-modify by a constant then we could try replacing a pseudo
5696 register with its equivalent constant where applicable.
5697
5698 We also handle the case where the register was eliminated
5699 resulting in a PLUS subexpression.
5700
5701 If we later decide to reload the whole PRE_MODIFY or
5702 POST_MODIFY, inc_for_reload might clobber the reload register
5703 before reading the index. The index register might therefore
5704 need to live longer than a TYPE reload normally would, so be
5705 conservative and class it as RELOAD_OTHER. */
5706 if ((REG_P (XEXP (op1, 1))
5707 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5708 || GET_CODE (XEXP (op1, 1)) == PLUS)
5709 find_reloads_address_1 (mode, as, XEXP (op1, 1), context: 1, outer_code: code, index_code: SCRATCH,
5710 loc: &XEXP (op1, 1), opnum, type: RELOAD_OTHER,
5711 ind_levels, insn);
5712
5713 gcc_assert (REG_P (XEXP (op1, 0)));
5714
5715 regno = REGNO (XEXP (op1, 0));
5716 index_code = GET_CODE (XEXP (op1, 1));
5717
5718 /* A register that is incremented cannot be constant! */
5719 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5720 || reg_equiv_constant (regno) == 0);
5721
5722 /* Handle a register that is equivalent to a memory location
5723 which cannot be addressed directly. */
5724 if (reg_equiv_memory_loc (regno) != 0
5725 && (reg_equiv_address (regno) != 0
5726 || num_not_at_initial_offset))
5727 {
5728 rtx tem = make_memloc (XEXP (x, 0), regno);
5729
5730 if (reg_equiv_address (regno)
5731 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5732 {
5733 rtx orig = tem;
5734
5735 /* First reload the memory location's address.
5736 We can't use ADDR_TYPE (type) here, because we need to
5737 write back the value after reading it, hence we actually
5738 need two registers. */
5739 find_reloads_address (GET_MODE (tem), memrefloc: &tem, XEXP (tem, 0),
5740 loc: &XEXP (tem, 0), opnum,
5741 type: RELOAD_OTHER,
5742 ind_levels, insn);
5743
5744 if (!rtx_equal_p (tem, orig))
5745 push_reg_equiv_alt_mem (regno, mem: tem);
5746
5747 /* Then reload the memory location into a base
5748 register. */
5749 reloadnum = push_reload (in: tem, out: tem, inloc: &XEXP (x, 0),
5750 outloc: &XEXP (op1, 0),
5751 rclass: base_reg_class (mode, as,
5752 outer_code: code, index_code,
5753 insn),
5754 GET_MODE (x), GET_MODE (x), strict_low: 0,
5755 optional: 0, opnum, type: RELOAD_OTHER);
5756
5757 update_auto_inc_notes (insn: this_insn, regno, reloadnum);
5758 return 0;
5759 }
5760 }
5761
5762 if (reg_renumber[regno] >= 0)
5763 regno = reg_renumber[regno];
5764
5765 /* We require a base register here... */
5766 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, outer_code: code, index_code))
5767 {
5768 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5769 inloc: &XEXP (op1, 0), outloc: &XEXP (x, 0),
5770 rclass: base_reg_class (mode, as,
5771 outer_code: code, index_code,
5772 insn),
5773 GET_MODE (x), GET_MODE (x), strict_low: 0, optional: 0,
5774 opnum, type: RELOAD_OTHER);
5775
5776 update_auto_inc_notes (insn: this_insn, regno, reloadnum);
5777 return 0;
5778 }
5779 }
5780 return 0;
5781
5782 case POST_INC:
5783 case POST_DEC:
5784 case PRE_INC:
5785 case PRE_DEC:
5786 if (REG_P (XEXP (x, 0)))
5787 {
5788 int regno = REGNO (XEXP (x, 0));
5789 int value = 0;
5790 rtx x_orig = x;
5791
5792 /* A register that is incremented cannot be constant! */
5793 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5794 || reg_equiv_constant (regno) == 0);
5795
5796 /* Handle a register that is equivalent to a memory location
5797 which cannot be addressed directly. */
5798 if (reg_equiv_memory_loc (regno) != 0
5799 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5800 {
5801 rtx tem = make_memloc (XEXP (x, 0), regno);
5802 if (reg_equiv_address (regno)
5803 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5804 {
5805 rtx orig = tem;
5806
5807 /* First reload the memory location's address.
5808 We can't use ADDR_TYPE (type) here, because we need to
5809 write back the value after reading it, hence we actually
5810 need two registers. */
5811 find_reloads_address (GET_MODE (tem), memrefloc: &tem, XEXP (tem, 0),
5812 loc: &XEXP (tem, 0), opnum, type,
5813 ind_levels, insn);
5814 reloaded_inner_of_autoinc = true;
5815 if (!rtx_equal_p (tem, orig))
5816 push_reg_equiv_alt_mem (regno, mem: tem);
5817 /* Put this inside a new increment-expression. */
5818 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5819 /* Proceed to reload that, as if it contained a register. */
5820 }
5821 }
5822
5823 /* If we have a hard register that is ok in this incdec context,
5824 don't make a reload. If the register isn't nice enough for
5825 autoincdec, we can reload it. But, if an autoincrement of a
5826 register that we here verified as playing nice, still outside
5827 isn't "valid", it must be that no autoincrement is "valid".
5828 If that is true and something made an autoincrement anyway,
5829 this must be a special context where one is allowed.
5830 (For example, a "push" instruction.)
5831 We can't improve this address, so leave it alone. */
5832
5833 /* Otherwise, reload the autoincrement into a suitable hard reg
5834 and record how much to increment by. */
5835
5836 if (reg_renumber[regno] >= 0)
5837 regno = reg_renumber[regno];
5838 if (regno >= FIRST_PSEUDO_REGISTER
5839 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5840 index_code))
5841 {
5842 int reloadnum;
5843
5844 /* If we can output the register afterwards, do so, this
5845 saves the extra update.
5846 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5847 CALL_INSN.
5848 But don't do this if we cannot directly address the
5849 memory location, since this will make it harder to
5850 reuse address reloads, and increases register pressure.
5851 Also don't do this if we can probably update x directly. */
5852 rtx equiv = (MEM_P (XEXP (x, 0))
5853 ? XEXP (x, 0)
5854 : reg_equiv_mem (regno));
5855 enum insn_code icode = optab_handler (op: add_optab, GET_MODE (x));
5856 if (insn && NONJUMP_INSN_P (insn)
5857 && (regno < FIRST_PSEUDO_REGISTER
5858 || (equiv
5859 && memory_operand (equiv, GET_MODE (equiv))
5860 && ! (icode != CODE_FOR_nothing
5861 && insn_operand_matches (icode, opno: 0, operand: equiv)
5862 && insn_operand_matches (icode, opno: 1, operand: equiv))))
5863 /* Using RELOAD_OTHER means we emit this and the reload we
5864 made earlier in the wrong order. */
5865 && !reloaded_inner_of_autoinc)
5866 {
5867 /* We use the original pseudo for loc, so that
5868 emit_reload_insns() knows which pseudo this
5869 reload refers to and updates the pseudo rtx, not
5870 its equivalent memory location, as well as the
5871 corresponding entry in reg_last_reload_reg. */
5872 loc = &XEXP (x_orig, 0);
5873 x = XEXP (x, 0);
5874 reloadnum
5875 = push_reload (in: x, out: x, inloc: loc, outloc: loc,
5876 rclass: context_reg_class,
5877 GET_MODE (x), GET_MODE (x), strict_low: 0, optional: 0,
5878 opnum, type: RELOAD_OTHER);
5879 }
5880 else
5881 {
5882 reloadnum
5883 = push_reload (in: x, out: x, inloc: loc, outloc: (rtx*) 0,
5884 rclass: context_reg_class,
5885 GET_MODE (x), GET_MODE (x), strict_low: 0, optional: 0,
5886 opnum, type);
5887 rld[reloadnum].inc
5888 = find_inc_amount (PATTERN (insn: this_insn), XEXP (x_orig, 0));
5889
5890 value = 1;
5891 }
5892
5893 update_auto_inc_notes (insn: this_insn, REGNO (XEXP (x_orig, 0)),
5894 reloadnum);
5895 }
5896 return value;
5897 }
5898 return 0;
5899
5900 case TRUNCATE:
5901 case SIGN_EXTEND:
5902 case ZERO_EXTEND:
5903 /* Look for parts to reload in the inner expression and reload them
5904 too, in addition to this operation. Reloading all inner parts in
5905 addition to this one shouldn't be necessary, but at this point,
5906 we don't know if we can possibly omit any part that *can* be
5907 reloaded. Targets that are better off reloading just either part
5908 (or perhaps even a different part of an outer expression), should
5909 define LEGITIMIZE_RELOAD_ADDRESS. */
5910 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5911 context, outer_code: code, index_code: SCRATCH, loc: &XEXP (x, 0), opnum,
5912 type, ind_levels, insn);
5913 push_reload (in: x, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
5914 rclass: context_reg_class,
5915 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
5916 return 1;
5917
5918 case MEM:
5919 /* This is probably the result of a substitution, by eliminate_regs, of
5920 an equivalent address for a pseudo that was not allocated to a hard
5921 register. Verify that the specified address is valid and reload it
5922 into a register.
5923
5924 Since we know we are going to reload this item, don't decrement for
5925 the indirection level.
5926
5927 Note that this is actually conservative: it would be slightly more
5928 efficient to use the value of SPILL_INDIRECT_LEVELS from
5929 reload1.cc here. */
5930
5931 find_reloads_address (GET_MODE (x), memrefloc: loc, XEXP (x, 0), loc: &XEXP (x, 0),
5932 opnum, ADDR_TYPE (type), ind_levels, insn);
5933 push_reload (in: *loc, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
5934 rclass: context_reg_class,
5935 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
5936 return 1;
5937
5938 case REG:
5939 {
5940 int regno = REGNO (x);
5941
5942 if (reg_equiv_constant (regno) != 0)
5943 {
5944 find_reloads_address_part (reg_equiv_constant (regno), loc,
5945 context_reg_class,
5946 GET_MODE (x), opnum, type, ind_levels);
5947 return 1;
5948 }
5949
5950#if 0 /* This might screw code in reload1.cc to delete prior output-reload
5951 that feeds this insn. */
5952 if (reg_equiv_mem (regno) != 0)
5953 {
5954 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5955 context_reg_class,
5956 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5957 return 1;
5958 }
5959#endif
5960
5961 if (reg_equiv_memory_loc (regno)
5962 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5963 {
5964 rtx tem = make_memloc (ad: x, regno);
5965 if (reg_equiv_address (regno) != 0
5966 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5967 {
5968 x = tem;
5969 find_reloads_address (GET_MODE (x), memrefloc: &x, XEXP (x, 0),
5970 loc: &XEXP (x, 0), opnum, ADDR_TYPE (type),
5971 ind_levels, insn);
5972 if (!rtx_equal_p (x, tem))
5973 push_reg_equiv_alt_mem (regno, mem: x);
5974 }
5975 }
5976
5977 if (reg_renumber[regno] >= 0)
5978 regno = reg_renumber[regno];
5979
5980 if (regno >= FIRST_PSEUDO_REGISTER
5981 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5982 index_code))
5983 {
5984 push_reload (in: x, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
5985 rclass: context_reg_class,
5986 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
5987 return 1;
5988 }
5989
5990 /* If a register appearing in an address is the subject of a CLOBBER
5991 in this insn, reload it into some other register to be safe.
5992 The CLOBBER is supposed to make the register unavailable
5993 from before this insn to after it. */
5994 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5995 {
5996 push_reload (in: x, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
5997 rclass: context_reg_class,
5998 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
5999 return 1;
6000 }
6001 }
6002 return 0;
6003
6004 case SUBREG:
6005 if (REG_P (SUBREG_REG (x)))
6006 {
6007 /* If this is a SUBREG of a hard register and the resulting register
6008 is of the wrong class, reload the whole SUBREG. This avoids
6009 needless copies if SUBREG_REG is multi-word. */
6010 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6011 {
6012 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6013
6014 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6015 index_code))
6016 {
6017 push_reload (in: x, NULL_RTX, inloc: loc, outloc: (rtx*) 0,
6018 rclass: context_reg_class,
6019 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
6020 return 1;
6021 }
6022 }
6023 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6024 is larger than the class size, then reload the whole SUBREG. */
6025 else
6026 {
6027 enum reg_class rclass = context_reg_class;
6028 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6029 > reg_class_size[(int) rclass])
6030 {
6031 /* If the inner register will be replaced by a memory
6032 reference, we can do this only if we can replace the
6033 whole subreg by a (narrower) memory reference. If
6034 this is not possible, fall through and reload just
6035 the inner register (including address reloads). */
6036 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6037 {
6038 rtx tem = find_reloads_subreg_address (x, opnum,
6039 ADDR_TYPE (type),
6040 ind_levels, insn,
6041 NULL);
6042 if (tem)
6043 {
6044 push_reload (in: tem, NULL_RTX, inloc: loc, outloc: (rtx*) 0, rclass,
6045 GET_MODE (tem), VOIDmode, strict_low: 0, optional: 0,
6046 opnum, type);
6047 return 1;
6048 }
6049 }
6050 else
6051 {
6052 push_reload (in: x, NULL_RTX, inloc: loc, outloc: (rtx*) 0, rclass,
6053 GET_MODE (x), VOIDmode, strict_low: 0, optional: 0, opnum, type);
6054 return 1;
6055 }
6056 }
6057 }
6058 }
6059 break;
6060
6061 default:
6062 break;
6063 }
6064
6065 {
6066 const char *fmt = GET_RTX_FORMAT (code);
6067 int i;
6068
6069 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6070 {
6071 if (fmt[i] == 'e')
6072 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6073 we get here. */
6074 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6075 outer_code: code, index_code: SCRATCH, loc: &XEXP (x, i),
6076 opnum, type, ind_levels, insn);
6077 }
6078 }
6079
6080#undef REG_OK_FOR_CONTEXT
6081 return 0;
6082}
6083
6084/* X, which is found at *LOC, is a part of an address that needs to be
6085 reloaded into a register of class RCLASS. If X is a constant, or if
6086 X is a PLUS that contains a constant, check that the constant is a
6087 legitimate operand and that we are supposed to be able to load
6088 it into the register.
6089
6090 If not, force the constant into memory and reload the MEM instead.
6091
6092 MODE is the mode to use, in case X is an integer constant.
6093
6094 OPNUM and TYPE describe the purpose of any reloads made.
6095
6096 IND_LEVELS says how many levels of indirect addressing this machine
6097 supports. */
6098
6099static void
6100find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6101 machine_mode mode, int opnum,
6102 enum reload_type type, int ind_levels)
6103{
6104 if (CONSTANT_P (x)
6105 && (!targetm.legitimate_constant_p (mode, x)
6106 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6107 {
6108 x = force_const_mem (mode, x);
6109 find_reloads_address (mode, memrefloc: &x, XEXP (x, 0), loc: &XEXP (x, 0),
6110 opnum, type, ind_levels, insn: 0);
6111 }
6112
6113 else if (GET_CODE (x) == PLUS
6114 && CONSTANT_P (XEXP (x, 1))
6115 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6116 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6117 == NO_REGS))
6118 {
6119 rtx tem;
6120
6121 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6122 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6123 find_reloads_address (mode, memrefloc: &XEXP (x, 1), XEXP (tem, 0), loc: &XEXP (tem, 0),
6124 opnum, type, ind_levels, insn: 0);
6125 }
6126
6127 push_reload (in: x, NULL_RTX, inloc: loc, outloc: (rtx*) 0, rclass,
6128 inmode: mode, VOIDmode, strict_low: 0, optional: 0, opnum, type);
6129}
6130
6131/* X, a subreg of a pseudo, is a part of an address that needs to be
6132 reloaded, and the pseusdo is equivalent to a memory location.
6133
6134 Attempt to replace the whole subreg by a (possibly narrower or wider)
6135 memory reference. If this is possible, return this new memory
6136 reference, and push all required address reloads. Otherwise,
6137 return NULL.
6138
6139 OPNUM and TYPE identify the purpose of the reload.
6140
6141 IND_LEVELS says how many levels of indirect addressing are
6142 supported at this point in the address.
6143
6144 INSN, if nonzero, is the insn in which we do the reload. It is used
6145 to determine where to put USEs for pseudos that we have to replace with
6146 stack slots. */
6147
6148static rtx
6149find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6150 int ind_levels, rtx_insn *insn,
6151 int *address_reloaded)
6152{
6153 machine_mode outer_mode = GET_MODE (x);
6154 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6155 int regno = REGNO (SUBREG_REG (x));
6156 int reloaded = 0;
6157 rtx tem, orig;
6158 poly_int64 offset;
6159
6160 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6161
6162 /* We cannot replace the subreg with a modified memory reference if:
6163
6164 - we have a paradoxical subreg that implicitly acts as a zero or
6165 sign extension operation due to LOAD_EXTEND_OP;
6166
6167 - we have a subreg that is implicitly supposed to act on the full
6168 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6169
6170 - the address of the equivalent memory location is mode-dependent; or
6171
6172 - we have a paradoxical subreg and the resulting memory is not
6173 sufficiently aligned to allow access in the wider mode.
6174
6175 In addition, we choose not to perform the replacement for *any*
6176 paradoxical subreg, even if it were possible in principle. This
6177 is to avoid generating wider memory references than necessary.
6178
6179 This corresponds to how previous versions of reload used to handle
6180 paradoxical subregs where no address reload was required. */
6181
6182 if (paradoxical_subreg_p (x))
6183 return NULL;
6184
6185 if (WORD_REGISTER_OPERATIONS
6186 && partial_subreg_p (outermode: outer_mode, innermode: inner_mode)
6187 && known_equal_after_align_down (a: GET_MODE_SIZE (mode: outer_mode) - 1,
6188 b: GET_MODE_SIZE (mode: inner_mode) - 1,
6189 UNITS_PER_WORD))
6190 return NULL;
6191
6192 /* Since we don't attempt to handle paradoxical subregs, we can just
6193 call into simplify_subreg, which will handle all remaining checks
6194 for us. */
6195 orig = make_memloc (SUBREG_REG (x), regno);
6196 offset = SUBREG_BYTE (x);
6197 tem = simplify_subreg (outermode: outer_mode, op: orig, innermode: inner_mode, byte: offset);
6198 if (!tem || !MEM_P (tem))
6199 return NULL;
6200
6201 /* Now push all required address reloads, if any. */
6202 reloaded = find_reloads_address (GET_MODE (tem), memrefloc: &tem,
6203 XEXP (tem, 0), loc: &XEXP (tem, 0),
6204 opnum, type, ind_levels, insn);
6205 /* ??? Do we need to handle nonzero offsets somehow? */
6206 if (known_eq (offset, 0) && !rtx_equal_p (tem, orig))
6207 push_reg_equiv_alt_mem (regno, mem: tem);
6208
6209 /* For some processors an address may be valid in the original mode but
6210 not in a smaller mode. For example, ARM accepts a scaled index register
6211 in SImode but not in HImode. Note that this is only a problem if the
6212 address in reg_equiv_mem is already invalid in the new mode; other
6213 cases would be fixed by find_reloads_address as usual.
6214
6215 ??? We attempt to handle such cases here by doing an additional reload
6216 of the full address after the usual processing by find_reloads_address.
6217 Note that this may not work in the general case, but it seems to cover
6218 the cases where this situation currently occurs. A more general fix
6219 might be to reload the *value* instead of the address, but this would
6220 not be expected by the callers of this routine as-is.
6221
6222 If find_reloads_address already completed replaced the address, there
6223 is nothing further to do. */
6224 if (reloaded == 0
6225 && reg_equiv_mem (regno) != 0
6226 && !strict_memory_address_addr_space_p
6227 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6228 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6229 {
6230 push_reload (XEXP (tem, 0), NULL_RTX, inloc: &XEXP (tem, 0), outloc: (rtx*) 0,
6231 rclass: base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6232 outer_code: MEM, index_code: SCRATCH, insn),
6233 GET_MODE (XEXP (tem, 0)), VOIDmode, strict_low: 0, optional: 0, opnum, type);
6234 reloaded = 1;
6235 }
6236
6237 /* If this is not a toplevel operand, find_reloads doesn't see this
6238 substitution. We have to emit a USE of the pseudo so that
6239 delete_output_reload can see it. */
6240 if (replace_reloads && recog_data.operand[opnum] != x)
6241 /* We mark the USE with QImode so that we recognize it as one that
6242 can be safely deleted at the end of reload. */
6243 PUT_MODE (x: emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6244 QImode);
6245
6246 if (address_reloaded)
6247 *address_reloaded = reloaded;
6248
6249 return tem;
6250}
6251
6252/* Substitute into the current INSN the registers into which we have reloaded
6253 the things that need reloading. The array `replacements'
6254 contains the locations of all pointers that must be changed
6255 and says what to replace them with.
6256
6257 Return the rtx that X translates into; usually X, but modified. */
6258
6259void
6260subst_reloads (rtx_insn *insn)
6261{
6262 int i;
6263
6264 for (i = 0; i < n_replacements; i++)
6265 {
6266 struct replacement *r = &replacements[i];
6267 rtx reloadreg = rld[r->what].reg_rtx;
6268 if (reloadreg)
6269 {
6270#ifdef DEBUG_RELOAD
6271 /* This checking takes a very long time on some platforms
6272 causing the gcc.c-torture/compile/limits-fnargs.c test
6273 to time out during testing. See PR 31850.
6274
6275 Internal consistency test. Check that we don't modify
6276 anything in the equivalence arrays. Whenever something from
6277 those arrays needs to be reloaded, it must be unshared before
6278 being substituted into; the equivalence must not be modified.
6279 Otherwise, if the equivalence is used after that, it will
6280 have been modified, and the thing substituted (probably a
6281 register) is likely overwritten and not a usable equivalence. */
6282 int check_regno;
6283
6284 for (check_regno = 0; check_regno < max_regno; check_regno++)
6285 {
6286#define CHECK_MODF(ARRAY) \
6287 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6288 || !loc_mentioned_in_p (r->where, \
6289 (*reg_equivs)[check_regno].ARRAY))
6290
6291 CHECK_MODF (constant);
6292 CHECK_MODF (memory_loc);
6293 CHECK_MODF (address);
6294 CHECK_MODF (mem);
6295#undef CHECK_MODF
6296 }
6297#endif /* DEBUG_RELOAD */
6298
6299 /* If we're replacing a LABEL_REF with a register, there must
6300 already be an indication (to e.g. flow) which label this
6301 register refers to. */
6302 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6303 || !JUMP_P (insn)
6304 || find_reg_note (insn,
6305 REG_LABEL_OPERAND,
6306 XEXP (*r->where, 0))
6307 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6308
6309 /* Encapsulate RELOADREG so its machine mode matches what
6310 used to be there. Note that gen_lowpart_common will
6311 do the wrong thing if RELOADREG is multi-word. RELOADREG
6312 will always be a REG here. */
6313 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6314 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6315
6316 *r->where = reloadreg;
6317 }
6318 /* If reload got no reg and isn't optional, something's wrong. */
6319 else
6320 gcc_assert (rld[r->what].optional);
6321 }
6322}
6323
6324/* Make a copy of any replacements being done into X and move those
6325 copies to locations in Y, a copy of X. */
6326
6327void
6328copy_replacements (rtx x, rtx y)
6329{
6330 copy_replacements_1 (&x, &y, n_replacements);
6331}
6332
6333static void
6334copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6335{
6336 int i, j;
6337 rtx x, y;
6338 struct replacement *r;
6339 enum rtx_code code;
6340 const char *fmt;
6341
6342 for (j = 0; j < orig_replacements; j++)
6343 if (replacements[j].where == px)
6344 {
6345 r = &replacements[n_replacements++];
6346 r->where = py;
6347 r->what = replacements[j].what;
6348 r->mode = replacements[j].mode;
6349 }
6350
6351 x = *px;
6352 y = *py;
6353 code = GET_CODE (x);
6354 fmt = GET_RTX_FORMAT (code);
6355
6356 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6357 {
6358 if (fmt[i] == 'e')
6359 copy_replacements_1 (px: &XEXP (x, i), py: &XEXP (y, i), orig_replacements);
6360 else if (fmt[i] == 'E')
6361 for (j = XVECLEN (x, i); --j >= 0; )
6362 copy_replacements_1 (px: &XVECEXP (x, i, j), py: &XVECEXP (y, i, j),
6363 orig_replacements);
6364 }
6365}
6366
6367/* Change any replacements being done to *X to be done to *Y. */
6368
6369void
6370move_replacements (rtx *x, rtx *y)
6371{
6372 int i;
6373
6374 for (i = 0; i < n_replacements; i++)
6375 if (replacements[i].where == x)
6376 replacements[i].where = y;
6377}
6378
6379/* If LOC was scheduled to be replaced by something, return the replacement.
6380 Otherwise, return *LOC. */
6381
6382rtx
6383find_replacement (rtx *loc)
6384{
6385 struct replacement *r;
6386
6387 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6388 {
6389 rtx reloadreg = rld[r->what].reg_rtx;
6390
6391 if (reloadreg && r->where == loc)
6392 {
6393 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6394 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6395
6396 return reloadreg;
6397 }
6398 else if (reloadreg && GET_CODE (*loc) == SUBREG
6399 && r->where == &SUBREG_REG (*loc))
6400 {
6401 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6402 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6403
6404 return simplify_gen_subreg (GET_MODE (*loc), op: reloadreg,
6405 GET_MODE (SUBREG_REG (*loc)),
6406 SUBREG_BYTE (*loc));
6407 }
6408 }
6409
6410 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6411 what's inside and make a new rtl if so. */
6412 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6413 || GET_CODE (*loc) == MULT)
6414 {
6415 rtx x = find_replacement (loc: &XEXP (*loc, 0));
6416 rtx y = find_replacement (loc: &XEXP (*loc, 1));
6417
6418 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6419 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6420 }
6421
6422 return *loc;
6423}
6424
6425/* Return nonzero if register in range [REGNO, ENDREGNO)
6426 appears either explicitly or implicitly in X
6427 other than being stored into (except for earlyclobber operands).
6428
6429 References contained within the substructure at LOC do not count.
6430 LOC may be zero, meaning don't ignore anything.
6431
6432 This is similar to refers_to_regno_p in rtlanal.cc except that we
6433 look at equivalences for pseudos that didn't get hard registers. */
6434
6435static int
6436refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6437 rtx x, rtx *loc)
6438{
6439 int i;
6440 unsigned int r;
6441 RTX_CODE code;
6442 const char *fmt;
6443
6444 if (x == 0)
6445 return 0;
6446
6447 repeat:
6448 code = GET_CODE (x);
6449
6450 switch (code)
6451 {
6452 case REG:
6453 r = REGNO (x);
6454
6455 /* If this is a pseudo, a hard register must not have been allocated.
6456 X must therefore either be a constant or be in memory. */
6457 if (r >= FIRST_PSEUDO_REGISTER)
6458 {
6459 if (reg_equiv_memory_loc (r))
6460 return refers_to_regno_for_reload_p (regno, endregno,
6461 reg_equiv_memory_loc (r),
6462 loc: (rtx*) 0);
6463
6464 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6465 return 0;
6466 }
6467
6468 return endregno > r && regno < END_REGNO (x);
6469
6470 case SUBREG:
6471 /* If this is a SUBREG of a hard reg, we can see exactly which
6472 registers are being modified. Otherwise, handle normally. */
6473 if (REG_P (SUBREG_REG (x))
6474 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6475 {
6476 unsigned int inner_regno = subreg_regno (x);
6477 unsigned int inner_endregno
6478 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6479 ? subreg_nregs (x) : 1);
6480
6481 return endregno > inner_regno && regno < inner_endregno;
6482 }
6483 break;
6484
6485 case CLOBBER:
6486 case SET:
6487 if (&SET_DEST (x) != loc
6488 /* Note setting a SUBREG counts as referring to the REG it is in for
6489 a pseudo but not for hard registers since we can
6490 treat each word individually. */
6491 && ((GET_CODE (SET_DEST (x)) == SUBREG
6492 && loc != &SUBREG_REG (SET_DEST (x))
6493 && REG_P (SUBREG_REG (SET_DEST (x)))
6494 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6495 && refers_to_regno_for_reload_p (regno, endregno,
6496 SUBREG_REG (SET_DEST (x)),
6497 loc))
6498 /* If the output is an earlyclobber operand, this is
6499 a conflict. */
6500 || ((!REG_P (SET_DEST (x))
6501 || earlyclobber_operand_p (SET_DEST (x)))
6502 && refers_to_regno_for_reload_p (regno, endregno,
6503 SET_DEST (x), loc))))
6504 return 1;
6505
6506 if (code == CLOBBER || loc == &SET_SRC (x))
6507 return 0;
6508 x = SET_SRC (x);
6509 goto repeat;
6510
6511 default:
6512 break;
6513 }
6514
6515 /* X does not match, so try its subexpressions. */
6516
6517 fmt = GET_RTX_FORMAT (code);
6518 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6519 {
6520 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6521 {
6522 if (i == 0)
6523 {
6524 x = XEXP (x, 0);
6525 goto repeat;
6526 }
6527 else
6528 if (refers_to_regno_for_reload_p (regno, endregno,
6529 XEXP (x, i), loc))
6530 return 1;
6531 }
6532 else if (fmt[i] == 'E')
6533 {
6534 int j;
6535 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6536 if (loc != &XVECEXP (x, i, j)
6537 && refers_to_regno_for_reload_p (regno, endregno,
6538 XVECEXP (x, i, j), loc))
6539 return 1;
6540 }
6541 }
6542 return 0;
6543}
6544
6545/* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6546 we check if any register number in X conflicts with the relevant register
6547 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6548 contains a MEM (we don't bother checking for memory addresses that can't
6549 conflict because we expect this to be a rare case.
6550
6551 This function is similar to reg_overlap_mentioned_p in rtlanal.cc except
6552 that we look at equivalences for pseudos that didn't get hard registers. */
6553
6554int
6555reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6556{
6557 int regno, endregno;
6558
6559 /* Overly conservative. */
6560 if (GET_CODE (x) == STRICT_LOW_PART
6561 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6562 x = XEXP (x, 0);
6563
6564 /* If either argument is a constant, then modifying X cannot affect IN. */
6565 if (CONSTANT_P (x) || CONSTANT_P (in))
6566 return 0;
6567 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6568 return refers_to_mem_for_reload_p (in);
6569 else if (GET_CODE (x) == SUBREG)
6570 {
6571 regno = REGNO (SUBREG_REG (x));
6572 if (regno < FIRST_PSEUDO_REGISTER)
6573 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6574 GET_MODE (SUBREG_REG (x)),
6575 SUBREG_BYTE (x),
6576 GET_MODE (x));
6577 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6578 ? subreg_nregs (x) : 1);
6579
6580 return refers_to_regno_for_reload_p (regno, endregno, x: in, loc: (rtx*) 0);
6581 }
6582 else if (REG_P (x))
6583 {
6584 regno = REGNO (x);
6585
6586 /* If this is a pseudo, it must not have been assigned a hard register.
6587 Therefore, it must either be in memory or be a constant. */
6588
6589 if (regno >= FIRST_PSEUDO_REGISTER)
6590 {
6591 if (reg_equiv_memory_loc (regno))
6592 return refers_to_mem_for_reload_p (in);
6593 gcc_assert (reg_equiv_constant (regno));
6594 return 0;
6595 }
6596
6597 endregno = END_REGNO (x);
6598
6599 return refers_to_regno_for_reload_p (regno, endregno, x: in, loc: (rtx*) 0);
6600 }
6601 else if (MEM_P (x))
6602 return refers_to_mem_for_reload_p (in);
6603 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC)
6604 return reg_mentioned_p (x, in);
6605 else
6606 {
6607 gcc_assert (GET_CODE (x) == PLUS);
6608
6609 /* We actually want to know if X is mentioned somewhere inside IN.
6610 We must not say that (plus (sp) (const_int 124)) is in
6611 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6612 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6613 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6614 while (MEM_P (in))
6615 in = XEXP (in, 0);
6616 if (REG_P (in))
6617 return 0;
6618 else if (GET_CODE (in) == PLUS)
6619 return (rtx_equal_p (x, in)
6620 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6621 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6622 else
6623 return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6624 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6625 }
6626}
6627
6628/* Return nonzero if anything in X contains a MEM. Look also for pseudo
6629 registers. */
6630
6631static int
6632refers_to_mem_for_reload_p (rtx x)
6633{
6634 const char *fmt;
6635 int i;
6636
6637 if (MEM_P (x))
6638 return 1;
6639
6640 if (REG_P (x))
6641 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6642 && reg_equiv_memory_loc (REGNO (x)));
6643
6644 fmt = GET_RTX_FORMAT (GET_CODE (x));
6645 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6646 if (fmt[i] == 'e'
6647 && (MEM_P (XEXP (x, i))
6648 || refers_to_mem_for_reload_p (XEXP (x, i))))
6649 return 1;
6650
6651 return 0;
6652}
6653
6654/* Check the insns before INSN to see if there is a suitable register
6655 containing the same value as GOAL.
6656 If OTHER is -1, look for a register in class RCLASS.
6657 Otherwise, just see if register number OTHER shares GOAL's value.
6658
6659 Return an rtx for the register found, or zero if none is found.
6660
6661 If RELOAD_REG_P is (short *)1,
6662 we reject any hard reg that appears in reload_reg_rtx
6663 because such a hard reg is also needed coming into this insn.
6664
6665 If RELOAD_REG_P is any other nonzero value,
6666 it is a vector indexed by hard reg number
6667 and we reject any hard reg whose element in the vector is nonnegative
6668 as well as any that appears in reload_reg_rtx.
6669
6670 If GOAL is zero, then GOALREG is a register number; we look
6671 for an equivalent for that register.
6672
6673 MODE is the machine mode of the value we want an equivalence for.
6674 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6675
6676 This function is used by jump.cc as well as in the reload pass.
6677
6678 If GOAL is the sum of the stack pointer and a constant, we treat it
6679 as if it were a constant except that sp is required to be unchanging. */
6680
6681rtx
6682find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6683 short *reload_reg_p, int goalreg, machine_mode mode)
6684{
6685 rtx_insn *p = insn;
6686 rtx goaltry, valtry, value;
6687 rtx_insn *where;
6688 rtx pat;
6689 int regno = -1;
6690 int valueno;
6691 int goal_mem = 0;
6692 int goal_const = 0;
6693 int goal_mem_addr_varies = 0;
6694 int need_stable_sp = 0;
6695 int nregs;
6696 int valuenregs;
6697 int num = 0;
6698
6699 if (goal == 0)
6700 regno = goalreg;
6701 else if (REG_P (goal))
6702 regno = REGNO (goal);
6703 else if (MEM_P (goal))
6704 {
6705 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6706 if (MEM_VOLATILE_P (goal))
6707 return 0;
6708 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6709 return 0;
6710 /* An address with side effects must be reexecuted. */
6711 switch (code)
6712 {
6713 case POST_INC:
6714 case PRE_INC:
6715 case POST_DEC:
6716 case PRE_DEC:
6717 case POST_MODIFY:
6718 case PRE_MODIFY:
6719 return 0;
6720 default:
6721 break;
6722 }
6723 goal_mem = 1;
6724 }
6725 else if (CONSTANT_P (goal))
6726 goal_const = 1;
6727 else if (GET_CODE (goal) == PLUS
6728 && XEXP (goal, 0) == stack_pointer_rtx
6729 && CONSTANT_P (XEXP (goal, 1)))
6730 goal_const = need_stable_sp = 1;
6731 else if (GET_CODE (goal) == PLUS
6732 && XEXP (goal, 0) == frame_pointer_rtx
6733 && CONSTANT_P (XEXP (goal, 1)))
6734 goal_const = 1;
6735 else
6736 return 0;
6737
6738 num = 0;
6739 /* Scan insns back from INSN, looking for one that copies
6740 a value into or out of GOAL.
6741 Stop and give up if we reach a label. */
6742
6743 while (1)
6744 {
6745 p = PREV_INSN (insn: p);
6746 if (p && DEBUG_INSN_P (p))
6747 continue;
6748 num++;
6749 if (p == 0 || LABEL_P (p)
6750 || num > param_max_reload_search_insns)
6751 return 0;
6752
6753 /* Don't reuse register contents from before a setjmp-type
6754 function call; on the second return (from the longjmp) it
6755 might have been clobbered by a later reuse. It doesn't
6756 seem worthwhile to actually go and see if it is actually
6757 reused even if that information would be readily available;
6758 just don't reuse it across the setjmp call. */
6759 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6760 return 0;
6761
6762 if (NONJUMP_INSN_P (p)
6763 /* If we don't want spill regs ... */
6764 && (! (reload_reg_p != 0
6765 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6766 /* ... then ignore insns introduced by reload; they aren't
6767 useful and can cause results in reload_as_needed to be
6768 different from what they were when calculating the need for
6769 spills. If we notice an input-reload insn here, we will
6770 reject it below, but it might hide a usable equivalent.
6771 That makes bad code. It may even fail: perhaps no reg was
6772 spilled for this insn because it was assumed we would find
6773 that equivalent. */
6774 || INSN_UID (insn: p) < reload_first_uid))
6775 {
6776 rtx tem;
6777 pat = single_set (insn: p);
6778
6779 /* First check for something that sets some reg equal to GOAL. */
6780 if (pat != 0
6781 && ((regno >= 0
6782 && true_regnum (SET_SRC (pat)) == regno
6783 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6784 ||
6785 (regno >= 0
6786 && true_regnum (SET_DEST (pat)) == regno
6787 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6788 ||
6789 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6790 /* When looking for stack pointer + const,
6791 make sure we don't use a stack adjust. */
6792 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), in: goal)
6793 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6794 || (goal_mem
6795 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6796 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6797 || (goal_mem
6798 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6799 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6800 /* If we are looking for a constant,
6801 and something equivalent to that constant was copied
6802 into a reg, we can use that reg. */
6803 || (goal_const && REG_NOTES (p) != 0
6804 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6805 && ((rtx_equal_p (XEXP (tem, 0), goal)
6806 && (valueno
6807 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6808 || (REG_P (SET_DEST (pat))
6809 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6810 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6811 && CONST_INT_P (goal)
6812 && (goaltry = operand_subword (XEXP (tem, 0), 0,
6813 0, VOIDmode)) != 0
6814 && rtx_equal_p (goal, goaltry)
6815 && (valtry
6816 = operand_subword (SET_DEST (pat), 0, 0,
6817 VOIDmode))
6818 && (valueno = true_regnum (valtry)) >= 0)))
6819 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6820 NULL_RTX))
6821 && REG_P (SET_DEST (pat))
6822 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6823 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6824 && CONST_INT_P (goal)
6825 && (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6826 VOIDmode)) != 0
6827 && rtx_equal_p (goal, goaltry)
6828 && (valtry
6829 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6830 && (valueno = true_regnum (valtry)) >= 0)))
6831 {
6832 if (other >= 0)
6833 {
6834 if (valueno != other)
6835 continue;
6836 }
6837 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6838 continue;
6839 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6840 mode, regno: valueno))
6841 continue;
6842 value = valtry;
6843 where = p;
6844 break;
6845 }
6846 }
6847 }
6848
6849 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6850 (or copying VALUE into GOAL, if GOAL is also a register).
6851 Now verify that VALUE is really valid. */
6852
6853 /* VALUENO is the register number of VALUE; a hard register. */
6854
6855 /* Don't try to re-use something that is killed in this insn. We want
6856 to be able to trust REG_UNUSED notes. */
6857 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6858 return 0;
6859
6860 /* If we propose to get the value from the stack pointer or if GOAL is
6861 a MEM based on the stack pointer, we need a stable SP. */
6862 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6863 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6864 in: goal)))
6865 need_stable_sp = 1;
6866
6867 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6868 if (GET_MODE (value) != mode)
6869 return 0;
6870
6871 /* Reject VALUE if it was loaded from GOAL
6872 and is also a register that appears in the address of GOAL. */
6873
6874 if (goal_mem && value == SET_DEST (single_set (where))
6875 && refers_to_regno_for_reload_p (regno: valueno, endregno: end_hard_regno (mode, regno: valueno),
6876 x: goal, loc: (rtx*) 0))
6877 return 0;
6878
6879 /* Reject registers that overlap GOAL. */
6880
6881 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6882 nregs = hard_regno_nregs (regno, mode);
6883 else
6884 nregs = 1;
6885 valuenregs = hard_regno_nregs (regno: valueno, mode);
6886
6887 if (!goal_mem && !goal_const
6888 && regno + nregs > valueno && regno < valueno + valuenregs)
6889 return 0;
6890
6891 /* Reject VALUE if it is one of the regs reserved for reloads.
6892 Reload1 knows how to reuse them anyway, and it would get
6893 confused if we allocated one without its knowledge.
6894 (Now that insns introduced by reload are ignored above,
6895 this case shouldn't happen, but I'm not positive.) */
6896
6897 if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6898 {
6899 int i;
6900 for (i = 0; i < valuenregs; ++i)
6901 if (reload_reg_p[valueno + i] >= 0)
6902 return 0;
6903 }
6904
6905 /* Reject VALUE if it is a register being used for an input reload
6906 even if it is not one of those reserved. */
6907
6908 if (reload_reg_p != 0)
6909 {
6910 int i;
6911 for (i = 0; i < n_reloads; i++)
6912 if (rld[i].reg_rtx != 0
6913 && rld[i].in
6914 && (int) REGNO (rld[i].reg_rtx) < valueno + valuenregs
6915 && (int) END_REGNO (x: rld[i].reg_rtx) > valueno)
6916 return 0;
6917 }
6918
6919 if (goal_mem)
6920 /* We must treat frame pointer as varying here,
6921 since it can vary--in a nonlocal goto as generated by expand_goto. */
6922 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6923
6924 /* Now verify that the values of GOAL and VALUE remain unaltered
6925 until INSN is reached. */
6926
6927 p = insn;
6928 while (1)
6929 {
6930 p = PREV_INSN (insn: p);
6931 if (p == where)
6932 return value;
6933
6934 /* Don't trust the conversion past a function call
6935 if either of the two is in a call-clobbered register, or memory. */
6936 if (CALL_P (p))
6937 {
6938 if (goal_mem || need_stable_sp)
6939 return 0;
6940
6941 function_abi callee_abi = insn_callee_abi (p);
6942 if (regno >= 0
6943 && regno < FIRST_PSEUDO_REGISTER
6944 && callee_abi.clobbers_reg_p (mode, regno))
6945 return 0;
6946
6947 if (valueno >= 0
6948 && valueno < FIRST_PSEUDO_REGISTER
6949 && callee_abi.clobbers_reg_p (mode, regno: valueno))
6950 return 0;
6951 }
6952
6953 if (INSN_P (p))
6954 {
6955 pat = PATTERN (insn: p);
6956
6957 /* Watch out for unspec_volatile, and volatile asms. */
6958 if (volatile_insn_p (pat))
6959 return 0;
6960
6961 /* If this insn P stores in either GOAL or VALUE, return 0.
6962 If GOAL is a memory ref and this insn writes memory, return 0.
6963 If GOAL is a memory ref and its address is not constant,
6964 and this insn P changes a register used in GOAL, return 0. */
6965
6966 if (GET_CODE (pat) == COND_EXEC)
6967 pat = COND_EXEC_CODE (pat);
6968 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6969 {
6970 rtx dest = SET_DEST (pat);
6971 while (GET_CODE (dest) == SUBREG
6972 || GET_CODE (dest) == ZERO_EXTRACT
6973 || GET_CODE (dest) == STRICT_LOW_PART)
6974 dest = XEXP (dest, 0);
6975 if (REG_P (dest))
6976 {
6977 int xregno = REGNO (dest);
6978 int end_xregno = END_REGNO (x: dest);
6979 if (xregno < regno + nregs && end_xregno > regno)
6980 return 0;
6981 if (xregno < valueno + valuenregs
6982 && end_xregno > valueno)
6983 return 0;
6984 if (goal_mem_addr_varies
6985 && reg_overlap_mentioned_for_reload_p (x: dest, in: goal))
6986 return 0;
6987 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6988 return 0;
6989 }
6990 else if (goal_mem && MEM_P (dest)
6991 && ! push_operand (dest, GET_MODE (dest)))
6992 return 0;
6993 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6994 && reg_equiv_memory_loc (regno) != 0)
6995 return 0;
6996 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6997 return 0;
6998 }
6999 else if (GET_CODE (pat) == PARALLEL)
7000 {
7001 int i;
7002 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7003 {
7004 rtx v1 = XVECEXP (pat, 0, i);
7005 if (GET_CODE (v1) == COND_EXEC)
7006 v1 = COND_EXEC_CODE (v1);
7007 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7008 {
7009 rtx dest = SET_DEST (v1);
7010 while (GET_CODE (dest) == SUBREG
7011 || GET_CODE (dest) == ZERO_EXTRACT
7012 || GET_CODE (dest) == STRICT_LOW_PART)
7013 dest = XEXP (dest, 0);
7014 if (REG_P (dest))
7015 {
7016 int xregno = REGNO (dest);
7017 int end_xregno = END_REGNO (x: dest);
7018 if (xregno < regno + nregs
7019 && end_xregno > regno)
7020 return 0;
7021 if (xregno < valueno + valuenregs
7022 && end_xregno > valueno)
7023 return 0;
7024 if (goal_mem_addr_varies
7025 && reg_overlap_mentioned_for_reload_p (x: dest,
7026 in: goal))
7027 return 0;
7028 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7029 return 0;
7030 }
7031 else if (goal_mem && MEM_P (dest)
7032 && ! push_operand (dest, GET_MODE (dest)))
7033 return 0;
7034 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7035 && reg_equiv_memory_loc (regno) != 0)
7036 return 0;
7037 else if (need_stable_sp
7038 && push_operand (dest, GET_MODE (dest)))
7039 return 0;
7040 }
7041 }
7042 }
7043
7044 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7045 {
7046 rtx link;
7047
7048 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7049 link = XEXP (link, 1))
7050 {
7051 pat = XEXP (link, 0);
7052 if (GET_CODE (pat) == CLOBBER)
7053 {
7054 rtx dest = SET_DEST (pat);
7055
7056 if (REG_P (dest))
7057 {
7058 int xregno = REGNO (dest);
7059 int end_xregno = END_REGNO (x: dest);
7060
7061 if (xregno < regno + nregs
7062 && end_xregno > regno)
7063 return 0;
7064 else if (xregno < valueno + valuenregs
7065 && end_xregno > valueno)
7066 return 0;
7067 else if (goal_mem_addr_varies
7068 && reg_overlap_mentioned_for_reload_p (x: dest,
7069 in: goal))
7070 return 0;
7071 }
7072
7073 else if (goal_mem && MEM_P (dest)
7074 && ! push_operand (dest, GET_MODE (dest)))
7075 return 0;
7076 else if (need_stable_sp
7077 && push_operand (dest, GET_MODE (dest)))
7078 return 0;
7079 }
7080 }
7081 }
7082
7083#if AUTO_INC_DEC
7084 /* If this insn auto-increments or auto-decrements
7085 either regno or valueno, return 0 now.
7086 If GOAL is a memory ref and its address is not constant,
7087 and this insn P increments a register used in GOAL, return 0. */
7088 {
7089 rtx link;
7090
7091 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7092 if (REG_NOTE_KIND (link) == REG_INC
7093 && REG_P (XEXP (link, 0)))
7094 {
7095 int incno = REGNO (XEXP (link, 0));
7096 if (incno < regno + nregs && incno >= regno)
7097 return 0;
7098 if (incno < valueno + valuenregs && incno >= valueno)
7099 return 0;
7100 if (goal_mem_addr_varies
7101 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7102 goal))
7103 return 0;
7104 }
7105 }
7106#endif
7107 }
7108 }
7109}
7110
7111/* Find a place where INCED appears in an increment or decrement operator
7112 within X, and return the amount INCED is incremented or decremented by.
7113 The value is always positive. */
7114
7115static poly_int64
7116find_inc_amount (rtx x, rtx inced)
7117{
7118 enum rtx_code code = GET_CODE (x);
7119 const char *fmt;
7120 int i;
7121
7122 if (code == MEM)
7123 {
7124 rtx addr = XEXP (x, 0);
7125 if ((GET_CODE (addr) == PRE_DEC
7126 || GET_CODE (addr) == POST_DEC
7127 || GET_CODE (addr) == PRE_INC
7128 || GET_CODE (addr) == POST_INC)
7129 && XEXP (addr, 0) == inced)
7130 return GET_MODE_SIZE (GET_MODE (x));
7131 else if ((GET_CODE (addr) == PRE_MODIFY
7132 || GET_CODE (addr) == POST_MODIFY)
7133 && GET_CODE (XEXP (addr, 1)) == PLUS
7134 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7135 && XEXP (addr, 0) == inced
7136 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7137 {
7138 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7139 return i < 0 ? -i : i;
7140 }
7141 }
7142
7143 fmt = GET_RTX_FORMAT (code);
7144 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7145 {
7146 if (fmt[i] == 'e')
7147 {
7148 poly_int64 tem = find_inc_amount (XEXP (x, i), inced);
7149 if (maybe_ne (a: tem, b: 0))
7150 return tem;
7151 }
7152 if (fmt[i] == 'E')
7153 {
7154 int j;
7155 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7156 {
7157 poly_int64 tem = find_inc_amount (XVECEXP (x, i, j), inced);
7158 if (maybe_ne (a: tem, b: 0))
7159 return tem;
7160 }
7161 }
7162 }
7163
7164 return 0;
7165}
7166
7167/* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7168 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7169
7170static int
7171reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7172 rtx insn)
7173{
7174 rtx link;
7175
7176 if (!AUTO_INC_DEC)
7177 return 0;
7178
7179 gcc_assert (insn);
7180
7181 if (! INSN_P (insn))
7182 return 0;
7183
7184 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7185 if (REG_NOTE_KIND (link) == REG_INC)
7186 {
7187 unsigned int test = (int) REGNO (XEXP (link, 0));
7188 if (test >= regno && test < endregno)
7189 return 1;
7190 }
7191 return 0;
7192}
7193
7194/* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7195 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7196 REG_INC. REGNO must refer to a hard register. */
7197
7198int
7199regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7200 int sets)
7201{
7202 /* regno must be a hard register. */
7203 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7204
7205 unsigned int endregno = end_hard_regno (mode, regno);
7206
7207 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7208 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7209 && REG_P (XEXP (PATTERN (insn), 0)))
7210 {
7211 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7212
7213 return test >= regno && test < endregno;
7214 }
7215
7216 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7217 return 1;
7218
7219 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7220 {
7221 int i = XVECLEN (PATTERN (insn), 0) - 1;
7222
7223 for (; i >= 0; i--)
7224 {
7225 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7226 if ((GET_CODE (elt) == CLOBBER
7227 || (sets == 1 && GET_CODE (elt) == SET))
7228 && REG_P (XEXP (elt, 0)))
7229 {
7230 unsigned int test = REGNO (XEXP (elt, 0));
7231
7232 if (test >= regno && test < endregno)
7233 return 1;
7234 }
7235 if (sets == 2
7236 && reg_inc_found_and_valid_p (regno, endregno, insn: elt))
7237 return 1;
7238 }
7239 }
7240
7241 return 0;
7242}
7243
7244/* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7245rtx
7246reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7247{
7248 int regno;
7249
7250 if (GET_MODE (reloadreg) == mode)
7251 return reloadreg;
7252
7253 regno = REGNO (reloadreg);
7254
7255 if (REG_WORDS_BIG_ENDIAN)
7256 regno += ((int) REG_NREGS (reloadreg)
7257 - (int) hard_regno_nregs (regno, mode));
7258
7259 return gen_rtx_REG (mode, regno);
7260}
7261
7262static const char *const reload_when_needed_name[] =
7263{
7264 "RELOAD_FOR_INPUT",
7265 "RELOAD_FOR_OUTPUT",
7266 "RELOAD_FOR_INSN",
7267 "RELOAD_FOR_INPUT_ADDRESS",
7268 "RELOAD_FOR_INPADDR_ADDRESS",
7269 "RELOAD_FOR_OUTPUT_ADDRESS",
7270 "RELOAD_FOR_OUTADDR_ADDRESS",
7271 "RELOAD_FOR_OPERAND_ADDRESS",
7272 "RELOAD_FOR_OPADDR_ADDR",
7273 "RELOAD_OTHER",
7274 "RELOAD_FOR_OTHER_ADDRESS"
7275};
7276
7277/* These functions are used to print the variables set by 'find_reloads' */
7278
7279DEBUG_FUNCTION void
7280debug_reload_to_stream (FILE *f)
7281{
7282 int r;
7283 const char *prefix;
7284
7285 if (! f)
7286 f = stderr;
7287 for (r = 0; r < n_reloads; r++)
7288 {
7289 fprintf (stream: f, format: "Reload %d: ", r);
7290
7291 if (rld[r].in != 0)
7292 {
7293 fprintf (stream: f, format: "reload_in (%s) = ",
7294 GET_MODE_NAME (rld[r].inmode));
7295 print_inline_rtx (f, rld[r].in, 24);
7296 fprintf (stream: f, format: "\n\t");
7297 }
7298
7299 if (rld[r].out != 0)
7300 {
7301 fprintf (stream: f, format: "reload_out (%s) = ",
7302 GET_MODE_NAME (rld[r].outmode));
7303 print_inline_rtx (f, rld[r].out, 24);
7304 fprintf (stream: f, format: "\n\t");
7305 }
7306
7307 fprintf (stream: f, format: "%s, ", reg_class_names[(int) rld[r].rclass]);
7308
7309 fprintf (stream: f, format: "%s (opnum = %d)",
7310 reload_when_needed_name[(int) rld[r].when_needed],
7311 rld[r].opnum);
7312
7313 if (rld[r].optional)
7314 fprintf (stream: f, format: ", optional");
7315
7316 if (rld[r].nongroup)
7317 fprintf (stream: f, format: ", nongroup");
7318
7319 if (maybe_ne (a: rld[r].inc, b: 0))
7320 {
7321 fprintf (stream: f, format: ", inc by ");
7322 print_dec (value: rld[r].inc, file: f, sgn: SIGNED);
7323 }
7324
7325 if (rld[r].nocombine)
7326 fprintf (stream: f, format: ", can't combine");
7327
7328 if (rld[r].secondary_p)
7329 fprintf (stream: f, format: ", secondary_reload_p");
7330
7331 if (rld[r].in_reg != 0)
7332 {
7333 fprintf (stream: f, format: "\n\treload_in_reg: ");
7334 print_inline_rtx (f, rld[r].in_reg, 24);
7335 }
7336
7337 if (rld[r].out_reg != 0)
7338 {
7339 fprintf (stream: f, format: "\n\treload_out_reg: ");
7340 print_inline_rtx (f, rld[r].out_reg, 24);
7341 }
7342
7343 if (rld[r].reg_rtx != 0)
7344 {
7345 fprintf (stream: f, format: "\n\treload_reg_rtx: ");
7346 print_inline_rtx (f, rld[r].reg_rtx, 24);
7347 }
7348
7349 prefix = "\n\t";
7350 if (rld[r].secondary_in_reload != -1)
7351 {
7352 fprintf (stream: f, format: "%ssecondary_in_reload = %d",
7353 prefix, rld[r].secondary_in_reload);
7354 prefix = ", ";
7355 }
7356
7357 if (rld[r].secondary_out_reload != -1)
7358 fprintf (stream: f, format: "%ssecondary_out_reload = %d\n",
7359 prefix, rld[r].secondary_out_reload);
7360
7361 prefix = "\n\t";
7362 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7363 {
7364 fprintf (stream: f, format: "%ssecondary_in_icode = %s", prefix,
7365 insn_data[rld[r].secondary_in_icode].name);
7366 prefix = ", ";
7367 }
7368
7369 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7370 fprintf (stream: f, format: "%ssecondary_out_icode = %s", prefix,
7371 insn_data[rld[r].secondary_out_icode].name);
7372
7373 fprintf (stream: f, format: "\n");
7374 }
7375}
7376
7377DEBUG_FUNCTION void
7378debug_reload (void)
7379{
7380 debug_reload_to_stream (stderr);
7381}
7382

source code of gcc/reload.cc