1/* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20
21/* Middle-to-low level generation of rtx code and insns.
22
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
25
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.cc, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
37#include "memmodel.h"
38#include "backend.h"
39#include "target.h"
40#include "rtl.h"
41#include "tree.h"
42#include "df.h"
43#include "tm_p.h"
44#include "stringpool.h"
45#include "insn-config.h"
46#include "regs.h"
47#include "emit-rtl.h"
48#include "recog.h"
49#include "diagnostic-core.h"
50#include "alias.h"
51#include "fold-const.h"
52#include "varasm.h"
53#include "cfgrtl.h"
54#include "tree-eh.h"
55#include "explow.h"
56#include "expr.h"
57#include "builtins.h"
58#include "rtl-iter.h"
59#include "stor-layout.h"
60#include "opts.h"
61#include "predict.h"
62#include "rtx-vector-builder.h"
63#include "gimple.h"
64#include "gimple-ssa.h"
65#include "gimplify.h"
66
67struct target_rtl default_target_rtl;
68#if SWITCHABLE_TARGET
69struct target_rtl *this_target_rtl = &default_target_rtl;
70#endif
71
72#define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
73
74/* Commonly used modes. */
75
76scalar_int_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
77scalar_int_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
78scalar_int_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
79
80/* Datastructures maintained for currently processed function in RTL form. */
81
82struct rtl_data x_rtl;
83
84/* Indexed by pseudo register number, gives the rtx for that pseudo.
85 Allocated in parallel with regno_pointer_align.
86 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
87 with length attribute nested in top level structures. */
88
89rtx * regno_reg_rtx;
90
91/* This is *not* reset after each function. It gives each CODE_LABEL
92 in the entire compilation a unique label number. */
93
94static GTY(()) int label_num = 1;
95
96/* We record floating-point CONST_DOUBLEs in each floating-point mode for
97 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
98 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
99 is set only for MODE_INT and MODE_VECTOR_INT modes. */
100
101rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
102
103rtx const_true_rtx;
104
105REAL_VALUE_TYPE dconst0;
106REAL_VALUE_TYPE dconst1;
107REAL_VALUE_TYPE dconst2;
108REAL_VALUE_TYPE dconstm0;
109REAL_VALUE_TYPE dconstm1;
110REAL_VALUE_TYPE dconsthalf;
111REAL_VALUE_TYPE dconstinf;
112REAL_VALUE_TYPE dconstninf;
113
114/* Record fixed-point constant 0 and 1. */
115FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
116FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
117
118/* We make one copy of (const_int C) where C is in
119 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
120 to save space during the compilation and simplify comparisons of
121 integers. */
122
123rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
124
125/* Standard pieces of rtx, to be substituted directly into things. */
126rtx pc_rtx;
127rtx ret_rtx;
128rtx simple_return_rtx;
129
130/* Marker used for denoting an INSN, which should never be accessed (i.e.,
131 this pointer should normally never be dereferenced), but is required to be
132 distinct from NULL_RTX. Currently used by peephole2 pass. */
133rtx_insn *invalid_insn_rtx;
134
135/* A hash table storing CONST_INTs whose absolute value is greater
136 than MAX_SAVED_CONST_INT. */
137
138struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
139{
140 typedef HOST_WIDE_INT compare_type;
141
142 static hashval_t hash (rtx i);
143 static bool equal (rtx i, HOST_WIDE_INT h);
144};
145
146static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
147
148struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
149{
150 static hashval_t hash (rtx x);
151 static bool equal (rtx x, rtx y);
152};
153
154static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
155
156struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
157{
158 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
159
160 static hashval_t hash (rtx x);
161 static bool equal (rtx x, const compare_type &y);
162};
163
164static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
165
166/* A hash table storing register attribute structures. */
167struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
168{
169 static hashval_t hash (reg_attrs *x);
170 static bool equal (reg_attrs *a, reg_attrs *b);
171};
172
173static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
174
175/* A hash table storing all CONST_DOUBLEs. */
176struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
177{
178 static hashval_t hash (rtx x);
179 static bool equal (rtx x, rtx y);
180};
181
182static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
183
184/* A hash table storing all CONST_FIXEDs. */
185struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
186{
187 static hashval_t hash (rtx x);
188 static bool equal (rtx x, rtx y);
189};
190
191static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
192
193#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
194#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
195#define first_label_num (crtl->emit.x_first_label_num)
196
197static void set_used_decls (tree);
198static void mark_label_nuses (rtx);
199#if TARGET_SUPPORTS_WIDE_INT
200static rtx lookup_const_wide_int (rtx);
201#endif
202static rtx lookup_const_double (rtx);
203static rtx lookup_const_fixed (rtx);
204static rtx gen_const_vector (machine_mode, int);
205static void copy_rtx_if_shared_1 (rtx *orig);
206
207/* Probability of the conditional branch currently proceeded by try_split. */
208profile_probability split_branch_probability;
209
210/* Returns a hash code for X (which is a really a CONST_INT). */
211
212hashval_t
213const_int_hasher::hash (rtx x)
214{
215 return (hashval_t) INTVAL (x);
216}
217
218/* Returns true if the value represented by X (which is really a
219 CONST_INT) is the same as that given by Y (which is really a
220 HOST_WIDE_INT *). */
221
222bool
223const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
224{
225 return (INTVAL (x) == y);
226}
227
228#if TARGET_SUPPORTS_WIDE_INT
229/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
230
231hashval_t
232const_wide_int_hasher::hash (rtx x)
233{
234 int i;
235 unsigned HOST_WIDE_INT hash = 0;
236 const_rtx xr = x;
237
238 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
239 hash += CONST_WIDE_INT_ELT (xr, i);
240
241 return (hashval_t) hash;
242}
243
244/* Returns true if the value represented by X (which is really a
245 CONST_WIDE_INT) is the same as that given by Y (which is really a
246 CONST_WIDE_INT). */
247
248bool
249const_wide_int_hasher::equal (rtx x, rtx y)
250{
251 int i;
252 const_rtx xr = x;
253 const_rtx yr = y;
254 if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
255 return false;
256
257 for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
258 if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
259 return false;
260
261 return true;
262}
263#endif
264
265/* Returns a hash code for CONST_POLY_INT X. */
266
267hashval_t
268const_poly_int_hasher::hash (rtx x)
269{
270 inchash::hash h;
271 h.add_int (GET_MODE (x));
272 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
273 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
274 return h.end ();
275}
276
277/* Returns true if CONST_POLY_INT X is an rtx representation of Y. */
278
279bool
280const_poly_int_hasher::equal (rtx x, const compare_type &y)
281{
282 if (GET_MODE (x) != y.first)
283 return false;
284 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
285 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
286 return false;
287 return true;
288}
289
290/* Returns a hash code for X (which is really a CONST_DOUBLE). */
291hashval_t
292const_double_hasher::hash (rtx x)
293{
294 const_rtx const value = x;
295 hashval_t h;
296
297 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
298 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
299 else
300 {
301 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
302 /* MODE is used in the comparison, so it should be in the hash. */
303 h ^= GET_MODE (value);
304 }
305 return h;
306}
307
308/* Returns true if the value represented by X (really a ...)
309 is the same as that represented by Y (really a ...) */
310bool
311const_double_hasher::equal (rtx x, rtx y)
312{
313 const_rtx const a = x, b = y;
314
315 if (GET_MODE (a) != GET_MODE (b))
316 return false;
317 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
318 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
319 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
320 else
321 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
322 CONST_DOUBLE_REAL_VALUE (b));
323}
324
325/* Returns a hash code for X (which is really a CONST_FIXED). */
326
327hashval_t
328const_fixed_hasher::hash (rtx x)
329{
330 const_rtx const value = x;
331 hashval_t h;
332
333 h = fixed_hash (CONST_FIXED_VALUE (value));
334 /* MODE is used in the comparison, so it should be in the hash. */
335 h ^= GET_MODE (value);
336 return h;
337}
338
339/* Returns true if the value represented by X is the same as that
340 represented by Y. */
341
342bool
343const_fixed_hasher::equal (rtx x, rtx y)
344{
345 const_rtx const a = x, b = y;
346
347 if (GET_MODE (a) != GET_MODE (b))
348 return false;
349 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
350}
351
352/* Return true if the given memory attributes are equal. */
353
354bool
355mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
356{
357 if (p == q)
358 return true;
359 if (!p || !q)
360 return false;
361 return (p->alias == q->alias
362 && p->offset_known_p == q->offset_known_p
363 && (!p->offset_known_p || known_eq (p->offset, q->offset))
364 && p->size_known_p == q->size_known_p
365 && (!p->size_known_p || known_eq (p->size, q->size))
366 && p->align == q->align
367 && p->addrspace == q->addrspace
368 && (p->expr == q->expr
369 || (p->expr != NULL_TREE && q->expr != NULL_TREE
370 && operand_equal_p (p->expr, q->expr, flags: 0))));
371}
372
373/* Set MEM's memory attributes so that they are the same as ATTRS. */
374
375static void
376set_mem_attrs (rtx mem, mem_attrs *attrs)
377{
378 /* If everything is the default, we can just clear the attributes. */
379 if (mem_attrs_eq_p (p: attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
380 {
381 MEM_ATTRS (mem) = 0;
382 return;
383 }
384
385 if (!MEM_ATTRS (mem)
386 || !mem_attrs_eq_p (p: attrs, MEM_ATTRS (mem)))
387 {
388 MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
389 memcpy (MEM_ATTRS (mem), src: attrs, n: sizeof (mem_attrs));
390 }
391}
392
393/* Returns a hash code for X (which is a really a reg_attrs *). */
394
395hashval_t
396reg_attr_hasher::hash (reg_attrs *x)
397{
398 const reg_attrs *const p = x;
399
400 inchash::hash h;
401 h.add_ptr (ptr: p->decl);
402 h.add_poly_hwi (v: p->offset);
403 return h.end ();
404}
405
406/* Returns true if the value represented by X is the same as that given by
407 Y. */
408
409bool
410reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
411{
412 const reg_attrs *const p = x;
413 const reg_attrs *const q = y;
414
415 return (p->decl == q->decl && known_eq (p->offset, q->offset));
416}
417/* Allocate a new reg_attrs structure and insert it into the hash table if
418 one identical to it is not already in the table. We are doing this for
419 MEM of mode MODE. */
420
421static reg_attrs *
422get_reg_attrs (tree decl, poly_int64 offset)
423{
424 reg_attrs attrs;
425
426 /* If everything is the default, we can just return zero. */
427 if (decl == 0 && known_eq (offset, 0))
428 return 0;
429
430 attrs.decl = decl;
431 attrs.offset = offset;
432
433 reg_attrs **slot = reg_attrs_htab->find_slot (value: &attrs, insert: INSERT);
434 if (*slot == 0)
435 {
436 *slot = ggc_alloc<reg_attrs> ();
437 memcpy (dest: *slot, src: &attrs, n: sizeof (reg_attrs));
438 }
439
440 return *slot;
441}
442
443
444#if !HAVE_blockage
445/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
446 and to block register equivalences to be seen across this insn. */
447
448rtx
449gen_blockage (void)
450{
451 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
452 MEM_VOLATILE_P (x) = true;
453 return x;
454}
455#endif
456
457
458/* Set the mode and register number of X to MODE and REGNO. */
459
460void
461set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
462{
463 unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
464 ? hard_regno_nregs (regno, mode)
465 : 1);
466 PUT_MODE_RAW (x, mode);
467 set_regno_raw (x, regno, nregs);
468}
469
470/* Initialize a fresh REG rtx with mode MODE and register REGNO. */
471
472rtx
473init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
474{
475 set_mode_and_regno (x, mode, regno);
476 REG_ATTRS (x) = NULL;
477 ORIGINAL_REGNO (x) = regno;
478 return x;
479}
480
481/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
482 don't attempt to share with the various global pieces of rtl (such as
483 frame_pointer_rtx). */
484
485rtx
486gen_raw_REG (machine_mode mode, unsigned int regno)
487{
488 rtx x = rtx_alloc (REG MEM_STAT_INFO);
489 init_raw_REG (x, mode, regno);
490 return x;
491}
492
493/* There are some RTL codes that require special attention; the generation
494 functions do the raw handling. If you add to this list, modify
495 special_rtx in gengenrtl.cc as well. */
496
497rtx_expr_list *
498gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
499{
500 return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
501 expr_list));
502}
503
504rtx_insn_list *
505gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
506{
507 return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
508 insn_list));
509}
510
511rtx_insn *
512gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
513 basic_block bb, rtx pattern, int location, int code,
514 rtx reg_notes)
515{
516 return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
517 prev_insn, next_insn,
518 bb, pattern, location, code,
519 reg_notes));
520}
521
522rtx
523gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
524{
525 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
526 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
527
528#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
529 if (const_true_rtx && arg == STORE_FLAG_VALUE)
530 return const_true_rtx;
531#endif
532
533 /* Look up the CONST_INT in the hash table. */
534 rtx *slot = const_int_htab->find_slot_with_hash (comparable: arg, hash: (hashval_t) arg,
535 insert: INSERT);
536 if (*slot == 0)
537 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
538
539 return *slot;
540}
541
542rtx
543gen_int_mode (poly_int64 c, machine_mode mode)
544{
545 c = trunc_int_for_mode (c, mode);
546 if (c.is_constant ())
547 return GEN_INT (c.coeffs[0]);
548 unsigned int prec = GET_MODE_PRECISION (mode: as_a <scalar_mode> (m: mode));
549 return immed_wide_int_const (poly_wide_int::from (a: c, bitsize: prec, sgn: SIGNED), mode);
550}
551
552/* CONST_DOUBLEs might be created from pairs of integers, or from
553 REAL_VALUE_TYPEs. Also, their length is known only at run time,
554 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
555
556/* Determine whether REAL, a CONST_DOUBLE, already exists in the
557 hash table. If so, return its counterpart; otherwise add it
558 to the hash table and return it. */
559static rtx
560lookup_const_double (rtx real)
561{
562 rtx *slot = const_double_htab->find_slot (value: real, insert: INSERT);
563 if (*slot == 0)
564 *slot = real;
565
566 return *slot;
567}
568
569/* Return a CONST_DOUBLE rtx for a floating-point value specified by
570 VALUE in mode MODE. */
571rtx
572const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
573{
574 rtx real = rtx_alloc (CONST_DOUBLE);
575 PUT_MODE (x: real, mode);
576
577 real->u.rv = value;
578
579 return lookup_const_double (real);
580}
581
582/* Determine whether FIXED, a CONST_FIXED, already exists in the
583 hash table. If so, return its counterpart; otherwise add it
584 to the hash table and return it. */
585
586static rtx
587lookup_const_fixed (rtx fixed)
588{
589 rtx *slot = const_fixed_htab->find_slot (value: fixed, insert: INSERT);
590 if (*slot == 0)
591 *slot = fixed;
592
593 return *slot;
594}
595
596/* Return a CONST_FIXED rtx for a fixed-point value specified by
597 VALUE in mode MODE. */
598
599rtx
600const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
601{
602 rtx fixed = rtx_alloc (CONST_FIXED);
603 PUT_MODE (x: fixed, mode);
604
605 fixed->u.fv = value;
606
607 return lookup_const_fixed (fixed);
608}
609
610#if TARGET_SUPPORTS_WIDE_INT == 0
611/* Constructs double_int from rtx CST. */
612
613double_int
614rtx_to_double_int (const_rtx cst)
615{
616 double_int r;
617
618 if (CONST_INT_P (cst))
619 r = double_int::from_shwi (INTVAL (cst));
620 else if (CONST_DOUBLE_AS_INT_P (cst))
621 {
622 r.low = CONST_DOUBLE_LOW (cst);
623 r.high = CONST_DOUBLE_HIGH (cst);
624 }
625 else
626 gcc_unreachable ();
627
628 return r;
629}
630#endif
631
632#if TARGET_SUPPORTS_WIDE_INT
633/* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
634 If so, return its counterpart; otherwise add it to the hash table and
635 return it. */
636
637static rtx
638lookup_const_wide_int (rtx wint)
639{
640 rtx *slot = const_wide_int_htab->find_slot (value: wint, insert: INSERT);
641 if (*slot == 0)
642 *slot = wint;
643
644 return *slot;
645}
646#endif
647
648/* Return an rtx constant for V, given that the constant has mode MODE.
649 The returned rtx will be a CONST_INT if V fits, otherwise it will be
650 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
651 (if TARGET_SUPPORTS_WIDE_INT). */
652
653static rtx
654immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
655{
656 unsigned int len = v.get_len ();
657 /* Not scalar_int_mode because we also allow pointer bound modes. */
658 unsigned int prec = GET_MODE_PRECISION (mode: as_a <scalar_mode> (m: mode));
659
660 /* Allow truncation but not extension since we do not know if the
661 number is signed or unsigned. */
662 gcc_assert (prec <= v.get_precision ());
663
664 if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
665 return gen_int_mode (c: v.elt (i: 0), mode);
666
667#if TARGET_SUPPORTS_WIDE_INT
668 {
669 unsigned int i;
670 rtx value;
671 unsigned int blocks_needed
672 = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
673
674 if (len > blocks_needed)
675 len = blocks_needed;
676
677 value = const_wide_int_alloc (len);
678
679 /* It is so tempting to just put the mode in here. Must control
680 myself ... */
681 PUT_MODE (x: value, VOIDmode);
682 CWI_PUT_NUM_ELEM (value, len);
683
684 for (i = 0; i < len; i++)
685 CONST_WIDE_INT_ELT (value, i) = v.elt (i);
686
687 return lookup_const_wide_int (wint: value);
688 }
689#else
690 return immed_double_const (v.elt (0), v.elt (1), mode);
691#endif
692}
693
694#if TARGET_SUPPORTS_WIDE_INT == 0
695/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
696 of ints: I0 is the low-order word and I1 is the high-order word.
697 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
698 implied upper bits are copies of the high bit of i1. The value
699 itself is neither signed nor unsigned. Do not use this routine for
700 non-integer modes; convert to REAL_VALUE_TYPE and use
701 const_double_from_real_value. */
702
703rtx
704immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
705{
706 rtx value;
707 unsigned int i;
708
709 /* There are the following cases (note that there are no modes with
710 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
711
712 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
713 gen_int_mode.
714 2) If the value of the integer fits into HOST_WIDE_INT anyway
715 (i.e., i1 consists only from copies of the sign bit, and sign
716 of i0 and i1 are the same), then we return a CONST_INT for i0.
717 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
718 scalar_mode smode;
719 if (is_a <scalar_mode> (mode, &smode)
720 && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
721 return gen_int_mode (i0, mode);
722
723 /* If this integer fits in one word, return a CONST_INT. */
724 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
725 return GEN_INT (i0);
726
727 /* We use VOIDmode for integers. */
728 value = rtx_alloc (CONST_DOUBLE);
729 PUT_MODE (value, VOIDmode);
730
731 CONST_DOUBLE_LOW (value) = i0;
732 CONST_DOUBLE_HIGH (value) = i1;
733
734 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
735 XWINT (value, i) = 0;
736
737 return lookup_const_double (value);
738}
739#endif
740
741/* Return an rtx representation of C in mode MODE. */
742
743rtx
744immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
745{
746 if (c.is_constant ())
747 return immed_wide_int_const_1 (v: c.coeffs[0], mode);
748
749 /* Not scalar_int_mode because we also allow pointer bound modes. */
750 unsigned int prec = GET_MODE_PRECISION (mode: as_a <scalar_mode> (m: mode));
751
752 /* Allow truncation but not extension since we do not know if the
753 number is signed or unsigned. */
754 gcc_assert (prec <= c.coeffs[0].get_precision ());
755 poly_wide_int newc = poly_wide_int::from (a: c, bitsize: prec, sgn: SIGNED);
756
757 /* See whether we already have an rtx for this constant. */
758 inchash::hash h;
759 h.add_int (v: mode);
760 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
761 h.add_wide_int (x: newc.coeffs[i]);
762 const_poly_int_hasher::compare_type typed_value (mode, newc);
763 rtx *slot = const_poly_int_htab->find_slot_with_hash (comparable: typed_value,
764 hash: h.end (), insert: INSERT);
765 rtx x = *slot;
766 if (x)
767 return x;
768
769 /* Create a new rtx. There's a choice to be made here between installing
770 the actual mode of the rtx or leaving it as VOIDmode (for consistency
771 with CONST_INT). In practice the handling of the codes is different
772 enough that we get no benefit from using VOIDmode, and various places
773 assume that VOIDmode implies CONST_INT. Using the real mode seems like
774 the right long-term direction anyway. */
775 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
776 size_t extra_size = twi::extra_size (precision: prec);
777 x = rtx_alloc_v (CONST_POLY_INT,
778 sizeof (struct const_poly_int_def) + extra_size);
779 PUT_MODE (x, mode);
780 CONST_POLY_INT_COEFFS (x).set_precision (precision: prec);
781 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
782 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
783
784 *slot = x;
785 return x;
786}
787
788rtx
789gen_rtx_REG (machine_mode mode, unsigned int regno)
790{
791 /* In case the MD file explicitly references the frame pointer, have
792 all such references point to the same frame pointer. This is
793 used during frame pointer elimination to distinguish the explicit
794 references to these registers from pseudos that happened to be
795 assigned to them.
796
797 If we have eliminated the frame pointer or arg pointer, we will
798 be using it as a normal register, for example as a spill
799 register. In such cases, we might be accessing it in a mode that
800 is not Pmode and therefore cannot use the pre-allocated rtx.
801
802 Also don't do this when we are making new REGs in reload, since
803 we don't want to get confused with the real pointers. */
804
805 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
806 {
807 if (regno == FRAME_POINTER_REGNUM
808 && (!reload_completed || frame_pointer_needed))
809 return frame_pointer_rtx;
810
811 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
812 && regno == HARD_FRAME_POINTER_REGNUM
813 && (!reload_completed || frame_pointer_needed))
814 return hard_frame_pointer_rtx;
815#if !HARD_FRAME_POINTER_IS_ARG_POINTER
816 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
817 && regno == ARG_POINTER_REGNUM)
818 return arg_pointer_rtx;
819#endif
820#ifdef RETURN_ADDRESS_POINTER_REGNUM
821 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
822 return return_address_pointer_rtx;
823#endif
824 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
825 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
826 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
827 return pic_offset_table_rtx;
828 if (regno == STACK_POINTER_REGNUM)
829 return stack_pointer_rtx;
830 }
831
832#if 0
833 /* If the per-function register table has been set up, try to re-use
834 an existing entry in that table to avoid useless generation of RTL.
835
836 This code is disabled for now until we can fix the various backends
837 which depend on having non-shared hard registers in some cases. Long
838 term we want to re-enable this code as it can significantly cut down
839 on the amount of useless RTL that gets generated.
840
841 We'll also need to fix some code that runs after reload that wants to
842 set ORIGINAL_REGNO. */
843
844 if (cfun
845 && cfun->emit
846 && regno_reg_rtx
847 && regno < FIRST_PSEUDO_REGISTER
848 && reg_raw_mode[regno] == mode)
849 return regno_reg_rtx[regno];
850#endif
851
852 return gen_raw_REG (mode, regno);
853}
854
855rtx
856gen_rtx_MEM (machine_mode mode, rtx addr)
857{
858 rtx rt = gen_rtx_raw_MEM (mode, addr);
859
860 /* This field is not cleared by the mere allocation of the rtx, so
861 we clear it here. */
862 MEM_ATTRS (rt) = 0;
863
864 return rt;
865}
866
867/* Generate a memory referring to non-trapping constant memory. */
868
869rtx
870gen_const_mem (machine_mode mode, rtx addr)
871{
872 rtx mem = gen_rtx_MEM (mode, addr);
873 MEM_READONLY_P (mem) = 1;
874 MEM_NOTRAP_P (mem) = 1;
875 return mem;
876}
877
878/* Generate a MEM referring to fixed portions of the frame, e.g., register
879 save areas. */
880
881rtx
882gen_frame_mem (machine_mode mode, rtx addr)
883{
884 rtx mem = gen_rtx_MEM (mode, addr);
885 MEM_NOTRAP_P (mem) = 1;
886 set_mem_alias_set (mem, get_frame_alias_set ());
887 return mem;
888}
889
890/* Generate a MEM referring to a temporary use of the stack, not part
891 of the fixed stack frame. For example, something which is pushed
892 by a target splitter. */
893rtx
894gen_tmp_stack_mem (machine_mode mode, rtx addr)
895{
896 rtx mem = gen_rtx_MEM (mode, addr);
897 MEM_NOTRAP_P (mem) = 1;
898 if (!cfun->calls_alloca)
899 set_mem_alias_set (mem, get_frame_alias_set ());
900 return mem;
901}
902
903/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
904 this construct would be valid, and false otherwise. */
905
906bool
907validate_subreg (machine_mode omode, machine_mode imode,
908 const_rtx reg, poly_uint64 offset)
909{
910 poly_uint64 isize = GET_MODE_SIZE (mode: imode);
911 poly_uint64 osize = GET_MODE_SIZE (mode: omode);
912
913 /* The sizes must be ordered, so that we know whether the subreg
914 is partial, paradoxical or complete. */
915 if (!ordered_p (a: isize, b: osize))
916 return false;
917
918 /* All subregs must be aligned. */
919 if (!multiple_p (a: offset, b: osize))
920 return false;
921
922 /* The subreg offset cannot be outside the inner object. */
923 if (maybe_ge (offset, isize))
924 return false;
925
926 poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
927
928 /* ??? This should not be here. Temporarily continue to allow word_mode
929 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
930 Generally, backends are doing something sketchy but it'll take time to
931 fix them all. */
932 if (omode == word_mode)
933 ;
934 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
935 is the culprit here, and not the backends. */
936 else if (known_ge (osize, regsize) && known_ge (isize, osize))
937 ;
938 /* Allow component subregs of complex and vector. Though given the below
939 extraction rules, it's not always clear what that means. */
940 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
941 && GET_MODE_INNER (imode) == omode)
942 ;
943 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
944 i.e. (subreg:V4SF (reg:SF) 0) or (subreg:V4SF (reg:V2SF) 0). This
945 surely isn't the cleanest way to represent this. It's questionable
946 if this ought to be represented at all -- why can't this all be hidden
947 in post-reload splitters that make arbitrarily mode changes to the
948 registers themselves. */
949 else if (VECTOR_MODE_P (omode)
950 && GET_MODE_UNIT_SIZE (omode) == GET_MODE_UNIT_SIZE (imode))
951 ;
952 /* Subregs involving floating point modes are not allowed to
953 change size unless it's an insert into a complex mode.
954 Therefore (subreg:DI (reg:DF) 0) and (subreg:CS (reg:SF) 0) are fine, but
955 (subreg:SI (reg:DF) 0) isn't. */
956 else if ((FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
957 && !COMPLEX_MODE_P (omode))
958 {
959 if (! (known_eq (isize, osize)
960 /* LRA can use subreg to store a floating point value in
961 an integer mode. Although the floating point and the
962 integer modes need the same number of hard registers,
963 the size of floating point mode can be less than the
964 integer mode. LRA also uses subregs for a register
965 should be used in different mode in on insn. */
966 || lra_in_progress))
967 return false;
968 }
969
970 /* Paradoxical subregs must have offset zero. */
971 if (maybe_gt (osize, isize))
972 return known_eq (offset, 0U);
973
974 /* This is a normal subreg. Verify that the offset is representable. */
975
976 /* For hard registers, we already have most of these rules collected in
977 subreg_offset_representable_p. */
978 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
979 {
980 unsigned int regno = REGNO (reg);
981
982 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
983 && GET_MODE_INNER (imode) == omode)
984 ;
985 else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
986 return false;
987
988 return subreg_offset_representable_p (regno, imode, offset, omode);
989 }
990 /* Do not allow SUBREG with stricter alignment than the inner MEM. */
991 else if (reg && MEM_P (reg) && STRICT_ALIGNMENT
992 && MEM_ALIGN (reg) < GET_MODE_ALIGNMENT (omode))
993 return false;
994
995 /* The outer size must be ordered wrt the register size, otherwise
996 we wouldn't know at compile time how many registers the outer
997 mode occupies. */
998 if (!ordered_p (a: osize, b: regsize))
999 return false;
1000
1001 /* For pseudo registers, we want most of the same checks. Namely:
1002
1003 Assume that the pseudo register will be allocated to hard registers
1004 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
1005 the remainder must correspond to the lowpart of the containing hard
1006 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
1007 otherwise it is at the lowest offset.
1008
1009 Given that we've already checked the mode and offset alignment,
1010 we only have to check subblock subregs here. */
1011 if (maybe_lt (a: osize, b: regsize)
1012 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
1013 {
1014 /* It is invalid for the target to pick a register size for a mode
1015 that isn't ordered wrt to the size of that mode. */
1016 poly_uint64 block_size = ordered_min (a: isize, b: regsize);
1017 unsigned int start_reg;
1018 poly_uint64 offset_within_reg;
1019 if (!can_div_trunc_p (a: offset, b: block_size, quotient: &start_reg, remainder: &offset_within_reg)
1020 || (BYTES_BIG_ENDIAN
1021 ? maybe_ne (a: offset_within_reg, b: block_size - osize)
1022 : maybe_ne (a: offset_within_reg, b: 0U)))
1023 return false;
1024 }
1025 return true;
1026}
1027
1028rtx
1029gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1030{
1031 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1032 return gen_rtx_raw_SUBREG (mode, reg, offset);
1033}
1034
1035/* Generate a SUBREG representing the least-significant part of REG if MODE
1036 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1037
1038rtx
1039gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1040{
1041 machine_mode inmode;
1042
1043 inmode = GET_MODE (reg);
1044 if (inmode == VOIDmode)
1045 inmode = mode;
1046 return gen_rtx_SUBREG (mode, reg,
1047 offset: subreg_lowpart_offset (outermode: mode, innermode: inmode));
1048}
1049
1050rtx
1051gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1052 enum var_init_status status)
1053{
1054 rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1055 PAT_VAR_LOCATION_STATUS (x) = status;
1056 return x;
1057}
1058
1059
1060/* Create an rtvec and stores within it the RTXen passed in the arguments. */
1061
1062rtvec
1063gen_rtvec (int n, ...)
1064{
1065 int i;
1066 rtvec rt_val;
1067 va_list p;
1068
1069 va_start (p, n);
1070
1071 /* Don't allocate an empty rtvec... */
1072 if (n == 0)
1073 {
1074 va_end (p);
1075 return NULL_RTVEC;
1076 }
1077
1078 rt_val = rtvec_alloc (n);
1079
1080 for (i = 0; i < n; i++)
1081 rt_val->elem[i] = va_arg (p, rtx);
1082
1083 va_end (p);
1084 return rt_val;
1085}
1086
1087rtvec
1088gen_rtvec_v (int n, rtx *argp)
1089{
1090 int i;
1091 rtvec rt_val;
1092
1093 /* Don't allocate an empty rtvec... */
1094 if (n == 0)
1095 return NULL_RTVEC;
1096
1097 rt_val = rtvec_alloc (n);
1098
1099 for (i = 0; i < n; i++)
1100 rt_val->elem[i] = *argp++;
1101
1102 return rt_val;
1103}
1104
1105rtvec
1106gen_rtvec_v (int n, rtx_insn **argp)
1107{
1108 int i;
1109 rtvec rt_val;
1110
1111 /* Don't allocate an empty rtvec... */
1112 if (n == 0)
1113 return NULL_RTVEC;
1114
1115 rt_val = rtvec_alloc (n);
1116
1117 for (i = 0; i < n; i++)
1118 rt_val->elem[i] = *argp++;
1119
1120 return rt_val;
1121}
1122
1123
1124/* Return the number of bytes between the start of an OUTER_MODE
1125 in-memory value and the start of an INNER_MODE in-memory value,
1126 given that the former is a lowpart of the latter. It may be a
1127 paradoxical lowpart, in which case the offset will be negative
1128 on big-endian targets. */
1129
1130poly_int64
1131byte_lowpart_offset (machine_mode outer_mode,
1132 machine_mode inner_mode)
1133{
1134 if (paradoxical_subreg_p (outermode: outer_mode, innermode: inner_mode))
1135 return -subreg_lowpart_offset (outermode: inner_mode, innermode: outer_mode);
1136 else
1137 return subreg_lowpart_offset (outermode: outer_mode, innermode: inner_mode);
1138}
1139
1140/* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1141 from address X. For paradoxical big-endian subregs this is a
1142 negative value, otherwise it's the same as OFFSET. */
1143
1144poly_int64
1145subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1146 poly_uint64 offset)
1147{
1148 if (paradoxical_subreg_p (outermode: outer_mode, innermode: inner_mode))
1149 {
1150 gcc_assert (known_eq (offset, 0U));
1151 return -subreg_lowpart_offset (outermode: inner_mode, innermode: outer_mode);
1152 }
1153 return offset;
1154}
1155
1156/* As above, but return the offset that existing subreg X would have
1157 if SUBREG_REG (X) were stored in memory. The only significant thing
1158 about the current SUBREG_REG is its mode. */
1159
1160poly_int64
1161subreg_memory_offset (const_rtx x)
1162{
1163 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1164 SUBREG_BYTE (x));
1165}
1166
1167/* Generate a REG rtx for a new pseudo register of mode MODE.
1168 This pseudo is assigned the next sequential register number. */
1169
1170rtx
1171gen_reg_rtx (machine_mode mode)
1172{
1173 rtx val;
1174 unsigned int align = GET_MODE_ALIGNMENT (mode);
1175
1176 gcc_assert (can_create_pseudo_p ());
1177
1178 /* If a virtual register with bigger mode alignment is generated,
1179 increase stack alignment estimation because it might be spilled
1180 to stack later. */
1181 if (SUPPORTS_STACK_ALIGNMENT
1182 && crtl->stack_alignment_estimated < align
1183 && !crtl->stack_realign_processed)
1184 {
1185 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1186 if (crtl->stack_alignment_estimated < min_align)
1187 crtl->stack_alignment_estimated = min_align;
1188 }
1189
1190 if (generating_concat_p
1191 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1192 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1193 {
1194 /* For complex modes, don't make a single pseudo.
1195 Instead, make a CONCAT of two pseudos.
1196 This allows noncontiguous allocation of the real and imaginary parts,
1197 which makes much better code. Besides, allocating DCmode
1198 pseudos overstrains reload on some machines like the 386. */
1199 rtx realpart, imagpart;
1200 machine_mode partmode = GET_MODE_INNER (mode);
1201
1202 realpart = gen_reg_rtx (mode: partmode);
1203 imagpart = gen_reg_rtx (mode: partmode);
1204 return gen_rtx_CONCAT (mode, realpart, imagpart);
1205 }
1206
1207 /* Do not call gen_reg_rtx with uninitialized crtl. */
1208 gcc_assert (crtl->emit.regno_pointer_align_length);
1209
1210 crtl->emit.ensure_regno_capacity ();
1211 gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1212
1213 val = gen_raw_REG (mode, reg_rtx_no);
1214 regno_reg_rtx[reg_rtx_no++] = val;
1215 return val;
1216}
1217
1218/* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1219 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1220
1221void
1222emit_status::ensure_regno_capacity ()
1223{
1224 int old_size = regno_pointer_align_length;
1225
1226 if (reg_rtx_no < old_size)
1227 return;
1228
1229 int new_size = old_size * 2;
1230 while (reg_rtx_no >= new_size)
1231 new_size *= 2;
1232
1233 char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1234 memset (s: tmp + old_size, c: 0, n: new_size - old_size);
1235 regno_pointer_align = (unsigned char *) tmp;
1236
1237 rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1238 memset (s: new1 + old_size, c: 0, n: (new_size - old_size) * sizeof (rtx));
1239 regno_reg_rtx = new1;
1240
1241 crtl->emit.regno_pointer_align_length = new_size;
1242}
1243
1244/* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1245
1246bool
1247reg_is_parm_p (rtx reg)
1248{
1249 tree decl;
1250
1251 gcc_assert (REG_P (reg));
1252 decl = REG_EXPR (reg);
1253 return (decl && TREE_CODE (decl) == PARM_DECL);
1254}
1255
1256/* Update NEW with the same attributes as REG, but with OFFSET added
1257 to the REG_OFFSET. */
1258
1259static void
1260update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1261{
1262 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1263 REG_OFFSET (reg) + offset);
1264}
1265
1266/* Generate a register with same attributes as REG, but with OFFSET
1267 added to the REG_OFFSET. */
1268
1269rtx
1270gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1271 poly_int64 offset)
1272{
1273 /* Use gen_raw_REG rather than gen_rtx_REG, because otherwise we'd
1274 overwrite REG_ATTRS (and in the callers often ORIGINAL_REGNO too)
1275 of the shared REG rtxes like stack_pointer_rtx etc. This should
1276 happen only for SUBREGs from DEBUG_INSNs, RA should ensure
1277 multi-word registers don't overlap the special registers like
1278 stack pointer. */
1279 rtx new_rtx = gen_raw_REG (mode, regno);
1280
1281 update_reg_offset (new_rtx, reg, offset);
1282 return new_rtx;
1283}
1284
1285/* Generate a new pseudo-register with the same attributes as REG, but
1286 with OFFSET added to the REG_OFFSET. */
1287
1288rtx
1289gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1290{
1291 rtx new_rtx = gen_reg_rtx (mode);
1292
1293 update_reg_offset (new_rtx, reg, offset);
1294 return new_rtx;
1295}
1296
1297/* Adjust REG in-place so that it has mode MODE. It is assumed that the
1298 new register is a (possibly paradoxical) lowpart of the old one. */
1299
1300void
1301adjust_reg_mode (rtx reg, machine_mode mode)
1302{
1303 update_reg_offset (new_rtx: reg, reg, offset: byte_lowpart_offset (outer_mode: mode, GET_MODE (reg)));
1304 PUT_MODE (x: reg, mode);
1305}
1306
1307/* Copy REG's attributes from X, if X has any attributes. If REG and X
1308 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1309
1310void
1311set_reg_attrs_from_value (rtx reg, rtx x)
1312{
1313 poly_int64 offset;
1314 bool can_be_reg_pointer = true;
1315
1316 /* Don't call mark_reg_pointer for incompatible pointer sign
1317 extension. */
1318 while (GET_CODE (x) == SIGN_EXTEND
1319 || GET_CODE (x) == ZERO_EXTEND
1320 || GET_CODE (x) == TRUNCATE
1321 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1322 {
1323#if defined(POINTERS_EXTEND_UNSIGNED)
1324 if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1325 || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1326 || (paradoxical_subreg_p (x)
1327 && ! (SUBREG_PROMOTED_VAR_P (x)
1328 && SUBREG_CHECK_PROMOTED_SIGN (x,
1329 POINTERS_EXTEND_UNSIGNED))))
1330 && !targetm.have_ptr_extend ())
1331 can_be_reg_pointer = false;
1332#endif
1333 x = XEXP (x, 0);
1334 }
1335
1336 /* Hard registers can be reused for multiple purposes within the same
1337 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1338 on them is wrong. */
1339 if (HARD_REGISTER_P (reg))
1340 return;
1341
1342 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1343 if (MEM_P (x))
1344 {
1345 if (MEM_OFFSET_KNOWN_P (x))
1346 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1347 MEM_OFFSET (x) + offset);
1348 if (can_be_reg_pointer && MEM_POINTER (x))
1349 mark_reg_pointer (reg, 0);
1350 }
1351 else if (REG_P (x))
1352 {
1353 if (REG_ATTRS (x))
1354 update_reg_offset (new_rtx: reg, reg: x, offset);
1355 if (can_be_reg_pointer && REG_POINTER (x))
1356 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1357 }
1358}
1359
1360/* Generate a REG rtx for a new pseudo register, copying the mode
1361 and attributes from X. */
1362
1363rtx
1364gen_reg_rtx_and_attrs (rtx x)
1365{
1366 rtx reg = gen_reg_rtx (GET_MODE (x));
1367 set_reg_attrs_from_value (reg, x);
1368 return reg;
1369}
1370
1371/* Set the register attributes for registers contained in PARM_RTX.
1372 Use needed values from memory attributes of MEM. */
1373
1374void
1375set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1376{
1377 if (REG_P (parm_rtx))
1378 set_reg_attrs_from_value (reg: parm_rtx, x: mem);
1379 else if (GET_CODE (parm_rtx) == PARALLEL)
1380 {
1381 /* Check for a NULL entry in the first slot, used to indicate that the
1382 parameter goes both on the stack and in registers. */
1383 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1384 for (; i < XVECLEN (parm_rtx, 0); i++)
1385 {
1386 rtx x = XVECEXP (parm_rtx, 0, i);
1387 if (REG_P (XEXP (x, 0)))
1388 REG_ATTRS (XEXP (x, 0))
1389 = get_reg_attrs (MEM_EXPR (mem),
1390 INTVAL (XEXP (x, 1)));
1391 }
1392 }
1393}
1394
1395/* Set the REG_ATTRS for registers in value X, given that X represents
1396 decl T. */
1397
1398void
1399set_reg_attrs_for_decl_rtl (tree t, rtx x)
1400{
1401 if (!t)
1402 return;
1403 tree tdecl = t;
1404 if (GET_CODE (x) == SUBREG)
1405 {
1406 gcc_assert (subreg_lowpart_p (x));
1407 x = SUBREG_REG (x);
1408 }
1409 if (REG_P (x))
1410 REG_ATTRS (x)
1411 = get_reg_attrs (decl: t, offset: byte_lowpart_offset (GET_MODE (x),
1412 DECL_P (tdecl)
1413 ? DECL_MODE (tdecl)
1414 : TYPE_MODE (TREE_TYPE (tdecl))));
1415 if (GET_CODE (x) == CONCAT)
1416 {
1417 if (REG_P (XEXP (x, 0)))
1418 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (decl: t, offset: 0);
1419 if (REG_P (XEXP (x, 1)))
1420 REG_ATTRS (XEXP (x, 1))
1421 = get_reg_attrs (decl: t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1422 }
1423 if (GET_CODE (x) == PARALLEL)
1424 {
1425 int i, start;
1426
1427 /* Check for a NULL entry, used to indicate that the parameter goes
1428 both on the stack and in registers. */
1429 if (XEXP (XVECEXP (x, 0, 0), 0))
1430 start = 0;
1431 else
1432 start = 1;
1433
1434 for (i = start; i < XVECLEN (x, 0); i++)
1435 {
1436 rtx y = XVECEXP (x, 0, i);
1437 if (REG_P (XEXP (y, 0)))
1438 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (decl: t, INTVAL (XEXP (y, 1)));
1439 }
1440 }
1441}
1442
1443/* Assign the RTX X to declaration T. */
1444
1445void
1446set_decl_rtl (tree t, rtx x)
1447{
1448 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1449 if (x)
1450 set_reg_attrs_for_decl_rtl (t, x);
1451}
1452
1453/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1454 if the ABI requires the parameter to be passed by reference. */
1455
1456void
1457set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1458{
1459 DECL_INCOMING_RTL (t) = x;
1460 if (x && !by_reference_p)
1461 set_reg_attrs_for_decl_rtl (t, x);
1462}
1463
1464/* Identify REG (which may be a CONCAT) as a user register. */
1465
1466void
1467mark_user_reg (rtx reg)
1468{
1469 if (GET_CODE (reg) == CONCAT)
1470 {
1471 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1472 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1473 }
1474 else
1475 {
1476 gcc_assert (REG_P (reg));
1477 REG_USERVAR_P (reg) = 1;
1478 }
1479}
1480
1481/* Identify REG as a probable pointer register and show its alignment
1482 as ALIGN, if nonzero. */
1483
1484void
1485mark_reg_pointer (rtx reg, int align)
1486{
1487 if (! REG_POINTER (reg))
1488 {
1489 REG_POINTER (reg) = 1;
1490
1491 if (align)
1492 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1493 }
1494 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1495 /* We can no-longer be sure just how aligned this pointer is. */
1496 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1497}
1498
1499/* Return 1 plus largest pseudo reg number used in the current function. */
1500
1501int
1502max_reg_num (void)
1503{
1504 return reg_rtx_no;
1505}
1506
1507/* Return 1 + the largest label number used so far in the current function. */
1508
1509int
1510max_label_num (void)
1511{
1512 return label_num;
1513}
1514
1515/* Return first label number used in this function (if any were used). */
1516
1517int
1518get_first_label_num (void)
1519{
1520 return first_label_num;
1521}
1522
1523/* If the rtx for label was created during the expansion of a nested
1524 function, then first_label_num won't include this label number.
1525 Fix this now so that array indices work later. */
1526
1527void
1528maybe_set_first_label_num (rtx_code_label *x)
1529{
1530 if (CODE_LABEL_NUMBER (x) < first_label_num)
1531 first_label_num = CODE_LABEL_NUMBER (x);
1532}
1533
1534/* For use by the RTL function loader, when mingling with normal
1535 functions.
1536 Ensure that label_num is greater than the label num of X, to avoid
1537 duplicate labels in the generated assembler. */
1538
1539void
1540maybe_set_max_label_num (rtx_code_label *x)
1541{
1542 if (CODE_LABEL_NUMBER (x) >= label_num)
1543 label_num = CODE_LABEL_NUMBER (x) + 1;
1544}
1545
1546
1547/* Return a value representing some low-order bits of X, where the number
1548 of low-order bits is given by MODE. Note that no conversion is done
1549 between floating-point and fixed-point values, rather, the bit
1550 representation is returned.
1551
1552 This function handles the cases in common between gen_lowpart, below,
1553 and two variants in cse.cc and combine.cc. These are the cases that can
1554 be safely handled at all points in the compilation.
1555
1556 If this is not a case we can handle, return 0. */
1557
1558rtx
1559gen_lowpart_common (machine_mode mode, rtx x)
1560{
1561 poly_uint64 msize = GET_MODE_SIZE (mode);
1562 machine_mode innermode;
1563
1564 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1565 so we have to make one up. Yuk. */
1566 innermode = GET_MODE (x);
1567 if (CONST_INT_P (x)
1568 && known_le (msize * BITS_PER_UNIT,
1569 (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1570 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, limit: 0).require ();
1571 else if (innermode == VOIDmode)
1572 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, limit: 0).require ();
1573
1574 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1575
1576 if (innermode == mode)
1577 return x;
1578
1579 /* The size of the outer and inner modes must be ordered. */
1580 poly_uint64 xsize = GET_MODE_SIZE (mode: innermode);
1581 if (!ordered_p (a: msize, b: xsize))
1582 return 0;
1583
1584 if (SCALAR_FLOAT_MODE_P (mode))
1585 {
1586 /* Don't allow paradoxical FLOAT_MODE subregs. */
1587 if (maybe_gt (msize, xsize))
1588 return 0;
1589 }
1590 else
1591 {
1592 /* MODE must occupy no more of the underlying registers than X. */
1593 poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1594 unsigned int mregs, xregs;
1595 if (!can_div_away_from_zero_p (a: msize, b: regsize, quotient: &mregs)
1596 || !can_div_away_from_zero_p (a: xsize, b: regsize, quotient: &xregs)
1597 || mregs > xregs)
1598 return 0;
1599 }
1600
1601 scalar_int_mode int_mode, int_innermode, from_mode;
1602 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1603 && is_a <scalar_int_mode> (m: mode, result: &int_mode)
1604 && is_a <scalar_int_mode> (m: innermode, result: &int_innermode)
1605 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), result: &from_mode))
1606 {
1607 /* If we are getting the low-order part of something that has been
1608 sign- or zero-extended, we can either just use the object being
1609 extended or make a narrower extension. If we want an even smaller
1610 piece than the size of the object being extended, call ourselves
1611 recursively.
1612
1613 This case is used mostly by combine and cse. */
1614
1615 if (from_mode == int_mode)
1616 return XEXP (x, 0);
1617 else if (GET_MODE_SIZE (mode: int_mode) < GET_MODE_SIZE (mode: from_mode))
1618 return gen_lowpart_common (mode: int_mode, XEXP (x, 0));
1619 else if (GET_MODE_SIZE (mode: int_mode) < GET_MODE_SIZE (mode: int_innermode))
1620 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1621 }
1622 else if (GET_CODE (x) == SUBREG || REG_P (x)
1623 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1624 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1625 || CONST_POLY_INT_P (x))
1626 return lowpart_subreg (outermode: mode, op: x, innermode);
1627
1628 /* Otherwise, we can't do this. */
1629 return 0;
1630}
1631
1632rtx
1633gen_highpart (machine_mode mode, rtx x)
1634{
1635 poly_uint64 msize = GET_MODE_SIZE (mode);
1636 rtx result;
1637
1638 /* This case loses if X is a subreg. To catch bugs early,
1639 complain if an invalid MODE is used even in other cases. */
1640 gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1641 || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1642
1643 /* gen_lowpart_common handles a lot of special cases due to needing to handle
1644 paradoxical subregs; it only calls simplify_gen_subreg when certain that
1645 it will produce something meaningful. The only case we need to handle
1646 specially here is MEM. */
1647 if (MEM_P (x))
1648 {
1649 poly_int64 offset = subreg_highpart_offset (outermode: mode, GET_MODE (x));
1650 return adjust_address (x, mode, offset);
1651 }
1652
1653 result = simplify_gen_subreg (outermode: mode, op: x, GET_MODE (x),
1654 byte: subreg_highpart_offset (outermode: mode, GET_MODE (x)));
1655 /* Since we handle MEM directly above, we should never get a MEM back
1656 from simplify_gen_subreg. */
1657 gcc_assert (result && !MEM_P (result));
1658
1659 return result;
1660}
1661
1662/* Like gen_highpart, but accept mode of EXP operand in case EXP can
1663 be VOIDmode constant. */
1664rtx
1665gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1666{
1667 if (GET_MODE (exp) != VOIDmode)
1668 {
1669 gcc_assert (GET_MODE (exp) == innermode);
1670 return gen_highpart (mode: outermode, x: exp);
1671 }
1672 return simplify_gen_subreg (outermode, op: exp, innermode,
1673 byte: subreg_highpart_offset (outermode, innermode));
1674}
1675
1676/* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1677 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1678
1679poly_uint64
1680subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1681{
1682 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1683 if (maybe_gt (outer_bytes, inner_bytes))
1684 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1685 return 0;
1686
1687 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1688 return inner_bytes - outer_bytes;
1689 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1690 return 0;
1691 else
1692 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1693}
1694
1695/* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1696 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1697
1698poly_uint64
1699subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1700{
1701 gcc_assert (known_ge (inner_bytes, outer_bytes));
1702
1703 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1704 return 0;
1705 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1706 return inner_bytes - outer_bytes;
1707 else
1708 return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1709 (inner_bytes - outer_bytes)
1710 * BITS_PER_UNIT);
1711}
1712
1713/* Return true iff X, assumed to be a SUBREG,
1714 refers to the least significant part of its containing reg.
1715 If X is not a SUBREG, always return true (it is its own low part!). */
1716
1717bool
1718subreg_lowpart_p (const_rtx x)
1719{
1720 if (GET_CODE (x) != SUBREG)
1721 return true;
1722 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1723 return false;
1724
1725 return known_eq (subreg_lowpart_offset (GET_MODE (x),
1726 GET_MODE (SUBREG_REG (x))),
1727 SUBREG_BYTE (x));
1728}
1729
1730/* Return subword OFFSET of operand OP.
1731 The word number, OFFSET, is interpreted as the word number starting
1732 at the low-order address. OFFSET 0 is the low-order word if not
1733 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1734
1735 If we cannot extract the required word, we return zero. Otherwise,
1736 an rtx corresponding to the requested word will be returned.
1737
1738 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1739 reload has completed, a valid address will always be returned. After
1740 reload, if a valid address cannot be returned, we return zero.
1741
1742 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1743 it is the responsibility of the caller.
1744
1745 MODE is the mode of OP in case it is a CONST_INT.
1746
1747 ??? This is still rather broken for some cases. The problem for the
1748 moment is that all callers of this thing provide no 'goal mode' to
1749 tell us to work with. This exists because all callers were written
1750 in a word based SUBREG world.
1751 Now use of this function can be deprecated by simplify_subreg in most
1752 cases.
1753 */
1754
1755rtx
1756operand_subword (rtx op, poly_uint64 offset, int validate_address,
1757 machine_mode mode)
1758{
1759 if (mode == VOIDmode)
1760 mode = GET_MODE (op);
1761
1762 gcc_assert (mode != VOIDmode);
1763
1764 /* If OP is narrower than a word, fail. */
1765 if (mode != BLKmode
1766 && maybe_lt (a: GET_MODE_SIZE (mode), UNITS_PER_WORD))
1767 return 0;
1768
1769 /* If we want a word outside OP, return zero. */
1770 if (mode != BLKmode
1771 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1772 return const0_rtx;
1773
1774 /* Form a new MEM at the requested address. */
1775 if (MEM_P (op))
1776 {
1777 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1778
1779 if (! validate_address)
1780 return new_rtx;
1781
1782 else if (reload_completed)
1783 {
1784 if (! strict_memory_address_addr_space_p (word_mode,
1785 XEXP (new_rtx, 0),
1786 MEM_ADDR_SPACE (op)))
1787 return 0;
1788 }
1789 else
1790 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1791 }
1792
1793 /* Rest can be handled by simplify_subreg. */
1794 return simplify_gen_subreg (outermode: word_mode, op, innermode: mode, byte: (offset * UNITS_PER_WORD));
1795}
1796
1797/* Similar to `operand_subword', but never return 0. If we can't
1798 extract the required subword, put OP into a register and try again.
1799 The second attempt must succeed. We always validate the address in
1800 this case.
1801
1802 MODE is the mode of OP, in case it is CONST_INT. */
1803
1804rtx
1805operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1806{
1807 rtx result = operand_subword (op, offset, validate_address: 1, mode);
1808
1809 if (result)
1810 return result;
1811
1812 if (mode != BLKmode && mode != VOIDmode)
1813 {
1814 /* If this is a register which cannot be accessed by words, copy it
1815 to a pseudo register. */
1816 if (REG_P (op))
1817 op = copy_to_reg (op);
1818 else
1819 op = force_reg (mode, op);
1820 }
1821
1822 result = operand_subword (op, offset, validate_address: 1, mode);
1823 gcc_assert (result);
1824
1825 return result;
1826}
1827
1828mem_attrs::mem_attrs ()
1829 : expr (NULL_TREE),
1830 offset (0),
1831 size (0),
1832 alias (0),
1833 align (0),
1834 addrspace (ADDR_SPACE_GENERIC),
1835 offset_known_p (false),
1836 size_known_p (false)
1837{}
1838
1839/* Returns true if both MEM_EXPR can be considered equal
1840 and false otherwise. */
1841
1842bool
1843mem_expr_equal_p (const_tree expr1, const_tree expr2)
1844{
1845 if (expr1 == expr2)
1846 return true;
1847
1848 if (! expr1 || ! expr2)
1849 return false;
1850
1851 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1852 return false;
1853
1854 return operand_equal_p (expr1, expr2, flags: 0);
1855}
1856
1857/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1858 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1859 -1 if not known. */
1860
1861int
1862get_mem_align_offset (rtx mem, unsigned int align)
1863{
1864 tree expr;
1865 poly_uint64 offset;
1866
1867 /* This function can't use
1868 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1869 || (MAX (MEM_ALIGN (mem),
1870 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1871 < align))
1872 return -1;
1873 else
1874 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1875 for two reasons:
1876 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1877 for <variable>. get_inner_reference doesn't handle it and
1878 even if it did, the alignment in that case needs to be determined
1879 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1880 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1881 isn't sufficiently aligned, the object it is in might be. */
1882 gcc_assert (MEM_P (mem));
1883 expr = MEM_EXPR (mem);
1884 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1885 return -1;
1886
1887 offset = MEM_OFFSET (mem);
1888 if (DECL_P (expr))
1889 {
1890 if (DECL_ALIGN (expr) < align)
1891 return -1;
1892 }
1893 else if (INDIRECT_REF_P (expr))
1894 {
1895 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1896 return -1;
1897 }
1898 else if (TREE_CODE (expr) == COMPONENT_REF)
1899 {
1900 while (1)
1901 {
1902 tree inner = TREE_OPERAND (expr, 0);
1903 tree field = TREE_OPERAND (expr, 1);
1904 tree byte_offset = component_ref_field_offset (expr);
1905 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1906
1907 poly_uint64 suboffset;
1908 if (!byte_offset
1909 || !poly_int_tree_p (t: byte_offset, value: &suboffset)
1910 || !tree_fits_uhwi_p (bit_offset))
1911 return -1;
1912
1913 offset += suboffset;
1914 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1915
1916 if (inner == NULL_TREE)
1917 {
1918 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1919 < (unsigned int) align)
1920 return -1;
1921 break;
1922 }
1923 else if (DECL_P (inner))
1924 {
1925 if (DECL_ALIGN (inner) < align)
1926 return -1;
1927 break;
1928 }
1929 else if (TREE_CODE (inner) != COMPONENT_REF)
1930 return -1;
1931 expr = inner;
1932 }
1933 }
1934 else
1935 return -1;
1936
1937 HOST_WIDE_INT misalign;
1938 if (!known_misalignment (value: offset, align: align / BITS_PER_UNIT, misalign: &misalign))
1939 return -1;
1940 return misalign;
1941}
1942
1943/* Given REF (a MEM) and T, either the type of X or the expression
1944 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1945 if we are making a new object of this type. BITPOS is nonzero if
1946 there is an offset outstanding on T that will be applied later. */
1947
1948void
1949set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1950 poly_int64 bitpos)
1951{
1952 poly_int64 apply_bitpos = 0;
1953 tree type;
1954 class mem_attrs attrs, *defattrs, *refattrs;
1955 addr_space_t as;
1956
1957 /* It can happen that type_for_mode was given a mode for which there
1958 is no language-level type. In which case it returns NULL, which
1959 we can see here. */
1960 if (t == NULL_TREE)
1961 return;
1962
1963 type = TYPE_P (t) ? t : TREE_TYPE (t);
1964 if (type == error_mark_node)
1965 return;
1966
1967 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1968 wrong answer, as it assumes that DECL_RTL already has the right alias
1969 info. Callers should not set DECL_RTL until after the call to
1970 set_mem_attributes. */
1971 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1972
1973 /* Get the alias set from the expression or type (perhaps using a
1974 front-end routine) and use it. */
1975 attrs.alias = get_alias_set (t);
1976
1977 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1978 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1979
1980 /* Default values from pre-existing memory attributes if present. */
1981 refattrs = MEM_ATTRS (ref);
1982 if (refattrs)
1983 {
1984 /* ??? Can this ever happen? Calling this routine on a MEM that
1985 already carries memory attributes should probably be invalid. */
1986 attrs.expr = refattrs->expr;
1987 attrs.offset_known_p = refattrs->offset_known_p;
1988 attrs.offset = refattrs->offset;
1989 attrs.size_known_p = refattrs->size_known_p;
1990 attrs.size = refattrs->size;
1991 attrs.align = refattrs->align;
1992 }
1993
1994 /* Otherwise, default values from the mode of the MEM reference. */
1995 else
1996 {
1997 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1998 gcc_assert (!defattrs->expr);
1999 gcc_assert (!defattrs->offset_known_p);
2000
2001 /* Respect mode size. */
2002 attrs.size_known_p = defattrs->size_known_p;
2003 attrs.size = defattrs->size;
2004 /* ??? Is this really necessary? We probably should always get
2005 the size from the type below. */
2006
2007 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
2008 if T is an object, always compute the object alignment below. */
2009 if (TYPE_P (t))
2010 attrs.align = defattrs->align;
2011 else
2012 attrs.align = BITS_PER_UNIT;
2013 /* ??? If T is a type, respecting mode alignment may *also* be wrong
2014 e.g. if the type carries an alignment attribute. Should we be
2015 able to simply always use TYPE_ALIGN? */
2016 }
2017
2018 /* We can set the alignment from the type if we are making an object or if
2019 this is an INDIRECT_REF. */
2020 if (objectp || TREE_CODE (t) == INDIRECT_REF)
2021 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
2022
2023 /* If the size is known, we can set that. */
2024 tree new_size = TYPE_SIZE_UNIT (type);
2025
2026 /* The address-space is that of the type. */
2027 as = TYPE_ADDR_SPACE (type);
2028
2029 /* If T is not a type, we may be able to deduce some more information about
2030 the expression. */
2031 if (! TYPE_P (t))
2032 {
2033 tree base;
2034
2035 if (TREE_THIS_VOLATILE (t))
2036 MEM_VOLATILE_P (ref) = 1;
2037
2038 /* Now remove any conversions: they don't change what the underlying
2039 object is. Likewise for SAVE_EXPR. */
2040 while (CONVERT_EXPR_P (t)
2041 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2042 || TREE_CODE (t) == SAVE_EXPR)
2043 t = TREE_OPERAND (t, 0);
2044
2045 /* Note whether this expression can trap. */
2046 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2047
2048 base = get_base_address (t);
2049 if (base)
2050 {
2051 if (DECL_P (base)
2052 && TREE_READONLY (base)
2053 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2054 && !TREE_THIS_VOLATILE (base))
2055 MEM_READONLY_P (ref) = 1;
2056
2057 /* Mark static const strings readonly as well. */
2058 if (TREE_CODE (base) == STRING_CST
2059 && TREE_READONLY (base)
2060 && TREE_STATIC (base))
2061 MEM_READONLY_P (ref) = 1;
2062
2063 /* Address-space information is on the base object. */
2064 if (TREE_CODE (base) == MEM_REF
2065 || TREE_CODE (base) == TARGET_MEM_REF)
2066 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2067 0))));
2068 else
2069 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2070 }
2071
2072 /* If this expression uses it's parent's alias set, mark it such
2073 that we won't change it. */
2074 if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2075 MEM_KEEP_ALIAS_SET_P (ref) = 1;
2076
2077 /* If this is a decl, set the attributes of the MEM from it. */
2078 if (DECL_P (t))
2079 {
2080 attrs.expr = t;
2081 attrs.offset_known_p = true;
2082 attrs.offset = 0;
2083 apply_bitpos = bitpos;
2084 new_size = DECL_SIZE_UNIT (t);
2085 }
2086
2087 /* ??? If we end up with a constant or a descriptor do not
2088 record a MEM_EXPR. */
2089 else if (CONSTANT_CLASS_P (t)
2090 || TREE_CODE (t) == CONSTRUCTOR)
2091 ;
2092
2093 /* If this is a field reference, record it. */
2094 else if (TREE_CODE (t) == COMPONENT_REF)
2095 {
2096 attrs.expr = t;
2097 attrs.offset_known_p = true;
2098 attrs.offset = 0;
2099 apply_bitpos = bitpos;
2100 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2101 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2102 }
2103
2104 /* Else record it. */
2105 else
2106 {
2107 gcc_assert (handled_component_p (t)
2108 || TREE_CODE (t) == MEM_REF
2109 || TREE_CODE (t) == TARGET_MEM_REF);
2110 attrs.expr = t;
2111 attrs.offset_known_p = true;
2112 attrs.offset = 0;
2113 apply_bitpos = bitpos;
2114 }
2115
2116 /* If this is a reference based on a partitioned decl replace the
2117 base with a MEM_REF of the pointer representative we created
2118 during stack slot partitioning. */
2119 if (attrs.expr
2120 && VAR_P (base)
2121 && ! is_global_var (t: base)
2122 && cfun->gimple_df->decls_to_pointers != NULL)
2123 {
2124 tree *namep = cfun->gimple_df->decls_to_pointers->get (k: base);
2125 if (namep)
2126 {
2127 attrs.expr = unshare_expr (attrs.expr);
2128 tree *orig_base = &attrs.expr;
2129 while (handled_component_p (t: *orig_base))
2130 orig_base = &TREE_OPERAND (*orig_base, 0);
2131 tree aptrt = reference_alias_ptr_type (*orig_base);
2132 *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
2133 build_int_cst (aptrt, 0));
2134 }
2135 }
2136
2137 /* Compute the alignment. */
2138 unsigned int obj_align;
2139 unsigned HOST_WIDE_INT obj_bitpos;
2140 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2141 unsigned int diff_align = known_alignment (a: obj_bitpos - bitpos);
2142 if (diff_align != 0)
2143 obj_align = MIN (obj_align, diff_align);
2144 attrs.align = MAX (attrs.align, obj_align);
2145 }
2146
2147 poly_uint64 const_size;
2148 if (poly_int_tree_p (t: new_size, value: &const_size))
2149 {
2150 attrs.size_known_p = true;
2151 attrs.size = const_size;
2152 }
2153
2154 /* If we modified OFFSET based on T, then subtract the outstanding
2155 bit position offset. Similarly, increase the size of the accessed
2156 object to contain the negative offset. */
2157 if (maybe_ne (a: apply_bitpos, b: 0))
2158 {
2159 gcc_assert (attrs.offset_known_p);
2160 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2161 attrs.offset -= bytepos;
2162 if (attrs.size_known_p)
2163 attrs.size += bytepos;
2164 }
2165
2166 /* Now set the attributes we computed above. */
2167 attrs.addrspace = as;
2168 set_mem_attrs (mem: ref, attrs: &attrs);
2169}
2170
2171void
2172set_mem_attributes (rtx ref, tree t, int objectp)
2173{
2174 set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos: 0);
2175}
2176
2177/* Set the alias set of MEM to SET. */
2178
2179void
2180set_mem_alias_set (rtx mem, alias_set_type set)
2181{
2182 /* If the new and old alias sets don't conflict, something is wrong. */
2183 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2184 mem_attrs attrs (*get_mem_attrs (x: mem));
2185 attrs.alias = set;
2186 set_mem_attrs (mem, attrs: &attrs);
2187}
2188
2189/* Set the address space of MEM to ADDRSPACE (target-defined). */
2190
2191void
2192set_mem_addr_space (rtx mem, addr_space_t addrspace)
2193{
2194 mem_attrs attrs (*get_mem_attrs (x: mem));
2195 attrs.addrspace = addrspace;
2196 set_mem_attrs (mem, attrs: &attrs);
2197}
2198
2199/* Set the alignment of MEM to ALIGN bits. */
2200
2201void
2202set_mem_align (rtx mem, unsigned int align)
2203{
2204 mem_attrs attrs (*get_mem_attrs (x: mem));
2205 attrs.align = align;
2206 set_mem_attrs (mem, attrs: &attrs);
2207}
2208
2209/* Set the expr for MEM to EXPR. */
2210
2211void
2212set_mem_expr (rtx mem, tree expr)
2213{
2214 mem_attrs attrs (*get_mem_attrs (x: mem));
2215 attrs.expr = expr;
2216 set_mem_attrs (mem, attrs: &attrs);
2217}
2218
2219/* Set the offset of MEM to OFFSET. */
2220
2221void
2222set_mem_offset (rtx mem, poly_int64 offset)
2223{
2224 mem_attrs attrs (*get_mem_attrs (x: mem));
2225 attrs.offset_known_p = true;
2226 attrs.offset = offset;
2227 set_mem_attrs (mem, attrs: &attrs);
2228}
2229
2230/* Clear the offset of MEM. */
2231
2232void
2233clear_mem_offset (rtx mem)
2234{
2235 mem_attrs attrs (*get_mem_attrs (x: mem));
2236 attrs.offset_known_p = false;
2237 set_mem_attrs (mem, attrs: &attrs);
2238}
2239
2240/* Set the size of MEM to SIZE. */
2241
2242void
2243set_mem_size (rtx mem, poly_int64 size)
2244{
2245 mem_attrs attrs (*get_mem_attrs (x: mem));
2246 attrs.size_known_p = true;
2247 attrs.size = size;
2248 set_mem_attrs (mem, attrs: &attrs);
2249}
2250
2251/* Clear the size of MEM. */
2252
2253void
2254clear_mem_size (rtx mem)
2255{
2256 mem_attrs attrs (*get_mem_attrs (x: mem));
2257 attrs.size_known_p = false;
2258 set_mem_attrs (mem, attrs: &attrs);
2259}
2260
2261/* Return a memory reference like MEMREF, but with its mode changed to MODE
2262 and its address changed to ADDR. (VOIDmode means don't change the mode.
2263 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2264 returned memory location is required to be valid. INPLACE is true if any
2265 changes can be made directly to MEMREF or false if MEMREF must be treated
2266 as immutable.
2267
2268 The memory attributes are not changed. */
2269
2270static rtx
2271change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2272 bool inplace)
2273{
2274 addr_space_t as;
2275 rtx new_rtx;
2276
2277 gcc_assert (MEM_P (memref));
2278 as = MEM_ADDR_SPACE (memref);
2279 if (mode == VOIDmode)
2280 mode = GET_MODE (memref);
2281 if (addr == 0)
2282 addr = XEXP (memref, 0);
2283 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2284 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2285 return memref;
2286
2287 /* Don't validate address for LRA. LRA can make the address valid
2288 by itself in most efficient way. */
2289 if (validate && !lra_in_progress)
2290 {
2291 if (reload_in_progress || reload_completed)
2292 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2293 else
2294 addr = memory_address_addr_space (mode, addr, as);
2295 }
2296
2297 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2298 return memref;
2299
2300 if (inplace)
2301 {
2302 XEXP (memref, 0) = addr;
2303 return memref;
2304 }
2305
2306 new_rtx = gen_rtx_MEM (mode, addr);
2307 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2308 return new_rtx;
2309}
2310
2311/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2312 way we are changing MEMREF, so we only preserve the alias set. */
2313
2314rtx
2315change_address (rtx memref, machine_mode mode, rtx addr)
2316{
2317 rtx new_rtx = change_address_1 (memref, mode, addr, validate: 1, inplace: false);
2318 machine_mode mmode = GET_MODE (new_rtx);
2319 class mem_attrs *defattrs;
2320
2321 mem_attrs attrs (*get_mem_attrs (x: memref));
2322 defattrs = mode_mem_attrs[(int) mmode];
2323 attrs.expr = NULL_TREE;
2324 attrs.offset_known_p = false;
2325 attrs.size_known_p = defattrs->size_known_p;
2326 attrs.size = defattrs->size;
2327 attrs.align = defattrs->align;
2328
2329 /* If there are no changes, just return the original memory reference. */
2330 if (new_rtx == memref)
2331 {
2332 if (mem_attrs_eq_p (p: get_mem_attrs (x: memref), q: &attrs))
2333 return new_rtx;
2334
2335 new_rtx = gen_rtx_MEM (mode: mmode, XEXP (memref, 0));
2336 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2337 }
2338
2339 set_mem_attrs (mem: new_rtx, attrs: &attrs);
2340 return new_rtx;
2341}
2342
2343/* Return a memory reference like MEMREF, but with its mode changed
2344 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2345 nonzero, the memory address is forced to be valid.
2346 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2347 and the caller is responsible for adjusting MEMREF base register.
2348 If ADJUST_OBJECT is zero, the underlying object associated with the
2349 memory reference is left unchanged and the caller is responsible for
2350 dealing with it. Otherwise, if the new memory reference is outside
2351 the underlying object, even partially, then the object is dropped.
2352 SIZE, if nonzero, is the size of an access in cases where MODE
2353 has no inherent size. */
2354
2355rtx
2356adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2357 int validate, int adjust_address, int adjust_object,
2358 poly_int64 size)
2359{
2360 rtx addr = XEXP (memref, 0);
2361 rtx new_rtx;
2362 scalar_int_mode address_mode;
2363 class mem_attrs attrs (*get_mem_attrs (x: memref)), *defattrs;
2364 unsigned HOST_WIDE_INT max_align;
2365#ifdef POINTERS_EXTEND_UNSIGNED
2366 scalar_int_mode pointer_mode
2367 = targetm.addr_space.pointer_mode (attrs.addrspace);
2368#endif
2369
2370 /* VOIDmode means no mode change for change_address_1. */
2371 if (mode == VOIDmode)
2372 mode = GET_MODE (memref);
2373
2374 /* Take the size of non-BLKmode accesses from the mode. */
2375 defattrs = mode_mem_attrs[(int) mode];
2376 if (defattrs->size_known_p)
2377 size = defattrs->size;
2378
2379 /* If there are no changes, just return the original memory reference. */
2380 if (mode == GET_MODE (memref)
2381 && known_eq (offset, 0)
2382 && (known_eq (size, 0)
2383 || (attrs.size_known_p && known_eq (attrs.size, size)))
2384 && (!validate || memory_address_addr_space_p (mode, addr,
2385 attrs.addrspace)))
2386 return memref;
2387
2388 /* ??? Prefer to create garbage instead of creating shared rtl.
2389 This may happen even if offset is nonzero -- consider
2390 (plus (plus reg reg) const_int) -- so do this always. */
2391 addr = copy_rtx (addr);
2392
2393 /* Convert a possibly large offset to a signed value within the
2394 range of the target address space. */
2395 address_mode = get_address_mode (mem: memref);
2396 offset = trunc_int_for_mode (offset, address_mode);
2397
2398 if (adjust_address)
2399 {
2400 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2401 object, we can merge it into the LO_SUM. */
2402 if (GET_MODE (memref) != BLKmode
2403 && GET_CODE (addr) == LO_SUM
2404 && known_in_range_p (val: offset,
2405 pos: 0, size: (GET_MODE_ALIGNMENT (GET_MODE (memref))
2406 / BITS_PER_UNIT)))
2407 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2408 plus_constant (address_mode,
2409 XEXP (addr, 1), offset));
2410#ifdef POINTERS_EXTEND_UNSIGNED
2411 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2412 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2413 the fact that pointers are not allowed to overflow. */
2414 else if (POINTERS_EXTEND_UNSIGNED > 0
2415 && GET_CODE (addr) == ZERO_EXTEND
2416 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2417 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2418 addr = gen_rtx_ZERO_EXTEND (address_mode,
2419 plus_constant (pointer_mode,
2420 XEXP (addr, 0), offset));
2421#endif
2422 else
2423 addr = plus_constant (address_mode, addr, offset);
2424 }
2425
2426 new_rtx = change_address_1 (memref, mode, addr, validate, inplace: false);
2427
2428 /* If the address is a REG, change_address_1 rightfully returns memref,
2429 but this would destroy memref's MEM_ATTRS. */
2430 if (new_rtx == memref && maybe_ne (a: offset, b: 0))
2431 new_rtx = copy_rtx (new_rtx);
2432
2433 /* Conservatively drop the object if we don't know where we start from. */
2434 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2435 {
2436 attrs.expr = NULL_TREE;
2437 attrs.alias = 0;
2438 }
2439
2440 /* Compute the new values of the memory attributes due to this adjustment.
2441 We add the offsets and update the alignment. */
2442 if (attrs.offset_known_p)
2443 {
2444 attrs.offset += offset;
2445
2446 /* Drop the object if the new left end is not within its bounds. */
2447 if (adjust_object && maybe_lt (a: attrs.offset, b: 0))
2448 {
2449 attrs.expr = NULL_TREE;
2450 attrs.alias = 0;
2451 }
2452 }
2453
2454 /* Compute the new alignment by taking the MIN of the alignment and the
2455 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2456 if zero. */
2457 if (maybe_ne (a: offset, b: 0))
2458 {
2459 max_align = known_alignment (a: offset) * BITS_PER_UNIT;
2460 attrs.align = MIN (attrs.align, max_align);
2461 }
2462
2463 if (maybe_ne (a: size, b: 0))
2464 {
2465 /* Drop the object if the new right end is not within its bounds. */
2466 if (adjust_object && maybe_gt (offset + size, attrs.size))
2467 {
2468 attrs.expr = NULL_TREE;
2469 attrs.alias = 0;
2470 }
2471 attrs.size_known_p = true;
2472 attrs.size = size;
2473 }
2474 else if (attrs.size_known_p)
2475 {
2476 gcc_assert (!adjust_object);
2477 attrs.size -= offset;
2478 /* ??? The store_by_pieces machinery generates negative sizes,
2479 so don't assert for that here. */
2480 }
2481
2482 set_mem_attrs (mem: new_rtx, attrs: &attrs);
2483
2484 return new_rtx;
2485}
2486
2487/* Return a memory reference like MEMREF, but with its mode changed
2488 to MODE and its address changed to ADDR, which is assumed to be
2489 MEMREF offset by OFFSET bytes. If VALIDATE is
2490 nonzero, the memory address is forced to be valid. */
2491
2492rtx
2493adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2494 poly_int64 offset, int validate)
2495{
2496 memref = change_address_1 (memref, VOIDmode, addr, validate, inplace: false);
2497 return adjust_address_1 (memref, mode, offset, validate, adjust_address: 0, adjust_object: 0, size: 0);
2498}
2499
2500/* Return a memory reference like MEMREF, but whose address is changed by
2501 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2502 known to be in OFFSET (possibly 1). */
2503
2504rtx
2505offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2506{
2507 rtx new_rtx, addr = XEXP (memref, 0);
2508 machine_mode address_mode;
2509 class mem_attrs *defattrs;
2510
2511 mem_attrs attrs (*get_mem_attrs (x: memref));
2512 address_mode = get_address_mode (mem: memref);
2513 new_rtx = simplify_gen_binary (code: PLUS, mode: address_mode, op0: addr, op1: offset);
2514
2515 /* At this point we don't know _why_ the address is invalid. It
2516 could have secondary memory references, multiplies or anything.
2517
2518 However, if we did go and rearrange things, we can wind up not
2519 being able to recognize the magic around pic_offset_table_rtx.
2520 This stuff is fragile, and is yet another example of why it is
2521 bad to expose PIC machinery too early. */
2522 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2523 attrs.addrspace)
2524 && GET_CODE (addr) == PLUS
2525 && XEXP (addr, 0) == pic_offset_table_rtx)
2526 {
2527 addr = force_reg (GET_MODE (addr), addr);
2528 new_rtx = simplify_gen_binary (code: PLUS, mode: address_mode, op0: addr, op1: offset);
2529 }
2530
2531 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2532 new_rtx = change_address_1 (memref, VOIDmode, addr: new_rtx, validate: 1, inplace: false);
2533
2534 /* If there are no changes, just return the original memory reference. */
2535 if (new_rtx == memref)
2536 return new_rtx;
2537
2538 /* Update the alignment to reflect the offset. Reset the offset, which
2539 we don't know. */
2540 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2541 attrs.offset_known_p = false;
2542 attrs.size_known_p = defattrs->size_known_p;
2543 attrs.size = defattrs->size;
2544 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2545 set_mem_attrs (mem: new_rtx, attrs: &attrs);
2546 return new_rtx;
2547}
2548
2549/* Return a memory reference like MEMREF, but with its address changed to
2550 ADDR. The caller is asserting that the actual piece of memory pointed
2551 to is the same, just the form of the address is being changed, such as
2552 by putting something into a register. INPLACE is true if any changes
2553 can be made directly to MEMREF or false if MEMREF must be treated as
2554 immutable. */
2555
2556rtx
2557replace_equiv_address (rtx memref, rtx addr, bool inplace)
2558{
2559 /* change_address_1 copies the memory attribute structure without change
2560 and that's exactly what we want here. */
2561 update_temp_slot_address (XEXP (memref, 0), addr);
2562 return change_address_1 (memref, VOIDmode, addr, validate: 1, inplace);
2563}
2564
2565/* Likewise, but the reference is not required to be valid. */
2566
2567rtx
2568replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2569{
2570 return change_address_1 (memref, VOIDmode, addr, validate: 0, inplace);
2571}
2572
2573/* Return a memory reference like MEMREF, but with its mode widened to
2574 MODE and offset by OFFSET. This would be used by targets that e.g.
2575 cannot issue QImode memory operations and have to use SImode memory
2576 operations plus masking logic. */
2577
2578rtx
2579widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2580{
2581 rtx new_rtx = adjust_address_1 (memref, mode, offset, validate: 1, adjust_address: 1, adjust_object: 0, size: 0);
2582 poly_uint64 size = GET_MODE_SIZE (mode);
2583
2584 /* If there are no changes, just return the original memory reference. */
2585 if (new_rtx == memref)
2586 return new_rtx;
2587
2588 mem_attrs attrs (*get_mem_attrs (x: new_rtx));
2589
2590 /* If we don't know what offset we were at within the expression, then
2591 we can't know if we've overstepped the bounds. */
2592 if (! attrs.offset_known_p)
2593 attrs.expr = NULL_TREE;
2594
2595 while (attrs.expr)
2596 {
2597 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2598 {
2599 tree field = TREE_OPERAND (attrs.expr, 1);
2600 tree offset = component_ref_field_offset (attrs.expr);
2601
2602 if (! DECL_SIZE_UNIT (field))
2603 {
2604 attrs.expr = NULL_TREE;
2605 break;
2606 }
2607
2608 /* Is the field at least as large as the access? If so, ok,
2609 otherwise strip back to the containing structure. */
2610 if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2611 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2612 && known_ge (attrs.offset, 0))
2613 break;
2614
2615 poly_uint64 suboffset;
2616 if (!poly_int_tree_p (t: offset, value: &suboffset))
2617 {
2618 attrs.expr = NULL_TREE;
2619 break;
2620 }
2621
2622 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2623 attrs.offset += suboffset;
2624 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2625 / BITS_PER_UNIT);
2626 }
2627 /* Similarly for the decl. */
2628 else if (DECL_P (attrs.expr)
2629 && DECL_SIZE_UNIT (attrs.expr)
2630 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2631 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2632 size)
2633 && known_ge (attrs.offset, 0))
2634 break;
2635 else
2636 {
2637 /* The widened memory access overflows the expression, which means
2638 that it could alias another expression. Zap it. */
2639 attrs.expr = NULL_TREE;
2640 break;
2641 }
2642 }
2643
2644 if (! attrs.expr)
2645 attrs.offset_known_p = false;
2646
2647 /* The widened memory may alias other stuff, so zap the alias set. */
2648 /* ??? Maybe use get_alias_set on any remaining expression. */
2649 attrs.alias = 0;
2650 attrs.size_known_p = true;
2651 attrs.size = size;
2652 set_mem_attrs (mem: new_rtx, attrs: &attrs);
2653 return new_rtx;
2654}
2655
2656/* A fake decl that is used as the MEM_EXPR of spill slots. */
2657static GTY(()) tree spill_slot_decl;
2658
2659tree
2660get_spill_slot_decl (bool force_build_p)
2661{
2662 tree d = spill_slot_decl;
2663 rtx rd;
2664
2665 if (d || !force_build_p)
2666 return d;
2667
2668 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2669 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2670 DECL_ARTIFICIAL (d) = 1;
2671 DECL_IGNORED_P (d) = 1;
2672 TREE_USED (d) = 1;
2673 spill_slot_decl = d;
2674
2675 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2676 MEM_NOTRAP_P (rd) = 1;
2677 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2678 attrs.alias = new_alias_set ();
2679 attrs.expr = d;
2680 set_mem_attrs (mem: rd, attrs: &attrs);
2681 SET_DECL_RTL (d, rd);
2682
2683 return d;
2684}
2685
2686/* Given MEM, a result from assign_stack_local, fill in the memory
2687 attributes as appropriate for a register allocator spill slot.
2688 These slots are not aliasable by other memory. We arrange for
2689 them all to use a single MEM_EXPR, so that the aliasing code can
2690 work properly in the case of shared spill slots. */
2691
2692void
2693set_mem_attrs_for_spill (rtx mem)
2694{
2695 rtx addr;
2696
2697 mem_attrs attrs (*get_mem_attrs (x: mem));
2698 attrs.expr = get_spill_slot_decl (force_build_p: true);
2699 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2700 attrs.addrspace = ADDR_SPACE_GENERIC;
2701
2702 /* We expect the incoming memory to be of the form:
2703 (mem:MODE (plus (reg sfp) (const_int offset)))
2704 with perhaps the plus missing for offset = 0. */
2705 addr = XEXP (mem, 0);
2706 attrs.offset_known_p = true;
2707 strip_offset (addr, &attrs.offset);
2708
2709 set_mem_attrs (mem, attrs: &attrs);
2710 MEM_NOTRAP_P (mem) = 1;
2711}
2712
2713/* Return a newly created CODE_LABEL rtx with a unique label number. */
2714
2715rtx_code_label *
2716gen_label_rtx (void)
2717{
2718 return as_a <rtx_code_label *> (
2719 gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2720 NULL, label_num++, NULL));
2721}
2722
2723/* For procedure integration. */
2724
2725/* Install new pointers to the first and last insns in the chain.
2726 Also, set cur_insn_uid to one higher than the last in use.
2727 Used for an inline-procedure after copying the insn chain. */
2728
2729void
2730set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2731{
2732 rtx_insn *insn;
2733
2734 set_first_insn (first);
2735 set_last_insn (last);
2736 cur_insn_uid = 0;
2737
2738 if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
2739 {
2740 int debug_count = 0;
2741
2742 cur_insn_uid = param_min_nondebug_insn_uid - 1;
2743 cur_debug_insn_uid = 0;
2744
2745 for (insn = first; insn; insn = NEXT_INSN (insn))
2746 if (INSN_UID (insn) < param_min_nondebug_insn_uid)
2747 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2748 else
2749 {
2750 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2751 if (DEBUG_INSN_P (insn))
2752 debug_count++;
2753 }
2754
2755 if (debug_count)
2756 cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
2757 else
2758 cur_debug_insn_uid++;
2759 }
2760 else
2761 for (insn = first; insn; insn = NEXT_INSN (insn))
2762 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2763
2764 cur_insn_uid++;
2765}
2766
2767/* Go through all the RTL insn bodies and copy any invalid shared
2768 structure. This routine should only be called once. */
2769
2770static void
2771unshare_all_rtl_1 (rtx_insn *insn)
2772{
2773 /* Unshare just about everything else. */
2774 unshare_all_rtl_in_chain (insn);
2775
2776 /* Make sure the addresses of stack slots found outside the insn chain
2777 (such as, in DECL_RTL of a variable) are not shared
2778 with the insn chain.
2779
2780 This special care is necessary when the stack slot MEM does not
2781 actually appear in the insn chain. If it does appear, its address
2782 is unshared from all else at that point. */
2783 unsigned int i;
2784 rtx temp;
2785 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2786 (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2787}
2788
2789/* Go through all the RTL insn bodies and copy any invalid shared
2790 structure, again. This is a fairly expensive thing to do so it
2791 should be done sparingly. */
2792
2793void
2794unshare_all_rtl_again (rtx_insn *insn)
2795{
2796 rtx_insn *p;
2797 tree decl;
2798
2799 for (p = insn; p; p = NEXT_INSN (insn: p))
2800 if (INSN_P (p))
2801 {
2802 reset_used_flags (PATTERN (insn: p));
2803 reset_used_flags (REG_NOTES (p));
2804 if (CALL_P (p))
2805 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2806 }
2807
2808 /* Make sure that virtual stack slots are not shared. */
2809 set_used_decls (DECL_INITIAL (cfun->decl));
2810
2811 /* Make sure that virtual parameters are not shared. */
2812 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2813 set_used_flags (DECL_RTL (decl));
2814
2815 rtx temp;
2816 unsigned int i;
2817 FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2818 reset_used_flags (temp);
2819
2820 unshare_all_rtl_1 (insn);
2821}
2822
2823void
2824unshare_all_rtl (void)
2825{
2826 unshare_all_rtl_1 (insn: get_insns ());
2827
2828 for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2829 {
2830 if (DECL_RTL_SET_P (decl))
2831 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2832 DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2833 }
2834}
2835
2836
2837/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2838 Recursively does the same for subexpressions. */
2839
2840static void
2841verify_rtx_sharing (rtx orig, rtx insn)
2842{
2843 rtx x = orig;
2844 int i;
2845 enum rtx_code code;
2846 const char *format_ptr;
2847
2848 if (x == 0)
2849 return;
2850
2851 code = GET_CODE (x);
2852
2853 /* These types may be freely shared. */
2854
2855 switch (code)
2856 {
2857 case REG:
2858 case DEBUG_EXPR:
2859 case VALUE:
2860 CASE_CONST_ANY:
2861 case SYMBOL_REF:
2862 case LABEL_REF:
2863 case CODE_LABEL:
2864 case PC:
2865 case RETURN:
2866 case SIMPLE_RETURN:
2867 case SCRATCH:
2868 /* SCRATCH must be shared because they represent distinct values. */
2869 return;
2870 case CLOBBER:
2871 /* Share clobbers of hard registers, but do not share pseudo reg
2872 clobbers or clobbers of hard registers that originated as pseudos.
2873 This is needed to allow safe register renaming. */
2874 if (REG_P (XEXP (x, 0))
2875 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2876 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2877 return;
2878 break;
2879
2880 case CONST:
2881 if (shared_const_p (orig))
2882 return;
2883 break;
2884
2885 case MEM:
2886 /* A MEM is allowed to be shared if its address is constant. */
2887 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2888 || reload_completed || reload_in_progress)
2889 return;
2890
2891 break;
2892
2893 default:
2894 break;
2895 }
2896
2897 /* This rtx may not be shared. If it has already been seen,
2898 replace it with a copy of itself. */
2899 if (flag_checking && RTX_FLAG (x, used))
2900 {
2901 error ("invalid rtl sharing found in the insn");
2902 debug_rtx (insn);
2903 error ("shared rtx");
2904 debug_rtx (x);
2905 internal_error ("internal consistency failure");
2906 }
2907 gcc_assert (!RTX_FLAG (x, used));
2908
2909 RTX_FLAG (x, used) = 1;
2910
2911 /* Now scan the subexpressions recursively. */
2912
2913 format_ptr = GET_RTX_FORMAT (code);
2914
2915 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2916 {
2917 switch (*format_ptr++)
2918 {
2919 case 'e':
2920 verify_rtx_sharing (XEXP (x, i), insn);
2921 break;
2922
2923 case 'E':
2924 if (XVEC (x, i) != NULL)
2925 {
2926 int j;
2927 int len = XVECLEN (x, i);
2928
2929 for (j = 0; j < len; j++)
2930 {
2931 /* We allow sharing of ASM_OPERANDS inside single
2932 instruction. */
2933 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2934 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2935 == ASM_OPERANDS))
2936 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2937 else
2938 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2939 }
2940 }
2941 break;
2942 }
2943 }
2944}
2945
2946/* Reset used-flags for INSN. */
2947
2948static void
2949reset_insn_used_flags (rtx insn)
2950{
2951 gcc_assert (INSN_P (insn));
2952 reset_used_flags (PATTERN (insn));
2953 reset_used_flags (REG_NOTES (insn));
2954 if (CALL_P (insn))
2955 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2956}
2957
2958/* Go through all the RTL insn bodies and clear all the USED bits. */
2959
2960static void
2961reset_all_used_flags (void)
2962{
2963 rtx_insn *p;
2964
2965 for (p = get_insns (); p; p = NEXT_INSN (insn: p))
2966 if (INSN_P (p))
2967 {
2968 rtx pat = PATTERN (insn: p);
2969 if (GET_CODE (pat) != SEQUENCE)
2970 reset_insn_used_flags (insn: p);
2971 else
2972 {
2973 gcc_assert (REG_NOTES (p) == NULL);
2974 for (int i = 0; i < XVECLEN (pat, 0); i++)
2975 {
2976 rtx insn = XVECEXP (pat, 0, i);
2977 if (INSN_P (insn))
2978 reset_insn_used_flags (insn);
2979 }
2980 }
2981 }
2982}
2983
2984/* Verify sharing in INSN. */
2985
2986static void
2987verify_insn_sharing (rtx insn)
2988{
2989 gcc_assert (INSN_P (insn));
2990 verify_rtx_sharing (orig: PATTERN (insn), insn);
2991 verify_rtx_sharing (REG_NOTES (insn), insn);
2992 if (CALL_P (insn))
2993 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2994}
2995
2996/* Go through all the RTL insn bodies and check that there is no unexpected
2997 sharing in between the subexpressions. */
2998
2999DEBUG_FUNCTION void
3000verify_rtl_sharing (void)
3001{
3002 rtx_insn *p;
3003
3004 timevar_push (tv: TV_VERIFY_RTL_SHARING);
3005
3006 reset_all_used_flags ();
3007
3008 for (p = get_insns (); p; p = NEXT_INSN (insn: p))
3009 if (INSN_P (p))
3010 {
3011 rtx pat = PATTERN (insn: p);
3012 if (GET_CODE (pat) != SEQUENCE)
3013 verify_insn_sharing (insn: p);
3014 else
3015 for (int i = 0; i < XVECLEN (pat, 0); i++)
3016 {
3017 rtx insn = XVECEXP (pat, 0, i);
3018 if (INSN_P (insn))
3019 verify_insn_sharing (insn);
3020 }
3021 }
3022
3023 reset_all_used_flags ();
3024
3025 timevar_pop (tv: TV_VERIFY_RTL_SHARING);
3026}
3027
3028/* Go through all the RTL insn bodies and copy any invalid shared structure.
3029 Assumes the mark bits are cleared at entry. */
3030
3031void
3032unshare_all_rtl_in_chain (rtx_insn *insn)
3033{
3034 for (; insn; insn = NEXT_INSN (insn))
3035 if (INSN_P (insn))
3036 {
3037 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3038 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3039 if (CALL_P (insn))
3040 CALL_INSN_FUNCTION_USAGE (insn)
3041 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3042 }
3043}
3044
3045/* Go through all virtual stack slots of a function and mark them as
3046 shared. We never replace the DECL_RTLs themselves with a copy,
3047 but expressions mentioned into a DECL_RTL cannot be shared with
3048 expressions in the instruction stream.
3049
3050 Note that reload may convert pseudo registers into memories in-place.
3051 Pseudo registers are always shared, but MEMs never are. Thus if we
3052 reset the used flags on MEMs in the instruction stream, we must set
3053 them again on MEMs that appear in DECL_RTLs. */
3054
3055static void
3056set_used_decls (tree blk)
3057{
3058 tree t;
3059
3060 /* Mark decls. */
3061 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3062 if (DECL_RTL_SET_P (t))
3063 set_used_flags (DECL_RTL (t));
3064
3065 /* Now process sub-blocks. */
3066 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3067 set_used_decls (t);
3068}
3069
3070/* Mark ORIG as in use, and return a copy of it if it was already in use.
3071 Recursively does the same for subexpressions. Uses
3072 copy_rtx_if_shared_1 to reduce stack space. */
3073
3074rtx
3075copy_rtx_if_shared (rtx orig)
3076{
3077 copy_rtx_if_shared_1 (orig: &orig);
3078 return orig;
3079}
3080
3081/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3082 use. Recursively does the same for subexpressions. */
3083
3084static void
3085copy_rtx_if_shared_1 (rtx *orig1)
3086{
3087 rtx x;
3088 int i;
3089 enum rtx_code code;
3090 rtx *last_ptr;
3091 const char *format_ptr;
3092 int copied = 0;
3093 int length;
3094
3095 /* Repeat is used to turn tail-recursion into iteration. */
3096repeat:
3097 x = *orig1;
3098
3099 if (x == 0)
3100 return;
3101
3102 code = GET_CODE (x);
3103
3104 /* These types may be freely shared. */
3105
3106 switch (code)
3107 {
3108 case REG:
3109 case DEBUG_EXPR:
3110 case VALUE:
3111 CASE_CONST_ANY:
3112 case SYMBOL_REF:
3113 case LABEL_REF:
3114 case CODE_LABEL:
3115 case PC:
3116 case RETURN:
3117 case SIMPLE_RETURN:
3118 case SCRATCH:
3119 /* SCRATCH must be shared because they represent distinct values. */
3120 return;
3121 case CLOBBER:
3122 /* Share clobbers of hard registers, but do not share pseudo reg
3123 clobbers or clobbers of hard registers that originated as pseudos.
3124 This is needed to allow safe register renaming. */
3125 if (REG_P (XEXP (x, 0))
3126 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3127 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3128 return;
3129 break;
3130
3131 case CONST:
3132 if (shared_const_p (x))
3133 return;
3134 break;
3135
3136 case DEBUG_INSN:
3137 case INSN:
3138 case JUMP_INSN:
3139 case CALL_INSN:
3140 case NOTE:
3141 case BARRIER:
3142 /* The chain of insns is not being copied. */
3143 return;
3144
3145 default:
3146 break;
3147 }
3148
3149 /* This rtx may not be shared. If it has already been seen,
3150 replace it with a copy of itself. */
3151
3152 if (RTX_FLAG (x, used))
3153 {
3154 x = shallow_copy_rtx (x);
3155 copied = 1;
3156 }
3157 RTX_FLAG (x, used) = 1;
3158
3159 /* Now scan the subexpressions recursively.
3160 We can store any replaced subexpressions directly into X
3161 since we know X is not shared! Any vectors in X
3162 must be copied if X was copied. */
3163
3164 format_ptr = GET_RTX_FORMAT (code);
3165 length = GET_RTX_LENGTH (code);
3166 last_ptr = NULL;
3167
3168 for (i = 0; i < length; i++)
3169 {
3170 switch (*format_ptr++)
3171 {
3172 case 'e':
3173 if (last_ptr)
3174 copy_rtx_if_shared_1 (orig1: last_ptr);
3175 last_ptr = &XEXP (x, i);
3176 break;
3177
3178 case 'E':
3179 if (XVEC (x, i) != NULL)
3180 {
3181 int j;
3182 int len = XVECLEN (x, i);
3183
3184 /* Copy the vector iff I copied the rtx and the length
3185 is nonzero. */
3186 if (copied && len > 0)
3187 XVEC (x, i) = gen_rtvec_v (n: len, XVEC (x, i)->elem);
3188
3189 /* Call recursively on all inside the vector. */
3190 for (j = 0; j < len; j++)
3191 {
3192 if (last_ptr)
3193 copy_rtx_if_shared_1 (orig1: last_ptr);
3194 last_ptr = &XVECEXP (x, i, j);
3195 }
3196 }
3197 break;
3198 }
3199 }
3200 *orig1 = x;
3201 if (last_ptr)
3202 {
3203 orig1 = last_ptr;
3204 goto repeat;
3205 }
3206}
3207
3208/* Set the USED bit in X and its non-shareable subparts to FLAG. */
3209
3210static void
3211mark_used_flags (rtx x, int flag)
3212{
3213 int i, j;
3214 enum rtx_code code;
3215 const char *format_ptr;
3216 int length;
3217
3218 /* Repeat is used to turn tail-recursion into iteration. */
3219repeat:
3220 if (x == 0)
3221 return;
3222
3223 code = GET_CODE (x);
3224
3225 /* These types may be freely shared so we needn't do any resetting
3226 for them. */
3227
3228 switch (code)
3229 {
3230 case REG:
3231 case DEBUG_EXPR:
3232 case VALUE:
3233 CASE_CONST_ANY:
3234 case SYMBOL_REF:
3235 case CODE_LABEL:
3236 case PC:
3237 case RETURN:
3238 case SIMPLE_RETURN:
3239 return;
3240
3241 case DEBUG_INSN:
3242 case INSN:
3243 case JUMP_INSN:
3244 case CALL_INSN:
3245 case NOTE:
3246 case LABEL_REF:
3247 case BARRIER:
3248 /* The chain of insns is not being copied. */
3249 return;
3250
3251 default:
3252 break;
3253 }
3254
3255 RTX_FLAG (x, used) = flag;
3256
3257 format_ptr = GET_RTX_FORMAT (code);
3258 length = GET_RTX_LENGTH (code);
3259
3260 for (i = 0; i < length; i++)
3261 {
3262 switch (*format_ptr++)
3263 {
3264 case 'e':
3265 if (i == length-1)
3266 {
3267 x = XEXP (x, i);
3268 goto repeat;
3269 }
3270 mark_used_flags (XEXP (x, i), flag);
3271 break;
3272
3273 case 'E':
3274 for (j = 0; j < XVECLEN (x, i); j++)
3275 mark_used_flags (XVECEXP (x, i, j), flag);
3276 break;
3277 }
3278 }
3279}
3280
3281/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3282 to look for shared sub-parts. */
3283
3284void
3285reset_used_flags (rtx x)
3286{
3287 mark_used_flags (x, flag: 0);
3288}
3289
3290/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3291 to look for shared sub-parts. */
3292
3293void
3294set_used_flags (rtx x)
3295{
3296 mark_used_flags (x, flag: 1);
3297}
3298
3299/* Copy X if necessary so that it won't be altered by changes in OTHER.
3300 Return X or the rtx for the pseudo reg the value of X was copied into.
3301 OTHER must be valid as a SET_DEST. */
3302
3303rtx
3304make_safe_from (rtx x, rtx other)
3305{
3306 while (1)
3307 switch (GET_CODE (other))
3308 {
3309 case SUBREG:
3310 other = SUBREG_REG (other);
3311 break;
3312 case STRICT_LOW_PART:
3313 case SIGN_EXTEND:
3314 case ZERO_EXTEND:
3315 other = XEXP (other, 0);
3316 break;
3317 default:
3318 goto done;
3319 }
3320 done:
3321 if ((MEM_P (other)
3322 && ! CONSTANT_P (x)
3323 && !REG_P (x)
3324 && GET_CODE (x) != SUBREG)
3325 || (REG_P (other)
3326 && (REGNO (other) < FIRST_PSEUDO_REGISTER
3327 || reg_mentioned_p (other, x))))
3328 {
3329 rtx temp = gen_reg_rtx (GET_MODE (x));
3330 emit_move_insn (temp, x);
3331 return temp;
3332 }
3333 return x;
3334}
3335
3336/* Emission of insns (adding them to the doubly-linked list). */
3337
3338/* Return the last insn emitted, even if it is in a sequence now pushed. */
3339
3340rtx_insn *
3341get_last_insn_anywhere (void)
3342{
3343 struct sequence_stack *seq;
3344 for (seq = get_current_sequence (); seq; seq = seq->next)
3345 if (seq->last != 0)
3346 return seq->last;
3347 return 0;
3348}
3349
3350/* Return the first nonnote insn emitted in current sequence or current
3351 function. This routine looks inside SEQUENCEs. */
3352
3353rtx_insn *
3354get_first_nonnote_insn (void)
3355{
3356 rtx_insn *insn = get_insns ();
3357
3358 if (insn)
3359 {
3360 if (NOTE_P (insn))
3361 for (insn = next_insn (insn);
3362 insn && NOTE_P (insn);
3363 insn = next_insn (insn))
3364 continue;
3365 else
3366 {
3367 if (NONJUMP_INSN_P (insn)
3368 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3369 insn = as_a <rtx_sequence *> (p: PATTERN (insn))->insn (index: 0);
3370 }
3371 }
3372
3373 return insn;
3374}
3375
3376/* Return the last nonnote insn emitted in current sequence or current
3377 function. This routine looks inside SEQUENCEs. */
3378
3379rtx_insn *
3380get_last_nonnote_insn (void)
3381{
3382 rtx_insn *insn = get_last_insn ();
3383
3384 if (insn)
3385 {
3386 if (NOTE_P (insn))
3387 for (insn = previous_insn (insn);
3388 insn && NOTE_P (insn);
3389 insn = previous_insn (insn))
3390 continue;
3391 else
3392 {
3393 if (NONJUMP_INSN_P (insn))
3394 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (p: PATTERN (insn)))
3395 insn = seq->insn (index: seq->len () - 1);
3396 }
3397 }
3398
3399 return insn;
3400}
3401
3402/* Return the number of actual (non-debug) insns emitted in this
3403 function. */
3404
3405int
3406get_max_insn_count (void)
3407{
3408 int n = cur_insn_uid;
3409
3410 /* The table size must be stable across -g, to avoid codegen
3411 differences due to debug insns, and not be affected by
3412 -fmin-insn-uid, to avoid excessive table size and to simplify
3413 debugging of -fcompare-debug failures. */
3414 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
3415 n -= cur_debug_insn_uid;
3416 else
3417 n -= param_min_nondebug_insn_uid;
3418
3419 return n;
3420}
3421
3422
3423/* Return the next insn. If it is a SEQUENCE, return the first insn
3424 of the sequence. */
3425
3426rtx_insn *
3427next_insn (rtx_insn *insn)
3428{
3429 if (insn)
3430 {
3431 insn = NEXT_INSN (insn);
3432 if (insn && NONJUMP_INSN_P (insn)
3433 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3434 insn = as_a <rtx_sequence *> (p: PATTERN (insn))->insn (index: 0);
3435 }
3436
3437 return insn;
3438}
3439
3440/* Return the previous insn. If it is a SEQUENCE, return the last insn
3441 of the sequence. */
3442
3443rtx_insn *
3444previous_insn (rtx_insn *insn)
3445{
3446 if (insn)
3447 {
3448 insn = PREV_INSN (insn);
3449 if (insn && NONJUMP_INSN_P (insn))
3450 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (p: PATTERN (insn)))
3451 insn = seq->insn (index: seq->len () - 1);
3452 }
3453
3454 return insn;
3455}
3456
3457/* Return the next insn after INSN that is not a NOTE. This routine does not
3458 look inside SEQUENCEs. */
3459
3460rtx_insn *
3461next_nonnote_insn (rtx_insn *insn)
3462{
3463 while (insn)
3464 {
3465 insn = NEXT_INSN (insn);
3466 if (insn == 0 || !NOTE_P (insn))
3467 break;
3468 }
3469
3470 return insn;
3471}
3472
3473/* Return the next insn after INSN that is not a DEBUG_INSN. This
3474 routine does not look inside SEQUENCEs. */
3475
3476rtx_insn *
3477next_nondebug_insn (rtx_insn *insn)
3478{
3479 while (insn)
3480 {
3481 insn = NEXT_INSN (insn);
3482 if (insn == 0 || !DEBUG_INSN_P (insn))
3483 break;
3484 }
3485
3486 return insn;
3487}
3488
3489/* Return the previous insn before INSN that is not a NOTE. This routine does
3490 not look inside SEQUENCEs. */
3491
3492rtx_insn *
3493prev_nonnote_insn (rtx_insn *insn)
3494{
3495 while (insn)
3496 {
3497 insn = PREV_INSN (insn);
3498 if (insn == 0 || !NOTE_P (insn))
3499 break;
3500 }
3501
3502 return insn;
3503}
3504
3505/* Return the previous insn before INSN that is not a DEBUG_INSN.
3506 This routine does not look inside SEQUENCEs. */
3507
3508rtx_insn *
3509prev_nondebug_insn (rtx_insn *insn)
3510{
3511 while (insn)
3512 {
3513 insn = PREV_INSN (insn);
3514 if (insn == 0 || !DEBUG_INSN_P (insn))
3515 break;
3516 }
3517
3518 return insn;
3519}
3520
3521/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3522 This routine does not look inside SEQUENCEs. */
3523
3524rtx_insn *
3525next_nonnote_nondebug_insn (rtx_insn *insn)
3526{
3527 while (insn)
3528 {
3529 insn = NEXT_INSN (insn);
3530 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3531 break;
3532 }
3533
3534 return insn;
3535}
3536
3537/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3538 but stop the search before we enter another basic block. This
3539 routine does not look inside SEQUENCEs. */
3540
3541rtx_insn *
3542next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3543{
3544 while (insn)
3545 {
3546 insn = NEXT_INSN (insn);
3547 if (insn == 0)
3548 break;
3549 if (DEBUG_INSN_P (insn))
3550 continue;
3551 if (!NOTE_P (insn))
3552 break;
3553 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3554 return NULL;
3555 }
3556
3557 return insn;
3558}
3559
3560/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3561 This routine does not look inside SEQUENCEs. */
3562
3563rtx_insn *
3564prev_nonnote_nondebug_insn (rtx_insn *insn)
3565{
3566 while (insn)
3567 {
3568 insn = PREV_INSN (insn);
3569 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3570 break;
3571 }
3572
3573 return insn;
3574}
3575
3576/* Return the previous insn before INSN that is not a NOTE nor
3577 DEBUG_INSN, but stop the search before we enter another basic
3578 block. This routine does not look inside SEQUENCEs. */
3579
3580rtx_insn *
3581prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3582{
3583 while (insn)
3584 {
3585 insn = PREV_INSN (insn);
3586 if (insn == 0)
3587 break;
3588 if (DEBUG_INSN_P (insn))
3589 continue;
3590 if (!NOTE_P (insn))
3591 break;
3592 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3593 return NULL;
3594 }
3595
3596 return insn;
3597}
3598
3599/* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3600 or 0, if there is none. This routine does not look inside
3601 SEQUENCEs. */
3602
3603rtx_insn *
3604next_real_insn (rtx_insn *insn)
3605{
3606 while (insn)
3607 {
3608 insn = NEXT_INSN (insn);
3609 if (insn == 0 || INSN_P (insn))
3610 break;
3611 }
3612
3613 return insn;
3614}
3615
3616/* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3617 or 0, if there is none. This routine does not look inside
3618 SEQUENCEs. */
3619
3620rtx_insn *
3621prev_real_insn (rtx_insn *insn)
3622{
3623 while (insn)
3624 {
3625 insn = PREV_INSN (insn);
3626 if (insn == 0 || INSN_P (insn))
3627 break;
3628 }
3629
3630 return insn;
3631}
3632
3633/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3634 or 0, if there is none. This routine does not look inside
3635 SEQUENCEs. */
3636
3637rtx_insn *
3638next_real_nondebug_insn (rtx uncast_insn)
3639{
3640 rtx_insn *insn = safe_as_a <rtx_insn *> (p: uncast_insn);
3641
3642 while (insn)
3643 {
3644 insn = NEXT_INSN (insn);
3645 if (insn == 0 || NONDEBUG_INSN_P (insn))
3646 break;
3647 }
3648
3649 return insn;
3650}
3651
3652/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3653 or 0, if there is none. This routine does not look inside
3654 SEQUENCEs. */
3655
3656rtx_insn *
3657prev_real_nondebug_insn (rtx_insn *insn)
3658{
3659 while (insn)
3660 {
3661 insn = PREV_INSN (insn);
3662 if (insn == 0 || NONDEBUG_INSN_P (insn))
3663 break;
3664 }
3665
3666 return insn;
3667}
3668
3669/* Return the last CALL_INSN in the current list, or 0 if there is none.
3670 This routine does not look inside SEQUENCEs. */
3671
3672rtx_call_insn *
3673last_call_insn (void)
3674{
3675 rtx_insn *insn;
3676
3677 for (insn = get_last_insn ();
3678 insn && !CALL_P (insn);
3679 insn = PREV_INSN (insn))
3680 ;
3681
3682 return safe_as_a <rtx_call_insn *> (p: insn);
3683}
3684
3685bool
3686active_insn_p (const rtx_insn *insn)
3687{
3688 return (CALL_P (insn) || JUMP_P (insn)
3689 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3690 || (NONJUMP_INSN_P (insn)
3691 && (! reload_completed
3692 || (GET_CODE (PATTERN (insn)) != USE
3693 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3694}
3695
3696/* Find the next insn after INSN that really does something. This routine
3697 does not look inside SEQUENCEs. After reload this also skips over
3698 standalone USE and CLOBBER insn. */
3699
3700rtx_insn *
3701next_active_insn (rtx_insn *insn)
3702{
3703 while (insn)
3704 {
3705 insn = NEXT_INSN (insn);
3706 if (insn == 0 || active_insn_p (insn))
3707 break;
3708 }
3709
3710 return insn;
3711}
3712
3713/* Find the last insn before INSN that really does something. This routine
3714 does not look inside SEQUENCEs. After reload this also skips over
3715 standalone USE and CLOBBER insn. */
3716
3717rtx_insn *
3718prev_active_insn (rtx_insn *insn)
3719{
3720 while (insn)
3721 {
3722 insn = PREV_INSN (insn);
3723 if (insn == 0 || active_insn_p (insn))
3724 break;
3725 }
3726
3727 return insn;
3728}
3729
3730/* Find a RTX_AUTOINC class rtx which matches DATA. */
3731
3732static int
3733find_auto_inc (const_rtx x, const_rtx reg)
3734{
3735 subrtx_iterator::array_type array;
3736 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3737 {
3738 const_rtx x = *iter;
3739 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3740 && rtx_equal_p (reg, XEXP (x, 0)))
3741 return true;
3742 }
3743 return false;
3744}
3745
3746/* Increment the label uses for all labels present in rtx. */
3747
3748static void
3749mark_label_nuses (rtx x)
3750{
3751 enum rtx_code code;
3752 int i, j;
3753 const char *fmt;
3754
3755 code = GET_CODE (x);
3756 if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3757 LABEL_NUSES (label_ref_label (x))++;
3758
3759 fmt = GET_RTX_FORMAT (code);
3760 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3761 {
3762 if (fmt[i] == 'e')
3763 mark_label_nuses (XEXP (x, i));
3764 else if (fmt[i] == 'E')
3765 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3766 mark_label_nuses (XVECEXP (x, i, j));
3767 }
3768}
3769
3770
3771/* Try splitting insns that can be split for better scheduling.
3772 PAT is the pattern which might split.
3773 TRIAL is the insn providing PAT.
3774 LAST is nonzero if we should return the last insn of the sequence produced.
3775
3776 If this routine succeeds in splitting, it returns the first or last
3777 replacement insn depending on the value of LAST. Otherwise, it
3778 returns TRIAL. If the insn to be returned can be split, it will be. */
3779
3780rtx_insn *
3781try_split (rtx pat, rtx_insn *trial, int last)
3782{
3783 rtx_insn *before, *after;
3784 rtx note;
3785 rtx_insn *seq, *tem;
3786 profile_probability probability;
3787 rtx_insn *insn_last, *insn;
3788 int njumps = 0;
3789 rtx_insn *call_insn = NULL;
3790
3791 if (any_condjump_p (trial)
3792 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3793 split_branch_probability
3794 = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3795 else
3796 split_branch_probability = profile_probability::uninitialized ();
3797
3798 probability = split_branch_probability;
3799
3800 seq = split_insns (pat, trial);
3801
3802 split_branch_probability = profile_probability::uninitialized ();
3803
3804 if (!seq)
3805 return trial;
3806
3807 int split_insn_count = 0;
3808 /* Avoid infinite loop if any insn of the result matches
3809 the original pattern. */
3810 insn_last = seq;
3811 while (1)
3812 {
3813 if (INSN_P (insn_last)
3814 && rtx_equal_p (PATTERN (insn: insn_last), pat))
3815 return trial;
3816 split_insn_count++;
3817 if (!NEXT_INSN (insn: insn_last))
3818 break;
3819 insn_last = NEXT_INSN (insn: insn_last);
3820 }
3821
3822 /* We're not good at redistributing frame information if
3823 the split occurs before reload or if it results in more
3824 than one insn. */
3825 if (RTX_FRAME_RELATED_P (trial))
3826 {
3827 if (!reload_completed || split_insn_count != 1)
3828 return trial;
3829
3830 rtx_insn *new_insn = seq;
3831 rtx_insn *old_insn = trial;
3832 copy_frame_info_to_split_insn (old_insn, new_insn);
3833 }
3834
3835 /* We will be adding the new sequence to the function. The splitters
3836 may have introduced invalid RTL sharing, so unshare the sequence now. */
3837 unshare_all_rtl_in_chain (insn: seq);
3838
3839 /* Mark labels and copy flags. */
3840 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3841 {
3842 if (JUMP_P (insn))
3843 {
3844 if (JUMP_P (trial))
3845 CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3846 mark_jump_label (PATTERN (insn), insn, 0);
3847 njumps++;
3848 if (probability.initialized_p ()
3849 && any_condjump_p (insn)
3850 && !find_reg_note (insn, REG_BR_PROB, 0))
3851 {
3852 /* We can preserve the REG_BR_PROB notes only if exactly
3853 one jump is created, otherwise the machine description
3854 is responsible for this step using
3855 split_branch_probability variable. */
3856 gcc_assert (njumps == 1);
3857 add_reg_br_prob_note (insn, probability);
3858 }
3859 }
3860 }
3861
3862 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3863 in SEQ and copy any additional information across. */
3864 if (CALL_P (trial))
3865 {
3866 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3867 if (CALL_P (insn))
3868 {
3869 gcc_assert (call_insn == NULL_RTX);
3870 call_insn = insn;
3871
3872 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3873 target may have explicitly specified. */
3874 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3875 while (*p)
3876 p = &XEXP (*p, 1);
3877 *p = CALL_INSN_FUNCTION_USAGE (trial);
3878
3879 /* If the old call was a sibling call, the new one must
3880 be too. */
3881 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3882 }
3883 }
3884
3885 /* Copy notes, particularly those related to the CFG. */
3886 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3887 {
3888 switch (REG_NOTE_KIND (note))
3889 {
3890 case REG_EH_REGION:
3891 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3892 break;
3893
3894 case REG_NORETURN:
3895 case REG_SETJMP:
3896 case REG_TM:
3897 case REG_CALL_NOCF_CHECK:
3898 case REG_CALL_ARG_LOCATION:
3899 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3900 {
3901 if (CALL_P (insn))
3902 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3903 }
3904 break;
3905
3906 case REG_NON_LOCAL_GOTO:
3907 case REG_LABEL_TARGET:
3908 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3909 {
3910 if (JUMP_P (insn))
3911 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3912 }
3913 break;
3914
3915 case REG_INC:
3916 if (!AUTO_INC_DEC)
3917 break;
3918
3919 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3920 {
3921 rtx reg = XEXP (note, 0);
3922 if (!FIND_REG_INC_NOTE (insn, reg)
3923 && find_auto_inc (x: PATTERN (insn), reg))
3924 add_reg_note (insn, REG_INC, reg);
3925 }
3926 break;
3927
3928 case REG_ARGS_SIZE:
3929 fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3930 break;
3931
3932 case REG_CALL_DECL:
3933 case REG_UNTYPED_CALL:
3934 gcc_assert (call_insn != NULL_RTX);
3935 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3936 break;
3937
3938 default:
3939 break;
3940 }
3941 }
3942
3943 /* If there are LABELS inside the split insns increment the
3944 usage count so we don't delete the label. */
3945 if (INSN_P (trial))
3946 {
3947 insn = insn_last;
3948 while (insn != NULL_RTX)
3949 {
3950 /* JUMP_P insns have already been "marked" above. */
3951 if (NONJUMP_INSN_P (insn))
3952 mark_label_nuses (x: PATTERN (insn));
3953
3954 insn = PREV_INSN (insn);
3955 }
3956 }
3957
3958 before = PREV_INSN (insn: trial);
3959 after = NEXT_INSN (insn: trial);
3960
3961 emit_insn_after_setloc (seq, trial, INSN_LOCATION (insn: trial));
3962
3963 delete_insn (trial);
3964
3965 /* Recursively call try_split for each new insn created; by the
3966 time control returns here that insn will be fully split, so
3967 set LAST and continue from the insn after the one returned.
3968 We can't use next_active_insn here since AFTER may be a note.
3969 Ignore deleted insns, which can be occur if not optimizing. */
3970 for (tem = NEXT_INSN (insn: before); tem != after; tem = NEXT_INSN (insn: tem))
3971 if (! tem->deleted () && INSN_P (tem))
3972 tem = try_split (pat: PATTERN (insn: tem), trial: tem, last: 1);
3973
3974 /* Return either the first or the last insn, depending on which was
3975 requested. */
3976 return last
3977 ? (after ? PREV_INSN (insn: after) : get_last_insn ())
3978 : NEXT_INSN (insn: before);
3979}
3980
3981/* Make and return an INSN rtx, initializing all its slots.
3982 Store PATTERN in the pattern slots. */
3983
3984rtx_insn *
3985make_insn_raw (rtx pattern)
3986{
3987 rtx_insn *insn;
3988
3989 insn = as_a <rtx_insn *> (p: rtx_alloc (INSN));
3990
3991 INSN_UID (insn) = cur_insn_uid++;
3992 PATTERN (insn) = pattern;
3993 INSN_CODE (insn) = -1;
3994 REG_NOTES (insn) = NULL;
3995 INSN_LOCATION (insn) = curr_insn_location ();
3996 BLOCK_FOR_INSN (insn) = NULL;
3997
3998#ifdef ENABLE_RTL_CHECKING
3999 if (insn
4000 && INSN_P (insn)
4001 && (returnjump_p (insn)
4002 || (GET_CODE (insn) == SET
4003 && SET_DEST (insn) == pc_rtx)))
4004 {
4005 warning (0, "ICE: %<emit_insn%> used where %<emit_jump_insn%> needed:");
4006 debug_rtx (insn);
4007 }
4008#endif
4009
4010 return insn;
4011}
4012
4013/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4014
4015static rtx_insn *
4016make_debug_insn_raw (rtx pattern)
4017{
4018 rtx_debug_insn *insn;
4019
4020 insn = as_a <rtx_debug_insn *> (p: rtx_alloc (DEBUG_INSN));
4021 INSN_UID (insn) = cur_debug_insn_uid++;
4022 if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
4023 INSN_UID (insn) = cur_insn_uid++;
4024
4025 PATTERN (insn) = pattern;
4026 INSN_CODE (insn) = -1;
4027 REG_NOTES (insn) = NULL;
4028 INSN_LOCATION (insn) = curr_insn_location ();
4029 BLOCK_FOR_INSN (insn) = NULL;
4030
4031 return insn;
4032}
4033
4034/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4035
4036static rtx_insn *
4037make_jump_insn_raw (rtx pattern)
4038{
4039 rtx_jump_insn *insn;
4040
4041 insn = as_a <rtx_jump_insn *> (p: rtx_alloc (JUMP_INSN));
4042 INSN_UID (insn) = cur_insn_uid++;
4043
4044 PATTERN (insn) = pattern;
4045 INSN_CODE (insn) = -1;
4046 REG_NOTES (insn) = NULL;
4047 JUMP_LABEL (insn) = NULL;
4048 INSN_LOCATION (insn) = curr_insn_location ();
4049 BLOCK_FOR_INSN (insn) = NULL;
4050
4051 return insn;
4052}
4053
4054/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4055
4056static rtx_insn *
4057make_call_insn_raw (rtx pattern)
4058{
4059 rtx_call_insn *insn;
4060
4061 insn = as_a <rtx_call_insn *> (p: rtx_alloc (CALL_INSN));
4062 INSN_UID (insn) = cur_insn_uid++;
4063
4064 PATTERN (insn) = pattern;
4065 INSN_CODE (insn) = -1;
4066 REG_NOTES (insn) = NULL;
4067 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4068 INSN_LOCATION (insn) = curr_insn_location ();
4069 BLOCK_FOR_INSN (insn) = NULL;
4070
4071 return insn;
4072}
4073
4074/* Like `make_insn_raw' but make a NOTE instead of an insn. */
4075
4076static rtx_note *
4077make_note_raw (enum insn_note subtype)
4078{
4079 /* Some notes are never created this way at all. These notes are
4080 only created by patching out insns. */
4081 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4082 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4083
4084 rtx_note *note = as_a <rtx_note *> (p: rtx_alloc (NOTE));
4085 INSN_UID (insn: note) = cur_insn_uid++;
4086 NOTE_KIND (note) = subtype;
4087 BLOCK_FOR_INSN (insn: note) = NULL;
4088 memset (s: &NOTE_DATA (note), c: 0, n: sizeof (NOTE_DATA (note)));
4089 return note;
4090}
4091
4092/* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4093 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4094 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4095
4096static inline void
4097link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4098{
4099 SET_PREV_INSN (insn) = prev;
4100 SET_NEXT_INSN (insn) = next;
4101 if (prev != NULL)
4102 {
4103 SET_NEXT_INSN (prev) = insn;
4104 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4105 {
4106 rtx_sequence *sequence = as_a <rtx_sequence *> (p: PATTERN (insn: prev));
4107 SET_NEXT_INSN (sequence->insn (index: sequence->len () - 1)) = insn;
4108 }
4109 }
4110 if (next != NULL)
4111 {
4112 SET_PREV_INSN (next) = insn;
4113 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4114 {
4115 rtx_sequence *sequence = as_a <rtx_sequence *> (p: PATTERN (insn: next));
4116 SET_PREV_INSN (sequence->insn (index: 0)) = insn;
4117 }
4118 }
4119
4120 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4121 {
4122 rtx_sequence *sequence = as_a <rtx_sequence *> (p: PATTERN (insn));
4123 SET_PREV_INSN (sequence->insn (index: 0)) = prev;
4124 SET_NEXT_INSN (sequence->insn (index: sequence->len () - 1)) = next;
4125 }
4126}
4127
4128/* Add INSN to the end of the doubly-linked list.
4129 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4130
4131void
4132add_insn (rtx_insn *insn)
4133{
4134 rtx_insn *prev = get_last_insn ();
4135 link_insn_into_chain (insn, prev, NULL);
4136 if (get_insns () == NULL)
4137 set_first_insn (insn);
4138 set_last_insn (insn);
4139}
4140
4141/* Add INSN into the doubly-linked list after insn AFTER. */
4142
4143static void
4144add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4145{
4146 rtx_insn *next = NEXT_INSN (insn: after);
4147
4148 gcc_assert (!optimize || !after->deleted ());
4149
4150 link_insn_into_chain (insn, prev: after, next);
4151
4152 if (next == NULL)
4153 {
4154 struct sequence_stack *seq;
4155
4156 for (seq = get_current_sequence (); seq; seq = seq->next)
4157 if (after == seq->last)
4158 {
4159 seq->last = insn;
4160 break;
4161 }
4162 }
4163}
4164
4165/* Add INSN into the doubly-linked list before insn BEFORE. */
4166
4167static void
4168add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4169{
4170 rtx_insn *prev = PREV_INSN (insn: before);
4171
4172 gcc_assert (!optimize || !before->deleted ());
4173
4174 link_insn_into_chain (insn, prev, next: before);
4175
4176 if (prev == NULL)
4177 {
4178 struct sequence_stack *seq;
4179
4180 for (seq = get_current_sequence (); seq; seq = seq->next)
4181 if (before == seq->first)
4182 {
4183 seq->first = insn;
4184 break;
4185 }
4186
4187 gcc_assert (seq);
4188 }
4189}
4190
4191/* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4192 If BB is NULL, an attempt is made to infer the bb from before.
4193
4194 This and the next function should be the only functions called
4195 to insert an insn once delay slots have been filled since only
4196 they know how to update a SEQUENCE. */
4197
4198void
4199add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
4200{
4201 add_insn_after_nobb (insn, after);
4202 if (!BARRIER_P (after)
4203 && !BARRIER_P (insn)
4204 && (bb = BLOCK_FOR_INSN (insn: after)))
4205 {
4206 set_block_for_insn (insn, bb);
4207 if (INSN_P (insn))
4208 df_insn_rescan (insn);
4209 /* Should not happen as first in the BB is always
4210 either NOTE or LABEL. */
4211 if (BB_END (bb) == after
4212 /* Avoid clobbering of structure when creating new BB. */
4213 && !BARRIER_P (insn)
4214 && !NOTE_INSN_BASIC_BLOCK_P (insn))
4215 BB_END (bb) = insn;
4216 }
4217}
4218
4219/* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4220 If BB is NULL, an attempt is made to infer the bb from before.
4221
4222 This and the previous function should be the only functions called
4223 to insert an insn once delay slots have been filled since only
4224 they know how to update a SEQUENCE. */
4225
4226void
4227add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
4228{
4229 add_insn_before_nobb (insn, before);
4230
4231 if (!bb
4232 && !BARRIER_P (before)
4233 && !BARRIER_P (insn))
4234 bb = BLOCK_FOR_INSN (insn: before);
4235
4236 if (bb)
4237 {
4238 set_block_for_insn (insn, bb);
4239 if (INSN_P (insn))
4240 df_insn_rescan (insn);
4241 /* Should not happen as first in the BB is always either NOTE or
4242 LABEL. */
4243 gcc_assert (BB_HEAD (bb) != insn
4244 /* Avoid clobbering of structure when creating new BB. */
4245 || BARRIER_P (insn)
4246 || NOTE_INSN_BASIC_BLOCK_P (insn));
4247 }
4248}
4249
4250/* Replace insn with an deleted instruction note. */
4251
4252void
4253set_insn_deleted (rtx_insn *insn)
4254{
4255 if (INSN_P (insn))
4256 df_insn_delete (insn);
4257 PUT_CODE (insn, NOTE);
4258 NOTE_KIND (insn) = NOTE_INSN_DELETED;
4259}
4260
4261
4262/* Unlink INSN from the insn chain.
4263
4264 This function knows how to handle sequences.
4265
4266 This function does not invalidate data flow information associated with
4267 INSN (i.e. does not call df_insn_delete). That makes this function
4268 usable for only disconnecting an insn from the chain, and re-emit it
4269 elsewhere later.
4270
4271 To later insert INSN elsewhere in the insn chain via add_insn and
4272 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4273 the caller. Nullifying them here breaks many insn chain walks.
4274
4275 To really delete an insn and related DF information, use delete_insn. */
4276
4277void
4278remove_insn (rtx_insn *insn)
4279{
4280 rtx_insn *next = NEXT_INSN (insn);
4281 rtx_insn *prev = PREV_INSN (insn);
4282 basic_block bb;
4283
4284 if (prev)
4285 {
4286 SET_NEXT_INSN (prev) = next;
4287 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4288 {
4289 rtx_sequence *sequence = as_a <rtx_sequence *> (p: PATTERN (insn: prev));
4290 SET_NEXT_INSN (sequence->insn (index: sequence->len () - 1)) = next;
4291 }
4292 }
4293 else
4294 {
4295 struct sequence_stack *seq;
4296
4297 for (seq = get_current_sequence (); seq; seq = seq->next)
4298 if (insn == seq->first)
4299 {
4300 seq->first = next;
4301 break;
4302 }
4303
4304 gcc_assert (seq);
4305 }
4306
4307 if (next)
4308 {
4309 SET_PREV_INSN (next) = prev;
4310 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4311 {
4312 rtx_sequence *sequence = as_a <rtx_sequence *> (p: PATTERN (insn: next));
4313 SET_PREV_INSN (sequence->insn (index: 0)) = prev;
4314 }
4315 }
4316 else
4317 {
4318 struct sequence_stack *seq;
4319
4320 for (seq = get_current_sequence (); seq; seq = seq->next)
4321 if (insn == seq->last)
4322 {
4323 seq->last = prev;
4324 break;
4325 }
4326
4327 gcc_assert (seq);
4328 }
4329
4330 /* Fix up basic block boundaries, if necessary. */
4331 if (!BARRIER_P (insn)
4332 && (bb = BLOCK_FOR_INSN (insn)))
4333 {
4334 if (BB_HEAD (bb) == insn)
4335 {
4336 /* Never ever delete the basic block note without deleting whole
4337 basic block. */
4338 gcc_assert (!NOTE_P (insn));
4339 BB_HEAD (bb) = next;
4340 }
4341 if (BB_END (bb) == insn)
4342 BB_END (bb) = prev;
4343 }
4344}
4345
4346/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4347
4348void
4349add_function_usage_to (rtx call_insn, rtx call_fusage)
4350{
4351 gcc_assert (call_insn && CALL_P (call_insn));
4352
4353 /* Put the register usage information on the CALL. If there is already
4354 some usage information, put ours at the end. */
4355 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4356 {
4357 rtx link;
4358
4359 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4360 link = XEXP (link, 1))
4361 ;
4362
4363 XEXP (link, 1) = call_fusage;
4364 }
4365 else
4366 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4367}
4368
4369/* Delete all insns made since FROM.
4370 FROM becomes the new last instruction. */
4371
4372void
4373delete_insns_since (rtx_insn *from)
4374{
4375 if (from == 0)
4376 set_first_insn (0);
4377 else
4378 SET_NEXT_INSN (from) = 0;
4379 set_last_insn (from);
4380}
4381
4382/* This function is deprecated, please use sequences instead.
4383
4384 Move a consecutive bunch of insns to a different place in the chain.
4385 The insns to be moved are those between FROM and TO.
4386 They are moved to a new position after the insn AFTER.
4387 AFTER must not be FROM or TO or any insn in between.
4388
4389 This function does not know about SEQUENCEs and hence should not be
4390 called after delay-slot filling has been done. */
4391
4392void
4393reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4394{
4395 if (flag_checking)
4396 {
4397 for (rtx_insn *x = from; x != to; x = NEXT_INSN (insn: x))
4398 gcc_assert (after != x);
4399 gcc_assert (after != to);
4400 }
4401
4402 /* Splice this bunch out of where it is now. */
4403 if (PREV_INSN (insn: from))
4404 SET_NEXT_INSN (PREV_INSN (insn: from)) = NEXT_INSN (insn: to);
4405 if (NEXT_INSN (insn: to))
4406 SET_PREV_INSN (NEXT_INSN (insn: to)) = PREV_INSN (insn: from);
4407 if (get_last_insn () == to)
4408 set_last_insn (PREV_INSN (insn: from));
4409 if (get_insns () == from)
4410 set_first_insn (NEXT_INSN (insn: to));
4411
4412 /* Make the new neighbors point to it and it to them. */
4413 if (NEXT_INSN (insn: after))
4414 SET_PREV_INSN (NEXT_INSN (insn: after)) = to;
4415
4416 SET_NEXT_INSN (to) = NEXT_INSN (insn: after);
4417 SET_PREV_INSN (from) = after;
4418 SET_NEXT_INSN (after) = from;
4419 if (after == get_last_insn ())
4420 set_last_insn (to);
4421}
4422
4423/* Same as function above, but take care to update BB boundaries. */
4424void
4425reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4426{
4427 rtx_insn *prev = PREV_INSN (insn: from);
4428 basic_block bb, bb2;
4429
4430 reorder_insns_nobb (from, to, after);
4431
4432 if (!BARRIER_P (after)
4433 && (bb = BLOCK_FOR_INSN (insn: after)))
4434 {
4435 rtx_insn *x;
4436 df_set_bb_dirty (bb);
4437
4438 if (!BARRIER_P (from)
4439 && (bb2 = BLOCK_FOR_INSN (insn: from)))
4440 {
4441 if (BB_END (bb2) == to)
4442 BB_END (bb2) = prev;
4443 df_set_bb_dirty (bb2);
4444 }
4445
4446 if (BB_END (bb) == after)
4447 BB_END (bb) = to;
4448
4449 for (x = from; x != NEXT_INSN (insn: to); x = NEXT_INSN (insn: x))
4450 if (!BARRIER_P (x))
4451 df_insn_change_bb (x, bb);
4452 }
4453}
4454
4455
4456/* Emit insn(s) of given code and pattern
4457 at a specified place within the doubly-linked list.
4458
4459 All of the emit_foo global entry points accept an object
4460 X which is either an insn list or a PATTERN of a single
4461 instruction.
4462
4463 There are thus a few canonical ways to generate code and
4464 emit it at a specific place in the instruction stream. For
4465 example, consider the instruction named SPOT and the fact that
4466 we would like to emit some instructions before SPOT. We might
4467 do it like this:
4468
4469 start_sequence ();
4470 ... emit the new instructions ...
4471 insns_head = get_insns ();
4472 end_sequence ();
4473
4474 emit_insn_before (insns_head, SPOT);
4475
4476 It used to be common to generate SEQUENCE rtl instead, but that
4477 is a relic of the past which no longer occurs. The reason is that
4478 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4479 generated would almost certainly die right after it was created. */
4480
4481static rtx_insn *
4482emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
4483 basic_block bb,
4484 rtx_insn *(*make_raw) (rtx))
4485{
4486 rtx_insn *insn;
4487
4488 gcc_assert (before);
4489
4490 if (x == NULL_RTX)
4491 return last;
4492
4493 switch (GET_CODE (x))
4494 {
4495 case DEBUG_INSN:
4496 case INSN:
4497 case JUMP_INSN:
4498 case CALL_INSN:
4499 case CODE_LABEL:
4500 case BARRIER:
4501 case NOTE:
4502 insn = as_a <rtx_insn *> (p: x);
4503 while (insn)
4504 {
4505 rtx_insn *next = NEXT_INSN (insn);
4506 add_insn_before (insn, before, bb);
4507 last = insn;
4508 insn = next;
4509 }
4510 break;
4511
4512#ifdef ENABLE_RTL_CHECKING
4513 case SEQUENCE:
4514 gcc_unreachable ();
4515 break;
4516#endif
4517
4518 default:
4519 last = (*make_raw) (x);
4520 add_insn_before (insn: last, before, bb);
4521 break;
4522 }
4523
4524 return last;
4525}
4526
4527/* Make X be output before the instruction BEFORE. */
4528
4529rtx_insn *
4530emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4531{
4532 return emit_pattern_before_noloc (x, before, last: before, bb, make_raw: make_insn_raw);
4533}
4534
4535/* Make an instruction with body X and code JUMP_INSN
4536 and output it before the instruction BEFORE. */
4537
4538rtx_jump_insn *
4539emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4540{
4541 return as_a <rtx_jump_insn *> (
4542 p: emit_pattern_before_noloc (x, before, NULL, NULL,
4543 make_raw: make_jump_insn_raw));
4544}
4545
4546/* Make an instruction with body X and code CALL_INSN
4547 and output it before the instruction BEFORE. */
4548
4549rtx_insn *
4550emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4551{
4552 return emit_pattern_before_noloc (x, before, NULL, NULL,
4553 make_raw: make_call_insn_raw);
4554}
4555
4556/* Make an instruction with body X and code DEBUG_INSN
4557 and output it before the instruction BEFORE. */
4558
4559rtx_insn *
4560emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
4561{
4562 return emit_pattern_before_noloc (x, before, NULL, NULL,
4563 make_raw: make_debug_insn_raw);
4564}
4565
4566/* Make an insn of code BARRIER
4567 and output it before the insn BEFORE. */
4568
4569rtx_barrier *
4570emit_barrier_before (rtx_insn *before)
4571{
4572 rtx_barrier *insn = as_a <rtx_barrier *> (p: rtx_alloc (BARRIER));
4573
4574 INSN_UID (insn) = cur_insn_uid++;
4575
4576 add_insn_before (insn, before, NULL);
4577 return insn;
4578}
4579
4580/* Emit the label LABEL before the insn BEFORE. */
4581
4582rtx_code_label *
4583emit_label_before (rtx_code_label *label, rtx_insn *before)
4584{
4585 gcc_checking_assert (INSN_UID (label) == 0);
4586 INSN_UID (insn: label) = cur_insn_uid++;
4587 add_insn_before (insn: label, before, NULL);
4588 return label;
4589}
4590
4591/* Helper for emit_insn_after, handles lists of instructions
4592 efficiently. */
4593
4594static rtx_insn *
4595emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
4596{
4597 rtx_insn *last;
4598 rtx_insn *after_after;
4599 if (!bb && !BARRIER_P (after))
4600 bb = BLOCK_FOR_INSN (insn: after);
4601
4602 if (bb)
4603 {
4604 df_set_bb_dirty (bb);
4605 for (last = first; NEXT_INSN (insn: last); last = NEXT_INSN (insn: last))
4606 if (!BARRIER_P (last))
4607 {
4608 set_block_for_insn (insn: last, bb);
4609 df_insn_rescan (last);
4610 }
4611 if (!BARRIER_P (last))
4612 {
4613 set_block_for_insn (insn: last, bb);
4614 df_insn_rescan (last);
4615 }
4616 if (BB_END (bb) == after)
4617 BB_END (bb) = last;
4618 }
4619 else
4620 for (last = first; NEXT_INSN (insn: last); last = NEXT_INSN (insn: last))
4621 continue;
4622
4623 after_after = NEXT_INSN (insn: after);
4624
4625 SET_NEXT_INSN (after) = first;
4626 SET_PREV_INSN (first) = after;
4627 SET_NEXT_INSN (last) = after_after;
4628 if (after_after)
4629 SET_PREV_INSN (after_after) = last;
4630
4631 if (after == get_last_insn ())
4632 set_last_insn (last);
4633
4634 return last;
4635}
4636
4637static rtx_insn *
4638emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
4639 rtx_insn *(*make_raw)(rtx))
4640{
4641 rtx_insn *last = after;
4642
4643 gcc_assert (after);
4644
4645 if (x == NULL_RTX)
4646 return last;
4647
4648 switch (GET_CODE (x))
4649 {
4650 case DEBUG_INSN:
4651 case INSN:
4652 case JUMP_INSN:
4653 case CALL_INSN:
4654 case CODE_LABEL:
4655 case BARRIER:
4656 case NOTE:
4657 last = emit_insn_after_1 (first: as_a <rtx_insn *> (p: x), after, bb);
4658 break;
4659
4660#ifdef ENABLE_RTL_CHECKING
4661 case SEQUENCE:
4662 gcc_unreachable ();
4663 break;
4664#endif
4665
4666 default:
4667 last = (*make_raw) (x);
4668 add_insn_after (insn: last, after, bb);
4669 break;
4670 }
4671
4672 return last;
4673}
4674
4675/* Make X be output after the insn AFTER and set the BB of insn. If
4676 BB is NULL, an attempt is made to infer the BB from AFTER. */
4677
4678rtx_insn *
4679emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
4680{
4681 return emit_pattern_after_noloc (x, after, bb, make_raw: make_insn_raw);
4682}
4683
4684
4685/* Make an insn of code JUMP_INSN with body X
4686 and output it after the insn AFTER. */
4687
4688rtx_jump_insn *
4689emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
4690{
4691 return as_a <rtx_jump_insn *> (
4692 p: emit_pattern_after_noloc (x, after, NULL, make_raw: make_jump_insn_raw));
4693}
4694
4695/* Make an instruction with body X and code CALL_INSN
4696 and output it after the instruction AFTER. */
4697
4698rtx_insn *
4699emit_call_insn_after_noloc (rtx x, rtx_insn *after)
4700{
4701 return emit_pattern_after_noloc (x, after, NULL, make_raw: make_call_insn_raw);
4702}
4703
4704/* Make an instruction with body X and code CALL_INSN
4705 and output it after the instruction AFTER. */
4706
4707rtx_insn *
4708emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
4709{
4710 return emit_pattern_after_noloc (x, after, NULL, make_raw: make_debug_insn_raw);
4711}
4712
4713/* Make an insn of code BARRIER
4714 and output it after the insn AFTER. */
4715
4716rtx_barrier *
4717emit_barrier_after (rtx_insn *after)
4718{
4719 rtx_barrier *insn = as_a <rtx_barrier *> (p: rtx_alloc (BARRIER));
4720
4721 INSN_UID (insn) = cur_insn_uid++;
4722
4723 add_insn_after (insn, after, NULL);
4724 return insn;
4725}
4726
4727/* Emit the label LABEL after the insn AFTER. */
4728
4729rtx_insn *
4730emit_label_after (rtx_insn *label, rtx_insn *after)
4731{
4732 gcc_checking_assert (INSN_UID (label) == 0);
4733 INSN_UID (insn: label) = cur_insn_uid++;
4734 add_insn_after (insn: label, after, NULL);
4735 return label;
4736}
4737
4738/* Notes require a bit of special handling: Some notes need to have their
4739 BLOCK_FOR_INSN set, others should never have it set, and some should
4740 have it set or clear depending on the context. */
4741
4742/* Return true iff a note of kind SUBTYPE should be emitted with routines
4743 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4744 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4745
4746static bool
4747note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4748{
4749 switch (subtype)
4750 {
4751 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4752 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4753 return true;
4754
4755 /* Notes for var tracking and EH region markers can appear between or
4756 inside basic blocks. If the caller is emitting on the basic block
4757 boundary, do not set BLOCK_FOR_INSN on the new note. */
4758 case NOTE_INSN_VAR_LOCATION:
4759 case NOTE_INSN_EH_REGION_BEG:
4760 case NOTE_INSN_EH_REGION_END:
4761 return on_bb_boundary_p;
4762
4763 /* Otherwise, BLOCK_FOR_INSN must be set. */
4764 default:
4765 return false;
4766 }
4767}
4768
4769/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4770
4771rtx_note *
4772emit_note_after (enum insn_note subtype, rtx_insn *after)
4773{
4774 rtx_note *note = make_note_raw (subtype);
4775 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (insn: after);
4776 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4777
4778 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4779 add_insn_after_nobb (insn: note, after);
4780 else
4781 add_insn_after (insn: note, after, bb);
4782 return note;
4783}
4784
4785/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4786
4787rtx_note *
4788emit_note_before (enum insn_note subtype, rtx_insn *before)
4789{
4790 rtx_note *note = make_note_raw (subtype);
4791 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (insn: before);
4792 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4793
4794 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4795 add_insn_before_nobb (insn: note, before);
4796 else
4797 add_insn_before (insn: note, before, bb);
4798 return note;
4799}
4800
4801/* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4802 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4803
4804static rtx_insn *
4805emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
4806 rtx_insn *(*make_raw) (rtx))
4807{
4808 rtx_insn *last = emit_pattern_after_noloc (x: pattern, after, NULL, make_raw);
4809
4810 if (pattern == NULL_RTX || !loc)
4811 return last;
4812
4813 after = NEXT_INSN (insn: after);
4814 while (1)
4815 {
4816 if (active_insn_p (insn: after)
4817 && !JUMP_TABLE_DATA_P (after) /* FIXME */
4818 && !INSN_LOCATION (insn: after))
4819 INSN_LOCATION (insn: after) = loc;
4820 if (after == last)
4821 break;
4822 after = NEXT_INSN (insn: after);
4823 }
4824 return last;
4825}
4826
4827/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4828 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4829 any DEBUG_INSNs. */
4830
4831static rtx_insn *
4832emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
4833 rtx_insn *(*make_raw) (rtx))
4834{
4835 rtx_insn *prev = after;
4836
4837 if (skip_debug_insns)
4838 while (DEBUG_INSN_P (prev))
4839 prev = PREV_INSN (insn: prev);
4840
4841 if (INSN_P (prev))
4842 return emit_pattern_after_setloc (pattern, after, loc: INSN_LOCATION (insn: prev),
4843 make_raw);
4844 else
4845 return emit_pattern_after_noloc (x: pattern, after, NULL, make_raw);
4846}
4847
4848/* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4849rtx_insn *
4850emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4851{
4852 return emit_pattern_after_setloc (pattern, after, loc, make_raw: make_insn_raw);
4853}
4854
4855/* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4856rtx_insn *
4857emit_insn_after (rtx pattern, rtx_insn *after)
4858{
4859 return emit_pattern_after (pattern, after, skip_debug_insns: true, make_raw: make_insn_raw);
4860}
4861
4862/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4863rtx_jump_insn *
4864emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4865{
4866 return as_a <rtx_jump_insn *> (
4867 p: emit_pattern_after_setloc (pattern, after, loc, make_raw: make_jump_insn_raw));
4868}
4869
4870/* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4871rtx_jump_insn *
4872emit_jump_insn_after (rtx pattern, rtx_insn *after)
4873{
4874 return as_a <rtx_jump_insn *> (
4875 p: emit_pattern_after (pattern, after, skip_debug_insns: true, make_raw: make_jump_insn_raw));
4876}
4877
4878/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4879rtx_insn *
4880emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4881{
4882 return emit_pattern_after_setloc (pattern, after, loc, make_raw: make_call_insn_raw);
4883}
4884
4885/* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4886rtx_insn *
4887emit_call_insn_after (rtx pattern, rtx_insn *after)
4888{
4889 return emit_pattern_after (pattern, after, skip_debug_insns: true, make_raw: make_call_insn_raw);
4890}
4891
4892/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4893rtx_insn *
4894emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4895{
4896 return emit_pattern_after_setloc (pattern, after, loc, make_raw: make_debug_insn_raw);
4897}
4898
4899/* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4900rtx_insn *
4901emit_debug_insn_after (rtx pattern, rtx_insn *after)
4902{
4903 return emit_pattern_after (pattern, after, skip_debug_insns: false, make_raw: make_debug_insn_raw);
4904}
4905
4906/* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4907 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4908 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4909 CALL_INSN, etc. */
4910
4911static rtx_insn *
4912emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
4913 bool insnp, rtx_insn *(*make_raw) (rtx))
4914{
4915 rtx_insn *first = PREV_INSN (insn: before);
4916 rtx_insn *last = emit_pattern_before_noloc (x: pattern, before,
4917 last: insnp ? before : NULL,
4918 NULL, make_raw);
4919
4920 if (pattern == NULL_RTX || !loc)
4921 return last;
4922
4923 if (!first)
4924 first = get_insns ();
4925 else
4926 first = NEXT_INSN (insn: first);
4927 while (1)
4928 {
4929 if (active_insn_p (insn: first)
4930 && !JUMP_TABLE_DATA_P (first) /* FIXME */
4931 && !INSN_LOCATION (insn: first))
4932 INSN_LOCATION (insn: first) = loc;
4933 if (first == last)
4934 break;
4935 first = NEXT_INSN (insn: first);
4936 }
4937 return last;
4938}
4939
4940/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4941 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4942 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4943 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4944
4945static rtx_insn *
4946emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
4947 bool insnp, rtx_insn *(*make_raw) (rtx))
4948{
4949 rtx_insn *next = before;
4950
4951 if (skip_debug_insns)
4952 while (DEBUG_INSN_P (next))
4953 next = PREV_INSN (insn: next);
4954
4955 if (INSN_P (next))
4956 return emit_pattern_before_setloc (pattern, before, loc: INSN_LOCATION (insn: next),
4957 insnp, make_raw);
4958 else
4959 return emit_pattern_before_noloc (x: pattern, before,
4960 last: insnp ? before : NULL,
4961 NULL, make_raw);
4962}
4963
4964/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4965rtx_insn *
4966emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4967{
4968 return emit_pattern_before_setloc (pattern, before, loc, insnp: true,
4969 make_raw: make_insn_raw);
4970}
4971
4972/* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4973rtx_insn *
4974emit_insn_before (rtx pattern, rtx_insn *before)
4975{
4976 return emit_pattern_before (pattern, before, skip_debug_insns: true, insnp: true, make_raw: make_insn_raw);
4977}
4978
4979/* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4980rtx_jump_insn *
4981emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4982{
4983 return as_a <rtx_jump_insn *> (
4984 p: emit_pattern_before_setloc (pattern, before, loc, insnp: false,
4985 make_raw: make_jump_insn_raw));
4986}
4987
4988/* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4989rtx_jump_insn *
4990emit_jump_insn_before (rtx pattern, rtx_insn *before)
4991{
4992 return as_a <rtx_jump_insn *> (
4993 p: emit_pattern_before (pattern, before, skip_debug_insns: true, insnp: false,
4994 make_raw: make_jump_insn_raw));
4995}
4996
4997/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4998rtx_insn *
4999emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5000{
5001 return emit_pattern_before_setloc (pattern, before, loc, insnp: false,
5002 make_raw: make_call_insn_raw);
5003}
5004
5005/* Like emit_call_insn_before_noloc,
5006 but set insn_location according to BEFORE. */
5007rtx_insn *
5008emit_call_insn_before (rtx pattern, rtx_insn *before)
5009{
5010 return emit_pattern_before (pattern, before, skip_debug_insns: true, insnp: false,
5011 make_raw: make_call_insn_raw);
5012}
5013
5014/* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5015rtx_insn *
5016emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5017{
5018 return emit_pattern_before_setloc (pattern, before, loc, insnp: false,
5019 make_raw: make_debug_insn_raw);
5020}
5021
5022/* Like emit_debug_insn_before_noloc,
5023 but set insn_location according to BEFORE. */
5024rtx_insn *
5025emit_debug_insn_before (rtx pattern, rtx_insn *before)
5026{
5027 return emit_pattern_before (pattern, before, skip_debug_insns: false, insnp: false,
5028 make_raw: make_debug_insn_raw);
5029}
5030
5031/* Take X and emit it at the end of the doubly-linked
5032 INSN list.
5033
5034 Returns the last insn emitted. */
5035
5036rtx_insn *
5037emit_insn (rtx x)
5038{
5039 rtx_insn *last = get_last_insn ();
5040 rtx_insn *insn;
5041
5042 if (x == NULL_RTX)
5043 return last;
5044
5045 switch (GET_CODE (x))
5046 {
5047 case DEBUG_INSN:
5048 case INSN:
5049 case JUMP_INSN:
5050 case CALL_INSN:
5051 case CODE_LABEL:
5052 case BARRIER:
5053 case NOTE:
5054 insn = as_a <rtx_insn *> (p: x);
5055 while (insn)
5056 {
5057 rtx_insn *next = NEXT_INSN (insn);
5058 add_insn (insn);
5059 last = insn;
5060 insn = next;
5061 }
5062 break;
5063
5064#ifdef ENABLE_RTL_CHECKING
5065 case JUMP_TABLE_DATA:
5066 case SEQUENCE:
5067 gcc_unreachable ();
5068 break;
5069#endif
5070
5071 default:
5072 last = make_insn_raw (pattern: x);
5073 add_insn (insn: last);
5074 break;
5075 }
5076
5077 return last;
5078}
5079
5080/* Make an insn of code DEBUG_INSN with pattern X
5081 and add it to the end of the doubly-linked list. */
5082
5083rtx_insn *
5084emit_debug_insn (rtx x)
5085{
5086 rtx_insn *last = get_last_insn ();
5087 rtx_insn *insn;
5088
5089 if (x == NULL_RTX)
5090 return last;
5091
5092 switch (GET_CODE (x))
5093 {
5094 case DEBUG_INSN:
5095 case INSN:
5096 case JUMP_INSN:
5097 case CALL_INSN:
5098 case CODE_LABEL:
5099 case BARRIER:
5100 case NOTE:
5101 insn = as_a <rtx_insn *> (p: x);
5102 while (insn)
5103 {
5104 rtx_insn *next = NEXT_INSN (insn);
5105 add_insn (insn);
5106 last = insn;
5107 insn = next;
5108 }
5109 break;
5110
5111#ifdef ENABLE_RTL_CHECKING
5112 case JUMP_TABLE_DATA:
5113 case SEQUENCE:
5114 gcc_unreachable ();
5115 break;
5116#endif
5117
5118 default:
5119 last = make_debug_insn_raw (pattern: x);
5120 add_insn (insn: last);
5121 break;
5122 }
5123
5124 return last;
5125}
5126
5127/* Make an insn of code JUMP_INSN with pattern X
5128 and add it to the end of the doubly-linked list. */
5129
5130rtx_insn *
5131emit_jump_insn (rtx x)
5132{
5133 rtx_insn *last = NULL;
5134 rtx_insn *insn;
5135
5136 switch (GET_CODE (x))
5137 {
5138 case DEBUG_INSN:
5139 case INSN:
5140 case JUMP_INSN:
5141 case CALL_INSN:
5142 case CODE_LABEL:
5143 case BARRIER:
5144 case NOTE:
5145 insn = as_a <rtx_insn *> (p: x);
5146 while (insn)
5147 {
5148 rtx_insn *next = NEXT_INSN (insn);
5149 add_insn (insn);
5150 last = insn;
5151 insn = next;
5152 }
5153 break;
5154
5155#ifdef ENABLE_RTL_CHECKING
5156 case JUMP_TABLE_DATA:
5157 case SEQUENCE:
5158 gcc_unreachable ();
5159 break;
5160#endif
5161
5162 default:
5163 last = make_jump_insn_raw (pattern: x);
5164 add_insn (insn: last);
5165 break;
5166 }
5167
5168 return last;
5169}
5170
5171/* Make an insn of code JUMP_INSN with pattern X,
5172 add a REG_BR_PROB note that indicates very likely probability,
5173 and add it to the end of the doubly-linked list. */
5174
5175rtx_insn *
5176emit_likely_jump_insn (rtx x)
5177{
5178 rtx_insn *jump = emit_jump_insn (x);
5179 add_reg_br_prob_note (jump, profile_probability::very_likely ());
5180 return jump;
5181}
5182
5183/* Make an insn of code JUMP_INSN with pattern X,
5184 add a REG_BR_PROB note that indicates very unlikely probability,
5185 and add it to the end of the doubly-linked list. */
5186
5187rtx_insn *
5188emit_unlikely_jump_insn (rtx x)
5189{
5190 rtx_insn *jump = emit_jump_insn (x);
5191 add_reg_br_prob_note (jump, profile_probability::very_unlikely ());
5192 return jump;
5193}
5194
5195/* Make an insn of code CALL_INSN with pattern X
5196 and add it to the end of the doubly-linked list. */
5197
5198rtx_insn *
5199emit_call_insn (rtx x)
5200{
5201 rtx_insn *insn;
5202
5203 switch (GET_CODE (x))
5204 {
5205 case DEBUG_INSN:
5206 case INSN:
5207 case JUMP_INSN:
5208 case CALL_INSN:
5209 case CODE_LABEL:
5210 case BARRIER:
5211 case NOTE:
5212 insn = emit_insn (x);
5213 break;
5214
5215#ifdef ENABLE_RTL_CHECKING
5216 case SEQUENCE:
5217 case JUMP_TABLE_DATA:
5218 gcc_unreachable ();
5219 break;
5220#endif
5221
5222 default:
5223 insn = make_call_insn_raw (pattern: x);
5224 add_insn (insn);
5225 break;
5226 }
5227
5228 return insn;
5229}
5230
5231/* Add the label LABEL to the end of the doubly-linked list. */
5232
5233rtx_code_label *
5234emit_label (rtx uncast_label)
5235{
5236 rtx_code_label *label = as_a <rtx_code_label *> (p: uncast_label);
5237
5238 gcc_checking_assert (INSN_UID (label) == 0);
5239 INSN_UID (insn: label) = cur_insn_uid++;
5240 add_insn (insn: label);
5241 return label;
5242}
5243
5244/* Make an insn of code JUMP_TABLE_DATA
5245 and add it to the end of the doubly-linked list. */
5246
5247rtx_jump_table_data *
5248emit_jump_table_data (rtx table)
5249{
5250 rtx_jump_table_data *jump_table_data =
5251 as_a <rtx_jump_table_data *> (p: rtx_alloc (JUMP_TABLE_DATA));
5252 INSN_UID (insn: jump_table_data) = cur_insn_uid++;
5253 PATTERN (insn: jump_table_data) = table;
5254 BLOCK_FOR_INSN (insn: jump_table_data) = NULL;
5255 add_insn (insn: jump_table_data);
5256 return jump_table_data;
5257}
5258
5259/* Make an insn of code BARRIER
5260 and add it to the end of the doubly-linked list. */
5261
5262rtx_barrier *
5263emit_barrier (void)
5264{
5265 rtx_barrier *barrier = as_a <rtx_barrier *> (p: rtx_alloc (BARRIER));
5266 INSN_UID (insn: barrier) = cur_insn_uid++;
5267 add_insn (insn: barrier);
5268 return barrier;
5269}
5270
5271/* Emit a copy of note ORIG. */
5272
5273rtx_note *
5274emit_note_copy (rtx_note *orig)
5275{
5276 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5277 rtx_note *note = make_note_raw (subtype: kind);
5278 NOTE_DATA (note) = NOTE_DATA (orig);
5279 add_insn (insn: note);
5280 return note;
5281}
5282
5283/* Make an insn of code NOTE or type NOTE_NO
5284 and add it to the end of the doubly-linked list. */
5285
5286rtx_note *
5287emit_note (enum insn_note kind)
5288{
5289 rtx_note *note = make_note_raw (subtype: kind);
5290 add_insn (insn: note);
5291 return note;
5292}
5293
5294/* Emit a clobber of lvalue X. */
5295
5296rtx_insn *
5297emit_clobber (rtx x)
5298{
5299 /* CONCATs should not appear in the insn stream. */
5300 if (GET_CODE (x) == CONCAT)
5301 {
5302 emit_clobber (XEXP (x, 0));
5303 return emit_clobber (XEXP (x, 1));
5304 }
5305 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5306}
5307
5308/* Return a sequence of insns to clobber lvalue X. */
5309
5310rtx_insn *
5311gen_clobber (rtx x)
5312{
5313 rtx_insn *seq;
5314
5315 start_sequence ();
5316 emit_clobber (x);
5317 seq = get_insns ();
5318 end_sequence ();
5319 return seq;
5320}
5321
5322/* Emit a use of rvalue X. */
5323
5324rtx_insn *
5325emit_use (rtx x)
5326{
5327 /* CONCATs should not appear in the insn stream. */
5328 if (GET_CODE (x) == CONCAT)
5329 {
5330 emit_use (XEXP (x, 0));
5331 return emit_use (XEXP (x, 1));
5332 }
5333 return emit_insn (gen_rtx_USE (VOIDmode, x));
5334}
5335
5336/* Return a sequence of insns to use rvalue X. */
5337
5338rtx_insn *
5339gen_use (rtx x)
5340{
5341 rtx_insn *seq;
5342
5343 start_sequence ();
5344 emit_use (x);
5345 seq = get_insns ();
5346 end_sequence ();
5347 return seq;
5348}
5349
5350/* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5351 Return the set in INSN that such notes describe, or NULL if the notes
5352 have no meaning for INSN. */
5353
5354rtx
5355set_for_reg_notes (rtx insn)
5356{
5357 rtx pat, reg;
5358
5359 if (!INSN_P (insn))
5360 return NULL_RTX;
5361
5362 pat = PATTERN (insn);
5363 if (GET_CODE (pat) == PARALLEL)
5364 {
5365 /* We do not use single_set because that ignores SETs of unused
5366 registers. REG_EQUAL and REG_EQUIV notes really do require the
5367 PARALLEL to have a single SET. */
5368 if (multiple_sets (insn))
5369 return NULL_RTX;
5370 pat = XVECEXP (pat, 0, 0);
5371 }
5372
5373 if (GET_CODE (pat) != SET)
5374 return NULL_RTX;
5375
5376 reg = SET_DEST (pat);
5377
5378 /* Notes apply to the contents of a STRICT_LOW_PART. */
5379 if (GET_CODE (reg) == STRICT_LOW_PART
5380 || GET_CODE (reg) == ZERO_EXTRACT)
5381 reg = XEXP (reg, 0);
5382
5383 /* Check that we have a register. */
5384 if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5385 return NULL_RTX;
5386
5387 return pat;
5388}
5389
5390/* Place a note of KIND on insn INSN with DATUM as the datum. If a
5391 note of this type already exists, remove it first. */
5392
5393rtx
5394set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5395{
5396 rtx note = find_reg_note (insn, kind, NULL_RTX);
5397
5398 switch (kind)
5399 {
5400 case REG_EQUAL:
5401 case REG_EQUIV:
5402 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5403 if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5404 return NULL_RTX;
5405
5406 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5407 It serves no useful purpose and breaks eliminate_regs. */
5408 if (GET_CODE (datum) == ASM_OPERANDS)
5409 return NULL_RTX;
5410
5411 /* Notes with side effects are dangerous. Even if the side-effect
5412 initially mirrors one in PATTERN (INSN), later optimizations
5413 might alter the way that the final register value is calculated
5414 and so move or alter the side-effect in some way. The note would
5415 then no longer be a valid substitution for SET_SRC. */
5416 if (side_effects_p (datum))
5417 return NULL_RTX;
5418 break;
5419
5420 default:
5421 break;
5422 }
5423
5424 if (note)
5425 XEXP (note, 0) = datum;
5426 else
5427 {
5428 add_reg_note (insn, kind, datum);
5429 note = REG_NOTES (insn);
5430 }
5431
5432 switch (kind)
5433 {
5434 case REG_EQUAL:
5435 case REG_EQUIV:
5436 df_notes_rescan (as_a <rtx_insn *> (p: insn));
5437 break;
5438 default:
5439 break;
5440 }
5441
5442 return note;
5443}
5444
5445/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5446rtx
5447set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5448{
5449 rtx set = set_for_reg_notes (insn);
5450
5451 if (set && SET_DEST (set) == dst)
5452 return set_unique_reg_note (insn, kind, datum);
5453 return NULL_RTX;
5454}
5455
5456/* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5457 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5458 is true.
5459
5460 If X is a label, it is simply added into the insn chain. */
5461
5462rtx_insn *
5463emit (rtx x, bool allow_barrier_p)
5464{
5465 enum rtx_code code = classify_insn (x);
5466
5467 switch (code)
5468 {
5469 case CODE_LABEL:
5470 return emit_label (uncast_label: x);
5471 case INSN:
5472 return emit_insn (x);
5473 case JUMP_INSN:
5474 {
5475 rtx_insn *insn = emit_jump_insn (x);
5476 if (allow_barrier_p
5477 && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5478 return emit_barrier ();
5479 return insn;
5480 }
5481 case CALL_INSN:
5482 return emit_call_insn (x);
5483 case DEBUG_INSN:
5484 return emit_debug_insn (x);
5485 default:
5486 gcc_unreachable ();
5487 }
5488}
5489
5490/* Space for free sequence stack entries. */
5491static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5492
5493/* Begin emitting insns to a sequence. If this sequence will contain
5494 something that might cause the compiler to pop arguments to function
5495 calls (because those pops have previously been deferred; see
5496 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5497 before calling this function. That will ensure that the deferred
5498 pops are not accidentally emitted in the middle of this sequence. */
5499
5500void
5501start_sequence (void)
5502{
5503 struct sequence_stack *tem;
5504
5505 if (free_sequence_stack != NULL)
5506 {
5507 tem = free_sequence_stack;
5508 free_sequence_stack = tem->next;
5509 }
5510 else
5511 tem = ggc_alloc<sequence_stack> ();
5512
5513 tem->next = get_current_sequence ()->next;
5514 tem->first = get_insns ();
5515 tem->last = get_last_insn ();
5516 get_current_sequence ()->next = tem;
5517
5518 set_first_insn (0);
5519 set_last_insn (0);
5520}
5521
5522/* Set up the insn chain starting with FIRST as the current sequence,
5523 saving the previously current one. See the documentation for
5524 start_sequence for more information about how to use this function. */
5525
5526void
5527push_to_sequence (rtx_insn *first)
5528{
5529 rtx_insn *last;
5530
5531 start_sequence ();
5532
5533 for (last = first; last && NEXT_INSN (insn: last); last = NEXT_INSN (insn: last))
5534 ;
5535
5536 set_first_insn (first);
5537 set_last_insn (last);
5538}
5539
5540/* Like push_to_sequence, but take the last insn as an argument to avoid
5541 looping through the list. */
5542
5543void
5544push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5545{
5546 start_sequence ();
5547
5548 set_first_insn (first);
5549 set_last_insn (last);
5550}
5551
5552/* Set up the outer-level insn chain
5553 as the current sequence, saving the previously current one. */
5554
5555void
5556push_topmost_sequence (void)
5557{
5558 struct sequence_stack *top;
5559
5560 start_sequence ();
5561
5562 top = get_topmost_sequence ();
5563 set_first_insn (top->first);
5564 set_last_insn (top->last);
5565}
5566
5567/* After emitting to the outer-level insn chain, update the outer-level
5568 insn chain, and restore the previous saved state. */
5569
5570void
5571pop_topmost_sequence (void)
5572{
5573 struct sequence_stack *top;
5574
5575 top = get_topmost_sequence ();
5576 top->first = get_insns ();
5577 top->last = get_last_insn ();
5578
5579 end_sequence ();
5580}
5581
5582/* After emitting to a sequence, restore previous saved state.
5583
5584 To get the contents of the sequence just made, you must call
5585 `get_insns' *before* calling here.
5586
5587 If the compiler might have deferred popping arguments while
5588 generating this sequence, and this sequence will not be immediately
5589 inserted into the instruction stream, use do_pending_stack_adjust
5590 before calling get_insns. That will ensure that the deferred
5591 pops are inserted into this sequence, and not into some random
5592 location in the instruction stream. See INHIBIT_DEFER_POP for more
5593 information about deferred popping of arguments. */
5594
5595void
5596end_sequence (void)
5597{
5598 struct sequence_stack *tem = get_current_sequence ()->next;
5599
5600 set_first_insn (tem->first);
5601 set_last_insn (tem->last);
5602 get_current_sequence ()->next = tem->next;
5603
5604 memset (s: tem, c: 0, n: sizeof (*tem));
5605 tem->next = free_sequence_stack;
5606 free_sequence_stack = tem;
5607}
5608
5609/* Return true if currently emitting into a sequence. */
5610
5611bool
5612in_sequence_p (void)
5613{
5614 return get_current_sequence ()->next != 0;
5615}
5616
5617/* Put the various virtual registers into REGNO_REG_RTX. */
5618
5619static void
5620init_virtual_regs (void)
5621{
5622 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5623 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5624 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5625 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5626 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5627 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5628 = virtual_preferred_stack_boundary_rtx;
5629}
5630
5631
5632/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5633static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5634static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5635static int copy_insn_n_scratches;
5636
5637/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5638 copied an ASM_OPERANDS.
5639 In that case, it is the original input-operand vector. */
5640static rtvec orig_asm_operands_vector;
5641
5642/* When an insn is being copied by copy_insn_1, this is nonzero if we have
5643 copied an ASM_OPERANDS.
5644 In that case, it is the copied input-operand vector. */
5645static rtvec copy_asm_operands_vector;
5646
5647/* Likewise for the constraints vector. */
5648static rtvec orig_asm_constraints_vector;
5649static rtvec copy_asm_constraints_vector;
5650
5651/* Recursively create a new copy of an rtx for copy_insn.
5652 This function differs from copy_rtx in that it handles SCRATCHes and
5653 ASM_OPERANDs properly.
5654 Normally, this function is not used directly; use copy_insn as front end.
5655 However, you could first copy an insn pattern with copy_insn and then use
5656 this function afterwards to properly copy any REG_NOTEs containing
5657 SCRATCHes. */
5658
5659rtx
5660copy_insn_1 (rtx orig)
5661{
5662 rtx copy;
5663 int i, j;
5664 RTX_CODE code;
5665 const char *format_ptr;
5666
5667 if (orig == NULL)
5668 return NULL;
5669
5670 code = GET_CODE (orig);
5671
5672 switch (code)
5673 {
5674 case REG:
5675 case DEBUG_EXPR:
5676 CASE_CONST_ANY:
5677 case SYMBOL_REF:
5678 case CODE_LABEL:
5679 case PC:
5680 case RETURN:
5681 case SIMPLE_RETURN:
5682 return orig;
5683 case CLOBBER:
5684 /* Share clobbers of hard registers, but do not share pseudo reg
5685 clobbers or clobbers of hard registers that originated as pseudos.
5686 This is needed to allow safe register renaming. */
5687 if (REG_P (XEXP (orig, 0))
5688 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5689 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5690 return orig;
5691 break;
5692
5693 case SCRATCH:
5694 for (i = 0; i < copy_insn_n_scratches; i++)
5695 if (copy_insn_scratch_in[i] == orig)
5696 return copy_insn_scratch_out[i];
5697 break;
5698
5699 case CONST:
5700 if (shared_const_p (orig))
5701 return orig;
5702 break;
5703
5704 /* A MEM with a constant address is not sharable. The problem is that
5705 the constant address may need to be reloaded. If the mem is shared,
5706 then reloading one copy of this mem will cause all copies to appear
5707 to have been reloaded. */
5708
5709 default:
5710 break;
5711 }
5712
5713 /* Copy the various flags, fields, and other information. We assume
5714 that all fields need copying, and then clear the fields that should
5715 not be copied. That is the sensible default behavior, and forces
5716 us to explicitly document why we are *not* copying a flag. */
5717 copy = shallow_copy_rtx (orig);
5718
5719 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5720 if (INSN_P (orig))
5721 {
5722 RTX_FLAG (copy, jump) = 0;
5723 RTX_FLAG (copy, call) = 0;
5724 RTX_FLAG (copy, frame_related) = 0;
5725 }
5726
5727 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5728
5729 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5730 switch (*format_ptr++)
5731 {
5732 case 'e':
5733 if (XEXP (orig, i) != NULL)
5734 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5735 break;
5736
5737 case 'E':
5738 case 'V':
5739 if (XVEC (orig, i) == orig_asm_constraints_vector)
5740 XVEC (copy, i) = copy_asm_constraints_vector;
5741 else if (XVEC (orig, i) == orig_asm_operands_vector)
5742 XVEC (copy, i) = copy_asm_operands_vector;
5743 else if (XVEC (orig, i) != NULL)
5744 {
5745 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5746 for (j = 0; j < XVECLEN (copy, i); j++)
5747 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5748 }
5749 break;
5750
5751 case 't':
5752 case 'w':
5753 case 'i':
5754 case 'p':
5755 case 's':
5756 case 'S':
5757 case 'u':
5758 case '0':
5759 /* These are left unchanged. */
5760 break;
5761
5762 default:
5763 gcc_unreachable ();
5764 }
5765
5766 if (code == SCRATCH)
5767 {
5768 i = copy_insn_n_scratches++;
5769 gcc_assert (i < MAX_RECOG_OPERANDS);
5770 copy_insn_scratch_in[i] = orig;
5771 copy_insn_scratch_out[i] = copy;
5772 }
5773 else if (code == ASM_OPERANDS)
5774 {
5775 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5776 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5777 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5778 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5779 }
5780
5781 return copy;
5782}
5783
5784/* Create a new copy of an rtx.
5785 This function differs from copy_rtx in that it handles SCRATCHes and
5786 ASM_OPERANDs properly.
5787 INSN doesn't really have to be a full INSN; it could be just the
5788 pattern. */
5789rtx
5790copy_insn (rtx insn)
5791{
5792 copy_insn_n_scratches = 0;
5793 orig_asm_operands_vector = 0;
5794 orig_asm_constraints_vector = 0;
5795 copy_asm_operands_vector = 0;
5796 copy_asm_constraints_vector = 0;
5797 return copy_insn_1 (orig: insn);
5798}
5799
5800/* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5801 on that assumption that INSN itself remains in its original place. */
5802
5803rtx_insn *
5804copy_delay_slot_insn (rtx_insn *insn)
5805{
5806 /* Copy INSN with its rtx_code, all its notes, location etc. */
5807 insn = as_a <rtx_insn *> (p: copy_rtx (insn));
5808 INSN_UID (insn) = cur_insn_uid++;
5809 return insn;
5810}
5811
5812/* Initialize data structures and variables in this file
5813 before generating rtl for each function. */
5814
5815void
5816init_emit (void)
5817{
5818 set_first_insn (NULL);
5819 set_last_insn (NULL);
5820 if (param_min_nondebug_insn_uid)
5821 cur_insn_uid = param_min_nondebug_insn_uid;
5822 else
5823 cur_insn_uid = 1;
5824 cur_debug_insn_uid = 1;
5825 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5826 first_label_num = label_num;
5827 get_current_sequence ()->next = NULL;
5828
5829 /* Init the tables that describe all the pseudo regs. */
5830
5831 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5832
5833 crtl->emit.regno_pointer_align
5834 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5835
5836 regno_reg_rtx
5837 = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5838
5839 /* Put copies of all the hard registers into regno_reg_rtx. */
5840 memcpy (dest: regno_reg_rtx,
5841 initial_regno_reg_rtx,
5842 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5843
5844 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5845 init_virtual_regs ();
5846
5847 /* Indicate that the virtual registers and stack locations are
5848 all pointers. */
5849 REG_POINTER (stack_pointer_rtx) = 1;
5850 REG_POINTER (frame_pointer_rtx) = 1;
5851 REG_POINTER (hard_frame_pointer_rtx) = 1;
5852 REG_POINTER (arg_pointer_rtx) = 1;
5853
5854 REG_POINTER (virtual_incoming_args_rtx) = 1;
5855 REG_POINTER (virtual_stack_vars_rtx) = 1;
5856 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5857 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5858 REG_POINTER (virtual_cfa_rtx) = 1;
5859
5860#ifdef STACK_BOUNDARY
5861 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5862 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5863 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5864 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5865
5866 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5867 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5868 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5869 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5870
5871 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5872#endif
5873
5874#ifdef INIT_EXPANDERS
5875 INIT_EXPANDERS;
5876#endif
5877}
5878
5879/* Return the value of element I of CONST_VECTOR X as a wide_int. */
5880
5881wide_int
5882const_vector_int_elt (const_rtx x, unsigned int i)
5883{
5884 /* First handle elements that are directly encoded. */
5885 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5886 if (i < (unsigned int) XVECLEN (x, 0))
5887 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5888
5889 /* Identify the pattern that contains element I and work out the index of
5890 the last encoded element for that pattern. */
5891 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5892 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5893 unsigned int count = i / npatterns;
5894 unsigned int pattern = i % npatterns;
5895 unsigned int final_i = encoded_nelts - npatterns + pattern;
5896
5897 /* If there are no steps, the final encoded value is the right one. */
5898 if (!CONST_VECTOR_STEPPED_P (x))
5899 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5900
5901 /* Otherwise work out the value from the last two encoded elements. */
5902 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5903 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5904 wide_int diff = wi::sub (x: rtx_mode_t (v2, elt_mode),
5905 y: rtx_mode_t (v1, elt_mode));
5906 return wi::add (x: rtx_mode_t (v2, elt_mode), y: (count - 2) * diff);
5907}
5908
5909/* Return the value of element I of CONST_VECTOR X. */
5910
5911rtx
5912const_vector_elt (const_rtx x, unsigned int i)
5913{
5914 /* First handle elements that are directly encoded. */
5915 if (i < (unsigned int) XVECLEN (x, 0))
5916 return CONST_VECTOR_ENCODED_ELT (x, i);
5917
5918 /* If there are no steps, the final encoded value is the right one. */
5919 if (!CONST_VECTOR_STEPPED_P (x))
5920 {
5921 /* Identify the pattern that contains element I and work out the index of
5922 the last encoded element for that pattern. */
5923 unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5924 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5925 unsigned int pattern = i % npatterns;
5926 unsigned int final_i = encoded_nelts - npatterns + pattern;
5927 return CONST_VECTOR_ENCODED_ELT (x, final_i);
5928 }
5929
5930 /* Otherwise work out the value from the last two encoded elements. */
5931 return immed_wide_int_const (c: const_vector_int_elt (x, i),
5932 GET_MODE_INNER (GET_MODE (x)));
5933}
5934
5935/* Return true if X is a valid element for a CONST_VECTOR of the given
5936 mode. */
5937
5938bool
5939valid_for_const_vector_p (machine_mode, rtx x)
5940{
5941 return (CONST_SCALAR_INT_P (x)
5942 || CONST_POLY_INT_P (x)
5943 || CONST_DOUBLE_AS_FLOAT_P (x)
5944 || CONST_FIXED_P (x));
5945}
5946
5947/* Generate a vector constant of mode MODE in which every element has
5948 value ELT. */
5949
5950rtx
5951gen_const_vec_duplicate (machine_mode mode, rtx elt)
5952{
5953 rtx_vector_builder builder (mode, 1, 1);
5954 builder.quick_push (obj: elt);
5955 return builder.build ();
5956}
5957
5958/* Return a vector rtx of mode MODE in which every element has value X.
5959 The result will be a constant if X is constant. */
5960
5961rtx
5962gen_vec_duplicate (machine_mode mode, rtx x)
5963{
5964 if (valid_for_const_vector_p (mode, x))
5965 return gen_const_vec_duplicate (mode, elt: x);
5966 return gen_rtx_VEC_DUPLICATE (mode, x);
5967}
5968
5969/* A subroutine of const_vec_series_p that handles the case in which:
5970
5971 (GET_CODE (X) == CONST_VECTOR
5972 && CONST_VECTOR_NPATTERNS (X) == 1
5973 && !CONST_VECTOR_DUPLICATE_P (X))
5974
5975 is known to hold. */
5976
5977bool
5978const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5979{
5980 /* Stepped sequences are only defined for integers, to avoid specifying
5981 rounding behavior. */
5982 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
5983 return false;
5984
5985 /* A non-duplicated vector with two elements can always be seen as a
5986 series with a nonzero step. Longer vectors must have a stepped
5987 encoding. */
5988 if (maybe_ne (CONST_VECTOR_NUNITS (x), b: 2)
5989 && !CONST_VECTOR_STEPPED_P (x))
5990 return false;
5991
5992 /* Calculate the step between the first and second elements. */
5993 scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5994 rtx base = CONST_VECTOR_ELT (x, 0);
5995 rtx step = simplify_binary_operation (code: MINUS, mode: inner,
5996 CONST_VECTOR_ENCODED_ELT (x, 1), op1: base);
5997 if (rtx_equal_p (step, CONST0_RTX (inner)))
5998 return false;
5999
6000 /* If we have a stepped encoding, check that the step between the
6001 second and third elements is the same as STEP. */
6002 if (CONST_VECTOR_STEPPED_P (x))
6003 {
6004 rtx diff = simplify_binary_operation (code: MINUS, mode: inner,
6005 CONST_VECTOR_ENCODED_ELT (x, 2),
6006 CONST_VECTOR_ENCODED_ELT (x, 1));
6007 if (!rtx_equal_p (step, diff))
6008 return false;
6009 }
6010
6011 *base_out = base;
6012 *step_out = step;
6013 return true;
6014}
6015
6016/* Generate a vector constant of mode MODE in which element I has
6017 the value BASE + I * STEP. */
6018
6019rtx
6020gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6021{
6022 gcc_assert (valid_for_const_vector_p (mode, base)
6023 && valid_for_const_vector_p (mode, step));
6024
6025 rtx_vector_builder builder (mode, 1, 3);
6026 builder.quick_push (obj: base);
6027 for (int i = 1; i < 3; ++i)
6028 builder.quick_push (obj: simplify_gen_binary (code: PLUS, GET_MODE_INNER (mode),
6029 op0: builder[i - 1], op1: step));
6030 return builder.build ();
6031}
6032
6033/* Generate a vector of mode MODE in which element I has the value
6034 BASE + I * STEP. The result will be a constant if BASE and STEP
6035 are both constants. */
6036
6037rtx
6038gen_vec_series (machine_mode mode, rtx base, rtx step)
6039{
6040 if (step == const0_rtx)
6041 return gen_vec_duplicate (mode, x: base);
6042 if (valid_for_const_vector_p (mode, x: base)
6043 && valid_for_const_vector_p (mode, x: step))
6044 return gen_const_vec_series (mode, base, step);
6045 return gen_rtx_VEC_SERIES (mode, base, step);
6046}
6047
6048/* Generate a new vector constant for mode MODE and constant value
6049 CONSTANT. */
6050
6051static rtx
6052gen_const_vector (machine_mode mode, int constant)
6053{
6054 machine_mode inner = GET_MODE_INNER (mode);
6055
6056 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6057
6058 rtx el = const_tiny_rtx[constant][(int) inner];
6059 gcc_assert (el);
6060
6061 return gen_const_vec_duplicate (mode, elt: el);
6062}
6063
6064/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6065 all elements are zero, and the one vector when all elements are one. */
6066rtx
6067gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6068{
6069 gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6070
6071 /* If the values are all the same, check to see if we can use one of the
6072 standard constant vectors. */
6073 if (rtvec_all_equal_p (v))
6074 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6075
6076 unsigned int nunits = GET_NUM_ELEM (v);
6077 rtx_vector_builder builder (mode, nunits, 1);
6078 for (unsigned int i = 0; i < nunits; ++i)
6079 builder.quick_push (RTVEC_ELT (v, i));
6080 return builder.build (v);
6081}
6082
6083/* Initialise global register information required by all functions. */
6084
6085void
6086init_emit_regs (void)
6087{
6088 int i;
6089 machine_mode mode;
6090 mem_attrs *attrs;
6091
6092 /* Reset register attributes */
6093 reg_attrs_htab->empty ();
6094
6095 /* We need reg_raw_mode, so initialize the modes now. */
6096 init_reg_modes_target ();
6097
6098 /* Assign register numbers to the globally defined register rtx. */
6099 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6100 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6101 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6102 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6103 virtual_incoming_args_rtx =
6104 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6105 virtual_stack_vars_rtx =
6106 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6107 virtual_stack_dynamic_rtx =
6108 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6109 virtual_outgoing_args_rtx =
6110 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6111 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6112 virtual_preferred_stack_boundary_rtx =
6113 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6114
6115 /* Initialize RTL for commonly used hard registers. These are
6116 copied into regno_reg_rtx as we begin to compile each function. */
6117 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6118 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], regno: i);
6119
6120#ifdef RETURN_ADDRESS_POINTER_REGNUM
6121 return_address_pointer_rtx
6122 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6123#endif
6124
6125 pic_offset_table_rtx = NULL_RTX;
6126 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6127 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6128
6129 /* Process stack-limiting command-line options. */
6130 if (opt_fstack_limit_symbol_arg != NULL)
6131 stack_limit_rtx
6132 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6133 if (opt_fstack_limit_register_no >= 0)
6134 stack_limit_rtx = gen_rtx_REG (Pmode, regno: opt_fstack_limit_register_no);
6135
6136 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6137 {
6138 mode = (machine_mode) i;
6139 attrs = ggc_cleared_alloc<mem_attrs> ();
6140 attrs->align = BITS_PER_UNIT;
6141 attrs->addrspace = ADDR_SPACE_GENERIC;
6142 if (mode != BLKmode && mode != VOIDmode)
6143 {
6144 attrs->size_known_p = true;
6145 attrs->size = GET_MODE_SIZE (mode);
6146 if (STRICT_ALIGNMENT)
6147 attrs->align = GET_MODE_ALIGNMENT (mode);
6148 }
6149 mode_mem_attrs[i] = attrs;
6150 }
6151
6152 split_branch_probability = profile_probability::uninitialized ();
6153}
6154
6155/* Initialize global machine_mode variables. */
6156
6157void
6158init_derived_machine_modes (void)
6159{
6160 opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6161 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6162 {
6163 scalar_int_mode mode = mode_iter.require ();
6164
6165 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6166 && !opt_byte_mode.exists ())
6167 opt_byte_mode = mode;
6168
6169 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6170 && !opt_word_mode.exists ())
6171 opt_word_mode = mode;
6172 }
6173
6174 byte_mode = opt_byte_mode.require ();
6175 word_mode = opt_word_mode.require ();
6176 ptr_mode = as_a <scalar_int_mode>
6177 (m: mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6178}
6179
6180/* Create some permanent unique rtl objects shared between all functions. */
6181
6182void
6183init_emit_once (void)
6184{
6185 int i;
6186 machine_mode mode;
6187 scalar_float_mode double_mode;
6188 opt_scalar_mode smode_iter;
6189
6190 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6191 CONST_FIXED, and memory attribute hash tables. */
6192 const_int_htab = hash_table<const_int_hasher>::create_ggc (n: 37);
6193
6194#if TARGET_SUPPORTS_WIDE_INT
6195 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (n: 37);
6196#endif
6197 const_double_htab = hash_table<const_double_hasher>::create_ggc (n: 37);
6198
6199 if (NUM_POLY_INT_COEFFS > 1)
6200 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (n: 37);
6201
6202 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (n: 37);
6203
6204 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (n: 37);
6205
6206#ifdef INIT_EXPANDERS
6207 /* This is to initialize {init|mark|free}_machine_status before the first
6208 call to push_function_context_to. This is needed by the Chill front
6209 end which calls push_function_context_to before the first call to
6210 init_function_start. */
6211 INIT_EXPANDERS;
6212#endif
6213
6214 /* Create the unique rtx's for certain rtx codes and operand values. */
6215
6216 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6217 tries to use these variables. */
6218 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6219 const_int_rtx[i + MAX_SAVED_CONST_INT] =
6220 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6221
6222 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6223 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6224 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6225 else
6226 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6227
6228 double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6229
6230 real_from_integer (&dconst0, double_mode, 0, SIGNED);
6231 real_from_integer (&dconst1, double_mode, 1, SIGNED);
6232 real_from_integer (&dconst2, double_mode, 2, SIGNED);
6233
6234 dconstm0 = dconst0;
6235 dconstm0.sign = 1;
6236
6237 dconstm1 = dconst1;
6238 dconstm1.sign = 1;
6239
6240 dconsthalf = dconst1;
6241 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6242
6243 real_inf (&dconstinf);
6244 real_inf (&dconstninf, sign: true);
6245
6246 for (i = 0; i < 3; i++)
6247 {
6248 const REAL_VALUE_TYPE *const r =
6249 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6250
6251 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6252 const_tiny_rtx[i][(int) mode] =
6253 const_double_from_real_value (value: *r, mode);
6254
6255 FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6256 const_tiny_rtx[i][(int) mode] =
6257 const_double_from_real_value (value: *r, mode);
6258
6259 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6260
6261 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6262 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6263
6264 for (mode = MIN_MODE_PARTIAL_INT;
6265 mode <= MAX_MODE_PARTIAL_INT;
6266 mode = (machine_mode)((int)(mode) + 1))
6267 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6268 }
6269
6270 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6271
6272 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6273 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6274
6275 /* For BImode, 1 and -1 are unsigned and signed interpretations
6276 of the same value. */
6277 for (mode = MIN_MODE_BOOL;
6278 mode <= MAX_MODE_BOOL;
6279 mode = (machine_mode)((int)(mode) + 1))
6280 {
6281 const_tiny_rtx[0][(int) mode] = const0_rtx;
6282 if (mode == BImode)
6283 {
6284 const_tiny_rtx[1][(int) mode] = const_true_rtx;
6285 const_tiny_rtx[3][(int) mode] = const_true_rtx;
6286 }
6287 else
6288 {
6289 const_tiny_rtx[1][(int) mode] = const1_rtx;
6290 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6291 }
6292 }
6293
6294 for (mode = MIN_MODE_PARTIAL_INT;
6295 mode <= MAX_MODE_PARTIAL_INT;
6296 mode = (machine_mode)((int)(mode) + 1))
6297 const_tiny_rtx[3][(int) mode] = constm1_rtx;
6298
6299 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6300 {
6301 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6302 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6303 }
6304
6305 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6306 {
6307 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6308 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6309 }
6310
6311 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6312 {
6313 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, constant: 0);
6314 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, constant: 3);
6315 if (GET_MODE_INNER (mode) == BImode)
6316 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6317 interpretations of the same value. */
6318 const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6319 else
6320 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, constant: 1);
6321 }
6322
6323 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6324 {
6325 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, constant: 0);
6326 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, constant: 1);
6327 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, constant: 3);
6328 }
6329
6330 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6331 {
6332 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, constant: 0);
6333 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, constant: 1);
6334 }
6335
6336 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6337 {
6338 scalar_mode smode = smode_iter.require ();
6339 FCONST0 (smode).data.high = 0;
6340 FCONST0 (smode).data.low = 0;
6341 FCONST0 (smode).mode = smode;
6342 const_tiny_rtx[0][(int) smode]
6343 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6344 }
6345
6346 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6347 {
6348 scalar_mode smode = smode_iter.require ();
6349 FCONST0 (smode).data.high = 0;
6350 FCONST0 (smode).data.low = 0;
6351 FCONST0 (smode).mode = smode;
6352 const_tiny_rtx[0][(int) smode]
6353 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6354 }
6355
6356 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6357 {
6358 scalar_mode smode = smode_iter.require ();
6359 FCONST0 (smode).data.high = 0;
6360 FCONST0 (smode).data.low = 0;
6361 FCONST0 (smode).mode = smode;
6362 const_tiny_rtx[0][(int) smode]
6363 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6364
6365 /* We store the value 1. */
6366 FCONST1 (smode).data.high = 0;
6367 FCONST1 (smode).data.low = 0;
6368 FCONST1 (smode).mode = smode;
6369 FCONST1 (smode).data
6370 = double_int_one.lshift (GET_MODE_FBIT (smode),
6371 HOST_BITS_PER_DOUBLE_INT,
6372 SIGNED_FIXED_POINT_MODE_P (smode));
6373 const_tiny_rtx[1][(int) smode]
6374 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6375 }
6376
6377 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6378 {
6379 scalar_mode smode = smode_iter.require ();
6380 FCONST0 (smode).data.high = 0;
6381 FCONST0 (smode).data.low = 0;
6382 FCONST0 (smode).mode = smode;
6383 const_tiny_rtx[0][(int) smode]
6384 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6385
6386 /* We store the value 1. */
6387 FCONST1 (smode).data.high = 0;
6388 FCONST1 (smode).data.low = 0;
6389 FCONST1 (smode).mode = smode;
6390 FCONST1 (smode).data
6391 = double_int_one.lshift (GET_MODE_FBIT (smode),
6392 HOST_BITS_PER_DOUBLE_INT,
6393 SIGNED_FIXED_POINT_MODE_P (smode));
6394 const_tiny_rtx[1][(int) smode]
6395 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6396 }
6397
6398 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6399 {
6400 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, constant: 0);
6401 }
6402
6403 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6404 {
6405 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, constant: 0);
6406 }
6407
6408 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6409 {
6410 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, constant: 0);
6411 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, constant: 1);
6412 }
6413
6414 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6415 {
6416 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, constant: 0);
6417 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, constant: 1);
6418 }
6419
6420 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6421 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6422 const_tiny_rtx[0][i] = const0_rtx;
6423
6424 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6425 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6426 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6427 invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6428 /*prev_insn=*/NULL,
6429 /*next_insn=*/NULL,
6430 /*bb=*/NULL,
6431 /*pattern=*/NULL_RTX,
6432 /*location=*/-1,
6433 code: CODE_FOR_nothing,
6434 /*reg_notes=*/NULL_RTX);
6435}
6436
6437/* Produce exact duplicate of insn INSN after AFTER.
6438 Care updating of libcall regions if present. */
6439
6440rtx_insn *
6441emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6442{
6443 rtx_insn *new_rtx;
6444 rtx link;
6445
6446 switch (GET_CODE (insn))
6447 {
6448 case INSN:
6449 new_rtx = emit_insn_after (pattern: copy_insn (insn: PATTERN (insn)), after);
6450 break;
6451
6452 case JUMP_INSN:
6453 new_rtx = emit_jump_insn_after (pattern: copy_insn (insn: PATTERN (insn)), after);
6454 CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6455 break;
6456
6457 case DEBUG_INSN:
6458 new_rtx = emit_debug_insn_after (pattern: copy_insn (insn: PATTERN (insn)), after);
6459 break;
6460
6461 case CALL_INSN:
6462 new_rtx = emit_call_insn_after (pattern: copy_insn (insn: PATTERN (insn)), after);
6463 if (CALL_INSN_FUNCTION_USAGE (insn))
6464 CALL_INSN_FUNCTION_USAGE (new_rtx)
6465 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6466 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6467 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6468 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6469 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6470 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6471 break;
6472
6473 default:
6474 gcc_unreachable ();
6475 }
6476
6477 /* Update LABEL_NUSES. */
6478 if (NONDEBUG_INSN_P (insn))
6479 mark_jump_label (PATTERN (insn: new_rtx), new_rtx, 0);
6480
6481 INSN_LOCATION (insn: new_rtx) = INSN_LOCATION (insn);
6482
6483 /* If the old insn is frame related, then so is the new one. This is
6484 primarily needed for IA-64 unwind info which marks epilogue insns,
6485 which may be duplicated by the basic block reordering code. */
6486 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6487
6488 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6489 rtx *ptail = &REG_NOTES (new_rtx);
6490 while (*ptail != NULL_RTX)
6491 ptail = &XEXP (*ptail, 1);
6492
6493 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6494 will make them. REG_LABEL_TARGETs are created there too, but are
6495 supposed to be sticky, so we copy them. */
6496 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6497 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6498 {
6499 *ptail = duplicate_reg_note (link);
6500 ptail = &XEXP (*ptail, 1);
6501 }
6502
6503 INSN_CODE (new_rtx) = INSN_CODE (insn);
6504 return new_rtx;
6505}
6506
6507static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6508rtx
6509gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6510{
6511 if (hard_reg_clobbers[mode][regno])
6512 return hard_reg_clobbers[mode][regno];
6513 else
6514 return (hard_reg_clobbers[mode][regno] =
6515 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6516}
6517
6518location_t prologue_location;
6519location_t epilogue_location;
6520
6521/* Hold current location information and last location information, so the
6522 datastructures are built lazily only when some instructions in given
6523 place are needed. */
6524static location_t curr_location;
6525
6526/* Allocate insn location datastructure. */
6527void
6528insn_locations_init (void)
6529{
6530 prologue_location = epilogue_location = 0;
6531 curr_location = UNKNOWN_LOCATION;
6532}
6533
6534/* At the end of emit stage, clear current location. */
6535void
6536insn_locations_finalize (void)
6537{
6538 epilogue_location = curr_location;
6539 curr_location = UNKNOWN_LOCATION;
6540}
6541
6542/* Set current location. */
6543void
6544set_curr_insn_location (location_t location)
6545{
6546 curr_location = location;
6547}
6548
6549/* Get current location. */
6550location_t
6551curr_insn_location (void)
6552{
6553 return curr_location;
6554}
6555
6556/* Set the location of the insn chain starting at INSN to LOC. */
6557void
6558set_insn_locations (rtx_insn *insn, location_t loc)
6559{
6560 while (insn)
6561 {
6562 if (INSN_P (insn))
6563 INSN_LOCATION (insn) = loc;
6564 insn = NEXT_INSN (insn);
6565 }
6566}
6567
6568/* Return lexical scope block insn belongs to. */
6569tree
6570insn_scope (const rtx_insn *insn)
6571{
6572 return LOCATION_BLOCK (INSN_LOCATION (insn));
6573}
6574
6575/* Return line number of the statement that produced this insn. */
6576int
6577insn_line (const rtx_insn *insn)
6578{
6579 return LOCATION_LINE (INSN_LOCATION (insn));
6580}
6581
6582/* Return source file of the statement that produced this insn. */
6583const char *
6584insn_file (const rtx_insn *insn)
6585{
6586 return LOCATION_FILE (INSN_LOCATION (insn));
6587}
6588
6589/* Return expanded location of the statement that produced this insn. */
6590expanded_location
6591insn_location (const rtx_insn *insn)
6592{
6593 return expand_location (INSN_LOCATION (insn));
6594}
6595
6596/* Return true if memory model MODEL requires a pre-operation (release-style)
6597 barrier or a post-operation (acquire-style) barrier. While not universal,
6598 this function matches behavior of several targets. */
6599
6600bool
6601need_atomic_barrier_p (enum memmodel model, bool pre)
6602{
6603 switch (model & MEMMODEL_BASE_MASK)
6604 {
6605 case MEMMODEL_RELAXED:
6606 case MEMMODEL_CONSUME:
6607 return false;
6608 case MEMMODEL_RELEASE:
6609 return pre;
6610 case MEMMODEL_ACQUIRE:
6611 return !pre;
6612 case MEMMODEL_ACQ_REL:
6613 case MEMMODEL_SEQ_CST:
6614 return true;
6615 default:
6616 gcc_unreachable ();
6617 }
6618}
6619
6620/* Return a constant shift amount for shifting a value of mode MODE
6621 by VALUE bits. */
6622
6623rtx
6624gen_int_shift_amount (machine_mode, poly_int64 value)
6625{
6626 /* Use a 64-bit mode, to avoid any truncation.
6627
6628 ??? Perhaps this should be automatically derived from the .md files
6629 instead, or perhaps have a target hook. */
6630 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6631 ? DImode
6632 : int_mode_for_size (size: 64, limit: 0).require ());
6633 return gen_int_mode (c: value, mode: shift_mode);
6634}
6635
6636/* Initialize fields of rtl_data related to stack alignment. */
6637
6638void
6639rtl_data::init_stack_alignment ()
6640{
6641 stack_alignment_needed = STACK_BOUNDARY;
6642 max_used_stack_slot_alignment = STACK_BOUNDARY;
6643 stack_alignment_estimated = 0;
6644 preferred_stack_boundary = STACK_BOUNDARY;
6645}
6646
6647
6648#include "gt-emit-rtl.h"
6649

source code of gcc/emit-rtl.cc