1/* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
22
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
28
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
33
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
36
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
40
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
44
45#include "config.h"
46#define INCLUDE_ALGORITHM /* reverse */
47#include "system.h"
48#include "coretypes.h"
49#include "backend.h"
50#include "target.h"
51#include "rtl.h"
52#include "tree.h"
53#include "cfghooks.h"
54#include "df.h"
55#include "memmodel.h"
56#include "tm_p.h"
57#include "insn-config.h"
58#include "regs.h"
59#include "emit-rtl.h"
60#include "recog.h"
61#include "cgraph.h"
62#include "tree-pretty-print.h" /* for dump_function_header */
63#include "varasm.h"
64#include "insn-attr.h"
65#include "conditions.h"
66#include "flags.h"
67#include "output.h"
68#include "except.h"
69#include "rtl-error.h"
70#include "toplev.h" /* exact_log2, floor_log2 */
71#include "reload.h"
72#include "intl.h"
73#include "cfgrtl.h"
74#include "debug.h"
75#include "tree-pass.h"
76#include "tree-ssa.h"
77#include "cfgloop.h"
78#include "stringpool.h"
79#include "attribs.h"
80#include "asan.h"
81#include "rtl-iter.h"
82#include "print-rtl.h"
83#include "function-abi.h"
84#include "common/common-target.h"
85
86#include "dwarf2out.h"
87
88/* Most ports don't need to define CC_STATUS_INIT.
89 So define a null default for it to save conditionalization later. */
90#ifndef CC_STATUS_INIT
91#define CC_STATUS_INIT
92#endif
93
94/* Is the given character a logical line separator for the assembler? */
95#ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
96#define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
97#endif
98
99#ifndef JUMP_TABLES_IN_TEXT_SECTION
100#define JUMP_TABLES_IN_TEXT_SECTION 0
101#endif
102
103/* Bitflags used by final_scan_insn. */
104#define SEEN_NOTE 1
105#define SEEN_EMITTED 2
106#define SEEN_NEXT_VIEW 4
107
108/* Last insn processed by final_scan_insn. */
109static rtx_insn *debug_insn;
110rtx_insn *current_output_insn;
111
112/* Line number of last NOTE. */
113static int last_linenum;
114
115/* Column number of last NOTE. */
116static int last_columnnum;
117
118/* Discriminator written to assembly. */
119static int last_discriminator;
120
121/* Compute discriminator to be written to assembly for current instruction.
122 Note: actual usage depends on loc_discriminator_kind setting. */
123static inline int compute_discriminator (location_t loc);
124
125/* Highest line number in current block. */
126static int high_block_linenum;
127
128/* Likewise for function. */
129static int high_function_linenum;
130
131/* Filename of last NOTE. */
132static const char *last_filename;
133
134/* Override filename, line and column number. */
135static const char *override_filename;
136static int override_linenum;
137static int override_columnnum;
138static int override_discriminator;
139
140/* Whether to force emission of a line note before the next insn. */
141static bool force_source_line = false;
142
143extern const int length_unit_log; /* This is defined in insn-attrtab.cc. */
144
145/* Nonzero while outputting an `asm' with operands.
146 This means that inconsistencies are the user's fault, so don't die.
147 The precise value is the insn being output, to pass to error_for_asm. */
148const rtx_insn *this_is_asm_operands;
149
150/* Number of operands of this insn, for an `asm' with operands. */
151static unsigned int insn_noperands;
152
153/* Compare optimization flag. */
154
155static rtx last_ignored_compare = 0;
156
157/* Assign a unique number to each insn that is output.
158 This can be used to generate unique local labels. */
159
160static int insn_counter = 0;
161
162/* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
163
164static int block_depth;
165
166/* True if have enabled APP processing of our assembler output. */
167
168static bool app_on;
169
170/* If we are outputting an insn sequence, this contains the sequence rtx.
171 Zero otherwise. */
172
173rtx_sequence *final_sequence;
174
175#ifdef ASSEMBLER_DIALECT
176
177/* Number of the assembler dialect to use, starting at 0. */
178static int dialect_number;
179#endif
180
181/* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
182rtx current_insn_predicate;
183
184/* True if printing into -fdump-final-insns= dump. */
185bool final_insns_dump_p;
186
187/* True if profile_function should be called, but hasn't been called yet. */
188static bool need_profile_function;
189
190static int asm_insn_count (rtx);
191static void profile_function (FILE *);
192static void profile_after_prologue (FILE *);
193static bool notice_source_line (rtx_insn *, bool *);
194static rtx walk_alter_subreg (rtx *, bool *);
195static void output_asm_name (void);
196static void output_alternate_entry_point (FILE *, rtx_insn *);
197static tree get_mem_expr_from_op (rtx, int *);
198static void output_asm_operand_names (rtx *, int *, int);
199#ifdef LEAF_REGISTERS
200static void leaf_renumber_regs (rtx_insn *);
201#endif
202static int align_fuzz (rtx, rtx, int, unsigned);
203static void collect_fn_hard_reg_usage (void);
204
205/* Initialize data in final at the beginning of a compilation. */
206
207void
208init_final (const char *filename ATTRIBUTE_UNUSED)
209{
210 app_on = 0;
211 final_sequence = 0;
212
213#ifdef ASSEMBLER_DIALECT
214 dialect_number = ASSEMBLER_DIALECT;
215#endif
216}
217
218/* Default target function prologue and epilogue assembler output.
219
220 If not overridden for epilogue code, then the function body itself
221 contains return instructions wherever needed. */
222void
223default_function_pro_epilogue (FILE *)
224{
225}
226
227void
228default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
229 tree decl ATTRIBUTE_UNUSED,
230 bool new_is_cold ATTRIBUTE_UNUSED)
231{
232}
233
234/* Default target hook that outputs nothing to a stream. */
235void
236no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
237{
238}
239
240/* Enable APP processing of subsequent output.
241 Used before the output from an `asm' statement. */
242
243void
244app_enable (void)
245{
246 if (! app_on)
247 {
248 fputs (ASM_APP_ON, stream: asm_out_file);
249 app_on = 1;
250 }
251}
252
253/* Disable APP processing of subsequent output.
254 Called from varasm.cc before most kinds of output. */
255
256void
257app_disable (void)
258{
259 if (app_on)
260 {
261 fputs (ASM_APP_OFF, stream: asm_out_file);
262 app_on = 0;
263 }
264}
265
266/* Return the number of slots filled in the current
267 delayed branch sequence (we don't count the insn needing the
268 delay slot). Zero if not in a delayed branch sequence. */
269
270int
271dbr_sequence_length (void)
272{
273 if (final_sequence != 0)
274 return XVECLEN (final_sequence, 0) - 1;
275 else
276 return 0;
277}
278
279/* The next two pages contain routines used to compute the length of an insn
280 and to shorten branches. */
281
282/* Arrays for insn lengths, and addresses. The latter is referenced by
283 `insn_current_length'. */
284
285static int *insn_lengths;
286
287vec<int> insn_addresses_;
288
289/* Max uid for which the above arrays are valid. */
290static int insn_lengths_max_uid;
291
292/* Address of insn being processed. Used by `insn_current_length'. */
293int insn_current_address;
294
295/* Address of insn being processed in previous iteration. */
296int insn_last_address;
297
298/* known invariant alignment of insn being processed. */
299int insn_current_align;
300
301/* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
302 gives the next following alignment insn that increases the known
303 alignment, or NULL_RTX if there is no such insn.
304 For any alignment obtained this way, we can again index uid_align with
305 its uid to obtain the next following align that in turn increases the
306 alignment, till we reach NULL_RTX; the sequence obtained this way
307 for each insn we'll call the alignment chain of this insn in the following
308 comments. */
309
310static rtx *uid_align;
311static int *uid_shuid;
312static vec<align_flags> label_align;
313
314/* Indicate that branch shortening hasn't yet been done. */
315
316void
317init_insn_lengths (void)
318{
319 if (uid_shuid)
320 {
321 free (ptr: uid_shuid);
322 uid_shuid = 0;
323 }
324 if (insn_lengths)
325 {
326 free (ptr: insn_lengths);
327 insn_lengths = 0;
328 insn_lengths_max_uid = 0;
329 }
330 if (HAVE_ATTR_length)
331 INSN_ADDRESSES_FREE ();
332 if (uid_align)
333 {
334 free (ptr: uid_align);
335 uid_align = 0;
336 }
337}
338
339/* Obtain the current length of an insn. If branch shortening has been done,
340 get its actual length. Otherwise, use FALLBACK_FN to calculate the
341 length. */
342static int
343get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
344{
345 rtx body;
346 int i;
347 int length = 0;
348
349 if (!HAVE_ATTR_length)
350 return 0;
351
352 if (insn_lengths_max_uid > INSN_UID (insn))
353 return insn_lengths[INSN_UID (insn)];
354 else
355 switch (GET_CODE (insn))
356 {
357 case NOTE:
358 case BARRIER:
359 case CODE_LABEL:
360 case DEBUG_INSN:
361 return 0;
362
363 case CALL_INSN:
364 case JUMP_INSN:
365 length = fallback_fn (insn);
366 break;
367
368 case INSN:
369 body = PATTERN (insn);
370 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
371 return 0;
372
373 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
374 length = asm_insn_count (body) * fallback_fn (insn);
375 else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (p: body))
376 for (i = 0; i < seq->len (); i++)
377 length += get_attr_length_1 (insn: seq->insn (index: i), fallback_fn);
378 else
379 length = fallback_fn (insn);
380 break;
381
382 default:
383 break;
384 }
385
386#ifdef ADJUST_INSN_LENGTH
387 ADJUST_INSN_LENGTH (insn, length);
388#endif
389 return length;
390}
391
392/* Obtain the current length of an insn. If branch shortening has been done,
393 get its actual length. Otherwise, get its maximum length. */
394int
395get_attr_length (rtx_insn *insn)
396{
397 return get_attr_length_1 (insn, fallback_fn: insn_default_length);
398}
399
400/* Obtain the current length of an insn. If branch shortening has been done,
401 get its actual length. Otherwise, get its minimum length. */
402int
403get_attr_min_length (rtx_insn *insn)
404{
405 return get_attr_length_1 (insn, fallback_fn: insn_min_length);
406}
407
408/* Code to handle alignment inside shorten_branches. */
409
410/* Here is an explanation how the algorithm in align_fuzz can give
411 proper results:
412
413 Call a sequence of instructions beginning with alignment point X
414 and continuing until the next alignment point `block X'. When `X'
415 is used in an expression, it means the alignment value of the
416 alignment point.
417
418 Call the distance between the start of the first insn of block X, and
419 the end of the last insn of block X `IX', for the `inner size of X'.
420 This is clearly the sum of the instruction lengths.
421
422 Likewise with the next alignment-delimited block following X, which we
423 shall call block Y.
424
425 Call the distance between the start of the first insn of block X, and
426 the start of the first insn of block Y `OX', for the `outer size of X'.
427
428 The estimated padding is then OX - IX.
429
430 OX can be safely estimated as
431
432 if (X >= Y)
433 OX = round_up(IX, Y)
434 else
435 OX = round_up(IX, X) + Y - X
436
437 Clearly est(IX) >= real(IX), because that only depends on the
438 instruction lengths, and those being overestimated is a given.
439
440 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
441 we needn't worry about that when thinking about OX.
442
443 When X >= Y, the alignment provided by Y adds no uncertainty factor
444 for branch ranges starting before X, so we can just round what we have.
445 But when X < Y, we don't know anything about the, so to speak,
446 `middle bits', so we have to assume the worst when aligning up from an
447 address mod X to one mod Y, which is Y - X. */
448
449#ifndef LABEL_ALIGN
450#define LABEL_ALIGN(LABEL) align_labels
451#endif
452
453#ifndef LOOP_ALIGN
454#define LOOP_ALIGN(LABEL) align_loops
455#endif
456
457#ifndef LABEL_ALIGN_AFTER_BARRIER
458#define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
459#endif
460
461#ifndef JUMP_ALIGN
462#define JUMP_ALIGN(LABEL) align_jumps
463#endif
464
465#ifndef ADDR_VEC_ALIGN
466static int
467final_addr_vec_align (rtx_jump_table_data *addr_vec)
468{
469 int align = GET_MODE_SIZE (mode: addr_vec->get_data_mode ());
470
471 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
472 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
473 return exact_log2 (x: align);
474
475}
476
477#define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
478#endif
479
480#ifndef INSN_LENGTH_ALIGNMENT
481#define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
482#endif
483
484#define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
485
486static int min_labelno, max_labelno;
487
488#define LABEL_TO_ALIGNMENT(LABEL) \
489 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno])
490
491/* For the benefit of port specific code do this also as a function. */
492
493align_flags
494label_to_alignment (rtx label)
495{
496 if (CODE_LABEL_NUMBER (label) <= max_labelno)
497 return LABEL_TO_ALIGNMENT (label);
498 return align_flags ();
499}
500
501/* The differences in addresses
502 between a branch and its target might grow or shrink depending on
503 the alignment the start insn of the range (the branch for a forward
504 branch or the label for a backward branch) starts out on; if these
505 differences are used naively, they can even oscillate infinitely.
506 We therefore want to compute a 'worst case' address difference that
507 is independent of the alignment the start insn of the range end
508 up on, and that is at least as large as the actual difference.
509 The function align_fuzz calculates the amount we have to add to the
510 naively computed difference, by traversing the part of the alignment
511 chain of the start insn of the range that is in front of the end insn
512 of the range, and considering for each alignment the maximum amount
513 that it might contribute to a size increase.
514
515 For casesi tables, we also want to know worst case minimum amounts of
516 address difference, in case a machine description wants to introduce
517 some common offset that is added to all offsets in a table.
518 For this purpose, align_fuzz with a growth argument of 0 computes the
519 appropriate adjustment. */
520
521/* Compute the maximum delta by which the difference of the addresses of
522 START and END might grow / shrink due to a different address for start
523 which changes the size of alignment insns between START and END.
524 KNOWN_ALIGN_LOG is the alignment known for START.
525 GROWTH should be ~0 if the objective is to compute potential code size
526 increase, and 0 if the objective is to compute potential shrink.
527 The return value is undefined for any other value of GROWTH. */
528
529static int
530align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
531{
532 int uid = INSN_UID (insn: start);
533 rtx align_label;
534 int known_align = 1 << known_align_log;
535 int end_shuid = INSN_SHUID (end);
536 int fuzz = 0;
537
538 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
539 {
540 int align_addr, new_align;
541
542 uid = INSN_UID (insn: align_label);
543 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
544 if (uid_shuid[uid] > end_shuid)
545 break;
546 align_flags alignment = LABEL_TO_ALIGNMENT (align_label);
547 new_align = 1 << alignment.levels[0].log;
548 if (new_align < known_align)
549 continue;
550 fuzz += (-align_addr ^ growth) & (new_align - known_align);
551 known_align = new_align;
552 }
553 return fuzz;
554}
555
556/* Compute a worst-case reference address of a branch so that it
557 can be safely used in the presence of aligned labels. Since the
558 size of the branch itself is unknown, the size of the branch is
559 not included in the range. I.e. for a forward branch, the reference
560 address is the end address of the branch as known from the previous
561 branch shortening pass, minus a value to account for possible size
562 increase due to alignment. For a backward branch, it is the start
563 address of the branch as known from the current pass, plus a value
564 to account for possible size increase due to alignment.
565 NB.: Therefore, the maximum offset allowed for backward branches needs
566 to exclude the branch size. */
567
568int
569insn_current_reference_address (rtx_insn *branch)
570{
571 rtx dest;
572 int seq_uid;
573
574 if (! INSN_ADDRESSES_SET_P ())
575 return 0;
576
577 rtx_insn *seq = NEXT_INSN (insn: PREV_INSN (insn: branch));
578 seq_uid = INSN_UID (insn: seq);
579 if (!jump_to_label_p (branch))
580 /* This can happen for example on the PA; the objective is to know the
581 offset to address something in front of the start of the function.
582 Thus, we can treat it like a backward branch.
583 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
584 any alignment we'd encounter, so we skip the call to align_fuzz. */
585 return insn_current_address;
586 dest = JUMP_LABEL (branch);
587
588 /* BRANCH has no proper alignment chain set, so use SEQ.
589 BRANCH also has no INSN_SHUID. */
590 if (INSN_SHUID (seq) < INSN_SHUID (dest))
591 {
592 /* Forward branch. */
593 return (insn_last_address + insn_lengths[seq_uid]
594 - align_fuzz (start: seq, end: dest, known_align_log: length_unit_log, growth: ~0));
595 }
596 else
597 {
598 /* Backward branch. */
599 return (insn_current_address
600 + align_fuzz (start: dest, end: seq, known_align_log: length_unit_log, growth: ~0));
601 }
602}
603
604/* Compute branch alignments based on CFG profile. */
605
606void
607compute_alignments (void)
608{
609 basic_block bb;
610 align_flags max_alignment;
611
612 label_align.truncate (size: 0);
613
614 max_labelno = max_label_num ();
615 min_labelno = get_first_label_num ();
616 label_align.safe_grow_cleared (len: max_labelno - min_labelno + 1, exact: true);
617
618 /* If not optimizing or optimizing for size, don't assign any alignments. */
619 if (! optimize || optimize_function_for_size_p (cfun))
620 return;
621
622 if (dump_file)
623 {
624 dump_reg_info (dump_file);
625 dump_flow_info (dump_file, TDF_DETAILS);
626 flow_loops_dump (dump_file, NULL, 1);
627 }
628 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
629 profile_count count_threshold = cfun->cfg->count_max / param_align_threshold;
630
631 if (dump_file)
632 {
633 fprintf (stream: dump_file, format: "count_max: ");
634 cfun->cfg->count_max.dump (f: dump_file);
635 fprintf (stream: dump_file, format: "\n");
636 }
637 FOR_EACH_BB_FN (bb, cfun)
638 {
639 rtx_insn *label = BB_HEAD (bb);
640 bool has_fallthru = 0;
641 edge e;
642 edge_iterator ei;
643
644 if (!LABEL_P (label)
645 || optimize_bb_for_size_p (bb))
646 {
647 if (dump_file)
648 fprintf (stream: dump_file,
649 format: "BB %4i loop %2i loop_depth %2i skipped.\n",
650 bb->index,
651 bb->loop_father->num,
652 bb_loop_depth (bb));
653 continue;
654 }
655 max_alignment = LABEL_ALIGN (label);
656 profile_count fallthru_count = profile_count::zero ();
657 profile_count branch_count = profile_count::zero ();
658
659 FOR_EACH_EDGE (e, ei, bb->preds)
660 {
661 if (e->flags & EDGE_FALLTHRU)
662 has_fallthru = 1, fallthru_count += e->count ();
663 else
664 branch_count += e->count ();
665 }
666 if (dump_file)
667 {
668 fprintf (stream: dump_file, format: "BB %4i loop %2i loop_depth"
669 " %2i fall ",
670 bb->index, bb->loop_father->num,
671 bb_loop_depth (bb));
672 fallthru_count.dump (f: dump_file);
673 fprintf (stream: dump_file, format: " branch ");
674 branch_count.dump (f: dump_file);
675 if (!bb->loop_father->inner && bb->loop_father->num)
676 fprintf (stream: dump_file, format: " inner_loop");
677 if (bb->loop_father->header == bb)
678 fprintf (stream: dump_file, format: " loop_header");
679 fprintf (stream: dump_file, format: "\n");
680 }
681 if (!fallthru_count.initialized_p () || !branch_count.initialized_p ())
682 continue;
683
684 /* There are two purposes to align block with no fallthru incoming edge:
685 1) to avoid fetch stalls when branch destination is near cache boundary
686 2) to improve cache efficiency in case the previous block is not executed
687 (so it does not need to be in the cache).
688
689 We to catch first case, we align frequently executed blocks.
690 To catch the second, we align blocks that are executed more frequently
691 than the predecessor and the predecessor is likely to not be executed
692 when function is called. */
693
694 if (!has_fallthru
695 && (branch_count > count_threshold
696 || (bb->count > bb->prev_bb->count * 10
697 && (bb->prev_bb->count
698 <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->count / 2))))
699 {
700 align_flags alignment = JUMP_ALIGN (label);
701 if (dump_file)
702 fprintf (stream: dump_file, format: " jump alignment added.\n");
703 max_alignment = align_flags::max (f0: max_alignment, f1: alignment);
704 }
705 /* In case block is frequent and reached mostly by non-fallthru edge,
706 align it. It is most likely a first block of loop. */
707 if (has_fallthru
708 && !(single_succ_p (bb)
709 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
710 && optimize_bb_for_speed_p (bb)
711 && branch_count + fallthru_count > count_threshold
712 && (branch_count > fallthru_count * param_align_loop_iterations))
713 {
714 align_flags alignment = LOOP_ALIGN (label);
715 if (dump_file)
716 fprintf (stream: dump_file, format: " internal loop alignment added.\n");
717 max_alignment = align_flags::max (f0: max_alignment, f1: alignment);
718 }
719 LABEL_TO_ALIGNMENT (label) = max_alignment;
720 }
721
722 loop_optimizer_finalize ();
723 free_dominance_info (CDI_DOMINATORS);
724}
725
726/* Grow the LABEL_ALIGN array after new labels are created. */
727
728static void
729grow_label_align (void)
730{
731 int old = max_labelno;
732 int n_labels;
733 int n_old_labels;
734
735 max_labelno = max_label_num ();
736
737 n_labels = max_labelno - min_labelno + 1;
738 n_old_labels = old - min_labelno + 1;
739
740 label_align.safe_grow_cleared (len: n_labels, exact: true);
741
742 /* Range of labels grows monotonically in the function. Failing here
743 means that the initialization of array got lost. */
744 gcc_assert (n_old_labels <= n_labels);
745}
746
747/* Update the already computed alignment information. LABEL_PAIRS is a vector
748 made up of pairs of labels for which the alignment information of the first
749 element will be copied from that of the second element. */
750
751void
752update_alignments (vec<rtx> &label_pairs)
753{
754 unsigned int i = 0;
755 rtx iter, label = NULL_RTX;
756
757 if (max_labelno != max_label_num ())
758 grow_label_align ();
759
760 FOR_EACH_VEC_ELT (label_pairs, i, iter)
761 if (i & 1)
762 LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
763 else
764 label = iter;
765}
766
767namespace {
768
769const pass_data pass_data_compute_alignments =
770{
771 .type: RTL_PASS, /* type */
772 .name: "alignments", /* name */
773 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
774 .tv_id: TV_NONE, /* tv_id */
775 .properties_required: 0, /* properties_required */
776 .properties_provided: 0, /* properties_provided */
777 .properties_destroyed: 0, /* properties_destroyed */
778 .todo_flags_start: 0, /* todo_flags_start */
779 .todo_flags_finish: 0, /* todo_flags_finish */
780};
781
782class pass_compute_alignments : public rtl_opt_pass
783{
784public:
785 pass_compute_alignments (gcc::context *ctxt)
786 : rtl_opt_pass (pass_data_compute_alignments, ctxt)
787 {}
788
789 /* opt_pass methods: */
790 unsigned int execute (function *) final override
791 {
792 compute_alignments ();
793 return 0;
794 }
795
796}; // class pass_compute_alignments
797
798} // anon namespace
799
800rtl_opt_pass *
801make_pass_compute_alignments (gcc::context *ctxt)
802{
803 return new pass_compute_alignments (ctxt);
804}
805
806
807/* Make a pass over all insns and compute their actual lengths by shortening
808 any branches of variable length if possible. */
809
810/* shorten_branches might be called multiple times: for example, the SH
811 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
812 In order to do this, it needs proper length information, which it obtains
813 by calling shorten_branches. This cannot be collapsed with
814 shorten_branches itself into a single pass unless we also want to integrate
815 reorg.cc, since the branch splitting exposes new instructions with delay
816 slots. */
817
818void
819shorten_branches (rtx_insn *first)
820{
821 rtx_insn *insn;
822 int max_uid;
823 int i;
824 rtx_insn *seq;
825 bool something_changed = true;
826 char *varying_length;
827 rtx body;
828 int uid;
829 rtx align_tab[MAX_CODE_ALIGN + 1];
830
831 /* Compute maximum UID and allocate label_align / uid_shuid. */
832 max_uid = get_max_uid ();
833
834 /* Free uid_shuid before reallocating it. */
835 free (ptr: uid_shuid);
836
837 uid_shuid = XNEWVEC (int, max_uid);
838
839 if (max_labelno != max_label_num ())
840 grow_label_align ();
841
842 /* Initialize label_align and set up uid_shuid to be strictly
843 monotonically rising with insn order. */
844 /* We use alignment here to keep track of the maximum alignment we want to
845 impose on the next CODE_LABEL (or the current one if we are processing
846 the CODE_LABEL itself). */
847
848 align_flags max_alignment;
849
850 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
851 {
852 INSN_SHUID (insn) = i++;
853 if (INSN_P (insn))
854 continue;
855
856 if (rtx_code_label *label = dyn_cast <rtx_code_label *> (p: insn))
857 {
858 /* Merge in alignments computed by compute_alignments. */
859 align_flags alignment = LABEL_TO_ALIGNMENT (label);
860 max_alignment = align_flags::max (f0: max_alignment, f1: alignment);
861
862 rtx_jump_table_data *table = jump_table_for_label (label);
863 if (!table)
864 {
865 align_flags alignment = LABEL_ALIGN (label);
866 max_alignment = align_flags::max (f0: max_alignment, f1: alignment);
867 }
868 /* ADDR_VECs only take room if read-only data goes into the text
869 section. */
870 if ((JUMP_TABLES_IN_TEXT_SECTION
871 || readonly_data_section == text_section)
872 && table)
873 {
874 align_flags alignment = align_flags (ADDR_VEC_ALIGN (table));
875 max_alignment = align_flags::max (f0: max_alignment, f1: alignment);
876 }
877 LABEL_TO_ALIGNMENT (label) = max_alignment;
878 max_alignment = align_flags ();
879 }
880 else if (BARRIER_P (insn))
881 {
882 rtx_insn *label;
883
884 for (label = insn; label && ! INSN_P (label);
885 label = NEXT_INSN (insn: label))
886 if (LABEL_P (label))
887 {
888 align_flags alignment
889 = align_flags (LABEL_ALIGN_AFTER_BARRIER (insn));
890 max_alignment = align_flags::max (f0: max_alignment, f1: alignment);
891 break;
892 }
893 }
894 }
895 if (!HAVE_ATTR_length)
896 return;
897
898 /* Allocate the rest of the arrays. */
899 insn_lengths = XNEWVEC (int, max_uid);
900 insn_lengths_max_uid = max_uid;
901 /* Syntax errors can lead to labels being outside of the main insn stream.
902 Initialize insn_addresses, so that we get reproducible results. */
903 INSN_ADDRESSES_ALLOC (max_uid);
904
905 varying_length = XCNEWVEC (char, max_uid);
906
907 /* Initialize uid_align. We scan instructions
908 from end to start, and keep in align_tab[n] the last seen insn
909 that does an alignment of at least n+1, i.e. the successor
910 in the alignment chain for an insn that does / has a known
911 alignment of n. */
912 uid_align = XCNEWVEC (rtx, max_uid);
913
914 for (i = MAX_CODE_ALIGN + 1; --i >= 0;)
915 align_tab[i] = NULL_RTX;
916 seq = get_last_insn ();
917 for (; seq; seq = PREV_INSN (insn: seq))
918 {
919 int uid = INSN_UID (insn: seq);
920 int log;
921 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq).levels[0].log : 0);
922 uid_align[uid] = align_tab[0];
923 if (log)
924 {
925 /* Found an alignment label. */
926 gcc_checking_assert (log < MAX_CODE_ALIGN + 1);
927 uid_align[uid] = align_tab[log];
928 for (i = log - 1; i >= 0; i--)
929 align_tab[i] = seq;
930 }
931 }
932
933 /* When optimizing, we start assuming minimum length, and keep increasing
934 lengths as we find the need for this, till nothing changes.
935 When not optimizing, we start assuming maximum lengths, and
936 do a single pass to update the lengths. */
937 bool increasing = optimize != 0;
938
939#ifdef CASE_VECTOR_SHORTEN_MODE
940 if (optimize)
941 {
942 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
943 label fields. */
944
945 int min_shuid = INSN_SHUID (get_insns ()) - 1;
946 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
947 int rel;
948
949 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
950 {
951 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
952 int len, i, min, max, insn_shuid;
953 int min_align;
954 addr_diff_vec_flags flags;
955
956 if (! JUMP_TABLE_DATA_P (insn)
957 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
958 continue;
959 pat = PATTERN (insn);
960 len = XVECLEN (pat, 1);
961 gcc_assert (len > 0);
962 min_align = MAX_CODE_ALIGN;
963 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
964 {
965 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
966 int shuid = INSN_SHUID (lab);
967 if (shuid < min)
968 {
969 min = shuid;
970 min_lab = lab;
971 }
972 if (shuid > max)
973 {
974 max = shuid;
975 max_lab = lab;
976 }
977
978 int label_alignment = LABEL_TO_ALIGNMENT (lab).levels[0].log;
979 if (min_align > label_alignment)
980 min_align = label_alignment;
981 }
982 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
983 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
984 insn_shuid = INSN_SHUID (insn);
985 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
986 memset (&flags, 0, sizeof (flags));
987 flags.min_align = min_align;
988 flags.base_after_vec = rel > insn_shuid;
989 flags.min_after_vec = min > insn_shuid;
990 flags.max_after_vec = max > insn_shuid;
991 flags.min_after_base = min > rel;
992 flags.max_after_base = max > rel;
993 ADDR_DIFF_VEC_FLAGS (pat) = flags;
994
995 if (increasing)
996 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
997 }
998 }
999#endif /* CASE_VECTOR_SHORTEN_MODE */
1000
1001 /* Compute initial lengths, addresses, and varying flags for each insn. */
1002 int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1003
1004 for (insn_current_address = 0, insn = first;
1005 insn != 0;
1006 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1007 {
1008 uid = INSN_UID (insn);
1009
1010 insn_lengths[uid] = 0;
1011
1012 if (LABEL_P (insn))
1013 {
1014 int log = LABEL_TO_ALIGNMENT (insn).levels[0].log;
1015 if (log)
1016 {
1017 int align = 1 << log;
1018 int new_address = (insn_current_address + align - 1) & -align;
1019 insn_lengths[uid] = new_address - insn_current_address;
1020 }
1021 }
1022
1023 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1024
1025 if (NOTE_P (insn) || BARRIER_P (insn)
1026 || LABEL_P (insn) || DEBUG_INSN_P (insn))
1027 continue;
1028 if (insn->deleted ())
1029 continue;
1030
1031 body = PATTERN (insn);
1032 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (p: insn))
1033 {
1034 /* This only takes room if read-only data goes into the text
1035 section. */
1036 if (JUMP_TABLES_IN_TEXT_SECTION
1037 || readonly_data_section == text_section)
1038 insn_lengths[uid] = (XVECLEN (body,
1039 GET_CODE (body) == ADDR_DIFF_VEC)
1040 * GET_MODE_SIZE (mode: table->get_data_mode ()));
1041 /* Alignment is handled by ADDR_VEC_ALIGN. */
1042 }
1043 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1044 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1045 else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (p: body))
1046 {
1047 int i;
1048 int const_delay_slots;
1049 if (DELAY_SLOTS)
1050 const_delay_slots = const_num_delay_slots (body_seq->insn (index: 0));
1051 else
1052 const_delay_slots = 0;
1053
1054 int (*inner_length_fun) (rtx_insn *)
1055 = const_delay_slots ? length_fun : insn_default_length;
1056 /* Inside a delay slot sequence, we do not do any branch shortening
1057 if the shortening could change the number of delay slots
1058 of the branch. */
1059 for (i = 0; i < body_seq->len (); i++)
1060 {
1061 rtx_insn *inner_insn = body_seq->insn (index: i);
1062 int inner_uid = INSN_UID (insn: inner_insn);
1063 int inner_length;
1064
1065 if (GET_CODE (PATTERN (inner_insn)) == ASM_INPUT
1066 || asm_noperands (PATTERN (insn: inner_insn)) >= 0)
1067 inner_length = (asm_insn_count (PATTERN (insn: inner_insn))
1068 * insn_default_length (inner_insn));
1069 else
1070 inner_length = inner_length_fun (inner_insn);
1071
1072 insn_lengths[inner_uid] = inner_length;
1073 if (const_delay_slots)
1074 {
1075 if ((varying_length[inner_uid]
1076 = insn_variable_length_p (inner_insn)) != 0)
1077 varying_length[uid] = 1;
1078 INSN_ADDRESSES (inner_uid) = (insn_current_address
1079 + insn_lengths[uid]);
1080 }
1081 else
1082 varying_length[inner_uid] = 0;
1083 insn_lengths[uid] += inner_length;
1084 }
1085 }
1086 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1087 {
1088 insn_lengths[uid] = length_fun (insn);
1089 varying_length[uid] = insn_variable_length_p (insn);
1090 }
1091
1092 /* If needed, do any adjustment. */
1093#ifdef ADJUST_INSN_LENGTH
1094 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1095 if (insn_lengths[uid] < 0)
1096 fatal_insn ("negative insn length", insn);
1097#endif
1098 }
1099
1100 /* Now loop over all the insns finding varying length insns. For each,
1101 get the current insn length. If it has changed, reflect the change.
1102 When nothing changes for a full pass, we are done. */
1103
1104 while (something_changed)
1105 {
1106 something_changed = false;
1107 insn_current_align = MAX_CODE_ALIGN - 1;
1108 for (insn_current_address = 0, insn = first;
1109 insn != 0;
1110 insn = NEXT_INSN (insn))
1111 {
1112 int new_length;
1113#ifdef ADJUST_INSN_LENGTH
1114 int tmp_length;
1115#endif
1116 int length_align;
1117
1118 uid = INSN_UID (insn);
1119
1120 if (rtx_code_label *label = dyn_cast <rtx_code_label *> (p: insn))
1121 {
1122 int log = LABEL_TO_ALIGNMENT (label).levels[0].log;
1123
1124#ifdef CASE_VECTOR_SHORTEN_MODE
1125 /* If the mode of a following jump table was changed, we
1126 may need to update the alignment of this label. */
1127
1128 if (JUMP_TABLES_IN_TEXT_SECTION
1129 || readonly_data_section == text_section)
1130 {
1131 rtx_jump_table_data *table = jump_table_for_label (label);
1132 if (table)
1133 {
1134 int newlog = ADDR_VEC_ALIGN (table);
1135 if (newlog != log)
1136 {
1137 log = newlog;
1138 LABEL_TO_ALIGNMENT (insn) = log;
1139 something_changed = true;
1140 }
1141 }
1142 }
1143#endif
1144
1145 if (log > insn_current_align)
1146 {
1147 int align = 1 << log;
1148 int new_address= (insn_current_address + align - 1) & -align;
1149 insn_lengths[uid] = new_address - insn_current_address;
1150 insn_current_align = log;
1151 insn_current_address = new_address;
1152 }
1153 else
1154 insn_lengths[uid] = 0;
1155 INSN_ADDRESSES (uid) = insn_current_address;
1156 continue;
1157 }
1158
1159 length_align = INSN_LENGTH_ALIGNMENT (insn);
1160 if (length_align < insn_current_align)
1161 insn_current_align = length_align;
1162
1163 insn_last_address = INSN_ADDRESSES (uid);
1164 INSN_ADDRESSES (uid) = insn_current_address;
1165
1166#ifdef CASE_VECTOR_SHORTEN_MODE
1167 if (optimize
1168 && JUMP_TABLE_DATA_P (insn)
1169 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1170 {
1171 rtx_jump_table_data *table = as_a <rtx_jump_table_data *> (insn);
1172 rtx body = PATTERN (insn);
1173 int old_length = insn_lengths[uid];
1174 rtx_insn *rel_lab =
1175 safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1176 rtx min_lab = XEXP (XEXP (body, 2), 0);
1177 rtx max_lab = XEXP (XEXP (body, 3), 0);
1178 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1179 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1180 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1181 rtx_insn *prev;
1182 int rel_align = 0;
1183 addr_diff_vec_flags flags;
1184 scalar_int_mode vec_mode;
1185
1186 /* Avoid automatic aggregate initialization. */
1187 flags = ADDR_DIFF_VEC_FLAGS (body);
1188
1189 /* Try to find a known alignment for rel_lab. */
1190 for (prev = rel_lab;
1191 prev
1192 && ! insn_lengths[INSN_UID (prev)]
1193 && ! (varying_length[INSN_UID (prev)] & 1);
1194 prev = PREV_INSN (prev))
1195 if (varying_length[INSN_UID (prev)] & 2)
1196 {
1197 rel_align = LABEL_TO_ALIGNMENT (prev).levels[0].log;
1198 break;
1199 }
1200
1201 /* See the comment on addr_diff_vec_flags in rtl.h for the
1202 meaning of the flags values. base: REL_LAB vec: INSN */
1203 /* Anything after INSN has still addresses from the last
1204 pass; adjust these so that they reflect our current
1205 estimate for this pass. */
1206 if (flags.base_after_vec)
1207 rel_addr += insn_current_address - insn_last_address;
1208 if (flags.min_after_vec)
1209 min_addr += insn_current_address - insn_last_address;
1210 if (flags.max_after_vec)
1211 max_addr += insn_current_address - insn_last_address;
1212 /* We want to know the worst case, i.e. lowest possible value
1213 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1214 its offset is positive, and we have to be wary of code shrink;
1215 otherwise, it is negative, and we have to be vary of code
1216 size increase. */
1217 if (flags.min_after_base)
1218 {
1219 /* If INSN is between REL_LAB and MIN_LAB, the size
1220 changes we are about to make can change the alignment
1221 within the observed offset, therefore we have to break
1222 it up into two parts that are independent. */
1223 if (! flags.base_after_vec && flags.min_after_vec)
1224 {
1225 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1226 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1227 }
1228 else
1229 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1230 }
1231 else
1232 {
1233 if (flags.base_after_vec && ! flags.min_after_vec)
1234 {
1235 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1236 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1237 }
1238 else
1239 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1240 }
1241 /* Likewise, determine the highest lowest possible value
1242 for the offset of MAX_LAB. */
1243 if (flags.max_after_base)
1244 {
1245 if (! flags.base_after_vec && flags.max_after_vec)
1246 {
1247 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1248 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1249 }
1250 else
1251 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1252 }
1253 else
1254 {
1255 if (flags.base_after_vec && ! flags.max_after_vec)
1256 {
1257 max_addr += align_fuzz (max_lab, insn, 0, 0);
1258 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1259 }
1260 else
1261 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1262 }
1263 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1264 max_addr - rel_addr, body);
1265 if (!increasing
1266 || (GET_MODE_SIZE (vec_mode)
1267 >= GET_MODE_SIZE (table->get_data_mode ())))
1268 PUT_MODE (body, vec_mode);
1269 if (JUMP_TABLES_IN_TEXT_SECTION
1270 || readonly_data_section == text_section)
1271 {
1272 insn_lengths[uid]
1273 = (XVECLEN (body, 1)
1274 * GET_MODE_SIZE (table->get_data_mode ()));
1275 insn_current_address += insn_lengths[uid];
1276 if (insn_lengths[uid] != old_length)
1277 something_changed = true;
1278 }
1279
1280 continue;
1281 }
1282#endif /* CASE_VECTOR_SHORTEN_MODE */
1283
1284 if (! (varying_length[uid]))
1285 {
1286 if (NONJUMP_INSN_P (insn)
1287 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1288 {
1289 int i;
1290
1291 body = PATTERN (insn);
1292 for (i = 0; i < XVECLEN (body, 0); i++)
1293 {
1294 rtx inner_insn = XVECEXP (body, 0, i);
1295 int inner_uid = INSN_UID (insn: inner_insn);
1296
1297 INSN_ADDRESSES (inner_uid) = insn_current_address;
1298
1299 insn_current_address += insn_lengths[inner_uid];
1300 }
1301 }
1302 else
1303 insn_current_address += insn_lengths[uid];
1304
1305 continue;
1306 }
1307
1308 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1309 {
1310 rtx_sequence *seqn = as_a <rtx_sequence *> (p: PATTERN (insn));
1311 int i;
1312
1313 body = PATTERN (insn);
1314 new_length = 0;
1315 for (i = 0; i < seqn->len (); i++)
1316 {
1317 rtx_insn *inner_insn = seqn->insn (index: i);
1318 int inner_uid = INSN_UID (insn: inner_insn);
1319 int inner_length;
1320
1321 INSN_ADDRESSES (inner_uid) = insn_current_address;
1322
1323 /* insn_current_length returns 0 for insns with a
1324 non-varying length. */
1325 if (! varying_length[inner_uid])
1326 inner_length = insn_lengths[inner_uid];
1327 else
1328 inner_length = insn_current_length (inner_insn);
1329
1330 if (inner_length != insn_lengths[inner_uid])
1331 {
1332 if (!increasing || inner_length > insn_lengths[inner_uid])
1333 {
1334 insn_lengths[inner_uid] = inner_length;
1335 something_changed = true;
1336 }
1337 else
1338 inner_length = insn_lengths[inner_uid];
1339 }
1340 insn_current_address += inner_length;
1341 new_length += inner_length;
1342 }
1343 }
1344 else
1345 {
1346 new_length = insn_current_length (insn);
1347 insn_current_address += new_length;
1348 }
1349
1350#ifdef ADJUST_INSN_LENGTH
1351 /* If needed, do any adjustment. */
1352 tmp_length = new_length;
1353 ADJUST_INSN_LENGTH (insn, new_length);
1354 insn_current_address += (new_length - tmp_length);
1355#endif
1356
1357 if (new_length != insn_lengths[uid]
1358 && (!increasing || new_length > insn_lengths[uid]))
1359 {
1360 insn_lengths[uid] = new_length;
1361 something_changed = true;
1362 }
1363 else
1364 insn_current_address += insn_lengths[uid] - new_length;
1365 }
1366 /* For a non-optimizing compile, do only a single pass. */
1367 if (!increasing)
1368 break;
1369 }
1370 crtl->max_insn_address = insn_current_address;
1371 free (ptr: varying_length);
1372}
1373
1374/* Given the body of an INSN known to be generated by an ASM statement, return
1375 the number of machine instructions likely to be generated for this insn.
1376 This is used to compute its length. */
1377
1378static int
1379asm_insn_count (rtx body)
1380{
1381 const char *templ;
1382
1383 if (GET_CODE (body) == ASM_INPUT)
1384 templ = XSTR (body, 0);
1385 else
1386 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1387
1388 return asm_str_count (templ);
1389}
1390
1391/* Return the number of machine instructions likely to be generated for the
1392 inline-asm template. */
1393int
1394asm_str_count (const char *templ)
1395{
1396 int count = 1;
1397
1398 if (!*templ)
1399 return 0;
1400
1401 for (; *templ; templ++)
1402 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1403 || *templ == '\n')
1404 count++;
1405
1406 return count;
1407}
1408
1409/* Return true if DWARF2 debug info can be emitted for DECL. */
1410
1411static bool
1412dwarf2_debug_info_emitted_p (tree decl)
1413{
1414 /* When DWARF2 debug info is not generated internally. */
1415 if (!dwarf_debuginfo_p () && !dwarf_based_debuginfo_p ())
1416 return false;
1417
1418 if (DECL_IGNORED_P (decl))
1419 return false;
1420
1421 return true;
1422}
1423
1424/* Return scope resulting from combination of S1 and S2. */
1425static tree
1426choose_inner_scope (tree s1, tree s2)
1427{
1428 if (!s1)
1429 return s2;
1430 if (!s2)
1431 return s1;
1432 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1433 return s1;
1434 return s2;
1435}
1436
1437/* Emit lexical block notes needed to change scope from S1 to S2. */
1438
1439static void
1440change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1441{
1442 rtx_insn *insn = orig_insn;
1443 tree com = NULL_TREE;
1444 tree ts1 = s1, ts2 = s2;
1445 tree s;
1446
1447 while (ts1 != ts2)
1448 {
1449 gcc_assert (ts1 && ts2);
1450 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1451 ts1 = BLOCK_SUPERCONTEXT (ts1);
1452 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1453 ts2 = BLOCK_SUPERCONTEXT (ts2);
1454 else
1455 {
1456 ts1 = BLOCK_SUPERCONTEXT (ts1);
1457 ts2 = BLOCK_SUPERCONTEXT (ts2);
1458 }
1459 }
1460 com = ts1;
1461
1462 /* Close scopes. */
1463 s = s1;
1464 while (s != com)
1465 {
1466 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1467 NOTE_BLOCK (note) = s;
1468 s = BLOCK_SUPERCONTEXT (s);
1469 }
1470
1471 /* Open scopes. */
1472 s = s2;
1473 while (s != com)
1474 {
1475 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1476 NOTE_BLOCK (insn) = s;
1477 s = BLOCK_SUPERCONTEXT (s);
1478 }
1479}
1480
1481/* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1482 on the scope tree and the newly reordered instructions. */
1483
1484static void
1485reemit_insn_block_notes (void)
1486{
1487 tree cur_block = DECL_INITIAL (cfun->decl);
1488 rtx_insn *insn;
1489
1490 insn = get_insns ();
1491 for (; insn; insn = NEXT_INSN (insn))
1492 {
1493 tree this_block;
1494
1495 /* Prevent lexical blocks from straddling section boundaries. */
1496 if (NOTE_P (insn))
1497 switch (NOTE_KIND (insn))
1498 {
1499 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1500 {
1501 for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1502 s = BLOCK_SUPERCONTEXT (s))
1503 {
1504 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1505 NOTE_BLOCK (note) = s;
1506 note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1507 NOTE_BLOCK (note) = s;
1508 }
1509 }
1510 break;
1511
1512 case NOTE_INSN_BEGIN_STMT:
1513 case NOTE_INSN_INLINE_ENTRY:
1514 this_block = LOCATION_BLOCK (NOTE_MARKER_LOCATION (insn));
1515 goto set_cur_block_to_this_block;
1516
1517 default:
1518 continue;
1519 }
1520
1521 if (!active_insn_p (insn))
1522 continue;
1523
1524 /* Avoid putting scope notes between jump table and its label. */
1525 if (JUMP_TABLE_DATA_P (insn))
1526 continue;
1527
1528 this_block = insn_scope (insn);
1529 /* For sequences compute scope resulting from merging all scopes
1530 of instructions nested inside. */
1531 if (rtx_sequence *body = dyn_cast <rtx_sequence *> (p: PATTERN (insn)))
1532 {
1533 int i;
1534
1535 this_block = NULL;
1536 for (i = 0; i < body->len (); i++)
1537 this_block = choose_inner_scope (s1: this_block,
1538 s2: insn_scope (body->insn (index: i)));
1539 }
1540 set_cur_block_to_this_block:
1541 if (! this_block)
1542 {
1543 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1544 continue;
1545 else
1546 this_block = DECL_INITIAL (cfun->decl);
1547 }
1548
1549 if (this_block != cur_block)
1550 {
1551 change_scope (orig_insn: insn, s1: cur_block, s2: this_block);
1552 cur_block = this_block;
1553 }
1554 }
1555
1556 /* change_scope emits before the insn, not after. */
1557 rtx_note *note = emit_note (NOTE_INSN_DELETED);
1558 change_scope (orig_insn: note, s1: cur_block, DECL_INITIAL (cfun->decl));
1559 delete_insn (note);
1560
1561 reorder_blocks ();
1562}
1563
1564static const char *some_local_dynamic_name;
1565
1566/* Locate some local-dynamic symbol still in use by this function
1567 so that we can print its name in local-dynamic base patterns.
1568 Return null if there are no local-dynamic references. */
1569
1570const char *
1571get_some_local_dynamic_name ()
1572{
1573 subrtx_iterator::array_type array;
1574 rtx_insn *insn;
1575
1576 if (some_local_dynamic_name)
1577 return some_local_dynamic_name;
1578
1579 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1580 if (NONDEBUG_INSN_P (insn))
1581 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1582 {
1583 const_rtx x = *iter;
1584 if (GET_CODE (x) == SYMBOL_REF)
1585 {
1586 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1587 return some_local_dynamic_name = XSTR (x, 0);
1588 if (CONSTANT_POOL_ADDRESS_P (x))
1589 iter.substitute (x: get_pool_constant (x));
1590 }
1591 }
1592
1593 return 0;
1594}
1595
1596/* Arrange for us to emit a source location note before any further
1597 real insns or section changes, by setting the SEEN_NEXT_VIEW bit in
1598 *SEEN, as long as we are keeping track of location views. The bit
1599 indicates we have referenced the next view at the current PC, so we
1600 have to emit it. This should be called next to the var_location
1601 debug hook. */
1602
1603static inline void
1604set_next_view_needed (int *seen)
1605{
1606 if (debug_variable_location_views)
1607 *seen |= SEEN_NEXT_VIEW;
1608}
1609
1610/* Clear the flag in *SEEN indicating we need to emit the next view.
1611 This should be called next to the source_line debug hook. */
1612
1613static inline void
1614clear_next_view_needed (int *seen)
1615{
1616 *seen &= ~SEEN_NEXT_VIEW;
1617}
1618
1619/* Test whether we have a pending request to emit the next view in
1620 *SEEN, and emit it if needed, clearing the request bit. */
1621
1622static inline void
1623maybe_output_next_view (int *seen)
1624{
1625 if ((*seen & SEEN_NEXT_VIEW) != 0)
1626 {
1627 clear_next_view_needed (seen);
1628 (*debug_hooks->source_line) (last_linenum, last_columnnum,
1629 last_filename, last_discriminator,
1630 false);
1631 }
1632}
1633
1634/* We want to emit param bindings (before the first begin_stmt) in the
1635 initial view, if we are emitting views. To that end, we may
1636 consume initial notes in the function, processing them in
1637 final_start_function, before signaling the beginning of the
1638 prologue, rather than in final.
1639
1640 We don't test whether the DECLs are PARM_DECLs: the assumption is
1641 that there will be a NOTE_INSN_BEGIN_STMT marker before any
1642 non-parameter NOTE_INSN_VAR_LOCATION. It's ok if the marker is not
1643 there, we'll just have more variable locations bound in the initial
1644 view, which is consistent with their being bound without any code
1645 that would give them a value. */
1646
1647static inline bool
1648in_initial_view_p (rtx_insn *insn)
1649{
1650 return (!DECL_IGNORED_P (current_function_decl)
1651 && debug_variable_location_views
1652 && insn && GET_CODE (insn) == NOTE
1653 && (NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
1654 || NOTE_KIND (insn) == NOTE_INSN_DELETED));
1655}
1656
1657/* Output assembler code for the start of a function,
1658 and initialize some of the variables in this file
1659 for the new function. The label for the function and associated
1660 assembler pseudo-ops have already been output in `assemble_start_function'.
1661
1662 FIRST is the first insn of the rtl for the function being compiled.
1663 FILE is the file to write assembler code to.
1664 SEEN should be initially set to zero, and it may be updated to
1665 indicate we have references to the next location view, that would
1666 require us to emit it at the current PC.
1667 OPTIMIZE_P is nonzero if we should eliminate redundant
1668 test and compare insns. */
1669
1670static void
1671final_start_function_1 (rtx_insn **firstp, FILE *file, int *seen,
1672 int optimize_p ATTRIBUTE_UNUSED)
1673{
1674 block_depth = 0;
1675
1676 this_is_asm_operands = 0;
1677
1678 need_profile_function = false;
1679
1680 last_filename = LOCATION_FILE (prologue_location);
1681 last_linenum = LOCATION_LINE (prologue_location);
1682 last_columnnum = LOCATION_COLUMN (prologue_location);
1683 last_discriminator = 0;
1684 force_source_line = false;
1685
1686 high_block_linenum = high_function_linenum = last_linenum;
1687
1688 if (flag_sanitize & SANITIZE_ADDRESS)
1689 asan_function_start ();
1690
1691 rtx_insn *first = *firstp;
1692 if (in_initial_view_p (insn: first))
1693 {
1694 do
1695 {
1696 final_scan_insn (first, file, 0, 0, seen);
1697 first = NEXT_INSN (insn: first);
1698 }
1699 while (in_initial_view_p (insn: first));
1700 *firstp = first;
1701 }
1702
1703 if (!DECL_IGNORED_P (current_function_decl))
1704 debug_hooks->begin_prologue (last_linenum, last_columnnum,
1705 last_filename);
1706
1707 if (!dwarf2_debug_info_emitted_p (decl: current_function_decl))
1708 dwarf2out_begin_prologue (0, 0, NULL);
1709
1710 if (DECL_IGNORED_P (current_function_decl) && last_linenum && last_filename)
1711 debug_hooks->set_ignored_loc (last_linenum, last_columnnum, last_filename);
1712
1713#ifdef LEAF_REG_REMAP
1714 if (crtl->uses_only_leaf_regs)
1715 leaf_renumber_regs (first);
1716#endif
1717
1718 /* The Sun386i and perhaps other machines don't work right
1719 if the profiling code comes after the prologue. */
1720 if (targetm.profile_before_prologue () && crtl->profile)
1721 {
1722 if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1723 && targetm.have_prologue ())
1724 {
1725 rtx_insn *insn;
1726 for (insn = first; insn; insn = NEXT_INSN (insn))
1727 if (!NOTE_P (insn))
1728 {
1729 insn = NULL;
1730 break;
1731 }
1732 else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1733 || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1734 break;
1735 else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1736 || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1737 continue;
1738 else
1739 {
1740 insn = NULL;
1741 break;
1742 }
1743
1744 if (insn)
1745 need_profile_function = true;
1746 else
1747 profile_function (file);
1748 }
1749 else
1750 profile_function (file);
1751 }
1752
1753 /* If debugging, assign block numbers to all of the blocks in this
1754 function. */
1755 if (write_symbols)
1756 {
1757 reemit_insn_block_notes ();
1758 number_blocks (current_function_decl);
1759 /* We never actually put out begin/end notes for the top-level
1760 block in the function. But, conceptually, that block is
1761 always needed. */
1762 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1763 }
1764
1765 unsigned HOST_WIDE_INT min_frame_size
1766 = constant_lower_bound (a: get_frame_size ());
1767 if (min_frame_size > (unsigned HOST_WIDE_INT) warn_frame_larger_than_size)
1768 {
1769 /* Issue a warning */
1770 warning (OPT_Wframe_larger_than_,
1771 "the frame size of %wu bytes is larger than %wu bytes",
1772 min_frame_size, warn_frame_larger_than_size);
1773 }
1774
1775 /* First output the function prologue: code to set up the stack frame. */
1776 targetm.asm_out.function_prologue (file);
1777
1778 /* If the machine represents the prologue as RTL, the profiling code must
1779 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1780 if (! targetm.have_prologue ())
1781 profile_after_prologue (file);
1782}
1783
1784/* This is an exported final_start_function_1, callable without SEEN. */
1785
1786void
1787final_start_function (rtx_insn *first, FILE *file,
1788 int optimize_p ATTRIBUTE_UNUSED)
1789{
1790 int seen = 0;
1791 final_start_function_1 (firstp: &first, file, seen: &seen, optimize_p);
1792 gcc_assert (seen == 0);
1793}
1794
1795static void
1796profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1797{
1798 if (!targetm.profile_before_prologue () && crtl->profile)
1799 profile_function (file);
1800}
1801
1802static void
1803profile_function (FILE *file ATTRIBUTE_UNUSED)
1804{
1805#ifndef NO_PROFILE_COUNTERS
1806# define NO_PROFILE_COUNTERS 0
1807#endif
1808#ifdef ASM_OUTPUT_REG_PUSH
1809 rtx sval = NULL, chain = NULL;
1810
1811 if (cfun->returns_struct)
1812 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1813 true);
1814 if (cfun->static_chain_decl)
1815 chain = targetm.calls.static_chain (current_function_decl, true);
1816#endif /* ASM_OUTPUT_REG_PUSH */
1817
1818 if (! NO_PROFILE_COUNTERS)
1819 {
1820 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1821 switch_to_section (data_section);
1822 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1823 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1824 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1825 }
1826
1827 switch_to_section (current_function_section ());
1828
1829#ifdef ASM_OUTPUT_REG_PUSH
1830 if (sval && REG_P (sval))
1831 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1832 if (chain && REG_P (chain))
1833 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1834#endif
1835
1836 FUNCTION_PROFILER (file, current_function_funcdef_no);
1837
1838#ifdef ASM_OUTPUT_REG_PUSH
1839 if (chain && REG_P (chain))
1840 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1841 if (sval && REG_P (sval))
1842 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1843#endif
1844}
1845
1846/* Output assembler code for the end of a function.
1847 For clarity, args are same as those of `final_start_function'
1848 even though not all of them are needed. */
1849
1850void
1851final_end_function (void)
1852{
1853 app_disable ();
1854
1855 if (!DECL_IGNORED_P (current_function_decl))
1856 debug_hooks->end_function (high_function_linenum);
1857
1858 /* Finally, output the function epilogue:
1859 code to restore the stack frame and return to the caller. */
1860 targetm.asm_out.function_epilogue (asm_out_file);
1861
1862 /* And debug output. */
1863 if (!DECL_IGNORED_P (current_function_decl))
1864 debug_hooks->end_epilogue (last_linenum, last_filename);
1865
1866 if (!dwarf2_debug_info_emitted_p (decl: current_function_decl)
1867 && dwarf2out_do_frame ())
1868 dwarf2out_end_epilogue (last_linenum, last_filename);
1869
1870 some_local_dynamic_name = 0;
1871}
1872
1873
1874/* Dumper helper for basic block information. FILE is the assembly
1875 output file, and INSN is the instruction being emitted. */
1876
1877static void
1878dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1879 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1880{
1881 basic_block bb;
1882
1883 if (!flag_debug_asm)
1884 return;
1885
1886 if (INSN_UID (insn) < bb_map_size
1887 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1888 {
1889 edge e;
1890 edge_iterator ei;
1891
1892 fprintf (stream: file, format: "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1893 if (bb->count.initialized_p ())
1894 {
1895 fprintf (stream: file, format: ", count:");
1896 bb->count.dump (f: file);
1897 }
1898 fprintf (stream: file, format: " seq:%d", (*bb_seqn)++);
1899 fprintf (stream: file, format: "\n%s PRED:", ASM_COMMENT_START);
1900 FOR_EACH_EDGE (e, ei, bb->preds)
1901 {
1902 dump_edge_info (file, e, TDF_DETAILS, 0);
1903 }
1904 fprintf (stream: file, format: "\n");
1905 }
1906 if (INSN_UID (insn) < bb_map_size
1907 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1908 {
1909 edge e;
1910 edge_iterator ei;
1911
1912 fprintf (stream: asm_out_file, format: "%s SUCC:", ASM_COMMENT_START);
1913 FOR_EACH_EDGE (e, ei, bb->succs)
1914 {
1915 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1916 }
1917 fprintf (stream: file, format: "\n");
1918 }
1919}
1920
1921/* Output assembler code for some insns: all or part of a function.
1922 For description of args, see `final_start_function', above. */
1923
1924static void
1925final_1 (rtx_insn *first, FILE *file, int seen, int optimize_p)
1926{
1927 rtx_insn *insn, *next;
1928
1929 /* Used for -dA dump. */
1930 basic_block *start_to_bb = NULL;
1931 basic_block *end_to_bb = NULL;
1932 int bb_map_size = 0;
1933 int bb_seqn = 0;
1934
1935 last_ignored_compare = 0;
1936
1937 init_recog ();
1938
1939 CC_STATUS_INIT;
1940
1941 if (flag_debug_asm)
1942 {
1943 basic_block bb;
1944
1945 bb_map_size = get_max_uid () + 1;
1946 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1947 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1948
1949 /* There is no cfg for a thunk. */
1950 if (!cfun->is_thunk)
1951 FOR_EACH_BB_REVERSE_FN (bb, cfun)
1952 {
1953 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
1954 end_to_bb[INSN_UID (BB_END (bb))] = bb;
1955 }
1956 }
1957
1958 /* Output the insns. */
1959 for (insn = first; insn;)
1960 {
1961 if (HAVE_ATTR_length)
1962 {
1963 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1964 {
1965 /* This can be triggered by bugs elsewhere in the compiler if
1966 new insns are created after init_insn_lengths is called. */
1967 gcc_assert (NOTE_P (insn));
1968 insn_current_address = -1;
1969 }
1970 else
1971 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1972 /* final can be seen as an iteration of shorten_branches that
1973 does nothing (since a fixed point has already been reached). */
1974 insn_last_address = insn_current_address;
1975 }
1976
1977 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
1978 bb_map_size, bb_seqn: &bb_seqn);
1979 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
1980 }
1981
1982 maybe_output_next_view (seen: &seen);
1983
1984 if (flag_debug_asm)
1985 {
1986 free (ptr: start_to_bb);
1987 free (ptr: end_to_bb);
1988 }
1989
1990 /* Remove CFI notes, to avoid compare-debug failures. */
1991 for (insn = first; insn; insn = next)
1992 {
1993 next = NEXT_INSN (insn);
1994 if (NOTE_P (insn)
1995 && (NOTE_KIND (insn) == NOTE_INSN_CFI
1996 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
1997 delete_insn (insn);
1998 }
1999}
2000
2001/* This is an exported final_1, callable without SEEN. */
2002
2003void
2004final (rtx_insn *first, FILE *file, int optimize_p)
2005{
2006 /* Those that use the internal final_start_function_1/final_1 API
2007 skip initial debug bind notes in final_start_function_1, and pass
2008 the modified FIRST to final_1. But those that use the public
2009 final_start_function/final APIs, final_start_function can't move
2010 FIRST because it's not passed by reference, so if they were
2011 skipped there, skip them again here. */
2012 while (in_initial_view_p (insn: first))
2013 first = NEXT_INSN (insn: first);
2014
2015 final_1 (first, file, seen: 0, optimize_p);
2016}
2017
2018const char *
2019get_insn_template (int code, rtx_insn *insn)
2020{
2021 switch (insn_data[code].output_format)
2022 {
2023 case INSN_OUTPUT_FORMAT_SINGLE:
2024 return insn_data[code].output.single;
2025 case INSN_OUTPUT_FORMAT_MULTI:
2026 return insn_data[code].output.multi[which_alternative];
2027 case INSN_OUTPUT_FORMAT_FUNCTION:
2028 gcc_assert (insn);
2029 return (*insn_data[code].output.function) (recog_data.operand, insn);
2030
2031 default:
2032 gcc_unreachable ();
2033 }
2034}
2035
2036/* Emit the appropriate declaration for an alternate-entry-point
2037 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2038 LABEL_KIND != LABEL_NORMAL.
2039
2040 The case fall-through in this function is intentional. */
2041static void
2042output_alternate_entry_point (FILE *file, rtx_insn *insn)
2043{
2044 const char *name = LABEL_NAME (insn);
2045
2046 switch (LABEL_KIND (insn))
2047 {
2048 case LABEL_WEAK_ENTRY:
2049#ifdef ASM_WEAKEN_LABEL
2050 ASM_WEAKEN_LABEL (file, name);
2051 gcc_fallthrough ();
2052#endif
2053 case LABEL_GLOBAL_ENTRY:
2054 targetm.asm_out.globalize_label (file, name);
2055 gcc_fallthrough ();
2056 case LABEL_STATIC_ENTRY:
2057#ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2058 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2059#endif
2060 ASM_OUTPUT_LABEL (file, name);
2061 break;
2062
2063 case LABEL_NORMAL:
2064 default:
2065 gcc_unreachable ();
2066 }
2067}
2068
2069/* Given a CALL_INSN, find and return the nested CALL. */
2070static rtx
2071call_from_call_insn (rtx_call_insn *insn)
2072{
2073 rtx x;
2074 gcc_assert (CALL_P (insn));
2075 x = PATTERN (insn);
2076
2077 while (GET_CODE (x) != CALL)
2078 {
2079 switch (GET_CODE (x))
2080 {
2081 default:
2082 gcc_unreachable ();
2083 case COND_EXEC:
2084 x = COND_EXEC_CODE (x);
2085 break;
2086 case PARALLEL:
2087 x = XVECEXP (x, 0, 0);
2088 break;
2089 case SET:
2090 x = XEXP (x, 1);
2091 break;
2092 }
2093 }
2094 return x;
2095}
2096
2097/* Print a comment into the asm showing FILENAME, LINENUM, and the
2098 corresponding source line, if available. */
2099
2100static void
2101asm_show_source (const char *filename, int linenum)
2102{
2103 if (!filename)
2104 return;
2105
2106 char_span line = location_get_source_line (file_path: filename, line: linenum);
2107 if (!line)
2108 return;
2109
2110 fprintf (stream: asm_out_file, format: "%s %s:%i: ", ASM_COMMENT_START, filename, linenum);
2111 /* "line" is not 0-terminated, so we must use its length. */
2112 fwrite (ptr: line.get_buffer (), size: 1, n: line.length (), s: asm_out_file);
2113 fputc (c: '\n', stream: asm_out_file);
2114}
2115
2116/* Judge if an absolute jump table is relocatable. */
2117
2118bool
2119jumptable_relocatable (void)
2120{
2121 bool relocatable = false;
2122
2123 if (!CASE_VECTOR_PC_RELATIVE
2124 && !targetm.asm_out.generate_pic_addr_diff_vec ()
2125 && targetm_common.have_named_sections)
2126 relocatable = targetm.asm_out.reloc_rw_mask ();
2127
2128 return relocatable;
2129}
2130
2131/* The final scan for one insn, INSN.
2132 Args are same as in `final', except that INSN
2133 is the insn being scanned.
2134 Value returned is the next insn to be scanned.
2135
2136 NOPEEPHOLES is the flag to disallow peephole processing (currently
2137 used for within delayed branch sequence output).
2138
2139 SEEN is used to track the end of the prologue, for emitting
2140 debug information. We force the emission of a line note after
2141 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2142
2143static rtx_insn *
2144final_scan_insn_1 (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2145 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2146{
2147 rtx_insn *next;
2148 rtx_jump_table_data *table;
2149
2150 insn_counter++;
2151
2152 /* Ignore deleted insns. These can occur when we split insns (due to a
2153 template of "#") while not optimizing. */
2154 if (insn->deleted ())
2155 return NEXT_INSN (insn);
2156
2157 switch (GET_CODE (insn))
2158 {
2159 case NOTE:
2160 switch (NOTE_KIND (insn))
2161 {
2162 case NOTE_INSN_DELETED:
2163 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2164 break;
2165
2166 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2167 maybe_output_next_view (seen);
2168
2169 output_function_exception_table (0);
2170
2171 if (targetm.asm_out.unwind_emit)
2172 targetm.asm_out.unwind_emit (asm_out_file, insn);
2173
2174 in_cold_section_p = !in_cold_section_p;
2175
2176 gcc_checking_assert (in_cold_section_p);
2177 if (in_cold_section_p)
2178 cold_function_name
2179 = clone_function_name (decl: current_function_decl, suffix: "cold");
2180
2181 if (dwarf2out_do_frame ())
2182 {
2183 dwarf2out_switch_text_section ();
2184 if (!dwarf2_debug_info_emitted_p (decl: current_function_decl)
2185 && !DECL_IGNORED_P (current_function_decl))
2186 debug_hooks->switch_text_section ();
2187 }
2188 else if (!DECL_IGNORED_P (current_function_decl))
2189 debug_hooks->switch_text_section ();
2190 if (DECL_IGNORED_P (current_function_decl) && last_linenum
2191 && last_filename)
2192 debug_hooks->set_ignored_loc (last_linenum, last_columnnum,
2193 last_filename);
2194
2195 switch_to_section (current_function_section ());
2196 targetm.asm_out.function_switched_text_sections (asm_out_file,
2197 current_function_decl,
2198 in_cold_section_p);
2199 /* Emit a label for the split cold section. Form label name by
2200 suffixing "cold" to the original function's name. */
2201 if (in_cold_section_p)
2202 {
2203#ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2204 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2205 IDENTIFIER_POINTER
2206 (cold_function_name),
2207 current_function_decl);
2208#else
2209 ASM_OUTPUT_LABEL (asm_out_file,
2210 IDENTIFIER_POINTER (cold_function_name));
2211#endif
2212 if (dwarf2out_do_frame ()
2213 && cfun->fde->dw_fde_second_begin != NULL)
2214 ASM_OUTPUT_LABEL (asm_out_file, cfun->fde->dw_fde_second_begin);
2215 }
2216 break;
2217
2218 case NOTE_INSN_BASIC_BLOCK:
2219 if (need_profile_function)
2220 {
2221 profile_function (file: asm_out_file);
2222 need_profile_function = false;
2223 }
2224
2225 if (targetm.asm_out.unwind_emit)
2226 targetm.asm_out.unwind_emit (asm_out_file, insn);
2227
2228 break;
2229
2230 case NOTE_INSN_EH_REGION_BEG:
2231 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2232 NOTE_EH_HANDLER (insn));
2233 break;
2234
2235 case NOTE_INSN_EH_REGION_END:
2236 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2237 NOTE_EH_HANDLER (insn));
2238 break;
2239
2240 case NOTE_INSN_PROLOGUE_END:
2241 targetm.asm_out.function_end_prologue (file);
2242 profile_after_prologue (file);
2243
2244 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2245 {
2246 *seen |= SEEN_EMITTED;
2247 force_source_line = true;
2248 }
2249 else
2250 *seen |= SEEN_NOTE;
2251
2252 break;
2253
2254 case NOTE_INSN_EPILOGUE_BEG:
2255 if (!DECL_IGNORED_P (current_function_decl))
2256 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2257 targetm.asm_out.function_begin_epilogue (file);
2258 break;
2259
2260 case NOTE_INSN_CFI:
2261 dwarf2out_emit_cfi (NOTE_CFI (insn));
2262 break;
2263
2264 case NOTE_INSN_CFI_LABEL:
2265 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2266 NOTE_LABEL_NUMBER (insn));
2267 break;
2268
2269 case NOTE_INSN_FUNCTION_BEG:
2270 if (need_profile_function)
2271 {
2272 profile_function (file: asm_out_file);
2273 need_profile_function = false;
2274 }
2275
2276 app_disable ();
2277 if (!DECL_IGNORED_P (current_function_decl))
2278 debug_hooks->end_prologue (last_linenum, last_filename);
2279
2280 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2281 {
2282 *seen |= SEEN_EMITTED;
2283 force_source_line = true;
2284 }
2285 else
2286 *seen |= SEEN_NOTE;
2287
2288 break;
2289
2290 case NOTE_INSN_BLOCK_BEG:
2291 if (debug_info_level >= DINFO_LEVEL_NORMAL
2292 || dwarf_debuginfo_p ()
2293 || write_symbols == VMS_DEBUG)
2294 {
2295 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2296
2297 app_disable ();
2298 ++block_depth;
2299 high_block_linenum = last_linenum;
2300
2301 /* Output debugging info about the symbol-block beginning. */
2302 if (!DECL_IGNORED_P (current_function_decl))
2303 debug_hooks->begin_block (last_linenum, n);
2304
2305 /* Mark this block as output. */
2306 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2307 BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn)) = in_cold_section_p;
2308 }
2309 break;
2310
2311 case NOTE_INSN_BLOCK_END:
2312 maybe_output_next_view (seen);
2313
2314 if (debug_info_level >= DINFO_LEVEL_NORMAL
2315 || dwarf_debuginfo_p ()
2316 || write_symbols == VMS_DEBUG)
2317 {
2318 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2319
2320 app_disable ();
2321
2322 /* End of a symbol-block. */
2323 --block_depth;
2324 gcc_assert (block_depth >= 0);
2325
2326 if (!DECL_IGNORED_P (current_function_decl))
2327 debug_hooks->end_block (high_block_linenum, n);
2328 gcc_assert (BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn))
2329 == in_cold_section_p);
2330 }
2331 break;
2332
2333 case NOTE_INSN_DELETED_LABEL:
2334 /* Emit the label. We may have deleted the CODE_LABEL because
2335 the label could be proved to be unreachable, though still
2336 referenced (in the form of having its address taken. */
2337 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2338 break;
2339
2340 case NOTE_INSN_DELETED_DEBUG_LABEL:
2341 /* Similarly, but need to use different namespace for it. */
2342 if (CODE_LABEL_NUMBER (insn) != -1)
2343 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2344 break;
2345
2346 case NOTE_INSN_VAR_LOCATION:
2347 if (!DECL_IGNORED_P (current_function_decl))
2348 {
2349 debug_hooks->var_location (insn);
2350 set_next_view_needed (seen);
2351 }
2352 break;
2353
2354 case NOTE_INSN_BEGIN_STMT:
2355 gcc_checking_assert (cfun->debug_nonbind_markers);
2356 if (!DECL_IGNORED_P (current_function_decl)
2357 && notice_source_line (insn, NULL))
2358 {
2359 output_source_line:
2360 (*debug_hooks->source_line) (last_linenum, last_columnnum,
2361 last_filename, last_discriminator,
2362 true);
2363 clear_next_view_needed (seen);
2364 }
2365 break;
2366
2367 case NOTE_INSN_INLINE_ENTRY:
2368 gcc_checking_assert (cfun->debug_nonbind_markers);
2369 if (!DECL_IGNORED_P (current_function_decl)
2370 && notice_source_line (insn, NULL))
2371 {
2372 (*debug_hooks->inline_entry) (LOCATION_BLOCK
2373 (NOTE_MARKER_LOCATION (insn)));
2374 goto output_source_line;
2375 }
2376 break;
2377
2378 default:
2379 gcc_unreachable ();
2380 break;
2381 }
2382 break;
2383
2384 case BARRIER:
2385 break;
2386
2387 case CODE_LABEL:
2388 /* The target port might emit labels in the output function for
2389 some insn, e.g. sh.cc output_branchy_insn. */
2390 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2391 {
2392 align_flags alignment = LABEL_TO_ALIGNMENT (insn);
2393 if (alignment.levels[0].log && NEXT_INSN (insn))
2394 {
2395#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2396 /* Output both primary and secondary alignment. */
2397 ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[0].log,
2398 alignment.levels[0].maxskip);
2399 ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[1].log,
2400 alignment.levels[1].maxskip);
2401#else
2402#ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2403 ASM_OUTPUT_ALIGN_WITH_NOP (file, alignment.levels[0].log);
2404#else
2405 ASM_OUTPUT_ALIGN (file, alignment.levels[0].log);
2406#endif
2407#endif
2408 }
2409 }
2410 CC_STATUS_INIT;
2411
2412 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2413 debug_hooks->label (as_a <rtx_code_label *> (p: insn));
2414
2415 app_disable ();
2416
2417 /* If this label is followed by a jump-table, make sure we put
2418 the label in the read-only section. Also possibly write the
2419 label and jump table together. */
2420 table = jump_table_for_label (label: as_a <rtx_code_label *> (p: insn));
2421 if (table)
2422 {
2423#if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2424 /* In this case, the case vector is being moved by the
2425 target, so don't output the label at all. Leave that
2426 to the back end macros. */
2427#else
2428 if (! JUMP_TABLES_IN_TEXT_SECTION)
2429 {
2430 int log_align;
2431
2432 switch_to_section (targetm.asm_out.function_rodata_section
2433 (current_function_decl,
2434 jumptable_relocatable ()));
2435
2436#ifdef ADDR_VEC_ALIGN
2437 log_align = ADDR_VEC_ALIGN (table);
2438#else
2439 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2440#endif
2441 ASM_OUTPUT_ALIGN (file, log_align);
2442 }
2443 else
2444 switch_to_section (current_function_section ());
2445
2446#ifdef ASM_OUTPUT_CASE_LABEL
2447 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn), table);
2448#else
2449 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2450#endif
2451#endif
2452 break;
2453 }
2454 if (LABEL_ALT_ENTRY_P (insn))
2455 output_alternate_entry_point (file, insn);
2456 else
2457 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2458 break;
2459
2460 default:
2461 {
2462 rtx body = PATTERN (insn);
2463 int insn_code_number;
2464 const char *templ;
2465 bool is_stmt, *is_stmt_p;
2466
2467 if (MAY_HAVE_DEBUG_MARKER_INSNS && cfun->debug_nonbind_markers)
2468 {
2469 is_stmt = false;
2470 is_stmt_p = NULL;
2471 }
2472 else
2473 is_stmt_p = &is_stmt;
2474
2475 /* Reset this early so it is correct for ASM statements. */
2476 current_insn_predicate = NULL_RTX;
2477
2478 /* An INSN, JUMP_INSN or CALL_INSN.
2479 First check for special kinds that recog doesn't recognize. */
2480
2481 if (GET_CODE (body) == USE /* These are just declarations. */
2482 || GET_CODE (body) == CLOBBER)
2483 break;
2484
2485 /* Detect insns that are really jump-tables
2486 and output them as such. */
2487
2488 if (JUMP_TABLE_DATA_P (insn))
2489 {
2490#if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2491 int vlen, idx;
2492#endif
2493
2494 if (! JUMP_TABLES_IN_TEXT_SECTION)
2495 switch_to_section (targetm.asm_out.function_rodata_section
2496 (current_function_decl,
2497 jumptable_relocatable ()));
2498 else
2499 switch_to_section (current_function_section ());
2500
2501 app_disable ();
2502
2503#if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2504 if (GET_CODE (body) == ADDR_VEC)
2505 {
2506#ifdef ASM_OUTPUT_ADDR_VEC
2507 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2508#else
2509 gcc_unreachable ();
2510#endif
2511 }
2512 else
2513 {
2514#ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2515 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2516#else
2517 gcc_unreachable ();
2518#endif
2519 }
2520#else
2521 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2522 for (idx = 0; idx < vlen; idx++)
2523 {
2524 if (GET_CODE (body) == ADDR_VEC)
2525 {
2526#ifdef ASM_OUTPUT_ADDR_VEC_ELT
2527 ASM_OUTPUT_ADDR_VEC_ELT
2528 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2529#else
2530 gcc_unreachable ();
2531#endif
2532 }
2533 else
2534 {
2535#ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2536 ASM_OUTPUT_ADDR_DIFF_ELT
2537 (file,
2538 body,
2539 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2540 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2541#else
2542 gcc_unreachable ();
2543#endif
2544 }
2545 }
2546#ifdef ASM_OUTPUT_CASE_END
2547 ASM_OUTPUT_CASE_END (file,
2548 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2549 insn);
2550#endif
2551#endif
2552
2553 switch_to_section (current_function_section ());
2554
2555 if (debug_variable_location_views
2556 && !DECL_IGNORED_P (current_function_decl))
2557 debug_hooks->var_location (insn);
2558
2559 break;
2560 }
2561 /* Output this line note if it is the first or the last line
2562 note in a row. */
2563 if (!DECL_IGNORED_P (current_function_decl)
2564 && notice_source_line (insn, is_stmt_p))
2565 {
2566 if (flag_verbose_asm)
2567 asm_show_source (filename: last_filename, linenum: last_linenum);
2568 (*debug_hooks->source_line) (last_linenum, last_columnnum,
2569 last_filename, last_discriminator,
2570 is_stmt);
2571 clear_next_view_needed (seen);
2572 }
2573 else
2574 maybe_output_next_view (seen);
2575
2576 gcc_checking_assert (!DEBUG_INSN_P (insn));
2577
2578 if (GET_CODE (body) == PARALLEL
2579 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2580 body = XVECEXP (body, 0, 0);
2581
2582 if (GET_CODE (body) == ASM_INPUT)
2583 {
2584 const char *string = XSTR (body, 0);
2585
2586 /* There's no telling what that did to the condition codes. */
2587 CC_STATUS_INIT;
2588
2589 if (string[0])
2590 {
2591 expanded_location loc;
2592
2593 app_enable ();
2594 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2595 if (*loc.file && loc.line)
2596 fprintf (stream: asm_out_file, format: "%s %i \"%s\" 1\n",
2597 ASM_COMMENT_START, loc.line, loc.file);
2598 fprintf (stream: asm_out_file, format: "\t%s\n", string);
2599#if HAVE_AS_LINE_ZERO
2600 if (*loc.file && loc.line)
2601 fprintf (stream: asm_out_file, format: "%s 0 \"\" 2\n", ASM_COMMENT_START);
2602#endif
2603 }
2604 break;
2605 }
2606
2607 /* Detect `asm' construct with operands. */
2608 if (asm_noperands (body) >= 0)
2609 {
2610 unsigned int noperands = asm_noperands (body);
2611 rtx *ops = XALLOCAVEC (rtx, noperands);
2612 const char *string;
2613 location_t loc;
2614 expanded_location expanded;
2615
2616 /* There's no telling what that did to the condition codes. */
2617 CC_STATUS_INIT;
2618
2619 /* Get out the operand values. */
2620 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2621 /* Inhibit dying on what would otherwise be compiler bugs. */
2622 insn_noperands = noperands;
2623 this_is_asm_operands = insn;
2624 expanded = expand_location (loc);
2625
2626#ifdef FINAL_PRESCAN_INSN
2627 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2628#endif
2629
2630 /* Output the insn using them. */
2631 if (string[0])
2632 {
2633 app_enable ();
2634 if (expanded.file && expanded.line)
2635 fprintf (stream: asm_out_file, format: "%s %i \"%s\" 1\n",
2636 ASM_COMMENT_START, expanded.line, expanded.file);
2637 output_asm_insn (string, ops);
2638#if HAVE_AS_LINE_ZERO
2639 if (expanded.file && expanded.line)
2640 fprintf (stream: asm_out_file, format: "%s 0 \"\" 2\n", ASM_COMMENT_START);
2641#endif
2642 }
2643
2644 if (targetm.asm_out.final_postscan_insn)
2645 targetm.asm_out.final_postscan_insn (file, insn, ops,
2646 insn_noperands);
2647
2648 this_is_asm_operands = 0;
2649 break;
2650 }
2651
2652 app_disable ();
2653
2654 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (p: body))
2655 {
2656 /* A delayed-branch sequence */
2657 int i;
2658
2659 final_sequence = seq;
2660
2661 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2662 force the restoration of a comparison that was previously
2663 thought unnecessary. If that happens, cancel this sequence
2664 and cause that insn to be restored. */
2665
2666 next = final_scan_insn (seq->insn (index: 0), file, 0, 1, seen);
2667 if (next != seq->insn (index: 1))
2668 {
2669 final_sequence = 0;
2670 return next;
2671 }
2672
2673 for (i = 1; i < seq->len (); i++)
2674 {
2675 rtx_insn *insn = seq->insn (index: i);
2676 rtx_insn *next = NEXT_INSN (insn);
2677 /* We loop in case any instruction in a delay slot gets
2678 split. */
2679 do
2680 insn = final_scan_insn (insn, file, 0, 1, seen);
2681 while (insn != next);
2682 }
2683#ifdef DBR_OUTPUT_SEQEND
2684 DBR_OUTPUT_SEQEND (file);
2685#endif
2686 final_sequence = 0;
2687
2688 /* If the insn requiring the delay slot was a CALL_INSN, the
2689 insns in the delay slot are actually executed before the
2690 called function. Hence we don't preserve any CC-setting
2691 actions in these insns and the CC must be marked as being
2692 clobbered by the function. */
2693 if (CALL_P (seq->insn (0)))
2694 {
2695 CC_STATUS_INIT;
2696 }
2697 break;
2698 }
2699
2700 /* We have a real machine instruction as rtl. */
2701
2702 body = PATTERN (insn);
2703
2704 /* Do machine-specific peephole optimizations if desired. */
2705
2706 if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
2707 {
2708 rtx_insn *next = peephole (insn);
2709 /* When peepholing, if there were notes within the peephole,
2710 emit them before the peephole. */
2711 if (next != 0 && next != NEXT_INSN (insn))
2712 {
2713 rtx_insn *note, *prev = PREV_INSN (insn);
2714
2715 for (note = NEXT_INSN (insn); note != next;
2716 note = NEXT_INSN (insn: note))
2717 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2718
2719 /* Put the notes in the proper position for a later
2720 rescan. For example, the SH target can do this
2721 when generating a far jump in a delayed branch
2722 sequence. */
2723 note = NEXT_INSN (insn);
2724 SET_PREV_INSN (note) = prev;
2725 SET_NEXT_INSN (prev) = note;
2726 SET_NEXT_INSN (PREV_INSN (insn: next)) = insn;
2727 SET_PREV_INSN (insn) = PREV_INSN (insn: next);
2728 SET_NEXT_INSN (insn) = next;
2729 SET_PREV_INSN (next) = insn;
2730 }
2731
2732 /* PEEPHOLE might have changed this. */
2733 body = PATTERN (insn);
2734 }
2735
2736 /* Try to recognize the instruction.
2737 If successful, verify that the operands satisfy the
2738 constraints for the instruction. Crash if they don't,
2739 since `reload' should have changed them so that they do. */
2740
2741 insn_code_number = recog_memoized (insn);
2742 cleanup_subreg_operands (insn);
2743
2744 /* Dump the insn in the assembly for debugging (-dAP).
2745 If the final dump is requested as slim RTL, dump slim
2746 RTL to the assembly file also. */
2747 if (flag_dump_rtl_in_asm)
2748 {
2749 print_rtx_head = ASM_COMMENT_START;
2750 if (! (dump_flags & TDF_SLIM))
2751 print_rtl_single (asm_out_file, insn);
2752 else
2753 dump_insn_slim (asm_out_file, insn);
2754 print_rtx_head = "";
2755 }
2756
2757 if (! constrain_operands_cached (insn, 1))
2758 fatal_insn_not_found (insn);
2759
2760 /* Some target machines need to prescan each insn before
2761 it is output. */
2762
2763#ifdef FINAL_PRESCAN_INSN
2764 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2765#endif
2766
2767 if (targetm.have_conditional_execution ()
2768 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2769 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2770
2771 current_output_insn = debug_insn = insn;
2772
2773 /* Find the proper template for this insn. */
2774 templ = get_insn_template (code: insn_code_number, insn);
2775
2776 /* If the C code returns 0, it means that it is a jump insn
2777 which follows a deleted test insn, and that test insn
2778 needs to be reinserted. */
2779 if (templ == 0)
2780 {
2781 rtx_insn *prev;
2782
2783 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2784
2785 /* We have already processed the notes between the setter and
2786 the user. Make sure we don't process them again, this is
2787 particularly important if one of the notes is a block
2788 scope note or an EH note. */
2789 for (prev = insn;
2790 prev != last_ignored_compare;
2791 prev = PREV_INSN (insn: prev))
2792 {
2793 if (NOTE_P (prev))
2794 delete_insn (prev); /* Use delete_note. */
2795 }
2796
2797 return prev;
2798 }
2799
2800 /* If the template is the string "#", it means that this insn must
2801 be split. */
2802 if (templ[0] == '#' && templ[1] == '\0')
2803 {
2804 rtx_insn *new_rtx = try_split (body, insn, 0);
2805
2806 /* If we didn't split the insn, go away. */
2807 if (new_rtx == insn && PATTERN (insn: new_rtx) == body)
2808 fatal_insn ("could not split insn", insn);
2809
2810 /* If we have a length attribute, this instruction should have
2811 been split in shorten_branches, to ensure that we would have
2812 valid length info for the splitees. */
2813 gcc_assert (!HAVE_ATTR_length);
2814
2815 return new_rtx;
2816 }
2817
2818 /* ??? This will put the directives in the wrong place if
2819 get_insn_template outputs assembly directly. However calling it
2820 before get_insn_template breaks if the insns is split. */
2821 if (targetm.asm_out.unwind_emit_before_insn
2822 && targetm.asm_out.unwind_emit)
2823 targetm.asm_out.unwind_emit (asm_out_file, insn);
2824
2825 rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (p: insn);
2826 if (call_insn != NULL)
2827 {
2828 rtx x = call_from_call_insn (insn: call_insn);
2829 x = XEXP (x, 0);
2830 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2831 {
2832 tree t;
2833 x = XEXP (x, 0);
2834 t = SYMBOL_REF_DECL (x);
2835 if (t)
2836 assemble_external (t);
2837 }
2838 }
2839
2840 /* Output assembler code from the template. */
2841 output_asm_insn (templ, recog_data.operand);
2842
2843 /* Some target machines need to postscan each insn after
2844 it is output. */
2845 if (targetm.asm_out.final_postscan_insn)
2846 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2847 recog_data.n_operands);
2848
2849 if (!targetm.asm_out.unwind_emit_before_insn
2850 && targetm.asm_out.unwind_emit)
2851 targetm.asm_out.unwind_emit (asm_out_file, insn);
2852
2853 /* Let the debug info back-end know about this call. We do this only
2854 after the instruction has been emitted because labels that may be
2855 created to reference the call instruction must appear after it. */
2856 if ((debug_variable_location_views || call_insn != NULL)
2857 && !DECL_IGNORED_P (current_function_decl))
2858 debug_hooks->var_location (insn);
2859
2860 current_output_insn = debug_insn = 0;
2861 }
2862 }
2863 return NEXT_INSN (insn);
2864}
2865
2866/* This is a wrapper around final_scan_insn_1 that allows ports to
2867 call it recursively without a known value for SEEN. The value is
2868 saved at the outermost call, and recovered for recursive calls.
2869 Recursive calls MUST pass NULL, or the same pointer if they can
2870 otherwise get to it. */
2871
2872rtx_insn *
2873final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p,
2874 int nopeepholes, int *seen)
2875{
2876 static int *enclosing_seen;
2877 static int recursion_counter;
2878
2879 gcc_assert (seen || recursion_counter);
2880 gcc_assert (!recursion_counter || !seen || seen == enclosing_seen);
2881
2882 if (!recursion_counter++)
2883 enclosing_seen = seen;
2884 else if (!seen)
2885 seen = enclosing_seen;
2886
2887 rtx_insn *ret = final_scan_insn_1 (insn, file, optimize_p, nopeepholes, seen);
2888
2889 if (!--recursion_counter)
2890 enclosing_seen = NULL;
2891
2892 return ret;
2893}
2894
2895
2896
2897/* Map DECLs to instance discriminators. This is allocated and
2898 defined in ada/gcc-interfaces/trans.cc, when compiling with -gnateS.
2899 Mappings from this table are saved and restored for LTO, so
2900 link-time compilation will have this map set, at least in
2901 partitions containing at least one DECL with an associated instance
2902 discriminator. */
2903
2904decl_to_instance_map_t *decl_to_instance_map;
2905
2906/* Return the instance number assigned to DECL. */
2907
2908static inline int
2909map_decl_to_instance (const_tree decl)
2910{
2911 int *inst;
2912
2913 if (!decl_to_instance_map || !decl || !DECL_P (decl))
2914 return 0;
2915
2916 inst = decl_to_instance_map->get (k: decl);
2917
2918 if (!inst)
2919 return 0;
2920
2921 return *inst;
2922}
2923
2924/* Set DISCRIMINATOR to the appropriate value, possibly derived from LOC. */
2925
2926static inline int
2927compute_discriminator (location_t loc)
2928{
2929 int discriminator;
2930
2931 if (!decl_to_instance_map)
2932 discriminator = get_discriminator_from_loc (loc);
2933 else
2934 {
2935 tree block = LOCATION_BLOCK (loc);
2936
2937 while (block && TREE_CODE (block) == BLOCK
2938 && !inlined_function_outer_scope_p (block))
2939 block = BLOCK_SUPERCONTEXT (block);
2940
2941 tree decl;
2942
2943 if (!block)
2944 decl = current_function_decl;
2945 else if (DECL_P (block))
2946 decl = block;
2947 else
2948 decl = block_ultimate_origin (block);
2949
2950 discriminator = map_decl_to_instance (decl);
2951 }
2952
2953 return discriminator;
2954}
2955
2956/* Return discriminator of the statement that produced this insn. */
2957int
2958insn_discriminator (const rtx_insn *insn)
2959{
2960 return compute_discriminator (loc: INSN_LOCATION (insn));
2961}
2962
2963/* Return whether a source line note needs to be emitted before INSN.
2964 Sets IS_STMT to TRUE if the line should be marked as a possible
2965 breakpoint location. */
2966
2967static bool
2968notice_source_line (rtx_insn *insn, bool *is_stmt)
2969{
2970 const char *filename;
2971 int linenum, columnnum;
2972 int discriminator;
2973
2974 if (NOTE_MARKER_P (insn))
2975 {
2976 location_t loc = NOTE_MARKER_LOCATION (insn);
2977 expanded_location xloc = expand_location (loc);
2978 if (xloc.line == 0
2979 && (LOCATION_LOCUS (loc) == UNKNOWN_LOCATION
2980 || LOCATION_LOCUS (loc) == BUILTINS_LOCATION))
2981 return false;
2982
2983 filename = xloc.file;
2984 linenum = xloc.line;
2985 columnnum = xloc.column;
2986 discriminator = compute_discriminator (loc);
2987 force_source_line = true;
2988 }
2989 else if (override_filename)
2990 {
2991 filename = override_filename;
2992 linenum = override_linenum;
2993 columnnum = override_columnnum;
2994 discriminator = override_discriminator;
2995 }
2996 else if (INSN_HAS_LOCATION (insn))
2997 {
2998 expanded_location xloc = insn_location (insn);
2999 filename = xloc.file;
3000 linenum = xloc.line;
3001 columnnum = xloc.column;
3002 discriminator = insn_discriminator (insn);
3003 }
3004 else
3005 {
3006 filename = NULL;
3007 linenum = 0;
3008 columnnum = 0;
3009 discriminator = 0;
3010 }
3011
3012 if (filename == NULL)
3013 return false;
3014
3015 if (force_source_line
3016 || filename != last_filename
3017 || last_linenum != linenum
3018 || (debug_column_info && last_columnnum != columnnum))
3019 {
3020 force_source_line = false;
3021 last_filename = filename;
3022 last_linenum = linenum;
3023 last_columnnum = columnnum;
3024 last_discriminator = discriminator;
3025 if (is_stmt)
3026 *is_stmt = true;
3027 high_block_linenum = MAX (last_linenum, high_block_linenum);
3028 high_function_linenum = MAX (last_linenum, high_function_linenum);
3029 return true;
3030 }
3031
3032 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3033 {
3034 /* If the discriminator changed, but the line number did not,
3035 output the line table entry with is_stmt false so the
3036 debugger does not treat this as a breakpoint location. */
3037 last_discriminator = discriminator;
3038 if (is_stmt)
3039 *is_stmt = false;
3040 return true;
3041 }
3042
3043 return false;
3044}
3045
3046/* For each operand in INSN, simplify (subreg (reg)) so that it refers
3047 directly to the desired hard register. */
3048
3049void
3050cleanup_subreg_operands (rtx_insn *insn)
3051{
3052 int i;
3053 bool changed = false;
3054 extract_insn_cached (insn);
3055 for (i = 0; i < recog_data.n_operands; i++)
3056 {
3057 /* The following test cannot use recog_data.operand when testing
3058 for a SUBREG: the underlying object might have been changed
3059 already if we are inside a match_operator expression that
3060 matches the else clause. Instead we test the underlying
3061 expression directly. */
3062 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3063 {
3064 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3065 changed = true;
3066 }
3067 else if (GET_CODE (recog_data.operand[i]) == PLUS
3068 || GET_CODE (recog_data.operand[i]) == MULT
3069 || MEM_P (recog_data.operand[i]))
3070 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3071 }
3072
3073 for (i = 0; i < recog_data.n_dups; i++)
3074 {
3075 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3076 {
3077 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3078 changed = true;
3079 }
3080 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3081 || GET_CODE (*recog_data.dup_loc[i]) == MULT
3082 || MEM_P (*recog_data.dup_loc[i]))
3083 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3084 }
3085 if (changed)
3086 df_insn_rescan (insn);
3087}
3088
3089/* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3090 the thing it is a subreg of. Do it anyway if FINAL_P. */
3091
3092rtx
3093alter_subreg (rtx *xp, bool final_p)
3094{
3095 rtx x = *xp;
3096 rtx y = SUBREG_REG (x);
3097
3098 /* simplify_subreg does not remove subreg from volatile references.
3099 We are required to. */
3100 if (MEM_P (y))
3101 {
3102 poly_int64 offset = SUBREG_BYTE (x);
3103
3104 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3105 contains 0 instead of the proper offset. See simplify_subreg. */
3106 if (paradoxical_subreg_p (x))
3107 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3108
3109 if (final_p)
3110 *xp = adjust_address (y, GET_MODE (x), offset);
3111 else
3112 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3113 }
3114 else if (REG_P (y) && HARD_REGISTER_P (y))
3115 {
3116 rtx new_rtx = simplify_subreg (GET_MODE (x), op: y, GET_MODE (y),
3117 SUBREG_BYTE (x));
3118
3119 if (new_rtx != 0)
3120 *xp = new_rtx;
3121 else if (final_p && REG_P (y))
3122 {
3123 /* Simplify_subreg can't handle some REG cases, but we have to. */
3124 unsigned int regno;
3125 poly_int64 offset;
3126
3127 regno = subreg_regno (x);
3128 if (subreg_lowpart_p (x))
3129 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3130 else
3131 offset = SUBREG_BYTE (x);
3132 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3133 }
3134 }
3135
3136 return *xp;
3137}
3138
3139/* Do alter_subreg on all the SUBREGs contained in X. */
3140
3141static rtx
3142walk_alter_subreg (rtx *xp, bool *changed)
3143{
3144 rtx x = *xp;
3145 switch (GET_CODE (x))
3146 {
3147 case PLUS:
3148 case MULT:
3149 case AND:
3150 XEXP (x, 0) = walk_alter_subreg (xp: &XEXP (x, 0), changed);
3151 XEXP (x, 1) = walk_alter_subreg (xp: &XEXP (x, 1), changed);
3152 break;
3153
3154 case MEM:
3155 case ZERO_EXTEND:
3156 XEXP (x, 0) = walk_alter_subreg (xp: &XEXP (x, 0), changed);
3157 break;
3158
3159 case SUBREG:
3160 *changed = true;
3161 return alter_subreg (xp, final_p: true);
3162
3163 default:
3164 break;
3165 }
3166
3167 return *xp;
3168}
3169
3170/* Report inconsistency between the assembler template and the operands.
3171 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3172
3173void
3174output_operand_lossage (const char *cmsgid, ...)
3175{
3176 char *fmt_string;
3177 char *new_message;
3178 const char *pfx_str;
3179 va_list ap;
3180
3181 va_start (ap, cmsgid);
3182
3183 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3184 fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3185 new_message = xvasprintf (fmt_string, ap);
3186
3187 if (this_is_asm_operands)
3188 error_for_asm (this_is_asm_operands, "%s", new_message);
3189 else
3190 internal_error ("%s", new_message);
3191
3192 free (ptr: fmt_string);
3193 free (ptr: new_message);
3194 va_end (ap);
3195}
3196
3197/* Output of assembler code from a template, and its subroutines. */
3198
3199/* Annotate the assembly with a comment describing the pattern and
3200 alternative used. */
3201
3202static void
3203output_asm_name (void)
3204{
3205 if (debug_insn)
3206 {
3207 fprintf (stream: asm_out_file, format: "\t%s %d\t",
3208 ASM_COMMENT_START, INSN_UID (insn: debug_insn));
3209
3210 fprintf (stream: asm_out_file, format: "[c=%d",
3211 insn_cost (debug_insn, optimize_insn_for_speed_p ()));
3212 if (HAVE_ATTR_length)
3213 fprintf (stream: asm_out_file, format: " l=%d",
3214 get_attr_length (insn: debug_insn));
3215 fprintf (stream: asm_out_file, format: "] ");
3216
3217 int num = INSN_CODE (debug_insn);
3218 fprintf (stream: asm_out_file, format: "%s", insn_data[num].name);
3219 if (insn_data[num].n_alternatives > 1)
3220 fprintf (stream: asm_out_file, format: "/%d", which_alternative);
3221
3222 /* Clear this so only the first assembler insn
3223 of any rtl insn will get the special comment for -dp. */
3224 debug_insn = 0;
3225 }
3226}
3227
3228/* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3229 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3230 corresponds to the address of the object and 0 if to the object. */
3231
3232static tree
3233get_mem_expr_from_op (rtx op, int *paddressp)
3234{
3235 tree expr;
3236 int inner_addressp;
3237
3238 *paddressp = 0;
3239
3240 if (REG_P (op))
3241 return REG_EXPR (op);
3242 else if (!MEM_P (op))
3243 return 0;
3244
3245 if (MEM_EXPR (op) != 0)
3246 return MEM_EXPR (op);
3247
3248 /* Otherwise we have an address, so indicate it and look at the address. */
3249 *paddressp = 1;
3250 op = XEXP (op, 0);
3251
3252 /* First check if we have a decl for the address, then look at the right side
3253 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3254 But don't allow the address to itself be indirect. */
3255 if ((expr = get_mem_expr_from_op (op, paddressp: &inner_addressp)) && ! inner_addressp)
3256 return expr;
3257 else if (GET_CODE (op) == PLUS
3258 && (expr = get_mem_expr_from_op (XEXP (op, 1), paddressp: &inner_addressp)))
3259 return expr;
3260
3261 while (UNARY_P (op)
3262 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3263 op = XEXP (op, 0);
3264
3265 expr = get_mem_expr_from_op (op, paddressp: &inner_addressp);
3266 return inner_addressp ? 0 : expr;
3267}
3268
3269/* Output operand names for assembler instructions. OPERANDS is the
3270 operand vector, OPORDER is the order to write the operands, and NOPS
3271 is the number of operands to write. */
3272
3273static void
3274output_asm_operand_names (rtx *operands, int *oporder, int nops)
3275{
3276 int wrote = 0;
3277 int i;
3278
3279 for (i = 0; i < nops; i++)
3280 {
3281 int addressp;
3282 rtx op = operands[oporder[i]];
3283 tree expr = get_mem_expr_from_op (op, paddressp: &addressp);
3284
3285 fprintf (stream: asm_out_file, format: "%c%s",
3286 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3287 wrote = 1;
3288 if (expr)
3289 {
3290 fprintf (stream: asm_out_file, format: "%s",
3291 addressp ? "*" : "");
3292 print_mem_expr (asm_out_file, expr);
3293 wrote = 1;
3294 }
3295 else if (REG_P (op) && ORIGINAL_REGNO (op)
3296 && ORIGINAL_REGNO (op) != REGNO (op))
3297 fprintf (stream: asm_out_file, format: " tmp%i", ORIGINAL_REGNO (op));
3298 }
3299}
3300
3301#ifdef ASSEMBLER_DIALECT
3302/* Helper function to parse assembler dialects in the asm string.
3303 This is called from output_asm_insn and asm_fprintf. */
3304static const char *
3305do_assembler_dialects (const char *p, int *dialect)
3306{
3307 char c = *(p - 1);
3308
3309 switch (c)
3310 {
3311 case '{':
3312 {
3313 int i;
3314
3315 if (*dialect)
3316 output_operand_lossage (cmsgid: "nested assembly dialect alternatives");
3317 else
3318 *dialect = 1;
3319
3320 /* If we want the first dialect, do nothing. Otherwise, skip
3321 DIALECT_NUMBER of strings ending with '|'. */
3322 for (i = 0; i < dialect_number; i++)
3323 {
3324 while (*p && *p != '}')
3325 {
3326 if (*p == '|')
3327 {
3328 p++;
3329 break;
3330 }
3331
3332 /* Skip over any character after a percent sign. */
3333 if (*p == '%')
3334 p++;
3335 if (*p)
3336 p++;
3337 }
3338
3339 if (*p == '}')
3340 break;
3341 }
3342
3343 if (*p == '\0')
3344 output_operand_lossage (cmsgid: "unterminated assembly dialect alternative");
3345 }
3346 break;
3347
3348 case '|':
3349 if (*dialect)
3350 {
3351 /* Skip to close brace. */
3352 do
3353 {
3354 if (*p == '\0')
3355 {
3356 output_operand_lossage (cmsgid: "unterminated assembly dialect alternative");
3357 break;
3358 }
3359
3360 /* Skip over any character after a percent sign. */
3361 if (*p == '%' && p[1])
3362 {
3363 p += 2;
3364 continue;
3365 }
3366
3367 if (*p++ == '}')
3368 break;
3369 }
3370 while (1);
3371
3372 *dialect = 0;
3373 }
3374 else
3375 putc (c: c, stream: asm_out_file);
3376 break;
3377
3378 case '}':
3379 if (! *dialect)
3380 putc (c: c, stream: asm_out_file);
3381 *dialect = 0;
3382 break;
3383 default:
3384 gcc_unreachable ();
3385 }
3386
3387 return p;
3388}
3389#endif
3390
3391/* Output text from TEMPLATE to the assembler output file,
3392 obeying %-directions to substitute operands taken from
3393 the vector OPERANDS.
3394
3395 %N (for N a digit) means print operand N in usual manner.
3396 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3397 and print the label name with no punctuation.
3398 %cN means require operand N to be a constant
3399 and print the constant expression with no punctuation.
3400 %aN means expect operand N to be a memory address
3401 (not a memory reference!) and print a reference
3402 to that address.
3403 %nN means expect operand N to be a constant
3404 and print a constant expression for minus the value
3405 of the operand, with no other punctuation. */
3406
3407void
3408output_asm_insn (const char *templ, rtx *operands)
3409{
3410 const char *p;
3411 int c;
3412#ifdef ASSEMBLER_DIALECT
3413 int dialect = 0;
3414#endif
3415 int oporder[MAX_RECOG_OPERANDS];
3416 char opoutput[MAX_RECOG_OPERANDS];
3417 int ops = 0;
3418
3419 /* An insn may return a null string template
3420 in a case where no assembler code is needed. */
3421 if (*templ == 0)
3422 return;
3423
3424 memset (s: opoutput, c: 0, n: sizeof opoutput);
3425 p = templ;
3426 putc (c: '\t', stream: asm_out_file);
3427
3428#ifdef ASM_OUTPUT_OPCODE
3429 ASM_OUTPUT_OPCODE (asm_out_file, p);
3430#endif
3431
3432 while ((c = *p++))
3433 switch (c)
3434 {
3435 case '\n':
3436 if (flag_verbose_asm)
3437 output_asm_operand_names (operands, oporder, nops: ops);
3438 if (flag_print_asm_name)
3439 output_asm_name ();
3440
3441 ops = 0;
3442 memset (s: opoutput, c: 0, n: sizeof opoutput);
3443
3444 putc (c: c, stream: asm_out_file);
3445#ifdef ASM_OUTPUT_OPCODE
3446 while ((c = *p) == '\t')
3447 {
3448 putc (c: c, stream: asm_out_file);
3449 p++;
3450 }
3451 ASM_OUTPUT_OPCODE (asm_out_file, p);
3452#endif
3453 break;
3454
3455#ifdef ASSEMBLER_DIALECT
3456 case '{':
3457 case '}':
3458 case '|':
3459 p = do_assembler_dialects (p, dialect: &dialect);
3460 break;
3461#endif
3462
3463 case '%':
3464 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3465 if ASSEMBLER_DIALECT defined and these characters have a special
3466 meaning as dialect delimiters.*/
3467 if (*p == '%'
3468#ifdef ASSEMBLER_DIALECT
3469 || *p == '{' || *p == '}' || *p == '|'
3470#endif
3471 )
3472 {
3473 putc (c: *p, stream: asm_out_file);
3474 p++;
3475 }
3476 /* %= outputs a number which is unique to each insn in the entire
3477 compilation. This is useful for making local labels that are
3478 referred to more than once in a given insn. */
3479 else if (*p == '=')
3480 {
3481 p++;
3482 fprintf (stream: asm_out_file, format: "%d", insn_counter);
3483 }
3484 /* % followed by a letter and some digits
3485 outputs an operand in a special way depending on the letter.
3486 Letters `acln' are implemented directly.
3487 Other letters are passed to `output_operand' so that
3488 the TARGET_PRINT_OPERAND hook can define them. */
3489 else if (ISALPHA (*p))
3490 {
3491 int letter = *p++;
3492 unsigned long opnum;
3493 char *endptr;
3494
3495 opnum = strtoul (nptr: p, endptr: &endptr, base: 10);
3496
3497 if (endptr == p)
3498 output_operand_lossage (cmsgid: "operand number missing "
3499 "after %%-letter");
3500 else if (this_is_asm_operands && opnum >= insn_noperands)
3501 output_operand_lossage (cmsgid: "operand number out of range");
3502 else if (letter == 'l')
3503 output_asm_label (operands[opnum]);
3504 else if (letter == 'a')
3505 output_address (VOIDmode, operands[opnum]);
3506 else if (letter == 'c')
3507 {
3508 if (CONSTANT_ADDRESS_P (operands[opnum]))
3509 output_addr_const (asm_out_file, operands[opnum]);
3510 else
3511 output_operand (operands[opnum], 'c');
3512 }
3513 else if (letter == 'n')
3514 {
3515 if (CONST_INT_P (operands[opnum]))
3516 fprintf (stream: asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3517 - INTVAL (operands[opnum]));
3518 else
3519 {
3520 putc (c: '-', stream: asm_out_file);
3521 output_addr_const (asm_out_file, operands[opnum]);
3522 }
3523 }
3524 else
3525 output_operand (operands[opnum], letter);
3526
3527 if (!opoutput[opnum])
3528 oporder[ops++] = opnum;
3529 opoutput[opnum] = 1;
3530
3531 p = endptr;
3532 c = *p;
3533 }
3534 /* % followed by a digit outputs an operand the default way. */
3535 else if (ISDIGIT (*p))
3536 {
3537 unsigned long opnum;
3538 char *endptr;
3539
3540 opnum = strtoul (nptr: p, endptr: &endptr, base: 10);
3541 if (this_is_asm_operands && opnum >= insn_noperands)
3542 output_operand_lossage (cmsgid: "operand number out of range");
3543 else
3544 output_operand (operands[opnum], 0);
3545
3546 if (!opoutput[opnum])
3547 oporder[ops++] = opnum;
3548 opoutput[opnum] = 1;
3549
3550 p = endptr;
3551 c = *p;
3552 }
3553 /* % followed by punctuation: output something for that
3554 punctuation character alone, with no operand. The
3555 TARGET_PRINT_OPERAND hook decides what is actually done. */
3556 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3557 output_operand (NULL_RTX, *p++);
3558 else
3559 output_operand_lossage (cmsgid: "invalid %%-code");
3560 break;
3561
3562 default:
3563 putc (c: c, stream: asm_out_file);
3564 }
3565
3566 /* Try to keep the asm a bit more readable. */
3567 if ((flag_verbose_asm || flag_print_asm_name) && strlen (s: templ) < 9)
3568 putc (c: '\t', stream: asm_out_file);
3569
3570 /* Write out the variable names for operands, if we know them. */
3571 if (flag_verbose_asm)
3572 output_asm_operand_names (operands, oporder, nops: ops);
3573 if (flag_print_asm_name)
3574 output_asm_name ();
3575
3576 putc (c: '\n', stream: asm_out_file);
3577}
3578
3579/* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3580
3581void
3582output_asm_label (rtx x)
3583{
3584 char buf[256];
3585
3586 if (GET_CODE (x) == LABEL_REF)
3587 x = label_ref_label (ref: x);
3588 if (LABEL_P (x)
3589 || (NOTE_P (x)
3590 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3591 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3592 else
3593 output_operand_lossage (cmsgid: "'%%l' operand isn't a label");
3594
3595 assemble_name (asm_out_file, buf);
3596}
3597
3598/* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3599
3600void
3601mark_symbol_refs_as_used (rtx x)
3602{
3603 subrtx_iterator::array_type array;
3604 FOR_EACH_SUBRTX (iter, array, x, ALL)
3605 {
3606 const_rtx x = *iter;
3607 if (GET_CODE (x) == SYMBOL_REF)
3608 if (tree t = SYMBOL_REF_DECL (x))
3609 assemble_external (t);
3610 }
3611}
3612
3613/* Print operand X using machine-dependent assembler syntax.
3614 CODE is a non-digit that preceded the operand-number in the % spec,
3615 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3616 between the % and the digits.
3617 When CODE is a non-letter, X is 0.
3618
3619 The meanings of the letters are machine-dependent and controlled
3620 by TARGET_PRINT_OPERAND. */
3621
3622void
3623output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3624{
3625 if (x && GET_CODE (x) == SUBREG)
3626 x = alter_subreg (xp: &x, final_p: true);
3627
3628 /* X must not be a pseudo reg. */
3629 if (!targetm.no_register_allocation)
3630 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3631
3632 targetm.asm_out.print_operand (asm_out_file, x, code);
3633
3634 if (x == NULL_RTX)
3635 return;
3636
3637 mark_symbol_refs_as_used (x);
3638}
3639
3640/* Print a memory reference operand for address X using
3641 machine-dependent assembler syntax. */
3642
3643void
3644output_address (machine_mode mode, rtx x)
3645{
3646 bool changed = false;
3647 walk_alter_subreg (xp: &x, changed: &changed);
3648 targetm.asm_out.print_operand_address (asm_out_file, mode, x);
3649}
3650
3651/* Print an integer constant expression in assembler syntax.
3652 Addition and subtraction are the only arithmetic
3653 that may appear in these expressions. */
3654
3655void
3656output_addr_const (FILE *file, rtx x)
3657{
3658 char buf[256];
3659
3660 restart:
3661 switch (GET_CODE (x))
3662 {
3663 case PC:
3664 putc (c: '.', stream: file);
3665 break;
3666
3667 case SYMBOL_REF:
3668 if (SYMBOL_REF_DECL (x))
3669 assemble_external (SYMBOL_REF_DECL (x));
3670#ifdef ASM_OUTPUT_SYMBOL_REF
3671 ASM_OUTPUT_SYMBOL_REF (file, x);
3672#else
3673 assemble_name (file, XSTR (x, 0));
3674#endif
3675 break;
3676
3677 case LABEL_REF:
3678 x = label_ref_label (ref: x);
3679 /* Fall through. */
3680 case CODE_LABEL:
3681 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3682#ifdef ASM_OUTPUT_LABEL_REF
3683 ASM_OUTPUT_LABEL_REF (file, buf);
3684#else
3685 assemble_name (file, buf);
3686#endif
3687 break;
3688
3689 case CONST_INT:
3690 fprintf (stream: file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3691 break;
3692
3693 case CONST:
3694 /* This used to output parentheses around the expression,
3695 but that does not work on the 386 (either ATT or BSD assembler). */
3696 output_addr_const (file, XEXP (x, 0));
3697 break;
3698
3699 case CONST_WIDE_INT:
3700 /* We do not know the mode here so we have to use a round about
3701 way to build a wide-int to get it printed properly. */
3702 {
3703 wide_int w = wide_int::from_array (val: &CONST_WIDE_INT_ELT (x, 0),
3704 CONST_WIDE_INT_NUNITS (x),
3705 CONST_WIDE_INT_NUNITS (x)
3706 * HOST_BITS_PER_WIDE_INT,
3707 need_canon_p: false);
3708 print_decs (wi: w, file);
3709 }
3710 break;
3711
3712 case CONST_DOUBLE:
3713 if (CONST_DOUBLE_AS_INT_P (x))
3714 {
3715 /* We can use %d if the number is one word and positive. */
3716 if (CONST_DOUBLE_HIGH (x))
3717 fprintf (stream: file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3718 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3719 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3720 else if (CONST_DOUBLE_LOW (x) < 0)
3721 fprintf (stream: file, HOST_WIDE_INT_PRINT_HEX,
3722 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3723 else
3724 fprintf (stream: file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3725 }
3726 else
3727 /* We can't handle floating point constants;
3728 PRINT_OPERAND must handle them. */
3729 output_operand_lossage (cmsgid: "floating constant misused");
3730 break;
3731
3732 case CONST_FIXED:
3733 fprintf (stream: file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3734 break;
3735
3736 case PLUS:
3737 /* Some assemblers need integer constants to appear last (eg masm). */
3738 if (CONST_INT_P (XEXP (x, 0)))
3739 {
3740 output_addr_const (file, XEXP (x, 1));
3741 if (INTVAL (XEXP (x, 0)) >= 0)
3742 fprintf (stream: file, format: "+");
3743 output_addr_const (file, XEXP (x, 0));
3744 }
3745 else
3746 {
3747 output_addr_const (file, XEXP (x, 0));
3748 if (!CONST_INT_P (XEXP (x, 1))
3749 || INTVAL (XEXP (x, 1)) >= 0)
3750 fprintf (stream: file, format: "+");
3751 output_addr_const (file, XEXP (x, 1));
3752 }
3753 break;
3754
3755 case MINUS:
3756 /* Avoid outputting things like x-x or x+5-x,
3757 since some assemblers can't handle that. */
3758 x = simplify_subtraction (x);
3759 if (GET_CODE (x) != MINUS)
3760 goto restart;
3761
3762 output_addr_const (file, XEXP (x, 0));
3763 fprintf (stream: file, format: "-");
3764 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3765 || GET_CODE (XEXP (x, 1)) == PC
3766 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3767 output_addr_const (file, XEXP (x, 1));
3768 else
3769 {
3770 fputs (s: targetm.asm_out.open_paren, stream: file);
3771 output_addr_const (file, XEXP (x, 1));
3772 fputs (s: targetm.asm_out.close_paren, stream: file);
3773 }
3774 break;
3775
3776 case ZERO_EXTEND:
3777 case SIGN_EXTEND:
3778 case SUBREG:
3779 case TRUNCATE:
3780 output_addr_const (file, XEXP (x, 0));
3781 break;
3782
3783 default:
3784 if (targetm.asm_out.output_addr_const_extra (file, x))
3785 break;
3786
3787 output_operand_lossage (cmsgid: "invalid expression as operand");
3788 }
3789}
3790
3791/* Output a quoted string. */
3792
3793void
3794output_quoted_string (FILE *asm_file, const char *string)
3795{
3796#ifdef OUTPUT_QUOTED_STRING
3797 OUTPUT_QUOTED_STRING (asm_file, string);
3798#else
3799 char c;
3800
3801 putc (c: '\"', stream: asm_file);
3802 while ((c = *string++) != 0)
3803 {
3804 if (ISPRINT (c))
3805 {
3806 if (c == '\"' || c == '\\')
3807 putc (c: '\\', stream: asm_file);
3808 putc (c: c, stream: asm_file);
3809 }
3810 else
3811 fprintf (stream: asm_file, format: "\\%03o", (unsigned char) c);
3812 }
3813 putc (c: '\"', stream: asm_file);
3814#endif
3815}
3816
3817/* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3818
3819void
3820fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
3821{
3822 char buf[2 + CHAR_BIT * sizeof (value) / 4];
3823 if (value == 0)
3824 putc (c: '0', stream: f);
3825 else
3826 {
3827 char *p = buf + sizeof (buf);
3828 do
3829 *--p = "0123456789abcdef"[value % 16];
3830 while ((value /= 16) != 0);
3831 *--p = 'x';
3832 *--p = '0';
3833 fwrite (ptr: p, size: 1, n: buf + sizeof (buf) - p, s: f);
3834 }
3835}
3836
3837/* Internal function that prints an unsigned long in decimal in reverse.
3838 The output string IS NOT null-terminated. */
3839
3840static int
3841sprint_ul_rev (char *s, unsigned long value)
3842{
3843 int i = 0;
3844 do
3845 {
3846 s[i] = "0123456789"[value % 10];
3847 value /= 10;
3848 i++;
3849 /* alternate version, without modulo */
3850 /* oldval = value; */
3851 /* value /= 10; */
3852 /* s[i] = "0123456789" [oldval - 10*value]; */
3853 /* i++ */
3854 }
3855 while (value != 0);
3856 return i;
3857}
3858
3859/* Write an unsigned long as decimal to a file, fast. */
3860
3861void
3862fprint_ul (FILE *f, unsigned long value)
3863{
3864 /* python says: len(str(2**64)) == 20 */
3865 char s[20];
3866 int i;
3867
3868 i = sprint_ul_rev (s, value);
3869
3870 /* It's probably too small to bother with string reversal and fputs. */
3871 do
3872 {
3873 i--;
3874 putc (c: s[i], stream: f);
3875 }
3876 while (i != 0);
3877}
3878
3879/* Write an unsigned long as decimal to a string, fast.
3880 s must be wide enough to not overflow, at least 21 chars.
3881 Returns the length of the string (without terminating '\0'). */
3882
3883int
3884sprint_ul (char *s, unsigned long value)
3885{
3886 int len = sprint_ul_rev (s, value);
3887 s[len] = '\0';
3888
3889 std::reverse (first: s, last: s + len);
3890 return len;
3891}
3892
3893/* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3894 %R prints the value of REGISTER_PREFIX.
3895 %L prints the value of LOCAL_LABEL_PREFIX.
3896 %U prints the value of USER_LABEL_PREFIX.
3897 %I prints the value of IMMEDIATE_PREFIX.
3898 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3899 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3900
3901 We handle alternate assembler dialects here, just like output_asm_insn. */
3902
3903void
3904asm_fprintf (FILE *file, const char *p, ...)
3905{
3906 char buf[10];
3907 char *q, c;
3908#ifdef ASSEMBLER_DIALECT
3909 int dialect = 0;
3910#endif
3911 va_list argptr;
3912
3913 va_start (argptr, p);
3914
3915 buf[0] = '%';
3916
3917 while ((c = *p++))
3918 switch (c)
3919 {
3920#ifdef ASSEMBLER_DIALECT
3921 case '{':
3922 case '}':
3923 case '|':
3924 p = do_assembler_dialects (p, dialect: &dialect);
3925 break;
3926#endif
3927
3928 case '%':
3929 c = *p++;
3930 q = &buf[1];
3931 while (strchr (s: "-+ #0", c: c))
3932 {
3933 *q++ = c;
3934 c = *p++;
3935 }
3936 while (ISDIGIT (c) || c == '.')
3937 {
3938 *q++ = c;
3939 c = *p++;
3940 }
3941 switch (c)
3942 {
3943 case '%':
3944 putc (c: '%', stream: file);
3945 break;
3946
3947 case 'd': case 'i': case 'u':
3948 case 'x': case 'X': case 'o':
3949 case 'c':
3950 *q++ = c;
3951 *q = 0;
3952 fprintf (stream: file, format: buf, va_arg (argptr, int));
3953 break;
3954
3955 case 'w':
3956 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3957 'o' cases, but we do not check for those cases. It
3958 means that the value is a HOST_WIDE_INT, which may be
3959 either `long' or `long long'. */
3960 memcpy (dest: q, HOST_WIDE_INT_PRINT, n: strlen (HOST_WIDE_INT_PRINT));
3961 q += strlen (HOST_WIDE_INT_PRINT);
3962 *q++ = *p++;
3963 *q = 0;
3964 fprintf (stream: file, format: buf, va_arg (argptr, HOST_WIDE_INT));
3965 break;
3966
3967 case 'l':
3968 *q++ = c;
3969#ifdef HAVE_LONG_LONG
3970 if (*p == 'l')
3971 {
3972 *q++ = *p++;
3973 *q++ = *p++;
3974 *q = 0;
3975 fprintf (stream: file, format: buf, va_arg (argptr, long long));
3976 }
3977 else
3978#endif
3979 {
3980 *q++ = *p++;
3981 *q = 0;
3982 fprintf (stream: file, format: buf, va_arg (argptr, long));
3983 }
3984
3985 break;
3986
3987 case 's':
3988 *q++ = c;
3989 *q = 0;
3990 fprintf (stream: file, format: buf, va_arg (argptr, char *));
3991 break;
3992
3993 case 'O':
3994#ifdef ASM_OUTPUT_OPCODE
3995 ASM_OUTPUT_OPCODE (asm_out_file, p);
3996#endif
3997 break;
3998
3999 case 'R':
4000#ifdef REGISTER_PREFIX
4001 fprintf (file, "%s", REGISTER_PREFIX);
4002#endif
4003 break;
4004
4005 case 'I':
4006#ifdef IMMEDIATE_PREFIX
4007 fprintf (file, "%s", IMMEDIATE_PREFIX);
4008#endif
4009 break;
4010
4011 case 'L':
4012#ifdef LOCAL_LABEL_PREFIX
4013 fprintf (stream: file, format: "%s", LOCAL_LABEL_PREFIX);
4014#endif
4015 break;
4016
4017 case 'U':
4018 fputs (s: user_label_prefix, stream: file);
4019 break;
4020
4021#ifdef ASM_FPRINTF_EXTENSIONS
4022 /* Uppercase letters are reserved for general use by asm_fprintf
4023 and so are not available to target specific code. In order to
4024 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4025 they are defined here. As they get turned into real extensions
4026 to asm_fprintf they should be removed from this list. */
4027 case 'A': case 'B': case 'C': case 'D': case 'E':
4028 case 'F': case 'G': case 'H': case 'J': case 'K':
4029 case 'M': case 'N': case 'P': case 'Q': case 'S':
4030 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4031 break;
4032
4033 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4034#endif
4035 default:
4036 gcc_unreachable ();
4037 }
4038 break;
4039
4040 default:
4041 putc (c: c, stream: file);
4042 }
4043 va_end (argptr);
4044}
4045
4046/* Return true if this function has no function calls. */
4047
4048bool
4049leaf_function_p (void)
4050{
4051 rtx_insn *insn;
4052
4053 /* Ensure we walk the entire function body. */
4054 gcc_assert (!in_sequence_p ());
4055
4056 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4057 functions even if they call mcount. */
4058 if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4059 return false;
4060
4061 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4062 {
4063 if (CALL_P (insn)
4064 && ! SIBLING_CALL_P (insn)
4065 && ! FAKE_CALL_P (insn))
4066 return false;
4067 if (NONJUMP_INSN_P (insn)
4068 && GET_CODE (PATTERN (insn)) == SEQUENCE
4069 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4070 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4071 return false;
4072 }
4073
4074 return true;
4075}
4076
4077/* Return true if branch is a forward branch.
4078 Uses insn_shuid array, so it works only in the final pass. May be used by
4079 output templates to customary add branch prediction hints.
4080 */
4081bool
4082final_forward_branch_p (rtx_insn *insn)
4083{
4084 int insn_id, label_id;
4085
4086 gcc_assert (uid_shuid);
4087 insn_id = INSN_SHUID (insn);
4088 label_id = INSN_SHUID (JUMP_LABEL (insn));
4089 /* We've hit some insns that does not have id information available. */
4090 gcc_assert (insn_id && label_id);
4091 return insn_id < label_id;
4092}
4093
4094/* On some machines, a function with no call insns
4095 can run faster if it doesn't create its own register window.
4096 When output, the leaf function should use only the "output"
4097 registers. Ordinarily, the function would be compiled to use
4098 the "input" registers to find its arguments; it is a candidate
4099 for leaf treatment if it uses only the "input" registers.
4100 Leaf function treatment means renumbering so the function
4101 uses the "output" registers instead. */
4102
4103#ifdef LEAF_REGISTERS
4104
4105/* Return bool if this function uses only the registers that can be
4106 safely renumbered. */
4107
4108bool
4109only_leaf_regs_used (void)
4110{
4111 int i;
4112 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4113
4114 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4115 if ((df_regs_ever_live_p (i) || global_regs[i])
4116 && ! permitted_reg_in_leaf_functions[i])
4117 return false;
4118
4119 if (crtl->uses_pic_offset_table
4120 && pic_offset_table_rtx != 0
4121 && REG_P (pic_offset_table_rtx)
4122 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4123 return false;
4124
4125 return true;
4126}
4127
4128/* Scan all instructions and renumber all registers into those
4129 available in leaf functions. */
4130
4131static void
4132leaf_renumber_regs (rtx_insn *first)
4133{
4134 rtx_insn *insn;
4135
4136 /* Renumber only the actual patterns.
4137 The reg-notes can contain frame pointer refs,
4138 and renumbering them could crash, and should not be needed. */
4139 for (insn = first; insn; insn = NEXT_INSN (insn))
4140 if (INSN_P (insn))
4141 leaf_renumber_regs_insn (PATTERN (insn));
4142}
4143
4144/* Scan IN_RTX and its subexpressions, and renumber all regs into those
4145 available in leaf functions. */
4146
4147void
4148leaf_renumber_regs_insn (rtx in_rtx)
4149{
4150 int i, j;
4151 const char *format_ptr;
4152
4153 if (in_rtx == 0)
4154 return;
4155
4156 /* Renumber all input-registers into output-registers.
4157 renumbered_regs would be 1 for an output-register;
4158 they */
4159
4160 if (REG_P (in_rtx))
4161 {
4162 int newreg;
4163
4164 /* Don't renumber the same reg twice. */
4165 if (in_rtx->used)
4166 return;
4167
4168 newreg = REGNO (in_rtx);
4169 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4170 to reach here as part of a REG_NOTE. */
4171 if (newreg >= FIRST_PSEUDO_REGISTER)
4172 {
4173 in_rtx->used = 1;
4174 return;
4175 }
4176 newreg = LEAF_REG_REMAP (newreg);
4177 gcc_assert (newreg >= 0);
4178 df_set_regs_ever_live (REGNO (in_rtx), false);
4179 df_set_regs_ever_live (newreg, true);
4180 SET_REGNO (in_rtx, newreg);
4181 in_rtx->used = 1;
4182 return;
4183 }
4184
4185 if (INSN_P (in_rtx))
4186 {
4187 /* Inside a SEQUENCE, we find insns.
4188 Renumber just the patterns of these insns,
4189 just as we do for the top-level insns. */
4190 leaf_renumber_regs_insn (PATTERN (in_rtx));
4191 return;
4192 }
4193
4194 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4195
4196 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4197 switch (*format_ptr++)
4198 {
4199 case 'e':
4200 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4201 break;
4202
4203 case 'E':
4204 if (XVEC (in_rtx, i) != NULL)
4205 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4206 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4207 break;
4208
4209 case 'S':
4210 case 's':
4211 case '0':
4212 case 'i':
4213 case 'w':
4214 case 'p':
4215 case 'n':
4216 case 'u':
4217 break;
4218
4219 default:
4220 gcc_unreachable ();
4221 }
4222}
4223#endif
4224
4225/* Turn the RTL into assembly. */
4226static unsigned int
4227rest_of_handle_final (void)
4228{
4229 const char *fnname = get_fnname_from_decl (current_function_decl);
4230
4231 /* Turn debug markers into notes if the var-tracking pass has not
4232 been invoked. */
4233 if (!flag_var_tracking && MAY_HAVE_DEBUG_MARKER_INSNS)
4234 delete_vta_debug_insns (false);
4235
4236 assemble_start_function (current_function_decl, fnname);
4237 rtx_insn *first = get_insns ();
4238 int seen = 0;
4239 final_start_function_1 (firstp: &first, file: asm_out_file, seen: &seen, optimize);
4240 final_1 (first, file: asm_out_file, seen, optimize);
4241 if (flag_ipa_ra
4242 && !lookup_attribute (attr_name: "noipa", DECL_ATTRIBUTES (current_function_decl))
4243 /* Functions with naked attributes are supported only with basic asm
4244 statements in the body, thus for supported use cases the information
4245 on clobbered registers is not available. */
4246 && !lookup_attribute (attr_name: "naked", DECL_ATTRIBUTES (current_function_decl)))
4247 collect_fn_hard_reg_usage ();
4248 final_end_function ();
4249
4250 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4251 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4252 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4253 output_function_exception_table (crtl->has_bb_partition ? 1 : 0);
4254
4255 assemble_end_function (current_function_decl, fnname);
4256
4257 /* Free up reg info memory. */
4258 free_reg_info ();
4259
4260 if (! quiet_flag)
4261 fflush (stream: asm_out_file);
4262
4263 /* Note that for those inline functions where we don't initially
4264 know for certain that we will be generating an out-of-line copy,
4265 the first invocation of this routine (rest_of_compilation) will
4266 skip over this code by doing a `goto exit_rest_of_compilation;'.
4267 Later on, wrapup_global_declarations will (indirectly) call
4268 rest_of_compilation again for those inline functions that need
4269 to have out-of-line copies generated. During that call, we
4270 *will* be routed past here. */
4271
4272 timevar_push (tv: TV_SYMOUT);
4273 if (!DECL_IGNORED_P (current_function_decl))
4274 debug_hooks->function_decl (current_function_decl);
4275 timevar_pop (tv: TV_SYMOUT);
4276
4277 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4278 DECL_INITIAL (current_function_decl) = error_mark_node;
4279
4280 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4281 && targetm.have_ctors_dtors)
4282 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4283 decl_init_priority_lookup
4284 (current_function_decl));
4285 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4286 && targetm.have_ctors_dtors)
4287 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4288 decl_fini_priority_lookup
4289 (current_function_decl));
4290 return 0;
4291}
4292
4293namespace {
4294
4295const pass_data pass_data_final =
4296{
4297 .type: RTL_PASS, /* type */
4298 .name: "final", /* name */
4299 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
4300 .tv_id: TV_FINAL, /* tv_id */
4301 .properties_required: 0, /* properties_required */
4302 .properties_provided: 0, /* properties_provided */
4303 .properties_destroyed: 0, /* properties_destroyed */
4304 .todo_flags_start: 0, /* todo_flags_start */
4305 .todo_flags_finish: 0, /* todo_flags_finish */
4306};
4307
4308class pass_final : public rtl_opt_pass
4309{
4310public:
4311 pass_final (gcc::context *ctxt)
4312 : rtl_opt_pass (pass_data_final, ctxt)
4313 {}
4314
4315 /* opt_pass methods: */
4316 unsigned int execute (function *) final override
4317 {
4318 return rest_of_handle_final ();
4319 }
4320
4321}; // class pass_final
4322
4323} // anon namespace
4324
4325rtl_opt_pass *
4326make_pass_final (gcc::context *ctxt)
4327{
4328 return new pass_final (ctxt);
4329}
4330
4331
4332static unsigned int
4333rest_of_handle_shorten_branches (void)
4334{
4335 /* Shorten branches. */
4336 shorten_branches (first: get_insns ());
4337 return 0;
4338}
4339
4340namespace {
4341
4342const pass_data pass_data_shorten_branches =
4343{
4344 .type: RTL_PASS, /* type */
4345 .name: "shorten", /* name */
4346 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
4347 .tv_id: TV_SHORTEN_BRANCH, /* tv_id */
4348 .properties_required: 0, /* properties_required */
4349 .properties_provided: 0, /* properties_provided */
4350 .properties_destroyed: 0, /* properties_destroyed */
4351 .todo_flags_start: 0, /* todo_flags_start */
4352 .todo_flags_finish: 0, /* todo_flags_finish */
4353};
4354
4355class pass_shorten_branches : public rtl_opt_pass
4356{
4357public:
4358 pass_shorten_branches (gcc::context *ctxt)
4359 : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4360 {}
4361
4362 /* opt_pass methods: */
4363 unsigned int execute (function *) final override
4364 {
4365 return rest_of_handle_shorten_branches ();
4366 }
4367
4368}; // class pass_shorten_branches
4369
4370} // anon namespace
4371
4372rtl_opt_pass *
4373make_pass_shorten_branches (gcc::context *ctxt)
4374{
4375 return new pass_shorten_branches (ctxt);
4376}
4377
4378
4379static unsigned int
4380rest_of_clean_state (void)
4381{
4382 rtx_insn *insn, *next;
4383 FILE *final_output = NULL;
4384 int save_unnumbered = flag_dump_unnumbered;
4385 int save_noaddr = flag_dump_noaddr;
4386
4387 if (flag_dump_final_insns)
4388 {
4389 final_output = fopen (flag_dump_final_insns, modes: "a");
4390 if (!final_output)
4391 {
4392 error ("could not open final insn dump file %qs: %m",
4393 flag_dump_final_insns);
4394 flag_dump_final_insns = NULL;
4395 }
4396 else
4397 {
4398 flag_dump_noaddr = flag_dump_unnumbered = 1;
4399 if (flag_compare_debug_opt || flag_compare_debug)
4400 dump_flags |= TDF_NOUID | TDF_COMPARE_DEBUG;
4401 dump_function_header (final_output, current_function_decl,
4402 dump_flags);
4403 final_insns_dump_p = true;
4404
4405 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4406 if (LABEL_P (insn))
4407 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4408 else
4409 {
4410 if (NOTE_P (insn))
4411 set_block_for_insn (insn, NULL);
4412 INSN_UID (insn) = 0;
4413 }
4414 }
4415 }
4416
4417 /* It is very important to decompose the RTL instruction chain here:
4418 debug information keeps pointing into CODE_LABEL insns inside the function
4419 body. If these remain pointing to the other insns, we end up preserving
4420 whole RTL chain and attached detailed debug info in memory. */
4421 for (insn = get_insns (); insn; insn = next)
4422 {
4423 next = NEXT_INSN (insn);
4424 SET_NEXT_INSN (insn) = NULL;
4425 SET_PREV_INSN (insn) = NULL;
4426
4427 rtx_insn *call_insn = insn;
4428 if (NONJUMP_INSN_P (call_insn)
4429 && GET_CODE (PATTERN (call_insn)) == SEQUENCE)
4430 {
4431 rtx_sequence *seq = as_a <rtx_sequence *> (p: PATTERN (insn: call_insn));
4432 call_insn = seq->insn (index: 0);
4433 }
4434 if (CALL_P (call_insn))
4435 {
4436 rtx note
4437 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
4438 if (note)
4439 remove_note (call_insn, note);
4440 }
4441
4442 if (final_output
4443 && (!NOTE_P (insn)
4444 || (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4445 && NOTE_KIND (insn) != NOTE_INSN_BEGIN_STMT
4446 && NOTE_KIND (insn) != NOTE_INSN_INLINE_ENTRY
4447 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4448 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4449 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4450 print_rtl_single (final_output, insn);
4451 }
4452
4453 if (final_output)
4454 {
4455 flag_dump_noaddr = save_noaddr;
4456 flag_dump_unnumbered = save_unnumbered;
4457 final_insns_dump_p = false;
4458
4459 if (fclose (stream: final_output))
4460 {
4461 error ("could not close final insn dump file %qs: %m",
4462 flag_dump_final_insns);
4463 flag_dump_final_insns = NULL;
4464 }
4465 }
4466
4467 flag_rerun_cse_after_global_opts = 0;
4468 reload_completed = 0;
4469 epilogue_completed = 0;
4470#ifdef STACK_REGS
4471 regstack_completed = 0;
4472#endif
4473
4474 /* Clear out the insn_length contents now that they are no
4475 longer valid. */
4476 init_insn_lengths ();
4477
4478 /* Show no temporary slots allocated. */
4479 init_temp_slots ();
4480
4481 free_bb_for_insn ();
4482
4483 if (cfun->gimple_df)
4484 delete_tree_ssa (cfun);
4485
4486 /* We can reduce stack alignment on call site only when we are sure that
4487 the function body just produced will be actually used in the final
4488 executable. */
4489 if (flag_ipa_stack_alignment
4490 && decl_binds_to_current_def_p (current_function_decl))
4491 {
4492 unsigned int pref = crtl->preferred_stack_boundary;
4493 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4494 pref = crtl->stack_alignment_needed;
4495 cgraph_node::rtl_info (current_function_decl)
4496 ->preferred_incoming_stack_boundary = pref;
4497 }
4498
4499 /* Make sure volatile mem refs aren't considered valid operands for
4500 arithmetic insns. We must call this here if this is a nested inline
4501 function, since the above code leaves us in the init_recog state,
4502 and the function context push/pop code does not save/restore volatile_ok.
4503
4504 ??? Maybe it isn't necessary for expand_start_function to call this
4505 anymore if we do it here? */
4506
4507 init_recog_no_volatile ();
4508
4509 /* We're done with this function. Free up memory if we can. */
4510 free_after_parsing (cfun);
4511 free_after_compilation (cfun);
4512 return 0;
4513}
4514
4515namespace {
4516
4517const pass_data pass_data_clean_state =
4518{
4519 .type: RTL_PASS, /* type */
4520 .name: "*clean_state", /* name */
4521 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
4522 .tv_id: TV_FINAL, /* tv_id */
4523 .properties_required: 0, /* properties_required */
4524 .properties_provided: 0, /* properties_provided */
4525 PROP_rtl, /* properties_destroyed */
4526 .todo_flags_start: 0, /* todo_flags_start */
4527 .todo_flags_finish: 0, /* todo_flags_finish */
4528};
4529
4530class pass_clean_state : public rtl_opt_pass
4531{
4532public:
4533 pass_clean_state (gcc::context *ctxt)
4534 : rtl_opt_pass (pass_data_clean_state, ctxt)
4535 {}
4536
4537 /* opt_pass methods: */
4538 unsigned int execute (function *) final override
4539 {
4540 return rest_of_clean_state ();
4541 }
4542
4543}; // class pass_clean_state
4544
4545} // anon namespace
4546
4547rtl_opt_pass *
4548make_pass_clean_state (gcc::context *ctxt)
4549{
4550 return new pass_clean_state (ctxt);
4551}
4552
4553/* Return true if INSN is a call to the current function. */
4554
4555static bool
4556self_recursive_call_p (rtx_insn *insn)
4557{
4558 tree fndecl = get_call_fndecl (insn);
4559 return (fndecl == current_function_decl
4560 && decl_binds_to_current_def_p (fndecl));
4561}
4562
4563/* Collect hard register usage for the current function. */
4564
4565static void
4566collect_fn_hard_reg_usage (void)
4567{
4568 rtx_insn *insn;
4569#ifdef STACK_REGS
4570 int i;
4571#endif
4572 struct cgraph_rtl_info *node;
4573 HARD_REG_SET function_used_regs;
4574
4575 /* ??? To be removed when all the ports have been fixed. */
4576 if (!targetm.call_fusage_contains_non_callee_clobbers)
4577 return;
4578
4579 /* Be conservative - mark fixed and global registers as used. */
4580 function_used_regs = fixed_reg_set;
4581
4582#ifdef STACK_REGS
4583 /* Handle STACK_REGS conservatively, since the df-framework does not
4584 provide accurate information for them. */
4585
4586 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
4587 SET_HARD_REG_BIT (set&: function_used_regs, bit: i);
4588#endif
4589
4590 for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
4591 {
4592 HARD_REG_SET insn_used_regs;
4593
4594 if (!NONDEBUG_INSN_P (insn))
4595 continue;
4596
4597 if (CALL_P (insn)
4598 && !self_recursive_call_p (insn))
4599 function_used_regs
4600 |= insn_callee_abi (insn).full_and_partial_reg_clobbers ();
4601
4602 find_all_hard_reg_sets (insn, &insn_used_regs, false);
4603 function_used_regs |= insn_used_regs;
4604
4605 if (hard_reg_set_subset_p (crtl->abi->full_and_partial_reg_clobbers (),
4606 y: function_used_regs))
4607 return;
4608 }
4609
4610 /* Mask out fully-saved registers, so that they don't affect equality
4611 comparisons between function_abis. */
4612 function_used_regs &= crtl->abi->full_and_partial_reg_clobbers ();
4613
4614 node = cgraph_node::rtl_info (current_function_decl);
4615 gcc_assert (node != NULL);
4616
4617 node->function_used_regs = function_used_regs;
4618}
4619

source code of gcc/final.cc