1 | /* Convert RTL to assembler code and output it, for GNU compiler. |
2 | Copyright (C) 1987-2025 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | /* This is the final pass of the compiler. |
21 | It looks at the rtl code for a function and outputs assembler code. |
22 | |
23 | Call `final_start_function' to output the assembler code for function entry, |
24 | `final' to output assembler code for some RTL code, |
25 | `final_end_function' to output assembler code for function exit. |
26 | If a function is compiled in several pieces, each piece is |
27 | output separately with `final'. |
28 | |
29 | Some optimizations are also done at this level. |
30 | Move instructions that were made unnecessary by good register allocation |
31 | are detected and omitted from the output. (Though most of these |
32 | are removed by the last jump pass.) |
33 | |
34 | Instructions to set the condition codes are omitted when it can be |
35 | seen that the condition codes already had the desired values. |
36 | |
37 | In some cases it is sufficient if the inherited condition codes |
38 | have related values, but this may require the following insn |
39 | (the one that tests the condition codes) to be modified. |
40 | |
41 | The code for the function prologue and epilogue are generated |
42 | directly in assembler by the target functions function_prologue and |
43 | function_epilogue. Those instructions never exist as rtl. */ |
44 | |
45 | #include "config.h" |
46 | #define INCLUDE_ALGORITHM /* reverse */ |
47 | #include "system.h" |
48 | #include "coretypes.h" |
49 | #include "backend.h" |
50 | #include "target.h" |
51 | #include "rtl.h" |
52 | #include "tree.h" |
53 | #include "cfghooks.h" |
54 | #include "df.h" |
55 | #include "memmodel.h" |
56 | #include "tm_p.h" |
57 | #include "insn-config.h" |
58 | #include "regs.h" |
59 | #include "emit-rtl.h" |
60 | #include "recog.h" |
61 | #include "cgraph.h" |
62 | #include "tree-pretty-print.h" /* for dump_function_header */ |
63 | #include "varasm.h" |
64 | #include "insn-attr.h" |
65 | #include "conditions.h" |
66 | #include "flags.h" |
67 | #include "output.h" |
68 | #include "except.h" |
69 | #include "rtl-error.h" |
70 | #include "toplev.h" /* exact_log2, floor_log2 */ |
71 | #include "reload.h" |
72 | #include "intl.h" |
73 | #include "cfgrtl.h" |
74 | #include "debug.h" |
75 | #include "tree-pass.h" |
76 | #include "tree-ssa.h" |
77 | #include "cfgloop.h" |
78 | #include "stringpool.h" |
79 | #include "attribs.h" |
80 | #include "asan.h" |
81 | #include "rtl-iter.h" |
82 | #include "print-rtl.h" |
83 | #include "function-abi.h" |
84 | #include "common/common-target.h" |
85 | #include "diagnostic.h" |
86 | |
87 | #include "dwarf2out.h" |
88 | |
89 | /* Most ports don't need to define CC_STATUS_INIT. |
90 | So define a null default for it to save conditionalization later. */ |
91 | #ifndef CC_STATUS_INIT |
92 | #define CC_STATUS_INIT |
93 | #endif |
94 | |
95 | /* Is the given character a logical line separator for the assembler? */ |
96 | #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR |
97 | #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';') |
98 | #endif |
99 | |
100 | #ifndef JUMP_TABLES_IN_TEXT_SECTION |
101 | #define JUMP_TABLES_IN_TEXT_SECTION 0 |
102 | #endif |
103 | |
104 | /* Bitflags used by final_scan_insn. */ |
105 | #define SEEN_NOTE 1 |
106 | #define SEEN_EMITTED 2 |
107 | #define SEEN_NEXT_VIEW 4 |
108 | |
109 | /* Last insn processed by final_scan_insn. */ |
110 | static rtx_insn *debug_insn; |
111 | rtx_insn *current_output_insn; |
112 | |
113 | /* Line number of last NOTE. */ |
114 | static int last_linenum; |
115 | |
116 | /* Column number of last NOTE. */ |
117 | static int last_columnnum; |
118 | |
119 | /* Discriminator written to assembly. */ |
120 | static int last_discriminator; |
121 | |
122 | /* Compute discriminator to be written to assembly for current instruction. |
123 | Note: actual usage depends on loc_discriminator_kind setting. */ |
124 | static inline int compute_discriminator (location_t loc); |
125 | |
126 | /* Highest line number in current block. */ |
127 | static int high_block_linenum; |
128 | |
129 | /* Likewise for function. */ |
130 | static int high_function_linenum; |
131 | |
132 | /* Filename of last NOTE. */ |
133 | static const char *last_filename; |
134 | |
135 | /* Override filename, line and column number. */ |
136 | static const char *override_filename; |
137 | static int override_linenum; |
138 | static int override_columnnum; |
139 | static int override_discriminator; |
140 | |
141 | /* Whether to force emission of a line note before the next insn. */ |
142 | static bool force_source_line = false; |
143 | |
144 | extern const int length_unit_log; /* This is defined in insn-attrtab.cc. */ |
145 | |
146 | /* Nonzero while outputting an `asm' with operands. |
147 | This means that inconsistencies are the user's fault, so don't die. |
148 | The precise value is the insn being output, to pass to error_for_asm. */ |
149 | const rtx_insn *this_is_asm_operands; |
150 | |
151 | /* Number of operands of this insn, for an `asm' with operands. */ |
152 | unsigned int insn_noperands; |
153 | |
154 | /* Compare optimization flag. */ |
155 | |
156 | static rtx last_ignored_compare = 0; |
157 | |
158 | /* Assign a unique number to each insn that is output. |
159 | This can be used to generate unique local labels. */ |
160 | |
161 | static int insn_counter = 0; |
162 | |
163 | /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */ |
164 | |
165 | static int block_depth; |
166 | |
167 | /* True if have enabled APP processing of our assembler output. */ |
168 | |
169 | static bool app_on; |
170 | |
171 | /* If we are outputting an insn sequence, this contains the sequence rtx. |
172 | Zero otherwise. */ |
173 | |
174 | rtx_sequence *final_sequence; |
175 | |
176 | #ifdef ASSEMBLER_DIALECT |
177 | |
178 | /* Number of the assembler dialect to use, starting at 0. */ |
179 | static int dialect_number; |
180 | #endif |
181 | |
182 | /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */ |
183 | rtx current_insn_predicate; |
184 | |
185 | /* True if printing into -fdump-final-insns= dump. */ |
186 | bool final_insns_dump_p; |
187 | |
188 | /* True if profile_function should be called, but hasn't been called yet. */ |
189 | static bool need_profile_function; |
190 | |
191 | static int asm_insn_count (rtx); |
192 | static void profile_function (FILE *); |
193 | static void profile_after_prologue (FILE *); |
194 | static bool notice_source_line (rtx_insn *, bool *); |
195 | static rtx walk_alter_subreg (rtx *, bool *); |
196 | static void output_asm_name (void); |
197 | static void output_alternate_entry_point (FILE *, rtx_insn *); |
198 | static tree get_mem_expr_from_op (rtx, int *); |
199 | static void output_asm_operand_names (rtx *, int *, int); |
200 | #ifdef LEAF_REGISTERS |
201 | static void leaf_renumber_regs (rtx_insn *); |
202 | #endif |
203 | static int align_fuzz (rtx, rtx, int, unsigned); |
204 | static void collect_fn_hard_reg_usage (void); |
205 | |
206 | /* Initialize data in final at the beginning of a compilation. */ |
207 | |
208 | void |
209 | init_final (const char *filename ATTRIBUTE_UNUSED) |
210 | { |
211 | app_on = 0; |
212 | final_sequence = 0; |
213 | |
214 | #ifdef ASSEMBLER_DIALECT |
215 | dialect_number = ASSEMBLER_DIALECT; |
216 | #endif |
217 | } |
218 | |
219 | /* Default target function prologue and epilogue assembler output. |
220 | |
221 | If not overridden for epilogue code, then the function body itself |
222 | contains return instructions wherever needed. */ |
223 | void |
224 | default_function_pro_epilogue (FILE *) |
225 | { |
226 | } |
227 | |
228 | void |
229 | default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED, |
230 | tree decl ATTRIBUTE_UNUSED, |
231 | bool new_is_cold ATTRIBUTE_UNUSED) |
232 | { |
233 | } |
234 | |
235 | /* Default target hook that outputs nothing to a stream. */ |
236 | void |
237 | no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED) |
238 | { |
239 | } |
240 | |
241 | /* Enable APP processing of subsequent output. |
242 | Used before the output from an `asm' statement. */ |
243 | |
244 | void |
245 | app_enable (void) |
246 | { |
247 | if (! app_on) |
248 | { |
249 | fputs (ASM_APP_ON, stream: asm_out_file); |
250 | app_on = 1; |
251 | } |
252 | } |
253 | |
254 | /* Disable APP processing of subsequent output. |
255 | Called from varasm.cc before most kinds of output. */ |
256 | |
257 | void |
258 | app_disable (void) |
259 | { |
260 | if (app_on) |
261 | { |
262 | fputs (ASM_APP_OFF, stream: asm_out_file); |
263 | app_on = 0; |
264 | } |
265 | } |
266 | |
267 | /* Return the number of slots filled in the current |
268 | delayed branch sequence (we don't count the insn needing the |
269 | delay slot). Zero if not in a delayed branch sequence. */ |
270 | |
271 | int |
272 | dbr_sequence_length (void) |
273 | { |
274 | if (final_sequence != 0) |
275 | return XVECLEN (final_sequence, 0) - 1; |
276 | else |
277 | return 0; |
278 | } |
279 | |
280 | /* The next two pages contain routines used to compute the length of an insn |
281 | and to shorten branches. */ |
282 | |
283 | /* Arrays for insn lengths, and addresses. The latter is referenced by |
284 | `insn_current_length'. */ |
285 | |
286 | static int *insn_lengths; |
287 | |
288 | vec<int> insn_addresses_; |
289 | |
290 | /* Max uid for which the above arrays are valid. */ |
291 | static int insn_lengths_max_uid; |
292 | |
293 | /* Address of insn being processed. Used by `insn_current_length'. */ |
294 | int insn_current_address; |
295 | |
296 | /* Address of insn being processed in previous iteration. */ |
297 | int insn_last_address; |
298 | |
299 | /* known invariant alignment of insn being processed. */ |
300 | int insn_current_align; |
301 | |
302 | /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)] |
303 | gives the next following alignment insn that increases the known |
304 | alignment, or NULL_RTX if there is no such insn. |
305 | For any alignment obtained this way, we can again index uid_align with |
306 | its uid to obtain the next following align that in turn increases the |
307 | alignment, till we reach NULL_RTX; the sequence obtained this way |
308 | for each insn we'll call the alignment chain of this insn in the following |
309 | comments. */ |
310 | |
311 | static rtx *uid_align; |
312 | static int *uid_shuid; |
313 | static vec<align_flags> label_align; |
314 | |
315 | /* Indicate that branch shortening hasn't yet been done. */ |
316 | |
317 | void |
318 | init_insn_lengths (void) |
319 | { |
320 | if (uid_shuid) |
321 | { |
322 | free (ptr: uid_shuid); |
323 | uid_shuid = 0; |
324 | } |
325 | if (insn_lengths) |
326 | { |
327 | free (ptr: insn_lengths); |
328 | insn_lengths = 0; |
329 | insn_lengths_max_uid = 0; |
330 | } |
331 | if (HAVE_ATTR_length) |
332 | INSN_ADDRESSES_FREE (); |
333 | if (uid_align) |
334 | { |
335 | free (ptr: uid_align); |
336 | uid_align = 0; |
337 | } |
338 | } |
339 | |
340 | /* Obtain the current length of an insn. If branch shortening has been done, |
341 | get its actual length. Otherwise, use FALLBACK_FN to calculate the |
342 | length. */ |
343 | static int |
344 | get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *)) |
345 | { |
346 | rtx body; |
347 | int i; |
348 | int length = 0; |
349 | |
350 | if (!HAVE_ATTR_length) |
351 | return 0; |
352 | |
353 | if (insn_lengths_max_uid > INSN_UID (insn)) |
354 | return insn_lengths[INSN_UID (insn)]; |
355 | else |
356 | switch (GET_CODE (insn)) |
357 | { |
358 | case NOTE: |
359 | case BARRIER: |
360 | case CODE_LABEL: |
361 | case DEBUG_INSN: |
362 | return 0; |
363 | |
364 | case CALL_INSN: |
365 | case JUMP_INSN: |
366 | body = PATTERN (insn); |
367 | if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0) |
368 | length = asm_insn_count (body) * fallback_fn (insn); |
369 | else |
370 | length = fallback_fn (insn); |
371 | break; |
372 | |
373 | case INSN: |
374 | body = PATTERN (insn); |
375 | if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER) |
376 | return 0; |
377 | |
378 | else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0) |
379 | length = asm_insn_count (body) * fallback_fn (insn); |
380 | else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (p: body)) |
381 | for (i = 0; i < seq->len (); i++) |
382 | length += get_attr_length_1 (insn: seq->insn (index: i), fallback_fn); |
383 | else |
384 | length = fallback_fn (insn); |
385 | break; |
386 | |
387 | default: |
388 | break; |
389 | } |
390 | |
391 | #ifdef ADJUST_INSN_LENGTH |
392 | ADJUST_INSN_LENGTH (insn, length); |
393 | #endif |
394 | return length; |
395 | } |
396 | |
397 | /* Obtain the current length of an insn. If branch shortening has been done, |
398 | get its actual length. Otherwise, get its maximum length. */ |
399 | int |
400 | get_attr_length (rtx_insn *insn) |
401 | { |
402 | return get_attr_length_1 (insn, fallback_fn: insn_default_length); |
403 | } |
404 | |
405 | /* Obtain the current length of an insn. If branch shortening has been done, |
406 | get its actual length. Otherwise, get its minimum length. */ |
407 | int |
408 | get_attr_min_length (rtx_insn *insn) |
409 | { |
410 | return get_attr_length_1 (insn, fallback_fn: insn_min_length); |
411 | } |
412 | |
413 | /* Code to handle alignment inside shorten_branches. */ |
414 | |
415 | /* Here is an explanation how the algorithm in align_fuzz can give |
416 | proper results: |
417 | |
418 | Call a sequence of instructions beginning with alignment point X |
419 | and continuing until the next alignment point `block X'. When `X' |
420 | is used in an expression, it means the alignment value of the |
421 | alignment point. |
422 | |
423 | Call the distance between the start of the first insn of block X, and |
424 | the end of the last insn of block X `IX', for the `inner size of X'. |
425 | This is clearly the sum of the instruction lengths. |
426 | |
427 | Likewise with the next alignment-delimited block following X, which we |
428 | shall call block Y. |
429 | |
430 | Call the distance between the start of the first insn of block X, and |
431 | the start of the first insn of block Y `OX', for the `outer size of X'. |
432 | |
433 | The estimated padding is then OX - IX. |
434 | |
435 | OX can be safely estimated as |
436 | |
437 | if (X >= Y) |
438 | OX = round_up(IX, Y) |
439 | else |
440 | OX = round_up(IX, X) + Y - X |
441 | |
442 | Clearly est(IX) >= real(IX), because that only depends on the |
443 | instruction lengths, and those being overestimated is a given. |
444 | |
445 | Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so |
446 | we needn't worry about that when thinking about OX. |
447 | |
448 | When X >= Y, the alignment provided by Y adds no uncertainty factor |
449 | for branch ranges starting before X, so we can just round what we have. |
450 | But when X < Y, we don't know anything about the, so to speak, |
451 | `middle bits', so we have to assume the worst when aligning up from an |
452 | address mod X to one mod Y, which is Y - X. */ |
453 | |
454 | #ifndef LABEL_ALIGN |
455 | #define LABEL_ALIGN(LABEL) align_labels |
456 | #endif |
457 | |
458 | #ifndef LOOP_ALIGN |
459 | #define LOOP_ALIGN(LABEL) align_loops |
460 | #endif |
461 | |
462 | #ifndef LABEL_ALIGN_AFTER_BARRIER |
463 | #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0 |
464 | #endif |
465 | |
466 | #ifndef JUMP_ALIGN |
467 | #define JUMP_ALIGN(LABEL) align_jumps |
468 | #endif |
469 | |
470 | #ifndef ADDR_VEC_ALIGN |
471 | static int |
472 | final_addr_vec_align (rtx_jump_table_data *addr_vec) |
473 | { |
474 | int align = GET_MODE_SIZE (mode: addr_vec->get_data_mode ()); |
475 | |
476 | if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT) |
477 | align = BIGGEST_ALIGNMENT / BITS_PER_UNIT; |
478 | return exact_log2 (x: align); |
479 | |
480 | } |
481 | |
482 | #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC) |
483 | #endif |
484 | |
485 | #ifndef INSN_LENGTH_ALIGNMENT |
486 | #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log |
487 | #endif |
488 | |
489 | #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)]) |
490 | |
491 | static int min_labelno, max_labelno; |
492 | |
493 | #define LABEL_TO_ALIGNMENT(LABEL) \ |
494 | (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno]) |
495 | |
496 | /* For the benefit of port specific code do this also as a function. */ |
497 | |
498 | align_flags |
499 | label_to_alignment (rtx label) |
500 | { |
501 | if (CODE_LABEL_NUMBER (label) <= max_labelno) |
502 | return LABEL_TO_ALIGNMENT (label); |
503 | return align_flags (); |
504 | } |
505 | |
506 | /* The differences in addresses |
507 | between a branch and its target might grow or shrink depending on |
508 | the alignment the start insn of the range (the branch for a forward |
509 | branch or the label for a backward branch) starts out on; if these |
510 | differences are used naively, they can even oscillate infinitely. |
511 | We therefore want to compute a 'worst case' address difference that |
512 | is independent of the alignment the start insn of the range end |
513 | up on, and that is at least as large as the actual difference. |
514 | The function align_fuzz calculates the amount we have to add to the |
515 | naively computed difference, by traversing the part of the alignment |
516 | chain of the start insn of the range that is in front of the end insn |
517 | of the range, and considering for each alignment the maximum amount |
518 | that it might contribute to a size increase. |
519 | |
520 | For casesi tables, we also want to know worst case minimum amounts of |
521 | address difference, in case a machine description wants to introduce |
522 | some common offset that is added to all offsets in a table. |
523 | For this purpose, align_fuzz with a growth argument of 0 computes the |
524 | appropriate adjustment. */ |
525 | |
526 | /* Compute the maximum delta by which the difference of the addresses of |
527 | START and END might grow / shrink due to a different address for start |
528 | which changes the size of alignment insns between START and END. |
529 | KNOWN_ALIGN_LOG is the alignment known for START. |
530 | GROWTH should be ~0 if the objective is to compute potential code size |
531 | increase, and 0 if the objective is to compute potential shrink. |
532 | The return value is undefined for any other value of GROWTH. */ |
533 | |
534 | static int |
535 | align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth) |
536 | { |
537 | int uid = INSN_UID (insn: start); |
538 | rtx align_label; |
539 | int known_align = 1 << known_align_log; |
540 | int end_shuid = INSN_SHUID (end); |
541 | int fuzz = 0; |
542 | |
543 | for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid]) |
544 | { |
545 | int align_addr, new_align; |
546 | |
547 | uid = INSN_UID (insn: align_label); |
548 | align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid]; |
549 | if (uid_shuid[uid] > end_shuid) |
550 | break; |
551 | align_flags alignment = LABEL_TO_ALIGNMENT (align_label); |
552 | new_align = 1 << alignment.levels[0].log; |
553 | if (new_align < known_align) |
554 | continue; |
555 | fuzz += (-align_addr ^ growth) & (new_align - known_align); |
556 | known_align = new_align; |
557 | } |
558 | return fuzz; |
559 | } |
560 | |
561 | /* Compute a worst-case reference address of a branch so that it |
562 | can be safely used in the presence of aligned labels. Since the |
563 | size of the branch itself is unknown, the size of the branch is |
564 | not included in the range. I.e. for a forward branch, the reference |
565 | address is the end address of the branch as known from the previous |
566 | branch shortening pass, minus a value to account for possible size |
567 | increase due to alignment. For a backward branch, it is the start |
568 | address of the branch as known from the current pass, plus a value |
569 | to account for possible size increase due to alignment. |
570 | NB.: Therefore, the maximum offset allowed for backward branches needs |
571 | to exclude the branch size. */ |
572 | |
573 | int |
574 | insn_current_reference_address (rtx_insn *branch) |
575 | { |
576 | rtx dest; |
577 | int seq_uid; |
578 | |
579 | if (! INSN_ADDRESSES_SET_P ()) |
580 | return 0; |
581 | |
582 | rtx_insn *seq = NEXT_INSN (insn: PREV_INSN (insn: branch)); |
583 | seq_uid = INSN_UID (insn: seq); |
584 | if (!jump_to_label_p (branch)) |
585 | /* This can happen for example on the PA; the objective is to know the |
586 | offset to address something in front of the start of the function. |
587 | Thus, we can treat it like a backward branch. |
588 | We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than |
589 | any alignment we'd encounter, so we skip the call to align_fuzz. */ |
590 | return insn_current_address; |
591 | dest = JUMP_LABEL (branch); |
592 | |
593 | /* BRANCH has no proper alignment chain set, so use SEQ. |
594 | BRANCH also has no INSN_SHUID. */ |
595 | if (INSN_SHUID (seq) < INSN_SHUID (dest)) |
596 | { |
597 | /* Forward branch. */ |
598 | return (insn_last_address + insn_lengths[seq_uid] |
599 | - align_fuzz (start: seq, end: dest, known_align_log: length_unit_log, growth: ~0)); |
600 | } |
601 | else |
602 | { |
603 | /* Backward branch. */ |
604 | return (insn_current_address |
605 | + align_fuzz (start: dest, end: seq, known_align_log: length_unit_log, growth: ~0)); |
606 | } |
607 | } |
608 | |
609 | /* Compute branch alignments based on CFG profile. */ |
610 | |
611 | void |
612 | compute_alignments (void) |
613 | { |
614 | basic_block bb; |
615 | align_flags max_alignment; |
616 | |
617 | label_align.truncate (size: 0); |
618 | |
619 | max_labelno = max_label_num (); |
620 | min_labelno = get_first_label_num (); |
621 | label_align.safe_grow_cleared (len: max_labelno - min_labelno + 1, exact: true); |
622 | |
623 | /* If not optimizing or optimizing for size, don't assign any alignments. */ |
624 | if (! optimize || optimize_function_for_size_p (cfun)) |
625 | return; |
626 | |
627 | if (dump_file) |
628 | { |
629 | dump_reg_info (dump_file); |
630 | dump_flow_info (dump_file, TDF_DETAILS); |
631 | flow_loops_dump (dump_file, NULL, 1); |
632 | } |
633 | loop_optimizer_init (AVOID_CFG_MODIFICATIONS); |
634 | profile_count count_threshold = cfun->cfg->count_max / param_align_threshold; |
635 | |
636 | if (dump_file) |
637 | { |
638 | fprintf (stream: dump_file, format: "count_max: " ); |
639 | cfun->cfg->count_max.dump (f: dump_file); |
640 | fprintf (stream: dump_file, format: "\n" ); |
641 | } |
642 | FOR_EACH_BB_FN (bb, cfun) |
643 | { |
644 | rtx_insn *label = BB_HEAD (bb); |
645 | bool has_fallthru = 0; |
646 | edge e; |
647 | edge_iterator ei; |
648 | |
649 | if (!LABEL_P (label) |
650 | || optimize_bb_for_size_p (bb)) |
651 | { |
652 | if (dump_file) |
653 | fprintf (stream: dump_file, |
654 | format: "BB %4i loop %2i loop_depth %2i skipped.\n" , |
655 | bb->index, |
656 | bb->loop_father->num, |
657 | bb_loop_depth (bb)); |
658 | continue; |
659 | } |
660 | max_alignment = LABEL_ALIGN (label); |
661 | profile_count fallthru_count = profile_count::zero (); |
662 | profile_count branch_count = profile_count::zero (); |
663 | |
664 | FOR_EACH_EDGE (e, ei, bb->preds) |
665 | { |
666 | if (e->flags & EDGE_FALLTHRU) |
667 | has_fallthru = 1, fallthru_count += e->count (); |
668 | else |
669 | branch_count += e->count (); |
670 | } |
671 | if (dump_file) |
672 | { |
673 | fprintf (stream: dump_file, format: "BB %4i loop %2i loop_depth" |
674 | " %2i fall " , |
675 | bb->index, bb->loop_father->num, |
676 | bb_loop_depth (bb)); |
677 | fallthru_count.dump (f: dump_file); |
678 | fprintf (stream: dump_file, format: " branch " ); |
679 | branch_count.dump (f: dump_file); |
680 | if (!bb->loop_father->inner && bb->loop_father->num) |
681 | fprintf (stream: dump_file, format: " inner_loop" ); |
682 | if (bb->loop_father->header == bb) |
683 | fprintf (stream: dump_file, format: " loop_header" ); |
684 | fprintf (stream: dump_file, format: "\n" ); |
685 | } |
686 | if (!fallthru_count.initialized_p () || !branch_count.initialized_p ()) |
687 | continue; |
688 | |
689 | /* There are two purposes to align block with no fallthru incoming edge: |
690 | 1) to avoid fetch stalls when branch destination is near cache boundary |
691 | 2) to improve cache efficiency in case the previous block is not executed |
692 | (so it does not need to be in the cache). |
693 | |
694 | We to catch first case, we align frequently executed blocks. |
695 | To catch the second, we align blocks that are executed more frequently |
696 | than the predecessor and the predecessor is likely to not be executed |
697 | when function is called. */ |
698 | |
699 | if (!has_fallthru |
700 | && (branch_count > count_threshold |
701 | || (bb->count > bb->prev_bb->count * 10 |
702 | && (bb->prev_bb->count |
703 | <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->count / 2)))) |
704 | { |
705 | align_flags alignment = JUMP_ALIGN (label); |
706 | if (dump_file) |
707 | fprintf (stream: dump_file, format: " jump alignment added.\n" ); |
708 | max_alignment = align_flags::max (f0: max_alignment, f1: alignment); |
709 | } |
710 | /* In case block is frequent and reached mostly by non-fallthru edge, |
711 | align it. It is most likely a first block of loop. */ |
712 | if (has_fallthru |
713 | && !(single_succ_p (bb) |
714 | && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
715 | && optimize_bb_for_speed_p (bb) |
716 | && branch_count + fallthru_count > count_threshold |
717 | && (branch_count > fallthru_count * param_align_loop_iterations)) |
718 | { |
719 | align_flags alignment = LOOP_ALIGN (label); |
720 | if (dump_file) |
721 | fprintf (stream: dump_file, format: " internal loop alignment added.\n" ); |
722 | max_alignment = align_flags::max (f0: max_alignment, f1: alignment); |
723 | } |
724 | LABEL_TO_ALIGNMENT (label) = max_alignment; |
725 | } |
726 | |
727 | loop_optimizer_finalize (); |
728 | free_dominance_info (CDI_DOMINATORS); |
729 | } |
730 | |
731 | /* Grow the LABEL_ALIGN array after new labels are created. */ |
732 | |
733 | static void |
734 | grow_label_align (void) |
735 | { |
736 | int old = max_labelno; |
737 | int n_labels; |
738 | int n_old_labels; |
739 | |
740 | max_labelno = max_label_num (); |
741 | |
742 | n_labels = max_labelno - min_labelno + 1; |
743 | n_old_labels = old - min_labelno + 1; |
744 | |
745 | label_align.safe_grow_cleared (len: n_labels, exact: true); |
746 | |
747 | /* Range of labels grows monotonically in the function. Failing here |
748 | means that the initialization of array got lost. */ |
749 | gcc_assert (n_old_labels <= n_labels); |
750 | } |
751 | |
752 | /* Update the already computed alignment information. LABEL_PAIRS is a vector |
753 | made up of pairs of labels for which the alignment information of the first |
754 | element will be copied from that of the second element. */ |
755 | |
756 | void |
757 | update_alignments (vec<rtx> &label_pairs) |
758 | { |
759 | unsigned int i = 0; |
760 | rtx iter, label = NULL_RTX; |
761 | |
762 | if (max_labelno != max_label_num ()) |
763 | grow_label_align (); |
764 | |
765 | FOR_EACH_VEC_ELT (label_pairs, i, iter) |
766 | if (i & 1) |
767 | LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter); |
768 | else |
769 | label = iter; |
770 | } |
771 | |
772 | namespace { |
773 | |
774 | const pass_data pass_data_compute_alignments = |
775 | { |
776 | .type: RTL_PASS, /* type */ |
777 | .name: "alignments" , /* name */ |
778 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
779 | .tv_id: TV_NONE, /* tv_id */ |
780 | .properties_required: 0, /* properties_required */ |
781 | .properties_provided: 0, /* properties_provided */ |
782 | .properties_destroyed: 0, /* properties_destroyed */ |
783 | .todo_flags_start: 0, /* todo_flags_start */ |
784 | .todo_flags_finish: 0, /* todo_flags_finish */ |
785 | }; |
786 | |
787 | class pass_compute_alignments : public rtl_opt_pass |
788 | { |
789 | public: |
790 | pass_compute_alignments (gcc::context *ctxt) |
791 | : rtl_opt_pass (pass_data_compute_alignments, ctxt) |
792 | {} |
793 | |
794 | /* opt_pass methods: */ |
795 | unsigned int execute (function *) final override |
796 | { |
797 | compute_alignments (); |
798 | return 0; |
799 | } |
800 | |
801 | }; // class pass_compute_alignments |
802 | |
803 | } // anon namespace |
804 | |
805 | rtl_opt_pass * |
806 | make_pass_compute_alignments (gcc::context *ctxt) |
807 | { |
808 | return new pass_compute_alignments (ctxt); |
809 | } |
810 | |
811 | |
812 | /* Make a pass over all insns and compute their actual lengths by shortening |
813 | any branches of variable length if possible. */ |
814 | |
815 | /* shorten_branches might be called multiple times: for example, the SH |
816 | port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG. |
817 | In order to do this, it needs proper length information, which it obtains |
818 | by calling shorten_branches. This cannot be collapsed with |
819 | shorten_branches itself into a single pass unless we also want to integrate |
820 | reorg.cc, since the branch splitting exposes new instructions with delay |
821 | slots. */ |
822 | |
823 | void |
824 | shorten_branches (rtx_insn *first) |
825 | { |
826 | rtx_insn *insn; |
827 | int max_uid; |
828 | int i; |
829 | rtx_insn *seq; |
830 | bool something_changed = true; |
831 | char *varying_length; |
832 | rtx body; |
833 | int uid; |
834 | rtx align_tab[MAX_CODE_ALIGN + 1]; |
835 | |
836 | /* Compute maximum UID and allocate label_align / uid_shuid. */ |
837 | max_uid = get_max_uid (); |
838 | |
839 | /* Free uid_shuid before reallocating it. */ |
840 | free (ptr: uid_shuid); |
841 | |
842 | uid_shuid = XNEWVEC (int, max_uid); |
843 | |
844 | if (max_labelno != max_label_num ()) |
845 | grow_label_align (); |
846 | |
847 | /* Initialize label_align and set up uid_shuid to be strictly |
848 | monotonically rising with insn order. */ |
849 | /* We use alignment here to keep track of the maximum alignment we want to |
850 | impose on the next CODE_LABEL (or the current one if we are processing |
851 | the CODE_LABEL itself). */ |
852 | |
853 | align_flags max_alignment; |
854 | |
855 | for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn)) |
856 | { |
857 | INSN_SHUID (insn) = i++; |
858 | if (INSN_P (insn)) |
859 | continue; |
860 | |
861 | if (rtx_code_label *label = dyn_cast <rtx_code_label *> (p: insn)) |
862 | { |
863 | /* Merge in alignments computed by compute_alignments. */ |
864 | align_flags alignment = LABEL_TO_ALIGNMENT (label); |
865 | max_alignment = align_flags::max (f0: max_alignment, f1: alignment); |
866 | |
867 | rtx_jump_table_data *table = jump_table_for_label (label); |
868 | if (!table) |
869 | { |
870 | align_flags alignment = LABEL_ALIGN (label); |
871 | max_alignment = align_flags::max (f0: max_alignment, f1: alignment); |
872 | } |
873 | /* ADDR_VECs only take room if read-only data goes into the text |
874 | section. */ |
875 | if ((JUMP_TABLES_IN_TEXT_SECTION |
876 | || readonly_data_section == text_section) |
877 | && table) |
878 | { |
879 | align_flags alignment = align_flags (ADDR_VEC_ALIGN (table)); |
880 | max_alignment = align_flags::max (f0: max_alignment, f1: alignment); |
881 | } |
882 | LABEL_TO_ALIGNMENT (label) = max_alignment; |
883 | max_alignment = align_flags (); |
884 | } |
885 | else if (BARRIER_P (insn)) |
886 | { |
887 | rtx_insn *label; |
888 | |
889 | for (label = insn; label && ! INSN_P (label); |
890 | label = NEXT_INSN (insn: label)) |
891 | if (LABEL_P (label)) |
892 | { |
893 | align_flags alignment |
894 | = align_flags (LABEL_ALIGN_AFTER_BARRIER (insn)); |
895 | max_alignment = align_flags::max (f0: max_alignment, f1: alignment); |
896 | break; |
897 | } |
898 | } |
899 | } |
900 | if (!HAVE_ATTR_length) |
901 | return; |
902 | |
903 | /* Allocate the rest of the arrays. */ |
904 | insn_lengths = XNEWVEC (int, max_uid); |
905 | insn_lengths_max_uid = max_uid; |
906 | /* Syntax errors can lead to labels being outside of the main insn stream. |
907 | Initialize insn_addresses, so that we get reproducible results. */ |
908 | INSN_ADDRESSES_ALLOC (max_uid); |
909 | |
910 | varying_length = XCNEWVEC (char, max_uid); |
911 | |
912 | /* Initialize uid_align. We scan instructions |
913 | from end to start, and keep in align_tab[n] the last seen insn |
914 | that does an alignment of at least n+1, i.e. the successor |
915 | in the alignment chain for an insn that does / has a known |
916 | alignment of n. */ |
917 | uid_align = XCNEWVEC (rtx, max_uid); |
918 | |
919 | for (i = MAX_CODE_ALIGN + 1; --i >= 0;) |
920 | align_tab[i] = NULL_RTX; |
921 | seq = get_last_insn (); |
922 | for (; seq; seq = PREV_INSN (insn: seq)) |
923 | { |
924 | int uid = INSN_UID (insn: seq); |
925 | int log; |
926 | log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq).levels[0].log : 0); |
927 | uid_align[uid] = align_tab[0]; |
928 | if (log) |
929 | { |
930 | /* Found an alignment label. */ |
931 | gcc_checking_assert (log < MAX_CODE_ALIGN + 1); |
932 | uid_align[uid] = align_tab[log]; |
933 | for (i = log - 1; i >= 0; i--) |
934 | align_tab[i] = seq; |
935 | } |
936 | } |
937 | |
938 | /* When optimizing, we start assuming minimum length, and keep increasing |
939 | lengths as we find the need for this, till nothing changes. |
940 | When not optimizing, we start assuming maximum lengths, and |
941 | do a single pass to update the lengths. */ |
942 | bool increasing = optimize != 0; |
943 | |
944 | #ifdef CASE_VECTOR_SHORTEN_MODE |
945 | if (optimize) |
946 | { |
947 | /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum |
948 | label fields. */ |
949 | |
950 | int min_shuid = INSN_SHUID (get_insns ()) - 1; |
951 | int max_shuid = INSN_SHUID (get_last_insn ()) + 1; |
952 | int rel; |
953 | |
954 | for (insn = first; insn != 0; insn = NEXT_INSN (insn)) |
955 | { |
956 | rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat; |
957 | int len, i, min, max, insn_shuid; |
958 | int min_align; |
959 | addr_diff_vec_flags flags; |
960 | |
961 | if (! JUMP_TABLE_DATA_P (insn) |
962 | || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC) |
963 | continue; |
964 | pat = PATTERN (insn); |
965 | len = XVECLEN (pat, 1); |
966 | gcc_assert (len > 0); |
967 | min_align = MAX_CODE_ALIGN; |
968 | for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--) |
969 | { |
970 | rtx lab = XEXP (XVECEXP (pat, 1, i), 0); |
971 | int shuid = INSN_SHUID (lab); |
972 | if (shuid < min) |
973 | { |
974 | min = shuid; |
975 | min_lab = lab; |
976 | } |
977 | if (shuid > max) |
978 | { |
979 | max = shuid; |
980 | max_lab = lab; |
981 | } |
982 | |
983 | int label_alignment = LABEL_TO_ALIGNMENT (lab).levels[0].log; |
984 | if (min_align > label_alignment) |
985 | min_align = label_alignment; |
986 | } |
987 | XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab); |
988 | XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab); |
989 | insn_shuid = INSN_SHUID (insn); |
990 | rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0)); |
991 | memset (&flags, 0, sizeof (flags)); |
992 | flags.min_align = min_align; |
993 | flags.base_after_vec = rel > insn_shuid; |
994 | flags.min_after_vec = min > insn_shuid; |
995 | flags.max_after_vec = max > insn_shuid; |
996 | flags.min_after_base = min > rel; |
997 | flags.max_after_base = max > rel; |
998 | ADDR_DIFF_VEC_FLAGS (pat) = flags; |
999 | |
1000 | if (increasing) |
1001 | PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat)); |
1002 | } |
1003 | } |
1004 | #endif /* CASE_VECTOR_SHORTEN_MODE */ |
1005 | |
1006 | /* Compute initial lengths, addresses, and varying flags for each insn. */ |
1007 | int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length; |
1008 | |
1009 | for (insn_current_address = 0, insn = first; |
1010 | insn != 0; |
1011 | insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn)) |
1012 | { |
1013 | uid = INSN_UID (insn); |
1014 | |
1015 | insn_lengths[uid] = 0; |
1016 | |
1017 | if (LABEL_P (insn)) |
1018 | { |
1019 | int log = LABEL_TO_ALIGNMENT (insn).levels[0].log; |
1020 | if (log) |
1021 | { |
1022 | int align = 1 << log; |
1023 | int new_address = (insn_current_address + align - 1) & -align; |
1024 | insn_lengths[uid] = new_address - insn_current_address; |
1025 | } |
1026 | } |
1027 | |
1028 | INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid]; |
1029 | |
1030 | if (NOTE_P (insn) || BARRIER_P (insn) |
1031 | || LABEL_P (insn) || DEBUG_INSN_P (insn)) |
1032 | continue; |
1033 | if (insn->deleted ()) |
1034 | continue; |
1035 | |
1036 | body = PATTERN (insn); |
1037 | if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (p: insn)) |
1038 | { |
1039 | /* This only takes room if read-only data goes into the text |
1040 | section. */ |
1041 | if (JUMP_TABLES_IN_TEXT_SECTION |
1042 | || readonly_data_section == text_section) |
1043 | insn_lengths[uid] = (XVECLEN (body, |
1044 | GET_CODE (body) == ADDR_DIFF_VEC) |
1045 | * GET_MODE_SIZE (mode: table->get_data_mode ())); |
1046 | /* Alignment is handled by ADDR_VEC_ALIGN. */ |
1047 | } |
1048 | else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0) |
1049 | insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn); |
1050 | else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (p: body)) |
1051 | { |
1052 | int i; |
1053 | int const_delay_slots; |
1054 | if (DELAY_SLOTS) |
1055 | const_delay_slots = const_num_delay_slots (body_seq->insn (index: 0)); |
1056 | else |
1057 | const_delay_slots = 0; |
1058 | |
1059 | int (*inner_length_fun) (rtx_insn *) |
1060 | = const_delay_slots ? length_fun : insn_default_length; |
1061 | /* Inside a delay slot sequence, we do not do any branch shortening |
1062 | if the shortening could change the number of delay slots |
1063 | of the branch. */ |
1064 | for (i = 0; i < body_seq->len (); i++) |
1065 | { |
1066 | rtx_insn *inner_insn = body_seq->insn (index: i); |
1067 | int inner_uid = INSN_UID (insn: inner_insn); |
1068 | int inner_length; |
1069 | |
1070 | if (GET_CODE (PATTERN (inner_insn)) == ASM_INPUT |
1071 | || asm_noperands (PATTERN (insn: inner_insn)) >= 0) |
1072 | inner_length = (asm_insn_count (PATTERN (insn: inner_insn)) |
1073 | * insn_default_length (inner_insn)); |
1074 | else |
1075 | inner_length = inner_length_fun (inner_insn); |
1076 | |
1077 | insn_lengths[inner_uid] = inner_length; |
1078 | if (const_delay_slots) |
1079 | { |
1080 | if ((varying_length[inner_uid] |
1081 | = insn_variable_length_p (inner_insn)) != 0) |
1082 | varying_length[uid] = 1; |
1083 | INSN_ADDRESSES (inner_uid) = (insn_current_address |
1084 | + insn_lengths[uid]); |
1085 | } |
1086 | else |
1087 | varying_length[inner_uid] = 0; |
1088 | insn_lengths[uid] += inner_length; |
1089 | } |
1090 | } |
1091 | else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER) |
1092 | { |
1093 | insn_lengths[uid] = length_fun (insn); |
1094 | varying_length[uid] = insn_variable_length_p (insn); |
1095 | } |
1096 | |
1097 | /* If needed, do any adjustment. */ |
1098 | #ifdef ADJUST_INSN_LENGTH |
1099 | ADJUST_INSN_LENGTH (insn, insn_lengths[uid]); |
1100 | if (insn_lengths[uid] < 0) |
1101 | fatal_insn ("negative insn length" , insn); |
1102 | #endif |
1103 | } |
1104 | |
1105 | /* Now loop over all the insns finding varying length insns. For each, |
1106 | get the current insn length. If it has changed, reflect the change. |
1107 | When nothing changes for a full pass, we are done. */ |
1108 | |
1109 | while (something_changed) |
1110 | { |
1111 | something_changed = false; |
1112 | insn_current_align = MAX_CODE_ALIGN - 1; |
1113 | for (insn_current_address = 0, insn = first; |
1114 | insn != 0; |
1115 | insn = NEXT_INSN (insn)) |
1116 | { |
1117 | int new_length; |
1118 | #ifdef ADJUST_INSN_LENGTH |
1119 | int tmp_length; |
1120 | #endif |
1121 | int length_align; |
1122 | |
1123 | uid = INSN_UID (insn); |
1124 | |
1125 | if (rtx_code_label *label = dyn_cast <rtx_code_label *> (p: insn)) |
1126 | { |
1127 | int log = LABEL_TO_ALIGNMENT (label).levels[0].log; |
1128 | |
1129 | #ifdef CASE_VECTOR_SHORTEN_MODE |
1130 | /* If the mode of a following jump table was changed, we |
1131 | may need to update the alignment of this label. */ |
1132 | |
1133 | if (JUMP_TABLES_IN_TEXT_SECTION |
1134 | || readonly_data_section == text_section) |
1135 | { |
1136 | rtx_jump_table_data *table = jump_table_for_label (label); |
1137 | if (table) |
1138 | { |
1139 | int newlog = ADDR_VEC_ALIGN (table); |
1140 | if (newlog != log) |
1141 | { |
1142 | log = newlog; |
1143 | LABEL_TO_ALIGNMENT (insn) = log; |
1144 | something_changed = true; |
1145 | } |
1146 | } |
1147 | } |
1148 | #endif |
1149 | |
1150 | if (log > insn_current_align) |
1151 | { |
1152 | int align = 1 << log; |
1153 | int new_address= (insn_current_address + align - 1) & -align; |
1154 | insn_lengths[uid] = new_address - insn_current_address; |
1155 | insn_current_align = log; |
1156 | insn_current_address = new_address; |
1157 | } |
1158 | else |
1159 | insn_lengths[uid] = 0; |
1160 | INSN_ADDRESSES (uid) = insn_current_address; |
1161 | continue; |
1162 | } |
1163 | |
1164 | length_align = INSN_LENGTH_ALIGNMENT (insn); |
1165 | if (length_align < insn_current_align) |
1166 | insn_current_align = length_align; |
1167 | |
1168 | insn_last_address = INSN_ADDRESSES (uid); |
1169 | INSN_ADDRESSES (uid) = insn_current_address; |
1170 | |
1171 | #ifdef CASE_VECTOR_SHORTEN_MODE |
1172 | if (optimize |
1173 | && JUMP_TABLE_DATA_P (insn) |
1174 | && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC) |
1175 | { |
1176 | rtx_jump_table_data *table = as_a <rtx_jump_table_data *> (insn); |
1177 | rtx body = PATTERN (insn); |
1178 | int old_length = insn_lengths[uid]; |
1179 | rtx_insn *rel_lab = |
1180 | safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0)); |
1181 | rtx min_lab = XEXP (XEXP (body, 2), 0); |
1182 | rtx max_lab = XEXP (XEXP (body, 3), 0); |
1183 | int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab)); |
1184 | int min_addr = INSN_ADDRESSES (INSN_UID (min_lab)); |
1185 | int max_addr = INSN_ADDRESSES (INSN_UID (max_lab)); |
1186 | rtx_insn *prev; |
1187 | int rel_align = 0; |
1188 | addr_diff_vec_flags flags; |
1189 | scalar_int_mode vec_mode; |
1190 | |
1191 | /* Avoid automatic aggregate initialization. */ |
1192 | flags = ADDR_DIFF_VEC_FLAGS (body); |
1193 | |
1194 | /* Try to find a known alignment for rel_lab. */ |
1195 | for (prev = rel_lab; |
1196 | prev |
1197 | && ! insn_lengths[INSN_UID (prev)] |
1198 | && ! (varying_length[INSN_UID (prev)] & 1); |
1199 | prev = PREV_INSN (prev)) |
1200 | if (varying_length[INSN_UID (prev)] & 2) |
1201 | { |
1202 | rel_align = LABEL_TO_ALIGNMENT (prev).levels[0].log; |
1203 | break; |
1204 | } |
1205 | |
1206 | /* See the comment on addr_diff_vec_flags in rtl.h for the |
1207 | meaning of the flags values. base: REL_LAB vec: INSN */ |
1208 | /* Anything after INSN has still addresses from the last |
1209 | pass; adjust these so that they reflect our current |
1210 | estimate for this pass. */ |
1211 | if (flags.base_after_vec) |
1212 | rel_addr += insn_current_address - insn_last_address; |
1213 | if (flags.min_after_vec) |
1214 | min_addr += insn_current_address - insn_last_address; |
1215 | if (flags.max_after_vec) |
1216 | max_addr += insn_current_address - insn_last_address; |
1217 | /* We want to know the worst case, i.e. lowest possible value |
1218 | for the offset of MIN_LAB. If MIN_LAB is after REL_LAB, |
1219 | its offset is positive, and we have to be wary of code shrink; |
1220 | otherwise, it is negative, and we have to be vary of code |
1221 | size increase. */ |
1222 | if (flags.min_after_base) |
1223 | { |
1224 | /* If INSN is between REL_LAB and MIN_LAB, the size |
1225 | changes we are about to make can change the alignment |
1226 | within the observed offset, therefore we have to break |
1227 | it up into two parts that are independent. */ |
1228 | if (! flags.base_after_vec && flags.min_after_vec) |
1229 | { |
1230 | min_addr -= align_fuzz (rel_lab, insn, rel_align, 0); |
1231 | min_addr -= align_fuzz (insn, min_lab, 0, 0); |
1232 | } |
1233 | else |
1234 | min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0); |
1235 | } |
1236 | else |
1237 | { |
1238 | if (flags.base_after_vec && ! flags.min_after_vec) |
1239 | { |
1240 | min_addr -= align_fuzz (min_lab, insn, 0, ~0); |
1241 | min_addr -= align_fuzz (insn, rel_lab, 0, ~0); |
1242 | } |
1243 | else |
1244 | min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0); |
1245 | } |
1246 | /* Likewise, determine the highest lowest possible value |
1247 | for the offset of MAX_LAB. */ |
1248 | if (flags.max_after_base) |
1249 | { |
1250 | if (! flags.base_after_vec && flags.max_after_vec) |
1251 | { |
1252 | max_addr += align_fuzz (rel_lab, insn, rel_align, ~0); |
1253 | max_addr += align_fuzz (insn, max_lab, 0, ~0); |
1254 | } |
1255 | else |
1256 | max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0); |
1257 | } |
1258 | else |
1259 | { |
1260 | if (flags.base_after_vec && ! flags.max_after_vec) |
1261 | { |
1262 | max_addr += align_fuzz (max_lab, insn, 0, 0); |
1263 | max_addr += align_fuzz (insn, rel_lab, 0, 0); |
1264 | } |
1265 | else |
1266 | max_addr += align_fuzz (max_lab, rel_lab, 0, 0); |
1267 | } |
1268 | vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr, |
1269 | max_addr - rel_addr, body); |
1270 | if (!increasing |
1271 | || (GET_MODE_SIZE (vec_mode) |
1272 | >= GET_MODE_SIZE (table->get_data_mode ()))) |
1273 | PUT_MODE (body, vec_mode); |
1274 | if (JUMP_TABLES_IN_TEXT_SECTION |
1275 | || readonly_data_section == text_section) |
1276 | { |
1277 | insn_lengths[uid] |
1278 | = (XVECLEN (body, 1) |
1279 | * GET_MODE_SIZE (table->get_data_mode ())); |
1280 | insn_current_address += insn_lengths[uid]; |
1281 | if (insn_lengths[uid] != old_length) |
1282 | something_changed = true; |
1283 | } |
1284 | |
1285 | continue; |
1286 | } |
1287 | #endif /* CASE_VECTOR_SHORTEN_MODE */ |
1288 | |
1289 | if (! (varying_length[uid])) |
1290 | { |
1291 | if (NONJUMP_INSN_P (insn) |
1292 | && GET_CODE (PATTERN (insn)) == SEQUENCE) |
1293 | { |
1294 | int i; |
1295 | |
1296 | body = PATTERN (insn); |
1297 | for (i = 0; i < XVECLEN (body, 0); i++) |
1298 | { |
1299 | rtx inner_insn = XVECEXP (body, 0, i); |
1300 | int inner_uid = INSN_UID (insn: inner_insn); |
1301 | |
1302 | INSN_ADDRESSES (inner_uid) = insn_current_address; |
1303 | |
1304 | insn_current_address += insn_lengths[inner_uid]; |
1305 | } |
1306 | } |
1307 | else |
1308 | insn_current_address += insn_lengths[uid]; |
1309 | |
1310 | continue; |
1311 | } |
1312 | |
1313 | if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) |
1314 | { |
1315 | rtx_sequence *seqn = as_a <rtx_sequence *> (p: PATTERN (insn)); |
1316 | int i; |
1317 | |
1318 | body = PATTERN (insn); |
1319 | new_length = 0; |
1320 | for (i = 0; i < seqn->len (); i++) |
1321 | { |
1322 | rtx_insn *inner_insn = seqn->insn (index: i); |
1323 | int inner_uid = INSN_UID (insn: inner_insn); |
1324 | int inner_length; |
1325 | |
1326 | INSN_ADDRESSES (inner_uid) = insn_current_address; |
1327 | |
1328 | /* insn_current_length returns 0 for insns with a |
1329 | non-varying length. */ |
1330 | if (! varying_length[inner_uid]) |
1331 | inner_length = insn_lengths[inner_uid]; |
1332 | else |
1333 | inner_length = insn_current_length (inner_insn); |
1334 | |
1335 | if (inner_length != insn_lengths[inner_uid]) |
1336 | { |
1337 | if (!increasing || inner_length > insn_lengths[inner_uid]) |
1338 | { |
1339 | insn_lengths[inner_uid] = inner_length; |
1340 | something_changed = true; |
1341 | } |
1342 | else |
1343 | inner_length = insn_lengths[inner_uid]; |
1344 | } |
1345 | insn_current_address += inner_length; |
1346 | new_length += inner_length; |
1347 | } |
1348 | } |
1349 | else |
1350 | { |
1351 | new_length = insn_current_length (insn); |
1352 | insn_current_address += new_length; |
1353 | } |
1354 | |
1355 | #ifdef ADJUST_INSN_LENGTH |
1356 | /* If needed, do any adjustment. */ |
1357 | tmp_length = new_length; |
1358 | ADJUST_INSN_LENGTH (insn, new_length); |
1359 | insn_current_address += (new_length - tmp_length); |
1360 | #endif |
1361 | |
1362 | if (new_length != insn_lengths[uid] |
1363 | && (!increasing || new_length > insn_lengths[uid])) |
1364 | { |
1365 | insn_lengths[uid] = new_length; |
1366 | something_changed = true; |
1367 | } |
1368 | else |
1369 | insn_current_address += insn_lengths[uid] - new_length; |
1370 | } |
1371 | /* For a non-optimizing compile, do only a single pass. */ |
1372 | if (!increasing) |
1373 | break; |
1374 | } |
1375 | crtl->max_insn_address = insn_current_address; |
1376 | free (ptr: varying_length); |
1377 | } |
1378 | |
1379 | /* Given the body of an INSN known to be generated by an ASM statement, return |
1380 | the number of machine instructions likely to be generated for this insn. |
1381 | This is used to compute its length. */ |
1382 | |
1383 | static int |
1384 | asm_insn_count (rtx body) |
1385 | { |
1386 | const char *templ; |
1387 | |
1388 | if (GET_CODE (body) == ASM_INPUT) |
1389 | templ = XSTR (body, 0); |
1390 | else |
1391 | templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL); |
1392 | |
1393 | return asm_str_count (templ); |
1394 | } |
1395 | |
1396 | /* Return the number of machine instructions likely to be generated for the |
1397 | inline-asm template. */ |
1398 | int |
1399 | asm_str_count (const char *templ) |
1400 | { |
1401 | int count = 1; |
1402 | |
1403 | if (!*templ) |
1404 | return 0; |
1405 | |
1406 | for (; *templ; templ++) |
1407 | if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ) |
1408 | || *templ == '\n') |
1409 | count++; |
1410 | |
1411 | return count; |
1412 | } |
1413 | |
1414 | /* Return true if DWARF2 debug info can be emitted for DECL. */ |
1415 | |
1416 | static bool |
1417 | dwarf2_debug_info_emitted_p (tree decl) |
1418 | { |
1419 | /* When DWARF2 debug info is not generated internally. */ |
1420 | if (!dwarf_debuginfo_p () && !dwarf_based_debuginfo_p ()) |
1421 | return false; |
1422 | |
1423 | if (DECL_IGNORED_P (decl)) |
1424 | return false; |
1425 | |
1426 | return true; |
1427 | } |
1428 | |
1429 | /* Return scope resulting from combination of S1 and S2. */ |
1430 | static tree |
1431 | choose_inner_scope (tree s1, tree s2) |
1432 | { |
1433 | if (!s1) |
1434 | return s2; |
1435 | if (!s2) |
1436 | return s1; |
1437 | if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2)) |
1438 | return s1; |
1439 | return s2; |
1440 | } |
1441 | |
1442 | /* Emit lexical block notes needed to change scope from S1 to S2. */ |
1443 | |
1444 | static void |
1445 | change_scope (rtx_insn *orig_insn, tree s1, tree s2) |
1446 | { |
1447 | rtx_insn *insn = orig_insn; |
1448 | tree com = NULL_TREE; |
1449 | tree ts1 = s1, ts2 = s2; |
1450 | tree s; |
1451 | |
1452 | while (ts1 != ts2) |
1453 | { |
1454 | gcc_assert (ts1 && ts2); |
1455 | if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2)) |
1456 | ts1 = BLOCK_SUPERCONTEXT (ts1); |
1457 | else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2)) |
1458 | ts2 = BLOCK_SUPERCONTEXT (ts2); |
1459 | else |
1460 | { |
1461 | ts1 = BLOCK_SUPERCONTEXT (ts1); |
1462 | ts2 = BLOCK_SUPERCONTEXT (ts2); |
1463 | } |
1464 | } |
1465 | com = ts1; |
1466 | |
1467 | /* Close scopes. */ |
1468 | s = s1; |
1469 | while (s != com) |
1470 | { |
1471 | rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn); |
1472 | NOTE_BLOCK (note) = s; |
1473 | s = BLOCK_SUPERCONTEXT (s); |
1474 | } |
1475 | |
1476 | /* Open scopes. */ |
1477 | s = s2; |
1478 | while (s != com) |
1479 | { |
1480 | insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn); |
1481 | NOTE_BLOCK (insn) = s; |
1482 | s = BLOCK_SUPERCONTEXT (s); |
1483 | } |
1484 | } |
1485 | |
1486 | /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based |
1487 | on the scope tree and the newly reordered instructions. */ |
1488 | |
1489 | static void |
1490 | reemit_insn_block_notes (void) |
1491 | { |
1492 | tree cur_block = DECL_INITIAL (cfun->decl); |
1493 | rtx_insn *insn; |
1494 | |
1495 | insn = get_insns (); |
1496 | for (; insn; insn = NEXT_INSN (insn)) |
1497 | { |
1498 | tree this_block; |
1499 | |
1500 | /* Prevent lexical blocks from straddling section boundaries. */ |
1501 | if (NOTE_P (insn)) |
1502 | switch (NOTE_KIND (insn)) |
1503 | { |
1504 | case NOTE_INSN_SWITCH_TEXT_SECTIONS: |
1505 | { |
1506 | for (tree s = cur_block; s != DECL_INITIAL (cfun->decl); |
1507 | s = BLOCK_SUPERCONTEXT (s)) |
1508 | { |
1509 | rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn); |
1510 | NOTE_BLOCK (note) = s; |
1511 | note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn); |
1512 | NOTE_BLOCK (note) = s; |
1513 | } |
1514 | } |
1515 | break; |
1516 | |
1517 | case NOTE_INSN_BEGIN_STMT: |
1518 | case NOTE_INSN_INLINE_ENTRY: |
1519 | this_block = LOCATION_BLOCK (NOTE_MARKER_LOCATION (insn)); |
1520 | if (!this_block) |
1521 | continue; |
1522 | goto set_cur_block_to_this_block; |
1523 | |
1524 | default: |
1525 | continue; |
1526 | } |
1527 | |
1528 | if (!active_insn_p (insn)) |
1529 | continue; |
1530 | |
1531 | /* Avoid putting scope notes between jump table and its label. */ |
1532 | if (JUMP_TABLE_DATA_P (insn)) |
1533 | continue; |
1534 | |
1535 | this_block = insn_scope (insn); |
1536 | /* For sequences compute scope resulting from merging all scopes |
1537 | of instructions nested inside. */ |
1538 | if (rtx_sequence *body = dyn_cast <rtx_sequence *> (p: PATTERN (insn))) |
1539 | { |
1540 | int i; |
1541 | |
1542 | this_block = NULL; |
1543 | for (i = 0; i < body->len (); i++) |
1544 | this_block = choose_inner_scope (s1: this_block, |
1545 | s2: insn_scope (body->insn (index: i))); |
1546 | } |
1547 | if (! this_block) |
1548 | { |
1549 | if (INSN_LOCATION (insn) == UNKNOWN_LOCATION) |
1550 | continue; |
1551 | else |
1552 | this_block = DECL_INITIAL (cfun->decl); |
1553 | } |
1554 | |
1555 | set_cur_block_to_this_block: |
1556 | if (this_block != cur_block) |
1557 | { |
1558 | change_scope (orig_insn: insn, s1: cur_block, s2: this_block); |
1559 | cur_block = this_block; |
1560 | } |
1561 | } |
1562 | |
1563 | /* change_scope emits before the insn, not after. */ |
1564 | rtx_note *note = emit_note (NOTE_INSN_DELETED); |
1565 | change_scope (orig_insn: note, s1: cur_block, DECL_INITIAL (cfun->decl)); |
1566 | delete_insn (note); |
1567 | |
1568 | reorder_blocks (); |
1569 | } |
1570 | |
1571 | static const char *some_local_dynamic_name; |
1572 | |
1573 | /* Locate some local-dynamic symbol still in use by this function |
1574 | so that we can print its name in local-dynamic base patterns. |
1575 | Return null if there are no local-dynamic references. */ |
1576 | |
1577 | const char * |
1578 | get_some_local_dynamic_name () |
1579 | { |
1580 | subrtx_iterator::array_type array; |
1581 | rtx_insn *insn; |
1582 | |
1583 | if (some_local_dynamic_name) |
1584 | return some_local_dynamic_name; |
1585 | |
1586 | for (insn = get_insns (); insn ; insn = NEXT_INSN (insn)) |
1587 | if (NONDEBUG_INSN_P (insn)) |
1588 | FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL) |
1589 | { |
1590 | const_rtx x = *iter; |
1591 | if (GET_CODE (x) == SYMBOL_REF) |
1592 | { |
1593 | if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC) |
1594 | return some_local_dynamic_name = XSTR (x, 0); |
1595 | if (CONSTANT_POOL_ADDRESS_P (x)) |
1596 | iter.substitute (x: get_pool_constant (x)); |
1597 | } |
1598 | } |
1599 | |
1600 | return 0; |
1601 | } |
1602 | |
1603 | /* Arrange for us to emit a source location note before any further |
1604 | real insns or section changes, by setting the SEEN_NEXT_VIEW bit in |
1605 | *SEEN, as long as we are keeping track of location views. The bit |
1606 | indicates we have referenced the next view at the current PC, so we |
1607 | have to emit it. This should be called next to the var_location |
1608 | debug hook. */ |
1609 | |
1610 | static inline void |
1611 | set_next_view_needed (int *seen) |
1612 | { |
1613 | if (debug_variable_location_views) |
1614 | *seen |= SEEN_NEXT_VIEW; |
1615 | } |
1616 | |
1617 | /* Clear the flag in *SEEN indicating we need to emit the next view. |
1618 | This should be called next to the source_line debug hook. */ |
1619 | |
1620 | static inline void |
1621 | clear_next_view_needed (int *seen) |
1622 | { |
1623 | *seen &= ~SEEN_NEXT_VIEW; |
1624 | } |
1625 | |
1626 | /* Test whether we have a pending request to emit the next view in |
1627 | *SEEN, and emit it if needed, clearing the request bit. */ |
1628 | |
1629 | static inline void |
1630 | maybe_output_next_view (int *seen) |
1631 | { |
1632 | if ((*seen & SEEN_NEXT_VIEW) != 0) |
1633 | { |
1634 | clear_next_view_needed (seen); |
1635 | (*debug_hooks->source_line) (last_linenum, last_columnnum, |
1636 | last_filename, last_discriminator, |
1637 | false); |
1638 | } |
1639 | } |
1640 | |
1641 | /* We want to emit param bindings (before the first begin_stmt) in the |
1642 | initial view, if we are emitting views. To that end, we may |
1643 | consume initial notes in the function, processing them in |
1644 | final_start_function, before signaling the beginning of the |
1645 | prologue, rather than in final. |
1646 | |
1647 | We don't test whether the DECLs are PARM_DECLs: the assumption is |
1648 | that there will be a NOTE_INSN_BEGIN_STMT marker before any |
1649 | non-parameter NOTE_INSN_VAR_LOCATION. It's ok if the marker is not |
1650 | there, we'll just have more variable locations bound in the initial |
1651 | view, which is consistent with their being bound without any code |
1652 | that would give them a value. */ |
1653 | |
1654 | static inline bool |
1655 | in_initial_view_p (rtx_insn *insn) |
1656 | { |
1657 | return (!DECL_IGNORED_P (current_function_decl) |
1658 | && debug_variable_location_views |
1659 | && insn && GET_CODE (insn) == NOTE |
1660 | && (NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION |
1661 | || NOTE_KIND (insn) == NOTE_INSN_DELETED)); |
1662 | } |
1663 | |
1664 | /* Output assembler code for the start of a function, |
1665 | and initialize some of the variables in this file |
1666 | for the new function. The label for the function and associated |
1667 | assembler pseudo-ops have already been output in `assemble_start_function'. |
1668 | |
1669 | FIRST is the first insn of the rtl for the function being compiled. |
1670 | FILE is the file to write assembler code to. |
1671 | SEEN should be initially set to zero, and it may be updated to |
1672 | indicate we have references to the next location view, that would |
1673 | require us to emit it at the current PC. |
1674 | OPTIMIZE_P is nonzero if we should eliminate redundant |
1675 | test and compare insns. */ |
1676 | |
1677 | static void |
1678 | final_start_function_1 (rtx_insn **firstp, FILE *file, int *seen, |
1679 | int optimize_p ATTRIBUTE_UNUSED) |
1680 | { |
1681 | block_depth = 0; |
1682 | |
1683 | this_is_asm_operands = 0; |
1684 | |
1685 | need_profile_function = false; |
1686 | |
1687 | last_filename = LOCATION_FILE (prologue_location); |
1688 | last_linenum = LOCATION_LINE (prologue_location); |
1689 | last_columnnum = LOCATION_COLUMN (prologue_location); |
1690 | last_discriminator = 0; |
1691 | force_source_line = false; |
1692 | |
1693 | high_block_linenum = high_function_linenum = last_linenum; |
1694 | |
1695 | rtx_insn *first = *firstp; |
1696 | if (in_initial_view_p (insn: first)) |
1697 | { |
1698 | do |
1699 | { |
1700 | final_scan_insn (first, file, 0, 0, seen); |
1701 | first = NEXT_INSN (insn: first); |
1702 | } |
1703 | while (in_initial_view_p (insn: first)); |
1704 | *firstp = first; |
1705 | } |
1706 | |
1707 | if (!DECL_IGNORED_P (current_function_decl)) |
1708 | debug_hooks->begin_prologue (last_linenum, last_columnnum, |
1709 | last_filename); |
1710 | |
1711 | if (!dwarf2_debug_info_emitted_p (decl: current_function_decl)) |
1712 | dwarf2out_begin_prologue (0, 0, NULL); |
1713 | |
1714 | if (DECL_IGNORED_P (current_function_decl) && last_linenum && last_filename) |
1715 | debug_hooks->set_ignored_loc (last_linenum, last_columnnum, last_filename); |
1716 | |
1717 | #ifdef LEAF_REG_REMAP |
1718 | if (crtl->uses_only_leaf_regs) |
1719 | leaf_renumber_regs (first); |
1720 | #endif |
1721 | |
1722 | /* The Sun386i and perhaps other machines don't work right |
1723 | if the profiling code comes after the prologue. */ |
1724 | if (targetm.profile_before_prologue () && crtl->profile) |
1725 | { |
1726 | if (targetm.asm_out.function_prologue == default_function_pro_epilogue |
1727 | && targetm.have_prologue ()) |
1728 | { |
1729 | rtx_insn *insn; |
1730 | for (insn = first; insn; insn = NEXT_INSN (insn)) |
1731 | if (!NOTE_P (insn)) |
1732 | { |
1733 | insn = NULL; |
1734 | break; |
1735 | } |
1736 | else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK |
1737 | || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG) |
1738 | break; |
1739 | else if (NOTE_KIND (insn) == NOTE_INSN_DELETED |
1740 | || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION) |
1741 | continue; |
1742 | else |
1743 | { |
1744 | insn = NULL; |
1745 | break; |
1746 | } |
1747 | |
1748 | if (insn) |
1749 | need_profile_function = true; |
1750 | else |
1751 | profile_function (file); |
1752 | } |
1753 | else |
1754 | profile_function (file); |
1755 | } |
1756 | |
1757 | /* If debugging, assign block numbers to all of the blocks in this |
1758 | function. */ |
1759 | if (write_symbols) |
1760 | { |
1761 | reemit_insn_block_notes (); |
1762 | number_blocks (current_function_decl); |
1763 | /* We never actually put out begin/end notes for the top-level |
1764 | block in the function. But, conceptually, that block is |
1765 | always needed. */ |
1766 | TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1; |
1767 | } |
1768 | |
1769 | unsigned HOST_WIDE_INT min_frame_size |
1770 | = constant_lower_bound (a: get_frame_size ()); |
1771 | if (min_frame_size > (unsigned HOST_WIDE_INT) warn_frame_larger_than_size) |
1772 | { |
1773 | /* Issue a warning */ |
1774 | warning (OPT_Wframe_larger_than_, |
1775 | "the frame size of %wu bytes is larger than %wu bytes" , |
1776 | min_frame_size, warn_frame_larger_than_size); |
1777 | } |
1778 | |
1779 | /* First output the function prologue: code to set up the stack frame. */ |
1780 | targetm.asm_out.function_prologue (file); |
1781 | |
1782 | /* If the machine represents the prologue as RTL, the profiling code must |
1783 | be emitted when NOTE_INSN_PROLOGUE_END is scanned. */ |
1784 | if (! targetm.have_prologue ()) |
1785 | profile_after_prologue (file); |
1786 | } |
1787 | |
1788 | /* This is an exported final_start_function_1, callable without SEEN. */ |
1789 | |
1790 | void |
1791 | final_start_function (rtx_insn *first, FILE *file, |
1792 | int optimize_p ATTRIBUTE_UNUSED) |
1793 | { |
1794 | int seen = 0; |
1795 | final_start_function_1 (firstp: &first, file, seen: &seen, optimize_p); |
1796 | gcc_assert (seen == 0); |
1797 | } |
1798 | |
1799 | static void |
1800 | profile_after_prologue (FILE *file ATTRIBUTE_UNUSED) |
1801 | { |
1802 | if (!targetm.profile_before_prologue () && crtl->profile) |
1803 | profile_function (file); |
1804 | } |
1805 | |
1806 | static void |
1807 | profile_function (FILE *file ATTRIBUTE_UNUSED) |
1808 | { |
1809 | #ifndef NO_PROFILE_COUNTERS |
1810 | # define NO_PROFILE_COUNTERS 0 |
1811 | #endif |
1812 | #ifdef ASM_OUTPUT_REG_PUSH |
1813 | rtx sval = NULL, chain = NULL; |
1814 | |
1815 | if (cfun->returns_struct) |
1816 | sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), |
1817 | true); |
1818 | if (cfun->static_chain_decl) |
1819 | chain = targetm.calls.static_chain (current_function_decl, true); |
1820 | #endif /* ASM_OUTPUT_REG_PUSH */ |
1821 | |
1822 | if (! NO_PROFILE_COUNTERS) |
1823 | { |
1824 | int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE); |
1825 | switch_to_section (data_section); |
1826 | ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT)); |
1827 | targetm.asm_out.internal_label (file, "LP" , current_function_funcdef_no); |
1828 | assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1); |
1829 | } |
1830 | |
1831 | switch_to_section (current_function_section ()); |
1832 | |
1833 | #ifdef ASM_OUTPUT_REG_PUSH |
1834 | if (sval && REG_P (sval)) |
1835 | ASM_OUTPUT_REG_PUSH (file, REGNO (sval)); |
1836 | if (chain && REG_P (chain)) |
1837 | ASM_OUTPUT_REG_PUSH (file, REGNO (chain)); |
1838 | #endif |
1839 | |
1840 | FUNCTION_PROFILER (file, current_function_funcdef_no); |
1841 | |
1842 | #ifdef ASM_OUTPUT_REG_PUSH |
1843 | if (chain && REG_P (chain)) |
1844 | ASM_OUTPUT_REG_POP (file, REGNO (chain)); |
1845 | if (sval && REG_P (sval)) |
1846 | ASM_OUTPUT_REG_POP (file, REGNO (sval)); |
1847 | #endif |
1848 | } |
1849 | |
1850 | /* Output assembler code for the end of a function. |
1851 | For clarity, args are same as those of `final_start_function' |
1852 | even though not all of them are needed. */ |
1853 | |
1854 | void |
1855 | final_end_function (void) |
1856 | { |
1857 | app_disable (); |
1858 | |
1859 | if (!DECL_IGNORED_P (current_function_decl)) |
1860 | debug_hooks->end_function (high_function_linenum); |
1861 | |
1862 | /* Finally, output the function epilogue: |
1863 | code to restore the stack frame and return to the caller. */ |
1864 | targetm.asm_out.function_epilogue (asm_out_file); |
1865 | |
1866 | /* And debug output. */ |
1867 | if (!DECL_IGNORED_P (current_function_decl)) |
1868 | debug_hooks->end_epilogue (last_linenum, last_filename); |
1869 | |
1870 | if (!dwarf2_debug_info_emitted_p (decl: current_function_decl) |
1871 | && dwarf2out_do_frame ()) |
1872 | dwarf2out_end_epilogue (last_linenum, last_filename); |
1873 | |
1874 | some_local_dynamic_name = 0; |
1875 | } |
1876 | |
1877 | |
1878 | /* Dumper helper for basic block information. FILE is the assembly |
1879 | output file, and INSN is the instruction being emitted. */ |
1880 | |
1881 | static void |
1882 | dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb, |
1883 | basic_block *end_to_bb, int bb_map_size, int *bb_seqn) |
1884 | { |
1885 | basic_block bb; |
1886 | |
1887 | if (!flag_debug_asm) |
1888 | return; |
1889 | |
1890 | if (INSN_UID (insn) < bb_map_size |
1891 | && (bb = start_to_bb[INSN_UID (insn)]) != NULL) |
1892 | { |
1893 | edge e; |
1894 | edge_iterator ei; |
1895 | |
1896 | fprintf (stream: file, format: "%s BLOCK %d" , ASM_COMMENT_START, bb->index); |
1897 | if (bb->count.initialized_p ()) |
1898 | { |
1899 | fprintf (stream: file, format: ", count:" ); |
1900 | bb->count.dump (f: file); |
1901 | } |
1902 | fprintf (stream: file, format: " seq:%d" , (*bb_seqn)++); |
1903 | fprintf (stream: file, format: "\n%s PRED:" , ASM_COMMENT_START); |
1904 | FOR_EACH_EDGE (e, ei, bb->preds) |
1905 | { |
1906 | dump_edge_info (file, e, TDF_DETAILS, 0); |
1907 | } |
1908 | fprintf (stream: file, format: "\n" ); |
1909 | } |
1910 | if (INSN_UID (insn) < bb_map_size |
1911 | && (bb = end_to_bb[INSN_UID (insn)]) != NULL) |
1912 | { |
1913 | edge e; |
1914 | edge_iterator ei; |
1915 | |
1916 | fprintf (stream: asm_out_file, format: "%s SUCC:" , ASM_COMMENT_START); |
1917 | FOR_EACH_EDGE (e, ei, bb->succs) |
1918 | { |
1919 | dump_edge_info (asm_out_file, e, TDF_DETAILS, 1); |
1920 | } |
1921 | fprintf (stream: file, format: "\n" ); |
1922 | } |
1923 | } |
1924 | |
1925 | /* Output assembler code for some insns: all or part of a function. |
1926 | For description of args, see `final_start_function', above. */ |
1927 | |
1928 | static void |
1929 | final_1 (rtx_insn *first, FILE *file, int seen, int optimize_p) |
1930 | { |
1931 | rtx_insn *insn, *next; |
1932 | |
1933 | /* Used for -dA dump. */ |
1934 | basic_block *start_to_bb = NULL; |
1935 | basic_block *end_to_bb = NULL; |
1936 | int bb_map_size = 0; |
1937 | int bb_seqn = 0; |
1938 | |
1939 | last_ignored_compare = 0; |
1940 | |
1941 | init_recog (); |
1942 | |
1943 | CC_STATUS_INIT; |
1944 | |
1945 | if (flag_debug_asm) |
1946 | { |
1947 | basic_block bb; |
1948 | |
1949 | bb_map_size = get_max_uid () + 1; |
1950 | start_to_bb = XCNEWVEC (basic_block, bb_map_size); |
1951 | end_to_bb = XCNEWVEC (basic_block, bb_map_size); |
1952 | |
1953 | /* There is no cfg for a thunk. */ |
1954 | if (!cfun->is_thunk) |
1955 | FOR_EACH_BB_REVERSE_FN (bb, cfun) |
1956 | { |
1957 | start_to_bb[INSN_UID (BB_HEAD (bb))] = bb; |
1958 | end_to_bb[INSN_UID (BB_END (bb))] = bb; |
1959 | } |
1960 | } |
1961 | |
1962 | /* Output the insns. */ |
1963 | for (insn = first; insn;) |
1964 | { |
1965 | if (HAVE_ATTR_length) |
1966 | { |
1967 | if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ()) |
1968 | { |
1969 | /* This can be triggered by bugs elsewhere in the compiler if |
1970 | new insns are created after init_insn_lengths is called. */ |
1971 | gcc_assert (NOTE_P (insn)); |
1972 | insn_current_address = -1; |
1973 | } |
1974 | else |
1975 | insn_current_address = INSN_ADDRESSES (INSN_UID (insn)); |
1976 | /* final can be seen as an iteration of shorten_branches that |
1977 | does nothing (since a fixed point has already been reached). */ |
1978 | insn_last_address = insn_current_address; |
1979 | } |
1980 | |
1981 | dump_basic_block_info (file, insn, start_to_bb, end_to_bb, |
1982 | bb_map_size, bb_seqn: &bb_seqn); |
1983 | insn = final_scan_insn (insn, file, optimize_p, 0, &seen); |
1984 | } |
1985 | |
1986 | maybe_output_next_view (seen: &seen); |
1987 | |
1988 | if (flag_debug_asm) |
1989 | { |
1990 | free (ptr: start_to_bb); |
1991 | free (ptr: end_to_bb); |
1992 | } |
1993 | |
1994 | /* Remove CFI notes, to avoid compare-debug failures. */ |
1995 | for (insn = first; insn; insn = next) |
1996 | { |
1997 | next = NEXT_INSN (insn); |
1998 | if (NOTE_P (insn) |
1999 | && (NOTE_KIND (insn) == NOTE_INSN_CFI |
2000 | || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL)) |
2001 | delete_insn (insn); |
2002 | } |
2003 | } |
2004 | |
2005 | /* This is an exported final_1, callable without SEEN. */ |
2006 | |
2007 | void |
2008 | final (rtx_insn *first, FILE *file, int optimize_p) |
2009 | { |
2010 | /* Those that use the internal final_start_function_1/final_1 API |
2011 | skip initial debug bind notes in final_start_function_1, and pass |
2012 | the modified FIRST to final_1. But those that use the public |
2013 | final_start_function/final APIs, final_start_function can't move |
2014 | FIRST because it's not passed by reference, so if they were |
2015 | skipped there, skip them again here. */ |
2016 | while (in_initial_view_p (insn: first)) |
2017 | first = NEXT_INSN (insn: first); |
2018 | |
2019 | final_1 (first, file, seen: 0, optimize_p); |
2020 | } |
2021 | |
2022 | const char * |
2023 | get_insn_template (int code, rtx_insn *insn) |
2024 | { |
2025 | switch (insn_data[code].output_format) |
2026 | { |
2027 | case INSN_OUTPUT_FORMAT_SINGLE: |
2028 | return insn_data[code].output.single; |
2029 | case INSN_OUTPUT_FORMAT_MULTI: |
2030 | return insn_data[code].output.multi[which_alternative]; |
2031 | case INSN_OUTPUT_FORMAT_FUNCTION: |
2032 | gcc_assert (insn); |
2033 | return (*insn_data[code].output.function) (recog_data.operand, insn); |
2034 | |
2035 | default: |
2036 | gcc_unreachable (); |
2037 | } |
2038 | } |
2039 | |
2040 | /* Emit the appropriate declaration for an alternate-entry-point |
2041 | symbol represented by INSN, to FILE. INSN is a CODE_LABEL with |
2042 | LABEL_KIND != LABEL_NORMAL. |
2043 | |
2044 | The case fall-through in this function is intentional. */ |
2045 | static void |
2046 | output_alternate_entry_point (FILE *file, rtx_insn *insn) |
2047 | { |
2048 | const char *name = LABEL_NAME (insn); |
2049 | |
2050 | switch (LABEL_KIND (insn)) |
2051 | { |
2052 | case LABEL_WEAK_ENTRY: |
2053 | #ifdef ASM_WEAKEN_LABEL |
2054 | ASM_WEAKEN_LABEL (file, name); |
2055 | gcc_fallthrough (); |
2056 | #endif |
2057 | case LABEL_GLOBAL_ENTRY: |
2058 | targetm.asm_out.globalize_label (file, name); |
2059 | gcc_fallthrough (); |
2060 | case LABEL_STATIC_ENTRY: |
2061 | #ifdef ASM_OUTPUT_TYPE_DIRECTIVE |
2062 | ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function" ); |
2063 | #endif |
2064 | ASM_OUTPUT_LABEL (file, name); |
2065 | break; |
2066 | |
2067 | case LABEL_NORMAL: |
2068 | default: |
2069 | gcc_unreachable (); |
2070 | } |
2071 | } |
2072 | |
2073 | /* Given a CALL_INSN, find and return the nested CALL. */ |
2074 | static rtx |
2075 | call_from_call_insn (const rtx_call_insn *insn) |
2076 | { |
2077 | rtx x; |
2078 | gcc_assert (CALL_P (insn)); |
2079 | x = PATTERN (insn); |
2080 | |
2081 | while (GET_CODE (x) != CALL) |
2082 | { |
2083 | switch (GET_CODE (x)) |
2084 | { |
2085 | default: |
2086 | gcc_unreachable (); |
2087 | case COND_EXEC: |
2088 | x = COND_EXEC_CODE (x); |
2089 | break; |
2090 | case PARALLEL: |
2091 | x = XVECEXP (x, 0, 0); |
2092 | break; |
2093 | case SET: |
2094 | x = XEXP (x, 1); |
2095 | break; |
2096 | } |
2097 | } |
2098 | return x; |
2099 | } |
2100 | |
2101 | /* Return the CALL in X if there is one. */ |
2102 | |
2103 | rtx |
2104 | get_call_rtx_from (const rtx_insn *insn) |
2105 | { |
2106 | const rtx_call_insn *call_insn = as_a<const rtx_call_insn *> (p: insn); |
2107 | return call_from_call_insn (insn: call_insn); |
2108 | } |
2109 | |
2110 | /* Print a comment into the asm showing FILENAME, LINENUM, and the |
2111 | corresponding source line, if available. */ |
2112 | |
2113 | static void |
2114 | asm_show_source (const char *filename, int linenum) |
2115 | { |
2116 | if (!filename) |
2117 | return; |
2118 | |
2119 | char_span line |
2120 | = global_dc->get_file_cache ().get_source_line (file_path: filename, line: linenum); |
2121 | if (!line) |
2122 | return; |
2123 | |
2124 | fprintf (stream: asm_out_file, format: "%s %s:%i: " , ASM_COMMENT_START, filename, linenum); |
2125 | /* "line" is not 0-terminated, so we must use its length. */ |
2126 | fwrite (ptr: line.get_buffer (), size: 1, n: line.length (), s: asm_out_file); |
2127 | fputc (c: '\n', stream: asm_out_file); |
2128 | } |
2129 | |
2130 | /* Judge if an absolute jump table is relocatable. */ |
2131 | |
2132 | bool |
2133 | jumptable_relocatable (void) |
2134 | { |
2135 | bool relocatable = false; |
2136 | |
2137 | if (!CASE_VECTOR_PC_RELATIVE |
2138 | && !targetm.asm_out.generate_pic_addr_diff_vec () |
2139 | && targetm_common.have_named_sections) |
2140 | relocatable = targetm.asm_out.reloc_rw_mask (); |
2141 | |
2142 | return relocatable; |
2143 | } |
2144 | |
2145 | /* The final scan for one insn, INSN. |
2146 | Args are same as in `final', except that INSN |
2147 | is the insn being scanned. |
2148 | Value returned is the next insn to be scanned. |
2149 | |
2150 | NOPEEPHOLES is the flag to disallow peephole processing (currently |
2151 | used for within delayed branch sequence output). |
2152 | |
2153 | SEEN is used to track the end of the prologue, for emitting |
2154 | debug information. We force the emission of a line note after |
2155 | both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */ |
2156 | |
2157 | static rtx_insn * |
2158 | final_scan_insn_1 (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED, |
2159 | int nopeepholes ATTRIBUTE_UNUSED, int *seen) |
2160 | { |
2161 | rtx_insn *next; |
2162 | rtx_jump_table_data *table; |
2163 | |
2164 | insn_counter++; |
2165 | |
2166 | /* Ignore deleted insns. These can occur when we split insns (due to a |
2167 | template of "#") while not optimizing. */ |
2168 | if (insn->deleted ()) |
2169 | return NEXT_INSN (insn); |
2170 | |
2171 | switch (GET_CODE (insn)) |
2172 | { |
2173 | case NOTE: |
2174 | switch (NOTE_KIND (insn)) |
2175 | { |
2176 | case NOTE_INSN_DELETED: |
2177 | case NOTE_INSN_UPDATE_SJLJ_CONTEXT: |
2178 | break; |
2179 | |
2180 | case NOTE_INSN_SWITCH_TEXT_SECTIONS: |
2181 | maybe_output_next_view (seen); |
2182 | |
2183 | output_function_exception_table (0); |
2184 | |
2185 | if (targetm.asm_out.unwind_emit) |
2186 | targetm.asm_out.unwind_emit (asm_out_file, insn); |
2187 | |
2188 | in_cold_section_p = !in_cold_section_p; |
2189 | |
2190 | gcc_checking_assert (in_cold_section_p); |
2191 | if (in_cold_section_p) |
2192 | cold_function_name |
2193 | = clone_function_name (decl: current_function_decl, suffix: "cold" ); |
2194 | |
2195 | if (dwarf2out_do_frame ()) |
2196 | { |
2197 | dwarf2out_switch_text_section (); |
2198 | if (!dwarf2_debug_info_emitted_p (decl: current_function_decl) |
2199 | && !DECL_IGNORED_P (current_function_decl)) |
2200 | debug_hooks->switch_text_section (); |
2201 | } |
2202 | else if (!DECL_IGNORED_P (current_function_decl)) |
2203 | debug_hooks->switch_text_section (); |
2204 | if (DECL_IGNORED_P (current_function_decl) && last_linenum |
2205 | && last_filename) |
2206 | debug_hooks->set_ignored_loc (last_linenum, last_columnnum, |
2207 | last_filename); |
2208 | |
2209 | switch_to_section (current_function_section ()); |
2210 | targetm.asm_out.function_switched_text_sections (asm_out_file, |
2211 | current_function_decl, |
2212 | in_cold_section_p); |
2213 | /* Emit a label for the split cold section. Form label name by |
2214 | suffixing "cold" to the original function's name. */ |
2215 | if (in_cold_section_p) |
2216 | { |
2217 | #ifdef ASM_DECLARE_COLD_FUNCTION_NAME |
2218 | ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file, |
2219 | IDENTIFIER_POINTER |
2220 | (cold_function_name), |
2221 | current_function_decl); |
2222 | #else |
2223 | ASM_OUTPUT_LABEL (asm_out_file, |
2224 | IDENTIFIER_POINTER (cold_function_name)); |
2225 | #endif |
2226 | if (dwarf2out_do_frame () |
2227 | && cfun->fde->dw_fde_second_begin != NULL) |
2228 | ASM_OUTPUT_LABEL (asm_out_file, cfun->fde->dw_fde_second_begin); |
2229 | } |
2230 | break; |
2231 | |
2232 | case NOTE_INSN_BASIC_BLOCK: |
2233 | if (need_profile_function) |
2234 | { |
2235 | profile_function (file: asm_out_file); |
2236 | need_profile_function = false; |
2237 | } |
2238 | |
2239 | if (targetm.asm_out.unwind_emit) |
2240 | targetm.asm_out.unwind_emit (asm_out_file, insn); |
2241 | |
2242 | break; |
2243 | |
2244 | case NOTE_INSN_EH_REGION_BEG: |
2245 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB" , |
2246 | NOTE_EH_HANDLER (insn)); |
2247 | break; |
2248 | |
2249 | case NOTE_INSN_EH_REGION_END: |
2250 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE" , |
2251 | NOTE_EH_HANDLER (insn)); |
2252 | break; |
2253 | |
2254 | case NOTE_INSN_PROLOGUE_END: |
2255 | targetm.asm_out.function_end_prologue (file); |
2256 | profile_after_prologue (file); |
2257 | |
2258 | if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE) |
2259 | { |
2260 | *seen |= SEEN_EMITTED; |
2261 | force_source_line = true; |
2262 | } |
2263 | else |
2264 | *seen |= SEEN_NOTE; |
2265 | |
2266 | break; |
2267 | |
2268 | case NOTE_INSN_EPILOGUE_BEG: |
2269 | if (!DECL_IGNORED_P (current_function_decl)) |
2270 | (*debug_hooks->begin_epilogue) (last_linenum, last_filename); |
2271 | targetm.asm_out.function_begin_epilogue (file); |
2272 | break; |
2273 | |
2274 | case NOTE_INSN_CFI: |
2275 | dwarf2out_emit_cfi (NOTE_CFI (insn)); |
2276 | break; |
2277 | |
2278 | case NOTE_INSN_CFI_LABEL: |
2279 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI" , |
2280 | NOTE_LABEL_NUMBER (insn)); |
2281 | break; |
2282 | |
2283 | case NOTE_INSN_FUNCTION_BEG: |
2284 | if (need_profile_function) |
2285 | { |
2286 | profile_function (file: asm_out_file); |
2287 | need_profile_function = false; |
2288 | } |
2289 | |
2290 | app_disable (); |
2291 | if (!DECL_IGNORED_P (current_function_decl)) |
2292 | debug_hooks->end_prologue (last_linenum, last_filename); |
2293 | |
2294 | if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE) |
2295 | { |
2296 | *seen |= SEEN_EMITTED; |
2297 | force_source_line = true; |
2298 | } |
2299 | else |
2300 | *seen |= SEEN_NOTE; |
2301 | |
2302 | break; |
2303 | |
2304 | case NOTE_INSN_BLOCK_BEG: |
2305 | if (debug_info_level >= DINFO_LEVEL_NORMAL |
2306 | || dwarf_debuginfo_p () |
2307 | || write_symbols == VMS_DEBUG) |
2308 | { |
2309 | int n = BLOCK_NUMBER (NOTE_BLOCK (insn)); |
2310 | |
2311 | app_disable (); |
2312 | ++block_depth; |
2313 | high_block_linenum = last_linenum; |
2314 | |
2315 | /* Output debugging info about the symbol-block beginning. */ |
2316 | if (!DECL_IGNORED_P (current_function_decl)) |
2317 | debug_hooks->begin_block (last_linenum, n, NOTE_BLOCK (insn)); |
2318 | |
2319 | /* Mark this block as output. */ |
2320 | TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1; |
2321 | BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn)) = in_cold_section_p; |
2322 | } |
2323 | break; |
2324 | |
2325 | case NOTE_INSN_BLOCK_END: |
2326 | maybe_output_next_view (seen); |
2327 | |
2328 | if (debug_info_level >= DINFO_LEVEL_NORMAL |
2329 | || dwarf_debuginfo_p () |
2330 | || write_symbols == VMS_DEBUG) |
2331 | { |
2332 | int n = BLOCK_NUMBER (NOTE_BLOCK (insn)); |
2333 | |
2334 | app_disable (); |
2335 | |
2336 | /* End of a symbol-block. */ |
2337 | --block_depth; |
2338 | gcc_assert (block_depth >= 0); |
2339 | |
2340 | if (!DECL_IGNORED_P (current_function_decl)) |
2341 | debug_hooks->end_block (high_block_linenum, n); |
2342 | gcc_assert (BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn)) |
2343 | == in_cold_section_p); |
2344 | } |
2345 | break; |
2346 | |
2347 | case NOTE_INSN_DELETED_LABEL: |
2348 | /* Emit the label. We may have deleted the CODE_LABEL because |
2349 | the label could be proved to be unreachable, though still |
2350 | referenced (in the form of having its address taken. */ |
2351 | ASM_OUTPUT_DEBUG_LABEL (file, "L" , CODE_LABEL_NUMBER (insn)); |
2352 | break; |
2353 | |
2354 | case NOTE_INSN_DELETED_DEBUG_LABEL: |
2355 | /* Similarly, but need to use different namespace for it. */ |
2356 | if (CODE_LABEL_NUMBER (insn) != -1) |
2357 | ASM_OUTPUT_DEBUG_LABEL (file, "LDL" , CODE_LABEL_NUMBER (insn)); |
2358 | break; |
2359 | |
2360 | case NOTE_INSN_VAR_LOCATION: |
2361 | if (!DECL_IGNORED_P (current_function_decl)) |
2362 | { |
2363 | debug_hooks->var_location (insn); |
2364 | set_next_view_needed (seen); |
2365 | } |
2366 | break; |
2367 | |
2368 | case NOTE_INSN_BEGIN_STMT: |
2369 | gcc_checking_assert (cfun->debug_nonbind_markers); |
2370 | if (!DECL_IGNORED_P (current_function_decl) |
2371 | && notice_source_line (insn, NULL)) |
2372 | { |
2373 | output_source_line: |
2374 | (*debug_hooks->source_line) (last_linenum, last_columnnum, |
2375 | last_filename, last_discriminator, |
2376 | true); |
2377 | clear_next_view_needed (seen); |
2378 | } |
2379 | break; |
2380 | |
2381 | case NOTE_INSN_INLINE_ENTRY: |
2382 | gcc_checking_assert (cfun->debug_nonbind_markers); |
2383 | if (!DECL_IGNORED_P (current_function_decl) |
2384 | && notice_source_line (insn, NULL)) |
2385 | { |
2386 | (*debug_hooks->inline_entry) (LOCATION_BLOCK |
2387 | (NOTE_MARKER_LOCATION (insn))); |
2388 | goto output_source_line; |
2389 | } |
2390 | break; |
2391 | |
2392 | default: |
2393 | gcc_unreachable (); |
2394 | break; |
2395 | } |
2396 | break; |
2397 | |
2398 | case BARRIER: |
2399 | break; |
2400 | |
2401 | case CODE_LABEL: |
2402 | /* The target port might emit labels in the output function for |
2403 | some insn, e.g. sh.cc output_branchy_insn. */ |
2404 | if (CODE_LABEL_NUMBER (insn) <= max_labelno) |
2405 | { |
2406 | align_flags alignment = LABEL_TO_ALIGNMENT (insn); |
2407 | if (alignment.levels[0].log && NEXT_INSN (insn)) |
2408 | { |
2409 | #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN |
2410 | /* Output both primary and secondary alignment. */ |
2411 | ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[0].log, |
2412 | alignment.levels[0].maxskip); |
2413 | ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[1].log, |
2414 | alignment.levels[1].maxskip); |
2415 | #else |
2416 | #ifdef ASM_OUTPUT_ALIGN_WITH_NOP |
2417 | ASM_OUTPUT_ALIGN_WITH_NOP (file, alignment.levels[0].log); |
2418 | #else |
2419 | ASM_OUTPUT_ALIGN (file, alignment.levels[0].log); |
2420 | #endif |
2421 | #endif |
2422 | } |
2423 | } |
2424 | CC_STATUS_INIT; |
2425 | |
2426 | if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn)) |
2427 | debug_hooks->label (as_a <rtx_code_label *> (p: insn)); |
2428 | |
2429 | app_disable (); |
2430 | |
2431 | /* If this label is followed by a jump-table, make sure we put |
2432 | the label in the read-only section. Also possibly write the |
2433 | label and jump table together. */ |
2434 | table = jump_table_for_label (label: as_a <rtx_code_label *> (p: insn)); |
2435 | if (table) |
2436 | { |
2437 | #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC) |
2438 | /* In this case, the case vector is being moved by the |
2439 | target, so don't output the label at all. Leave that |
2440 | to the back end macros. */ |
2441 | #else |
2442 | if (! JUMP_TABLES_IN_TEXT_SECTION) |
2443 | { |
2444 | int log_align; |
2445 | |
2446 | switch_to_section (targetm.asm_out.function_rodata_section |
2447 | (current_function_decl, |
2448 | jumptable_relocatable ())); |
2449 | |
2450 | #ifdef ADDR_VEC_ALIGN |
2451 | log_align = ADDR_VEC_ALIGN (table); |
2452 | #else |
2453 | log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT); |
2454 | #endif |
2455 | ASM_OUTPUT_ALIGN (file, log_align); |
2456 | } |
2457 | else |
2458 | switch_to_section (current_function_section ()); |
2459 | |
2460 | #ifdef ASM_OUTPUT_CASE_LABEL |
2461 | ASM_OUTPUT_CASE_LABEL (file, "L" , CODE_LABEL_NUMBER (insn), table); |
2462 | #else |
2463 | targetm.asm_out.internal_label (file, "L" , CODE_LABEL_NUMBER (insn)); |
2464 | #endif |
2465 | #endif |
2466 | break; |
2467 | } |
2468 | if (LABEL_ALT_ENTRY_P (insn)) |
2469 | output_alternate_entry_point (file, insn); |
2470 | else |
2471 | targetm.asm_out.internal_label (file, "L" , CODE_LABEL_NUMBER (insn)); |
2472 | break; |
2473 | |
2474 | default: |
2475 | { |
2476 | rtx body = PATTERN (insn); |
2477 | int insn_code_number; |
2478 | const char *templ; |
2479 | bool is_stmt, *is_stmt_p; |
2480 | |
2481 | if (MAY_HAVE_DEBUG_MARKER_INSNS && cfun->debug_nonbind_markers) |
2482 | { |
2483 | is_stmt = false; |
2484 | is_stmt_p = NULL; |
2485 | } |
2486 | else |
2487 | is_stmt_p = &is_stmt; |
2488 | |
2489 | /* Reset this early so it is correct for ASM statements. */ |
2490 | current_insn_predicate = NULL_RTX; |
2491 | |
2492 | /* An INSN, JUMP_INSN or CALL_INSN. |
2493 | First check for special kinds that recog doesn't recognize. */ |
2494 | |
2495 | if (GET_CODE (body) == USE /* These are just declarations. */ |
2496 | || GET_CODE (body) == CLOBBER) |
2497 | break; |
2498 | |
2499 | /* Detect insns that are really jump-tables |
2500 | and output them as such. */ |
2501 | |
2502 | if (JUMP_TABLE_DATA_P (insn)) |
2503 | { |
2504 | #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)) |
2505 | int vlen, idx; |
2506 | #endif |
2507 | |
2508 | if (! JUMP_TABLES_IN_TEXT_SECTION) |
2509 | switch_to_section (targetm.asm_out.function_rodata_section |
2510 | (current_function_decl, |
2511 | jumptable_relocatable ())); |
2512 | else |
2513 | switch_to_section (current_function_section ()); |
2514 | |
2515 | app_disable (); |
2516 | |
2517 | #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC) |
2518 | if (GET_CODE (body) == ADDR_VEC) |
2519 | { |
2520 | #ifdef ASM_OUTPUT_ADDR_VEC |
2521 | ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body); |
2522 | #else |
2523 | gcc_unreachable (); |
2524 | #endif |
2525 | } |
2526 | else |
2527 | { |
2528 | #ifdef ASM_OUTPUT_ADDR_DIFF_VEC |
2529 | ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body); |
2530 | #else |
2531 | gcc_unreachable (); |
2532 | #endif |
2533 | } |
2534 | #else |
2535 | vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC); |
2536 | for (idx = 0; idx < vlen; idx++) |
2537 | { |
2538 | if (GET_CODE (body) == ADDR_VEC) |
2539 | { |
2540 | #ifdef ASM_OUTPUT_ADDR_VEC_ELT |
2541 | ASM_OUTPUT_ADDR_VEC_ELT |
2542 | (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0))); |
2543 | #else |
2544 | gcc_unreachable (); |
2545 | #endif |
2546 | } |
2547 | else |
2548 | { |
2549 | #ifdef ASM_OUTPUT_ADDR_DIFF_ELT |
2550 | ASM_OUTPUT_ADDR_DIFF_ELT |
2551 | (file, |
2552 | body, |
2553 | CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)), |
2554 | CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0))); |
2555 | #else |
2556 | gcc_unreachable (); |
2557 | #endif |
2558 | } |
2559 | } |
2560 | #ifdef ASM_OUTPUT_CASE_END |
2561 | ASM_OUTPUT_CASE_END (file, |
2562 | CODE_LABEL_NUMBER (PREV_INSN (insn)), |
2563 | insn); |
2564 | #endif |
2565 | #endif |
2566 | |
2567 | switch_to_section (current_function_section ()); |
2568 | |
2569 | if (debug_variable_location_views |
2570 | && !DECL_IGNORED_P (current_function_decl)) |
2571 | debug_hooks->var_location (insn); |
2572 | |
2573 | break; |
2574 | } |
2575 | /* Output this line note if it is the first or the last line |
2576 | note in a row. */ |
2577 | if (!DECL_IGNORED_P (current_function_decl) |
2578 | && notice_source_line (insn, is_stmt_p)) |
2579 | { |
2580 | if (flag_verbose_asm) |
2581 | asm_show_source (filename: last_filename, linenum: last_linenum); |
2582 | (*debug_hooks->source_line) (last_linenum, last_columnnum, |
2583 | last_filename, last_discriminator, |
2584 | is_stmt); |
2585 | clear_next_view_needed (seen); |
2586 | } |
2587 | else |
2588 | maybe_output_next_view (seen); |
2589 | |
2590 | gcc_checking_assert (!DEBUG_INSN_P (insn)); |
2591 | |
2592 | if (GET_CODE (body) == PARALLEL |
2593 | && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT) |
2594 | body = XVECEXP (body, 0, 0); |
2595 | |
2596 | if (GET_CODE (body) == ASM_INPUT) |
2597 | { |
2598 | const char *string = XSTR (body, 0); |
2599 | |
2600 | /* There's no telling what that did to the condition codes. */ |
2601 | CC_STATUS_INIT; |
2602 | |
2603 | if (string[0]) |
2604 | { |
2605 | expanded_location loc; |
2606 | |
2607 | app_enable (); |
2608 | loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body)); |
2609 | if (*loc.file && loc.line) |
2610 | fprintf (stream: asm_out_file, format: "%s %i \"%s\" 1\n" , |
2611 | ASM_COMMENT_START, loc.line, loc.file); |
2612 | fprintf (stream: asm_out_file, format: "\t%s\n" , string); |
2613 | #if HAVE_AS_LINE_ZERO |
2614 | if (*loc.file && loc.line) |
2615 | fprintf (stream: asm_out_file, format: "%s 0 \"\" 2\n" , ASM_COMMENT_START); |
2616 | #endif |
2617 | } |
2618 | break; |
2619 | } |
2620 | |
2621 | /* Detect `asm' construct with operands. */ |
2622 | if (asm_noperands (body) >= 0) |
2623 | { |
2624 | unsigned int noperands = asm_noperands (body); |
2625 | rtx *ops = XALLOCAVEC (rtx, noperands); |
2626 | const char *string; |
2627 | location_t loc; |
2628 | expanded_location expanded; |
2629 | |
2630 | /* There's no telling what that did to the condition codes. */ |
2631 | CC_STATUS_INIT; |
2632 | |
2633 | /* Get out the operand values. */ |
2634 | string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc); |
2635 | /* Inhibit dying on what would otherwise be compiler bugs. */ |
2636 | insn_noperands = noperands; |
2637 | this_is_asm_operands = insn; |
2638 | expanded = expand_location (loc); |
2639 | |
2640 | #ifdef FINAL_PRESCAN_INSN |
2641 | FINAL_PRESCAN_INSN (insn, ops, insn_noperands); |
2642 | #endif |
2643 | |
2644 | /* Output the insn using them. */ |
2645 | if (string[0]) |
2646 | { |
2647 | app_enable (); |
2648 | if (expanded.file && expanded.line) |
2649 | fprintf (stream: asm_out_file, format: "%s %i \"%s\" 1\n" , |
2650 | ASM_COMMENT_START, expanded.line, expanded.file); |
2651 | output_asm_insn (string, ops); |
2652 | #if HAVE_AS_LINE_ZERO |
2653 | if (expanded.file && expanded.line) |
2654 | fprintf (stream: asm_out_file, format: "%s 0 \"\" 2\n" , ASM_COMMENT_START); |
2655 | #endif |
2656 | } |
2657 | |
2658 | if (targetm.asm_out.final_postscan_insn) |
2659 | targetm.asm_out.final_postscan_insn (file, insn, ops, |
2660 | insn_noperands); |
2661 | |
2662 | this_is_asm_operands = 0; |
2663 | break; |
2664 | } |
2665 | |
2666 | app_disable (); |
2667 | |
2668 | if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (p: body)) |
2669 | { |
2670 | /* A delayed-branch sequence */ |
2671 | int i; |
2672 | |
2673 | final_sequence = seq; |
2674 | |
2675 | /* The first insn in this SEQUENCE might be a JUMP_INSN that will |
2676 | force the restoration of a comparison that was previously |
2677 | thought unnecessary. If that happens, cancel this sequence |
2678 | and cause that insn to be restored. */ |
2679 | |
2680 | next = final_scan_insn (seq->insn (index: 0), file, 0, 1, seen); |
2681 | if (next != seq->insn (index: 1)) |
2682 | { |
2683 | final_sequence = 0; |
2684 | return next; |
2685 | } |
2686 | |
2687 | for (i = 1; i < seq->len (); i++) |
2688 | { |
2689 | rtx_insn *insn = seq->insn (index: i); |
2690 | rtx_insn *next = NEXT_INSN (insn); |
2691 | /* We loop in case any instruction in a delay slot gets |
2692 | split. */ |
2693 | do |
2694 | insn = final_scan_insn (insn, file, 0, 1, seen); |
2695 | while (insn != next); |
2696 | } |
2697 | #ifdef DBR_OUTPUT_SEQEND |
2698 | DBR_OUTPUT_SEQEND (file); |
2699 | #endif |
2700 | final_sequence = 0; |
2701 | |
2702 | /* If the insn requiring the delay slot was a CALL_INSN, the |
2703 | insns in the delay slot are actually executed before the |
2704 | called function. Hence we don't preserve any CC-setting |
2705 | actions in these insns and the CC must be marked as being |
2706 | clobbered by the function. */ |
2707 | if (CALL_P (seq->insn (0))) |
2708 | { |
2709 | CC_STATUS_INIT; |
2710 | } |
2711 | break; |
2712 | } |
2713 | |
2714 | /* We have a real machine instruction as rtl. */ |
2715 | |
2716 | body = PATTERN (insn); |
2717 | |
2718 | /* Do machine-specific peephole optimizations if desired. */ |
2719 | |
2720 | if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes) |
2721 | { |
2722 | rtx_insn *next = peephole (insn); |
2723 | /* When peepholing, if there were notes within the peephole, |
2724 | emit them before the peephole. */ |
2725 | if (next != 0 && next != NEXT_INSN (insn)) |
2726 | { |
2727 | rtx_insn *note, *prev = PREV_INSN (insn); |
2728 | |
2729 | for (note = NEXT_INSN (insn); note != next; |
2730 | note = NEXT_INSN (insn: note)) |
2731 | final_scan_insn (note, file, optimize_p, nopeepholes, seen); |
2732 | |
2733 | /* Put the notes in the proper position for a later |
2734 | rescan. For example, the SH target can do this |
2735 | when generating a far jump in a delayed branch |
2736 | sequence. */ |
2737 | note = NEXT_INSN (insn); |
2738 | SET_PREV_INSN (note) = prev; |
2739 | SET_NEXT_INSN (prev) = note; |
2740 | SET_NEXT_INSN (PREV_INSN (insn: next)) = insn; |
2741 | SET_PREV_INSN (insn) = PREV_INSN (insn: next); |
2742 | SET_NEXT_INSN (insn) = next; |
2743 | SET_PREV_INSN (next) = insn; |
2744 | } |
2745 | |
2746 | /* PEEPHOLE might have changed this. */ |
2747 | body = PATTERN (insn); |
2748 | } |
2749 | |
2750 | /* Try to recognize the instruction. |
2751 | If successful, verify that the operands satisfy the |
2752 | constraints for the instruction. Crash if they don't, |
2753 | since `reload' should have changed them so that they do. */ |
2754 | |
2755 | insn_code_number = recog_memoized (insn); |
2756 | cleanup_subreg_operands (insn); |
2757 | |
2758 | /* Dump the insn in the assembly for debugging (-dAP). |
2759 | If the final dump is requested as slim RTL, dump slim |
2760 | RTL to the assembly file also. */ |
2761 | if (flag_dump_rtl_in_asm) |
2762 | { |
2763 | print_rtx_head = ASM_COMMENT_START; |
2764 | if (! (dump_flags & TDF_SLIM)) |
2765 | print_rtl_single (asm_out_file, insn); |
2766 | else |
2767 | dump_insn_slim (asm_out_file, insn); |
2768 | print_rtx_head = "" ; |
2769 | } |
2770 | |
2771 | if (! constrain_operands_cached (insn, 1)) |
2772 | fatal_insn_not_found (insn); |
2773 | |
2774 | /* Some target machines need to prescan each insn before |
2775 | it is output. */ |
2776 | |
2777 | #ifdef FINAL_PRESCAN_INSN |
2778 | FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands); |
2779 | #endif |
2780 | |
2781 | if (targetm.have_conditional_execution () |
2782 | && GET_CODE (PATTERN (insn)) == COND_EXEC) |
2783 | current_insn_predicate = COND_EXEC_TEST (PATTERN (insn)); |
2784 | |
2785 | current_output_insn = debug_insn = insn; |
2786 | |
2787 | /* Find the proper template for this insn. */ |
2788 | templ = get_insn_template (code: insn_code_number, insn); |
2789 | |
2790 | /* If the C code returns 0, it means that it is a jump insn |
2791 | which follows a deleted test insn, and that test insn |
2792 | needs to be reinserted. */ |
2793 | if (templ == 0) |
2794 | { |
2795 | rtx_insn *prev; |
2796 | |
2797 | gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare); |
2798 | |
2799 | /* We have already processed the notes between the setter and |
2800 | the user. Make sure we don't process them again, this is |
2801 | particularly important if one of the notes is a block |
2802 | scope note or an EH note. */ |
2803 | for (prev = insn; |
2804 | prev != last_ignored_compare; |
2805 | prev = PREV_INSN (insn: prev)) |
2806 | { |
2807 | if (NOTE_P (prev)) |
2808 | delete_insn (prev); /* Use delete_note. */ |
2809 | } |
2810 | |
2811 | return prev; |
2812 | } |
2813 | |
2814 | /* If the template is the string "#", it means that this insn must |
2815 | be split. */ |
2816 | if (templ[0] == '#' && templ[1] == '\0') |
2817 | { |
2818 | rtx_insn *new_rtx = try_split (body, insn, 0); |
2819 | |
2820 | /* If we didn't split the insn, go away. */ |
2821 | if (new_rtx == insn && PATTERN (insn: new_rtx) == body) |
2822 | fatal_insn ("could not split insn" , insn); |
2823 | |
2824 | /* If we have a length attribute, this instruction should have |
2825 | been split in shorten_branches, to ensure that we would have |
2826 | valid length info for the splitees. */ |
2827 | gcc_assert (!HAVE_ATTR_length); |
2828 | |
2829 | return new_rtx; |
2830 | } |
2831 | |
2832 | /* ??? This will put the directives in the wrong place if |
2833 | get_insn_template outputs assembly directly. However calling it |
2834 | before get_insn_template breaks if the insns is split. */ |
2835 | if (targetm.asm_out.unwind_emit_before_insn |
2836 | && targetm.asm_out.unwind_emit) |
2837 | targetm.asm_out.unwind_emit (asm_out_file, insn); |
2838 | |
2839 | rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (p: insn); |
2840 | if (call_insn != NULL) |
2841 | { |
2842 | rtx x = call_from_call_insn (insn: call_insn); |
2843 | x = XEXP (x, 0); |
2844 | if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF) |
2845 | { |
2846 | tree t; |
2847 | x = XEXP (x, 0); |
2848 | t = SYMBOL_REF_DECL (x); |
2849 | if (t) |
2850 | assemble_external (t); |
2851 | } |
2852 | } |
2853 | |
2854 | /* Output assembler code from the template. */ |
2855 | output_asm_insn (templ, recog_data.operand); |
2856 | |
2857 | /* Some target machines need to postscan each insn after |
2858 | it is output. */ |
2859 | if (targetm.asm_out.final_postscan_insn) |
2860 | targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand, |
2861 | recog_data.n_operands); |
2862 | |
2863 | if (!targetm.asm_out.unwind_emit_before_insn |
2864 | && targetm.asm_out.unwind_emit) |
2865 | targetm.asm_out.unwind_emit (asm_out_file, insn); |
2866 | |
2867 | /* Let the debug info back-end know about this call. We do this only |
2868 | after the instruction has been emitted because labels that may be |
2869 | created to reference the call instruction must appear after it. */ |
2870 | if ((debug_variable_location_views || call_insn != NULL) |
2871 | && !DECL_IGNORED_P (current_function_decl)) |
2872 | debug_hooks->var_location (insn); |
2873 | |
2874 | current_output_insn = debug_insn = 0; |
2875 | } |
2876 | } |
2877 | return NEXT_INSN (insn); |
2878 | } |
2879 | |
2880 | /* This is a wrapper around final_scan_insn_1 that allows ports to |
2881 | call it recursively without a known value for SEEN. The value is |
2882 | saved at the outermost call, and recovered for recursive calls. |
2883 | Recursive calls MUST pass NULL, or the same pointer if they can |
2884 | otherwise get to it. */ |
2885 | |
2886 | rtx_insn * |
2887 | final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p, |
2888 | int nopeepholes, int *seen) |
2889 | { |
2890 | static int *enclosing_seen; |
2891 | static int recursion_counter; |
2892 | |
2893 | gcc_assert (seen || recursion_counter); |
2894 | gcc_assert (!recursion_counter || !seen || seen == enclosing_seen); |
2895 | |
2896 | if (!recursion_counter++) |
2897 | enclosing_seen = seen; |
2898 | else if (!seen) |
2899 | seen = enclosing_seen; |
2900 | |
2901 | rtx_insn *ret = final_scan_insn_1 (insn, file, optimize_p, nopeepholes, seen); |
2902 | |
2903 | if (!--recursion_counter) |
2904 | enclosing_seen = NULL; |
2905 | |
2906 | return ret; |
2907 | } |
2908 | |
2909 | |
2910 | |
2911 | /* Map DECLs to instance discriminators. This is allocated and |
2912 | defined in ada/gcc-interfaces/trans.cc, when compiling with -gnateS. |
2913 | Mappings from this table are saved and restored for LTO, so |
2914 | link-time compilation will have this map set, at least in |
2915 | partitions containing at least one DECL with an associated instance |
2916 | discriminator. */ |
2917 | |
2918 | decl_to_instance_map_t *decl_to_instance_map; |
2919 | |
2920 | /* Return the instance number assigned to DECL. */ |
2921 | |
2922 | static inline int |
2923 | map_decl_to_instance (const_tree decl) |
2924 | { |
2925 | int *inst; |
2926 | |
2927 | if (!decl_to_instance_map || !decl || !DECL_P (decl)) |
2928 | return 0; |
2929 | |
2930 | inst = decl_to_instance_map->get (k: decl); |
2931 | |
2932 | if (!inst) |
2933 | return 0; |
2934 | |
2935 | return *inst; |
2936 | } |
2937 | |
2938 | /* Set DISCRIMINATOR to the appropriate value, possibly derived from LOC. */ |
2939 | |
2940 | static inline int |
2941 | compute_discriminator (location_t loc) |
2942 | { |
2943 | int discriminator; |
2944 | |
2945 | if (!decl_to_instance_map) |
2946 | discriminator = get_discriminator_from_loc (loc); |
2947 | else |
2948 | { |
2949 | tree block = LOCATION_BLOCK (loc); |
2950 | |
2951 | while (block && TREE_CODE (block) == BLOCK |
2952 | && !inlined_function_outer_scope_p (block)) |
2953 | block = BLOCK_SUPERCONTEXT (block); |
2954 | |
2955 | tree decl; |
2956 | |
2957 | if (!block) |
2958 | decl = current_function_decl; |
2959 | else if (DECL_P (block)) |
2960 | decl = block; |
2961 | else |
2962 | decl = block_ultimate_origin (block); |
2963 | |
2964 | discriminator = map_decl_to_instance (decl); |
2965 | } |
2966 | |
2967 | return discriminator; |
2968 | } |
2969 | |
2970 | /* Return discriminator of the statement that produced this insn. */ |
2971 | int |
2972 | insn_discriminator (const rtx_insn *insn) |
2973 | { |
2974 | return compute_discriminator (loc: INSN_LOCATION (insn)); |
2975 | } |
2976 | |
2977 | /* Return whether a source line note needs to be emitted before INSN. |
2978 | Sets IS_STMT to TRUE if the line should be marked as a possible |
2979 | breakpoint location. */ |
2980 | |
2981 | static bool |
2982 | notice_source_line (rtx_insn *insn, bool *is_stmt) |
2983 | { |
2984 | const char *filename; |
2985 | int linenum, columnnum; |
2986 | int discriminator; |
2987 | |
2988 | if (NOTE_MARKER_P (insn)) |
2989 | { |
2990 | location_t loc = NOTE_MARKER_LOCATION (insn); |
2991 | expanded_location xloc = expand_location (loc); |
2992 | if (xloc.line == 0 |
2993 | && (LOCATION_LOCUS (loc) == UNKNOWN_LOCATION |
2994 | || LOCATION_LOCUS (loc) == BUILTINS_LOCATION)) |
2995 | return false; |
2996 | |
2997 | filename = xloc.file; |
2998 | linenum = xloc.line; |
2999 | columnnum = xloc.column; |
3000 | discriminator = compute_discriminator (loc); |
3001 | force_source_line = true; |
3002 | } |
3003 | else if (override_filename) |
3004 | { |
3005 | filename = override_filename; |
3006 | linenum = override_linenum; |
3007 | columnnum = override_columnnum; |
3008 | discriminator = override_discriminator; |
3009 | } |
3010 | else if (INSN_HAS_LOCATION (insn)) |
3011 | { |
3012 | expanded_location xloc = insn_location (insn); |
3013 | filename = xloc.file; |
3014 | linenum = xloc.line; |
3015 | columnnum = xloc.column; |
3016 | discriminator = insn_discriminator (insn); |
3017 | } |
3018 | else |
3019 | { |
3020 | filename = NULL; |
3021 | linenum = 0; |
3022 | columnnum = 0; |
3023 | discriminator = 0; |
3024 | } |
3025 | |
3026 | if (filename == NULL) |
3027 | return false; |
3028 | |
3029 | if (force_source_line |
3030 | || filename != last_filename |
3031 | || last_linenum != linenum |
3032 | || (debug_column_info && last_columnnum != columnnum)) |
3033 | { |
3034 | force_source_line = false; |
3035 | last_filename = filename; |
3036 | last_linenum = linenum; |
3037 | last_columnnum = columnnum; |
3038 | last_discriminator = discriminator; |
3039 | if (is_stmt) |
3040 | *is_stmt = true; |
3041 | high_block_linenum = MAX (last_linenum, high_block_linenum); |
3042 | high_function_linenum = MAX (last_linenum, high_function_linenum); |
3043 | return true; |
3044 | } |
3045 | |
3046 | if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator) |
3047 | { |
3048 | /* If the discriminator changed, but the line number did not, |
3049 | output the line table entry with is_stmt false so the |
3050 | debugger does not treat this as a breakpoint location. */ |
3051 | last_discriminator = discriminator; |
3052 | if (is_stmt) |
3053 | *is_stmt = false; |
3054 | return true; |
3055 | } |
3056 | |
3057 | return false; |
3058 | } |
3059 | |
3060 | /* For each operand in INSN, simplify (subreg (reg)) so that it refers |
3061 | directly to the desired hard register. */ |
3062 | |
3063 | void |
3064 | cleanup_subreg_operands (rtx_insn *insn) |
3065 | { |
3066 | int i; |
3067 | bool changed = false; |
3068 | extract_insn_cached (insn); |
3069 | for (i = 0; i < recog_data.n_operands; i++) |
3070 | { |
3071 | /* The following test cannot use recog_data.operand when testing |
3072 | for a SUBREG: the underlying object might have been changed |
3073 | already if we are inside a match_operator expression that |
3074 | matches the else clause. Instead we test the underlying |
3075 | expression directly. */ |
3076 | if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG) |
3077 | { |
3078 | recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true); |
3079 | changed = true; |
3080 | } |
3081 | else if (GET_CODE (recog_data.operand[i]) == PLUS |
3082 | || GET_CODE (recog_data.operand[i]) == MULT |
3083 | || MEM_P (recog_data.operand[i])) |
3084 | recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed); |
3085 | } |
3086 | |
3087 | for (i = 0; i < recog_data.n_dups; i++) |
3088 | { |
3089 | if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG) |
3090 | { |
3091 | *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true); |
3092 | changed = true; |
3093 | } |
3094 | else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS |
3095 | || GET_CODE (*recog_data.dup_loc[i]) == MULT |
3096 | || MEM_P (*recog_data.dup_loc[i])) |
3097 | *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed); |
3098 | } |
3099 | if (changed) |
3100 | df_insn_rescan (insn); |
3101 | } |
3102 | |
3103 | /* If X is a SUBREG, try to replace it with a REG or a MEM, based on |
3104 | the thing it is a subreg of. Do it anyway if FINAL_P. */ |
3105 | |
3106 | rtx |
3107 | alter_subreg (rtx *xp, bool final_p) |
3108 | { |
3109 | rtx x = *xp; |
3110 | rtx y = SUBREG_REG (x); |
3111 | |
3112 | /* simplify_subreg does not remove subreg from volatile references. |
3113 | We are required to. */ |
3114 | if (MEM_P (y)) |
3115 | { |
3116 | poly_int64 offset = SUBREG_BYTE (x); |
3117 | |
3118 | /* For paradoxical subregs on big-endian machines, SUBREG_BYTE |
3119 | contains 0 instead of the proper offset. See simplify_subreg. */ |
3120 | if (paradoxical_subreg_p (x)) |
3121 | offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y)); |
3122 | |
3123 | if (final_p) |
3124 | *xp = adjust_address (y, GET_MODE (x), offset); |
3125 | else |
3126 | *xp = adjust_address_nv (y, GET_MODE (x), offset); |
3127 | } |
3128 | else if (REG_P (y) && HARD_REGISTER_P (y)) |
3129 | { |
3130 | rtx new_rtx = simplify_subreg (GET_MODE (x), op: y, GET_MODE (y), |
3131 | SUBREG_BYTE (x)); |
3132 | |
3133 | if (new_rtx != 0) |
3134 | *xp = new_rtx; |
3135 | else if (final_p && REG_P (y)) |
3136 | { |
3137 | /* Simplify_subreg can't handle some REG cases, but we have to. */ |
3138 | unsigned int regno; |
3139 | poly_int64 offset; |
3140 | |
3141 | regno = subreg_regno (x); |
3142 | if (subreg_lowpart_p (x)) |
3143 | offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y)); |
3144 | else |
3145 | offset = SUBREG_BYTE (x); |
3146 | *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset); |
3147 | } |
3148 | } |
3149 | |
3150 | return *xp; |
3151 | } |
3152 | |
3153 | /* Do alter_subreg on all the SUBREGs contained in X. */ |
3154 | |
3155 | static rtx |
3156 | walk_alter_subreg (rtx *xp, bool *changed) |
3157 | { |
3158 | rtx x = *xp; |
3159 | switch (GET_CODE (x)) |
3160 | { |
3161 | case PLUS: |
3162 | case MULT: |
3163 | case AND: |
3164 | case ASHIFT: |
3165 | XEXP (x, 0) = walk_alter_subreg (xp: &XEXP (x, 0), changed); |
3166 | XEXP (x, 1) = walk_alter_subreg (xp: &XEXP (x, 1), changed); |
3167 | break; |
3168 | |
3169 | case MEM: |
3170 | case ZERO_EXTEND: |
3171 | XEXP (x, 0) = walk_alter_subreg (xp: &XEXP (x, 0), changed); |
3172 | break; |
3173 | |
3174 | case SUBREG: |
3175 | *changed = true; |
3176 | return alter_subreg (xp, final_p: true); |
3177 | |
3178 | default: |
3179 | break; |
3180 | } |
3181 | |
3182 | return *xp; |
3183 | } |
3184 | |
3185 | /* Report inconsistency between the assembler template and the operands. |
3186 | In an `asm', it's the user's fault; otherwise, the compiler's fault. */ |
3187 | |
3188 | void |
3189 | output_operand_lossage (const char *cmsgid, ...) |
3190 | { |
3191 | char *fmt_string; |
3192 | char *new_message; |
3193 | const char *pfx_str; |
3194 | va_list ap; |
3195 | |
3196 | va_start (ap, cmsgid); |
3197 | |
3198 | pfx_str = this_is_asm_operands ? _("invalid 'asm': " ) : "output_operand: " ; |
3199 | fmt_string = xasprintf ("%s%s" , pfx_str, _(cmsgid)); |
3200 | new_message = xvasprintf (fmt_string, ap); |
3201 | |
3202 | if (this_is_asm_operands) |
3203 | error_for_asm (this_is_asm_operands, "%s" , new_message); |
3204 | else |
3205 | internal_error ("%s" , new_message); |
3206 | |
3207 | free (ptr: fmt_string); |
3208 | free (ptr: new_message); |
3209 | va_end (ap); |
3210 | } |
3211 | |
3212 | /* Output of assembler code from a template, and its subroutines. */ |
3213 | |
3214 | /* Annotate the assembly with a comment describing the pattern and |
3215 | alternative used. */ |
3216 | |
3217 | static void |
3218 | output_asm_name (void) |
3219 | { |
3220 | if (debug_insn) |
3221 | { |
3222 | fprintf (stream: asm_out_file, format: "\t%s %d\t" , |
3223 | ASM_COMMENT_START, INSN_UID (insn: debug_insn)); |
3224 | |
3225 | fprintf (stream: asm_out_file, format: "[c=%d" , |
3226 | insn_cost (debug_insn, optimize_insn_for_speed_p ())); |
3227 | if (HAVE_ATTR_length) |
3228 | fprintf (stream: asm_out_file, format: " l=%d" , |
3229 | get_attr_length (insn: debug_insn)); |
3230 | fprintf (stream: asm_out_file, format: "] " ); |
3231 | |
3232 | int num = INSN_CODE (debug_insn); |
3233 | fprintf (stream: asm_out_file, format: "%s" , insn_data[num].name); |
3234 | if (insn_data[num].n_alternatives > 1) |
3235 | fprintf (stream: asm_out_file, format: "/%d" , which_alternative); |
3236 | |
3237 | /* Clear this so only the first assembler insn |
3238 | of any rtl insn will get the special comment for -dp. */ |
3239 | debug_insn = 0; |
3240 | } |
3241 | } |
3242 | |
3243 | /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it |
3244 | or its address, return that expr . Set *PADDRESSP to 1 if the expr |
3245 | corresponds to the address of the object and 0 if to the object. */ |
3246 | |
3247 | static tree |
3248 | get_mem_expr_from_op (rtx op, int *paddressp) |
3249 | { |
3250 | tree expr; |
3251 | int inner_addressp; |
3252 | |
3253 | *paddressp = 0; |
3254 | |
3255 | if (REG_P (op)) |
3256 | return REG_EXPR (op); |
3257 | else if (!MEM_P (op)) |
3258 | return 0; |
3259 | |
3260 | if (MEM_EXPR (op) != 0) |
3261 | return MEM_EXPR (op); |
3262 | |
3263 | /* Otherwise we have an address, so indicate it and look at the address. */ |
3264 | *paddressp = 1; |
3265 | op = XEXP (op, 0); |
3266 | |
3267 | /* First check if we have a decl for the address, then look at the right side |
3268 | if it is a PLUS. Otherwise, strip off arithmetic and keep looking. |
3269 | But don't allow the address to itself be indirect. */ |
3270 | if ((expr = get_mem_expr_from_op (op, paddressp: &inner_addressp)) && ! inner_addressp) |
3271 | return expr; |
3272 | else if (GET_CODE (op) == PLUS |
3273 | && (expr = get_mem_expr_from_op (XEXP (op, 1), paddressp: &inner_addressp))) |
3274 | return expr; |
3275 | |
3276 | while (UNARY_P (op) |
3277 | || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH) |
3278 | op = XEXP (op, 0); |
3279 | |
3280 | expr = get_mem_expr_from_op (op, paddressp: &inner_addressp); |
3281 | return inner_addressp ? 0 : expr; |
3282 | } |
3283 | |
3284 | /* Output operand names for assembler instructions. OPERANDS is the |
3285 | operand vector, OPORDER is the order to write the operands, and NOPS |
3286 | is the number of operands to write. */ |
3287 | |
3288 | static void |
3289 | output_asm_operand_names (rtx *operands, int *oporder, int nops) |
3290 | { |
3291 | int wrote = 0; |
3292 | int i; |
3293 | |
3294 | for (i = 0; i < nops; i++) |
3295 | { |
3296 | int addressp; |
3297 | rtx op = operands[oporder[i]]; |
3298 | tree expr = get_mem_expr_from_op (op, paddressp: &addressp); |
3299 | |
3300 | fprintf (stream: asm_out_file, format: "%c%s" , |
3301 | wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START); |
3302 | wrote = 1; |
3303 | if (expr) |
3304 | { |
3305 | fprintf (stream: asm_out_file, format: "%s" , |
3306 | addressp ? "*" : "" ); |
3307 | print_mem_expr (asm_out_file, expr); |
3308 | wrote = 1; |
3309 | } |
3310 | else if (REG_P (op) && ORIGINAL_REGNO (op) |
3311 | && ORIGINAL_REGNO (op) != REGNO (op)) |
3312 | fprintf (stream: asm_out_file, format: " tmp%i" , ORIGINAL_REGNO (op)); |
3313 | } |
3314 | } |
3315 | |
3316 | #ifdef ASSEMBLER_DIALECT |
3317 | /* Helper function to parse assembler dialects in the asm string. |
3318 | This is called from output_asm_insn and asm_fprintf. */ |
3319 | static const char * |
3320 | do_assembler_dialects (const char *p, int *dialect) |
3321 | { |
3322 | char c = *(p - 1); |
3323 | |
3324 | switch (c) |
3325 | { |
3326 | case '{': |
3327 | { |
3328 | int i; |
3329 | |
3330 | if (*dialect) |
3331 | output_operand_lossage (cmsgid: "nested assembly dialect alternatives" ); |
3332 | else |
3333 | *dialect = 1; |
3334 | |
3335 | /* If we want the first dialect, do nothing. Otherwise, skip |
3336 | DIALECT_NUMBER of strings ending with '|'. */ |
3337 | for (i = 0; i < dialect_number; i++) |
3338 | { |
3339 | while (*p && *p != '}') |
3340 | { |
3341 | if (*p == '|') |
3342 | { |
3343 | p++; |
3344 | break; |
3345 | } |
3346 | |
3347 | /* Skip over any character after a percent sign. */ |
3348 | if (*p == '%') |
3349 | p++; |
3350 | if (*p) |
3351 | p++; |
3352 | } |
3353 | |
3354 | if (*p == '}') |
3355 | break; |
3356 | } |
3357 | |
3358 | if (*p == '\0') |
3359 | output_operand_lossage (cmsgid: "unterminated assembly dialect alternative" ); |
3360 | } |
3361 | break; |
3362 | |
3363 | case '|': |
3364 | if (*dialect) |
3365 | { |
3366 | /* Skip to close brace. */ |
3367 | do |
3368 | { |
3369 | if (*p == '\0') |
3370 | { |
3371 | output_operand_lossage (cmsgid: "unterminated assembly dialect alternative" ); |
3372 | break; |
3373 | } |
3374 | |
3375 | /* Skip over any character after a percent sign. */ |
3376 | if (*p == '%' && p[1]) |
3377 | { |
3378 | p += 2; |
3379 | continue; |
3380 | } |
3381 | |
3382 | if (*p++ == '}') |
3383 | break; |
3384 | } |
3385 | while (1); |
3386 | |
3387 | *dialect = 0; |
3388 | } |
3389 | else |
3390 | putc (c: c, stream: asm_out_file); |
3391 | break; |
3392 | |
3393 | case '}': |
3394 | if (! *dialect) |
3395 | putc (c: c, stream: asm_out_file); |
3396 | *dialect = 0; |
3397 | break; |
3398 | default: |
3399 | gcc_unreachable (); |
3400 | } |
3401 | |
3402 | return p; |
3403 | } |
3404 | #endif |
3405 | |
3406 | /* Output text from TEMPLATE to the assembler output file, |
3407 | obeying %-directions to substitute operands taken from |
3408 | the vector OPERANDS. |
3409 | |
3410 | %N (for N a digit) means print operand N in usual manner. |
3411 | %lN means require operand N to be a CODE_LABEL or LABEL_REF |
3412 | and print the label name with no punctuation. |
3413 | %cN means require operand N to be a constant |
3414 | and print the constant expression with no punctuation. |
3415 | %aN means expect operand N to be a memory address |
3416 | (not a memory reference!) and print a reference |
3417 | to that address. |
3418 | %nN means expect operand N to be a constant |
3419 | and print a constant expression for minus the value |
3420 | of the operand, with no other punctuation. */ |
3421 | |
3422 | void |
3423 | output_asm_insn (const char *templ, rtx *operands) |
3424 | { |
3425 | const char *p; |
3426 | int c; |
3427 | #ifdef ASSEMBLER_DIALECT |
3428 | int dialect = 0; |
3429 | #endif |
3430 | int oporder[MAX_RECOG_OPERANDS]; |
3431 | char opoutput[MAX_RECOG_OPERANDS]; |
3432 | int ops = 0; |
3433 | |
3434 | /* An insn may return a null string template |
3435 | in a case where no assembler code is needed. */ |
3436 | if (*templ == 0) |
3437 | return; |
3438 | |
3439 | memset (s: opoutput, c: 0, n: sizeof opoutput); |
3440 | p = templ; |
3441 | putc (c: '\t', stream: asm_out_file); |
3442 | |
3443 | #ifdef ASM_OUTPUT_OPCODE |
3444 | ASM_OUTPUT_OPCODE (asm_out_file, p); |
3445 | #endif |
3446 | |
3447 | while ((c = *p++)) |
3448 | switch (c) |
3449 | { |
3450 | case '\n': |
3451 | if (flag_verbose_asm) |
3452 | output_asm_operand_names (operands, oporder, nops: ops); |
3453 | if (flag_print_asm_name) |
3454 | output_asm_name (); |
3455 | |
3456 | ops = 0; |
3457 | memset (s: opoutput, c: 0, n: sizeof opoutput); |
3458 | |
3459 | putc (c: c, stream: asm_out_file); |
3460 | #ifdef ASM_OUTPUT_OPCODE |
3461 | while ((c = *p) == '\t') |
3462 | { |
3463 | putc (c: c, stream: asm_out_file); |
3464 | p++; |
3465 | } |
3466 | ASM_OUTPUT_OPCODE (asm_out_file, p); |
3467 | #endif |
3468 | break; |
3469 | |
3470 | #ifdef ASSEMBLER_DIALECT |
3471 | case '{': |
3472 | case '}': |
3473 | case '|': |
3474 | p = do_assembler_dialects (p, dialect: &dialect); |
3475 | break; |
3476 | #endif |
3477 | |
3478 | case '%': |
3479 | /* %% outputs a single %. %{, %} and %| print {, } and | respectively |
3480 | if ASSEMBLER_DIALECT defined and these characters have a special |
3481 | meaning as dialect delimiters.*/ |
3482 | if (*p == '%' |
3483 | #ifdef ASSEMBLER_DIALECT |
3484 | || *p == '{' || *p == '}' || *p == '|' |
3485 | #endif |
3486 | ) |
3487 | { |
3488 | putc (c: *p, stream: asm_out_file); |
3489 | p++; |
3490 | } |
3491 | /* %= outputs a number which is unique to each insn in the entire |
3492 | compilation. This is useful for making local labels that are |
3493 | referred to more than once in a given insn. */ |
3494 | else if (*p == '=') |
3495 | { |
3496 | p++; |
3497 | fprintf (stream: asm_out_file, format: "%d" , insn_counter); |
3498 | } |
3499 | /* % followed by a letter and some digits |
3500 | outputs an operand in a special way depending on the letter. |
3501 | Letters `acln' are implemented directly. |
3502 | Other letters are passed to `output_operand' so that |
3503 | the TARGET_PRINT_OPERAND hook can define them. */ |
3504 | else if (ISALPHA (*p)) |
3505 | { |
3506 | int letter = *p++; |
3507 | unsigned long opnum; |
3508 | char *endptr; |
3509 | int letter2 = 0; |
3510 | |
3511 | if (letter == 'c' && *p == 'c') |
3512 | letter2 = *p++; |
3513 | opnum = strtoul (nptr: p, endptr: &endptr, base: 10); |
3514 | |
3515 | if (endptr == p) |
3516 | output_operand_lossage (cmsgid: "operand number missing " |
3517 | "after %%-letter" ); |
3518 | else if (this_is_asm_operands && opnum >= insn_noperands) |
3519 | output_operand_lossage (cmsgid: "operand number out of range" ); |
3520 | else if (letter == 'l') |
3521 | output_asm_label (operands[opnum]); |
3522 | else if (letter == 'a') |
3523 | output_address (VOIDmode, operands[opnum]); |
3524 | else if (letter == 'c') |
3525 | { |
3526 | if (letter2 == 'c' || CONSTANT_ADDRESS_P (operands[opnum])) |
3527 | output_addr_const (asm_out_file, operands[opnum]); |
3528 | else |
3529 | output_operand (operands[opnum], 'c'); |
3530 | } |
3531 | else if (letter == 'n') |
3532 | { |
3533 | if (CONST_INT_P (operands[opnum])) |
3534 | fprintf (stream: asm_out_file, HOST_WIDE_INT_PRINT_DEC, |
3535 | - INTVAL (operands[opnum])); |
3536 | else |
3537 | { |
3538 | putc (c: '-', stream: asm_out_file); |
3539 | output_addr_const (asm_out_file, operands[opnum]); |
3540 | } |
3541 | } |
3542 | else |
3543 | output_operand (operands[opnum], letter); |
3544 | |
3545 | if (!opoutput[opnum]) |
3546 | oporder[ops++] = opnum; |
3547 | opoutput[opnum] = 1; |
3548 | |
3549 | p = endptr; |
3550 | c = *p; |
3551 | } |
3552 | /* % followed by a digit outputs an operand the default way. */ |
3553 | else if (ISDIGIT (*p)) |
3554 | { |
3555 | unsigned long opnum; |
3556 | char *endptr; |
3557 | |
3558 | opnum = strtoul (nptr: p, endptr: &endptr, base: 10); |
3559 | if (this_is_asm_operands && opnum >= insn_noperands) |
3560 | output_operand_lossage (cmsgid: "operand number out of range" ); |
3561 | else |
3562 | output_operand (operands[opnum], 0); |
3563 | |
3564 | if (!opoutput[opnum]) |
3565 | oporder[ops++] = opnum; |
3566 | opoutput[opnum] = 1; |
3567 | |
3568 | p = endptr; |
3569 | c = *p; |
3570 | } |
3571 | /* % followed by punctuation: output something for that |
3572 | punctuation character alone, with no operand. The |
3573 | TARGET_PRINT_OPERAND hook decides what is actually done. */ |
3574 | else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p)) |
3575 | output_operand (NULL_RTX, *p++); |
3576 | else |
3577 | output_operand_lossage (cmsgid: "invalid %%-code" ); |
3578 | break; |
3579 | |
3580 | default: |
3581 | putc (c: c, stream: asm_out_file); |
3582 | } |
3583 | |
3584 | /* Try to keep the asm a bit more readable. */ |
3585 | if ((flag_verbose_asm || flag_print_asm_name) && strlen (s: templ) < 9) |
3586 | putc (c: '\t', stream: asm_out_file); |
3587 | |
3588 | /* Write out the variable names for operands, if we know them. */ |
3589 | if (flag_verbose_asm) |
3590 | output_asm_operand_names (operands, oporder, nops: ops); |
3591 | if (flag_print_asm_name) |
3592 | output_asm_name (); |
3593 | |
3594 | putc (c: '\n', stream: asm_out_file); |
3595 | } |
3596 | |
3597 | /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */ |
3598 | |
3599 | void |
3600 | output_asm_label (rtx x) |
3601 | { |
3602 | char buf[256]; |
3603 | |
3604 | if (GET_CODE (x) == LABEL_REF) |
3605 | x = label_ref_label (ref: x); |
3606 | if (LABEL_P (x) |
3607 | || (NOTE_P (x) |
3608 | && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL)) |
3609 | ASM_GENERATE_INTERNAL_LABEL (buf, "L" , CODE_LABEL_NUMBER (x)); |
3610 | else |
3611 | output_operand_lossage (cmsgid: "'%%l' operand isn't a label" ); |
3612 | |
3613 | assemble_name (asm_out_file, buf); |
3614 | } |
3615 | |
3616 | /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */ |
3617 | |
3618 | void |
3619 | mark_symbol_refs_as_used (rtx x) |
3620 | { |
3621 | subrtx_iterator::array_type array; |
3622 | FOR_EACH_SUBRTX (iter, array, x, ALL) |
3623 | { |
3624 | const_rtx x = *iter; |
3625 | if (GET_CODE (x) == SYMBOL_REF) |
3626 | if (tree t = SYMBOL_REF_DECL (x)) |
3627 | assemble_external (t); |
3628 | } |
3629 | } |
3630 | |
3631 | /* Print operand X using machine-dependent assembler syntax. |
3632 | CODE is a non-digit that preceded the operand-number in the % spec, |
3633 | such as 'z' if the spec was `%z3'. CODE is 0 if there was no char |
3634 | between the % and the digits. |
3635 | When CODE is a non-letter, X is 0. |
3636 | |
3637 | The meanings of the letters are machine-dependent and controlled |
3638 | by TARGET_PRINT_OPERAND. */ |
3639 | |
3640 | void |
3641 | output_operand (rtx x, int code ATTRIBUTE_UNUSED) |
3642 | { |
3643 | if (x && GET_CODE (x) == SUBREG) |
3644 | x = alter_subreg (xp: &x, final_p: true); |
3645 | |
3646 | /* X must not be a pseudo reg. */ |
3647 | if (!targetm.no_register_allocation) |
3648 | gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER); |
3649 | |
3650 | targetm.asm_out.print_operand (asm_out_file, x, code); |
3651 | |
3652 | if (x == NULL_RTX) |
3653 | return; |
3654 | |
3655 | mark_symbol_refs_as_used (x); |
3656 | } |
3657 | |
3658 | /* Print a memory reference operand for address X using |
3659 | machine-dependent assembler syntax. */ |
3660 | |
3661 | void |
3662 | output_address (machine_mode mode, rtx x) |
3663 | { |
3664 | bool changed = false; |
3665 | walk_alter_subreg (xp: &x, changed: &changed); |
3666 | targetm.asm_out.print_operand_address (asm_out_file, mode, x); |
3667 | } |
3668 | |
3669 | /* Print an integer constant expression in assembler syntax. |
3670 | Addition and subtraction are the only arithmetic |
3671 | that may appear in these expressions. */ |
3672 | |
3673 | void |
3674 | output_addr_const (FILE *file, rtx x) |
3675 | { |
3676 | char buf[256]; |
3677 | |
3678 | restart: |
3679 | switch (GET_CODE (x)) |
3680 | { |
3681 | case PC: |
3682 | putc (c: '.', stream: file); |
3683 | break; |
3684 | |
3685 | case SYMBOL_REF: |
3686 | if (SYMBOL_REF_DECL (x)) |
3687 | assemble_external (SYMBOL_REF_DECL (x)); |
3688 | #ifdef ASM_OUTPUT_SYMBOL_REF |
3689 | ASM_OUTPUT_SYMBOL_REF (file, x); |
3690 | #else |
3691 | assemble_name (file, XSTR (x, 0)); |
3692 | #endif |
3693 | break; |
3694 | |
3695 | case LABEL_REF: |
3696 | x = label_ref_label (ref: x); |
3697 | /* Fall through. */ |
3698 | case CODE_LABEL: |
3699 | ASM_GENERATE_INTERNAL_LABEL (buf, "L" , CODE_LABEL_NUMBER (x)); |
3700 | #ifdef ASM_OUTPUT_LABEL_REF |
3701 | ASM_OUTPUT_LABEL_REF (file, buf); |
3702 | #else |
3703 | assemble_name (file, buf); |
3704 | #endif |
3705 | break; |
3706 | |
3707 | case CONST_INT: |
3708 | fprintf (stream: file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x)); |
3709 | break; |
3710 | |
3711 | case CONST: |
3712 | /* This used to output parentheses around the expression, |
3713 | but that does not work on the 386 (either ATT or BSD assembler). */ |
3714 | output_addr_const (file, XEXP (x, 0)); |
3715 | break; |
3716 | |
3717 | case CONST_WIDE_INT: |
3718 | /* We do not know the mode here so we have to use a round about |
3719 | way to build a wide-int to get it printed properly. */ |
3720 | { |
3721 | wide_int w = wide_int::from_array (val: &CONST_WIDE_INT_ELT (x, 0), |
3722 | CONST_WIDE_INT_NUNITS (x), |
3723 | CONST_WIDE_INT_NUNITS (x) |
3724 | * HOST_BITS_PER_WIDE_INT, |
3725 | need_canon_p: false); |
3726 | print_decs (wi: w, file); |
3727 | } |
3728 | break; |
3729 | |
3730 | case CONST_DOUBLE: |
3731 | if (CONST_DOUBLE_AS_INT_P (x)) |
3732 | { |
3733 | /* We can use %d if the number is one word and positive. */ |
3734 | if (CONST_DOUBLE_HIGH (x)) |
3735 | fprintf (stream: file, HOST_WIDE_INT_PRINT_DOUBLE_HEX, |
3736 | (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x), |
3737 | (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x)); |
3738 | else if (CONST_DOUBLE_LOW (x) < 0) |
3739 | fprintf (stream: file, HOST_WIDE_INT_PRINT_HEX, |
3740 | (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x)); |
3741 | else |
3742 | fprintf (stream: file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x)); |
3743 | } |
3744 | else |
3745 | /* We can't handle floating point constants; |
3746 | PRINT_OPERAND must handle them. */ |
3747 | output_operand_lossage (cmsgid: "floating constant misused" ); |
3748 | break; |
3749 | |
3750 | case CONST_FIXED: |
3751 | fprintf (stream: file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x)); |
3752 | break; |
3753 | |
3754 | case PLUS: |
3755 | /* Some assemblers need integer constants to appear last (eg masm). */ |
3756 | if (CONST_INT_P (XEXP (x, 0))) |
3757 | { |
3758 | output_addr_const (file, XEXP (x, 1)); |
3759 | if (INTVAL (XEXP (x, 0)) >= 0) |
3760 | fprintf (stream: file, format: "+" ); |
3761 | output_addr_const (file, XEXP (x, 0)); |
3762 | } |
3763 | else |
3764 | { |
3765 | output_addr_const (file, XEXP (x, 0)); |
3766 | if (!CONST_INT_P (XEXP (x, 1)) |
3767 | || INTVAL (XEXP (x, 1)) >= 0) |
3768 | fprintf (stream: file, format: "+" ); |
3769 | output_addr_const (file, XEXP (x, 1)); |
3770 | } |
3771 | break; |
3772 | |
3773 | case MINUS: |
3774 | /* Avoid outputting things like x-x or x+5-x, |
3775 | since some assemblers can't handle that. */ |
3776 | x = simplify_subtraction (x); |
3777 | if (GET_CODE (x) != MINUS) |
3778 | goto restart; |
3779 | |
3780 | output_addr_const (file, XEXP (x, 0)); |
3781 | fprintf (stream: file, format: "-" ); |
3782 | if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0) |
3783 | || GET_CODE (XEXP (x, 1)) == PC |
3784 | || GET_CODE (XEXP (x, 1)) == SYMBOL_REF) |
3785 | output_addr_const (file, XEXP (x, 1)); |
3786 | else |
3787 | { |
3788 | fputs (s: targetm.asm_out.open_paren, stream: file); |
3789 | output_addr_const (file, XEXP (x, 1)); |
3790 | fputs (s: targetm.asm_out.close_paren, stream: file); |
3791 | } |
3792 | break; |
3793 | |
3794 | case ZERO_EXTEND: |
3795 | case SIGN_EXTEND: |
3796 | case SUBREG: |
3797 | case TRUNCATE: |
3798 | output_addr_const (file, XEXP (x, 0)); |
3799 | break; |
3800 | |
3801 | default: |
3802 | if (targetm.asm_out.output_addr_const_extra (file, x)) |
3803 | break; |
3804 | |
3805 | output_operand_lossage (cmsgid: "invalid expression as operand" ); |
3806 | } |
3807 | } |
3808 | |
3809 | /* Output a quoted string. */ |
3810 | |
3811 | void |
3812 | output_quoted_string (FILE *asm_file, const char *string) |
3813 | { |
3814 | #ifdef OUTPUT_QUOTED_STRING |
3815 | OUTPUT_QUOTED_STRING (asm_file, string); |
3816 | #else |
3817 | char c; |
3818 | |
3819 | putc (c: '\"', stream: asm_file); |
3820 | while ((c = *string++) != 0) |
3821 | { |
3822 | if (ISPRINT (c)) |
3823 | { |
3824 | if (c == '\"' || c == '\\') |
3825 | putc (c: '\\', stream: asm_file); |
3826 | putc (c: c, stream: asm_file); |
3827 | } |
3828 | else |
3829 | fprintf (stream: asm_file, format: "\\%03o" , (unsigned char) c); |
3830 | } |
3831 | putc (c: '\"', stream: asm_file); |
3832 | #endif |
3833 | } |
3834 | |
3835 | /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */ |
3836 | |
3837 | void |
3838 | fprint_whex (FILE *f, unsigned HOST_WIDE_INT value) |
3839 | { |
3840 | char buf[2 + CHAR_BIT * sizeof (value) / 4]; |
3841 | if (value == 0) |
3842 | putc (c: '0', stream: f); |
3843 | else |
3844 | { |
3845 | char *p = buf + sizeof (buf); |
3846 | do |
3847 | *--p = "0123456789abcdef" [value % 16]; |
3848 | while ((value /= 16) != 0); |
3849 | *--p = 'x'; |
3850 | *--p = '0'; |
3851 | fwrite (ptr: p, size: 1, n: buf + sizeof (buf) - p, s: f); |
3852 | } |
3853 | } |
3854 | |
3855 | /* Internal function that prints an unsigned long in decimal in reverse. |
3856 | The output string IS NOT null-terminated. */ |
3857 | |
3858 | static int |
3859 | sprint_ul_rev (char *s, unsigned long value) |
3860 | { |
3861 | int i = 0; |
3862 | do |
3863 | { |
3864 | s[i] = "0123456789" [value % 10]; |
3865 | value /= 10; |
3866 | i++; |
3867 | /* alternate version, without modulo */ |
3868 | /* oldval = value; */ |
3869 | /* value /= 10; */ |
3870 | /* s[i] = "0123456789" [oldval - 10*value]; */ |
3871 | /* i++ */ |
3872 | } |
3873 | while (value != 0); |
3874 | return i; |
3875 | } |
3876 | |
3877 | /* Write an unsigned long as decimal to a file, fast. */ |
3878 | |
3879 | void |
3880 | fprint_ul (FILE *f, unsigned long value) |
3881 | { |
3882 | /* python says: len(str(2**64)) == 20 */ |
3883 | char s[20]; |
3884 | int i; |
3885 | |
3886 | i = sprint_ul_rev (s, value); |
3887 | |
3888 | /* It's probably too small to bother with string reversal and fputs. */ |
3889 | do |
3890 | { |
3891 | i--; |
3892 | putc (c: s[i], stream: f); |
3893 | } |
3894 | while (i != 0); |
3895 | } |
3896 | |
3897 | /* Write an unsigned long as decimal to a string, fast. |
3898 | s must be wide enough to not overflow, at least 21 chars. |
3899 | Returns the length of the string (without terminating '\0'). */ |
3900 | |
3901 | int |
3902 | sprint_ul (char *s, unsigned long value) |
3903 | { |
3904 | int len = sprint_ul_rev (s, value); |
3905 | s[len] = '\0'; |
3906 | |
3907 | std::reverse (first: s, last: s + len); |
3908 | return len; |
3909 | } |
3910 | |
3911 | /* A poor man's fprintf, with the added features of %I, %R, %L, and %U. |
3912 | %R prints the value of REGISTER_PREFIX. |
3913 | %L prints the value of LOCAL_LABEL_PREFIX. |
3914 | %U prints the value of USER_LABEL_PREFIX. |
3915 | %I prints the value of IMMEDIATE_PREFIX. |
3916 | %O runs ASM_OUTPUT_OPCODE to transform what follows in the string. |
3917 | Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%. |
3918 | |
3919 | We handle alternate assembler dialects here, just like output_asm_insn. */ |
3920 | |
3921 | void |
3922 | asm_fprintf (FILE *file, const char *p, ...) |
3923 | { |
3924 | char buf[10]; |
3925 | char *q, c; |
3926 | #ifdef ASSEMBLER_DIALECT |
3927 | int dialect = 0; |
3928 | #endif |
3929 | va_list argptr; |
3930 | |
3931 | va_start (argptr, p); |
3932 | |
3933 | buf[0] = '%'; |
3934 | |
3935 | while ((c = *p++)) |
3936 | switch (c) |
3937 | { |
3938 | #ifdef ASSEMBLER_DIALECT |
3939 | case '{': |
3940 | case '}': |
3941 | case '|': |
3942 | p = do_assembler_dialects (p, dialect: &dialect); |
3943 | break; |
3944 | #endif |
3945 | |
3946 | case '%': |
3947 | c = *p++; |
3948 | q = &buf[1]; |
3949 | while (strchr (s: "-+ #0" , c: c)) |
3950 | { |
3951 | *q++ = c; |
3952 | c = *p++; |
3953 | } |
3954 | while (ISDIGIT (c) || c == '.') |
3955 | { |
3956 | *q++ = c; |
3957 | c = *p++; |
3958 | } |
3959 | switch (c) |
3960 | { |
3961 | case '%': |
3962 | putc (c: '%', stream: file); |
3963 | break; |
3964 | |
3965 | case 'd': case 'i': case 'u': |
3966 | case 'x': case 'X': case 'o': |
3967 | case 'c': |
3968 | *q++ = c; |
3969 | *q = 0; |
3970 | fprintf (stream: file, format: buf, va_arg (argptr, int)); |
3971 | break; |
3972 | |
3973 | case 'w': |
3974 | /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and |
3975 | 'o' cases, but we do not check for those cases. It |
3976 | means that the value is a HOST_WIDE_INT, which may be |
3977 | either `long' or `long long'. */ |
3978 | memcpy (dest: q, HOST_WIDE_INT_PRINT, n: strlen (HOST_WIDE_INT_PRINT)); |
3979 | q += strlen (HOST_WIDE_INT_PRINT); |
3980 | *q++ = *p++; |
3981 | *q = 0; |
3982 | fprintf (stream: file, format: buf, va_arg (argptr, HOST_WIDE_INT)); |
3983 | break; |
3984 | |
3985 | case 'l': |
3986 | *q++ = c; |
3987 | #ifdef HAVE_LONG_LONG |
3988 | if (*p == 'l') |
3989 | { |
3990 | *q++ = *p++; |
3991 | *q++ = *p++; |
3992 | *q = 0; |
3993 | fprintf (stream: file, format: buf, va_arg (argptr, long long)); |
3994 | } |
3995 | else |
3996 | #endif |
3997 | { |
3998 | *q++ = *p++; |
3999 | *q = 0; |
4000 | fprintf (stream: file, format: buf, va_arg (argptr, long)); |
4001 | } |
4002 | |
4003 | break; |
4004 | |
4005 | case 's': |
4006 | *q++ = c; |
4007 | *q = 0; |
4008 | fprintf (stream: file, format: buf, va_arg (argptr, char *)); |
4009 | break; |
4010 | |
4011 | case 'O': |
4012 | #ifdef ASM_OUTPUT_OPCODE |
4013 | ASM_OUTPUT_OPCODE (asm_out_file, p); |
4014 | #endif |
4015 | break; |
4016 | |
4017 | case 'R': |
4018 | #ifdef REGISTER_PREFIX |
4019 | fprintf (file, "%s" , REGISTER_PREFIX); |
4020 | #endif |
4021 | break; |
4022 | |
4023 | case 'I': |
4024 | #ifdef IMMEDIATE_PREFIX |
4025 | fprintf (file, "%s" , IMMEDIATE_PREFIX); |
4026 | #endif |
4027 | break; |
4028 | |
4029 | case 'L': |
4030 | #ifdef LOCAL_LABEL_PREFIX |
4031 | fprintf (stream: file, format: "%s" , LOCAL_LABEL_PREFIX); |
4032 | #endif |
4033 | break; |
4034 | |
4035 | case 'U': |
4036 | fputs (s: user_label_prefix, stream: file); |
4037 | break; |
4038 | |
4039 | #ifdef ASM_FPRINTF_EXTENSIONS |
4040 | /* Uppercase letters are reserved for general use by asm_fprintf |
4041 | and so are not available to target specific code. In order to |
4042 | prevent the ASM_FPRINTF_EXTENSIONS macro from using them then, |
4043 | they are defined here. As they get turned into real extensions |
4044 | to asm_fprintf they should be removed from this list. */ |
4045 | case 'A': case 'B': case 'C': case 'D': case 'E': |
4046 | case 'F': case 'G': case 'H': case 'J': case 'K': |
4047 | case 'M': case 'N': case 'P': case 'Q': case 'S': |
4048 | case 'T': case 'V': case 'W': case 'Y': case 'Z': |
4049 | break; |
4050 | |
4051 | ASM_FPRINTF_EXTENSIONS (file, argptr, p) |
4052 | #endif |
4053 | default: |
4054 | gcc_unreachable (); |
4055 | } |
4056 | break; |
4057 | |
4058 | default: |
4059 | putc (c: c, stream: file); |
4060 | } |
4061 | va_end (argptr); |
4062 | } |
4063 | |
4064 | /* Return true if this function has no function calls. */ |
4065 | |
4066 | bool |
4067 | leaf_function_p (void) |
4068 | { |
4069 | rtx_insn *insn; |
4070 | |
4071 | /* Ensure we walk the entire function body. */ |
4072 | gcc_assert (!in_sequence_p ()); |
4073 | |
4074 | /* Some back-ends (e.g. s390) want leaf functions to stay leaf |
4075 | functions even if they call mcount. */ |
4076 | if (crtl->profile && !targetm.keep_leaf_when_profiled ()) |
4077 | return false; |
4078 | |
4079 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
4080 | { |
4081 | if (CALL_P (insn) |
4082 | && ! SIBLING_CALL_P (insn) |
4083 | && ! FAKE_CALL_P (insn)) |
4084 | return false; |
4085 | if (NONJUMP_INSN_P (insn) |
4086 | && GET_CODE (PATTERN (insn)) == SEQUENCE |
4087 | && CALL_P (XVECEXP (PATTERN (insn), 0, 0)) |
4088 | && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0))) |
4089 | return false; |
4090 | } |
4091 | |
4092 | return true; |
4093 | } |
4094 | |
4095 | /* Return true if branch is a forward branch. |
4096 | Uses insn_shuid array, so it works only in the final pass. May be used by |
4097 | output templates to customary add branch prediction hints. |
4098 | */ |
4099 | bool |
4100 | final_forward_branch_p (rtx_insn *insn) |
4101 | { |
4102 | int insn_id, label_id; |
4103 | |
4104 | gcc_assert (uid_shuid); |
4105 | insn_id = INSN_SHUID (insn); |
4106 | label_id = INSN_SHUID (JUMP_LABEL (insn)); |
4107 | /* We've hit some insns that does not have id information available. */ |
4108 | gcc_assert (insn_id && label_id); |
4109 | return insn_id < label_id; |
4110 | } |
4111 | |
4112 | /* On some machines, a function with no call insns |
4113 | can run faster if it doesn't create its own register window. |
4114 | When output, the leaf function should use only the "output" |
4115 | registers. Ordinarily, the function would be compiled to use |
4116 | the "input" registers to find its arguments; it is a candidate |
4117 | for leaf treatment if it uses only the "input" registers. |
4118 | Leaf function treatment means renumbering so the function |
4119 | uses the "output" registers instead. */ |
4120 | |
4121 | #ifdef LEAF_REGISTERS |
4122 | |
4123 | /* Return bool if this function uses only the registers that can be |
4124 | safely renumbered. */ |
4125 | |
4126 | bool |
4127 | only_leaf_regs_used (void) |
4128 | { |
4129 | int i; |
4130 | const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS; |
4131 | |
4132 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
4133 | if ((df_regs_ever_live_p (i) || global_regs[i]) |
4134 | && ! permitted_reg_in_leaf_functions[i]) |
4135 | return false; |
4136 | |
4137 | if (crtl->uses_pic_offset_table |
4138 | && pic_offset_table_rtx != 0 |
4139 | && REG_P (pic_offset_table_rtx) |
4140 | && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)]) |
4141 | return false; |
4142 | |
4143 | return true; |
4144 | } |
4145 | |
4146 | /* Scan all instructions and renumber all registers into those |
4147 | available in leaf functions. */ |
4148 | |
4149 | static void |
4150 | leaf_renumber_regs (rtx_insn *first) |
4151 | { |
4152 | rtx_insn *insn; |
4153 | |
4154 | /* Renumber only the actual patterns. |
4155 | The reg-notes can contain frame pointer refs, |
4156 | and renumbering them could crash, and should not be needed. */ |
4157 | for (insn = first; insn; insn = NEXT_INSN (insn)) |
4158 | if (INSN_P (insn)) |
4159 | leaf_renumber_regs_insn (PATTERN (insn)); |
4160 | } |
4161 | |
4162 | /* Scan IN_RTX and its subexpressions, and renumber all regs into those |
4163 | available in leaf functions. */ |
4164 | |
4165 | void |
4166 | leaf_renumber_regs_insn (rtx in_rtx) |
4167 | { |
4168 | int i, j; |
4169 | const char *format_ptr; |
4170 | |
4171 | if (in_rtx == 0) |
4172 | return; |
4173 | |
4174 | /* Renumber all input-registers into output-registers. |
4175 | renumbered_regs would be 1 for an output-register; |
4176 | they */ |
4177 | |
4178 | if (REG_P (in_rtx)) |
4179 | { |
4180 | int newreg; |
4181 | |
4182 | /* Don't renumber the same reg twice. */ |
4183 | if (in_rtx->used) |
4184 | return; |
4185 | |
4186 | newreg = REGNO (in_rtx); |
4187 | /* Don't try to renumber pseudo regs. It is possible for a pseudo reg |
4188 | to reach here as part of a REG_NOTE. */ |
4189 | if (newreg >= FIRST_PSEUDO_REGISTER) |
4190 | { |
4191 | in_rtx->used = 1; |
4192 | return; |
4193 | } |
4194 | newreg = LEAF_REG_REMAP (newreg); |
4195 | gcc_assert (newreg >= 0); |
4196 | df_set_regs_ever_live (REGNO (in_rtx), false); |
4197 | df_set_regs_ever_live (newreg, true); |
4198 | SET_REGNO (in_rtx, newreg); |
4199 | in_rtx->used = 1; |
4200 | return; |
4201 | } |
4202 | |
4203 | if (INSN_P (in_rtx)) |
4204 | { |
4205 | /* Inside a SEQUENCE, we find insns. |
4206 | Renumber just the patterns of these insns, |
4207 | just as we do for the top-level insns. */ |
4208 | leaf_renumber_regs_insn (PATTERN (in_rtx)); |
4209 | return; |
4210 | } |
4211 | |
4212 | format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx)); |
4213 | |
4214 | for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++) |
4215 | switch (*format_ptr++) |
4216 | { |
4217 | case 'e': |
4218 | leaf_renumber_regs_insn (XEXP (in_rtx, i)); |
4219 | break; |
4220 | |
4221 | case 'E': |
4222 | if (XVEC (in_rtx, i) != NULL) |
4223 | for (j = 0; j < XVECLEN (in_rtx, i); j++) |
4224 | leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j)); |
4225 | break; |
4226 | |
4227 | case 'S': |
4228 | case 's': |
4229 | case '0': |
4230 | case 'i': |
4231 | case 'L': |
4232 | case 'w': |
4233 | case 'p': |
4234 | case 'n': |
4235 | case 'u': |
4236 | break; |
4237 | |
4238 | default: |
4239 | gcc_unreachable (); |
4240 | } |
4241 | } |
4242 | #endif |
4243 | |
4244 | /* Turn the RTL into assembly. */ |
4245 | static unsigned int |
4246 | rest_of_handle_final (void) |
4247 | { |
4248 | const char *fnname = get_fnname_from_decl (current_function_decl); |
4249 | |
4250 | /* Turn debug markers into notes if the var-tracking pass has not |
4251 | been invoked. */ |
4252 | if (!flag_var_tracking && MAY_HAVE_DEBUG_MARKER_INSNS) |
4253 | delete_vta_debug_insns (false); |
4254 | |
4255 | assemble_start_function (current_function_decl, fnname); |
4256 | rtx_insn *first = get_insns (); |
4257 | int seen = 0; |
4258 | final_start_function_1 (firstp: &first, file: asm_out_file, seen: &seen, optimize); |
4259 | final_1 (first, file: asm_out_file, seen, optimize); |
4260 | if (flag_ipa_ra |
4261 | && !lookup_attribute (attr_name: "noipa" , DECL_ATTRIBUTES (current_function_decl)) |
4262 | /* Functions with naked attributes are supported only with basic asm |
4263 | statements in the body, thus for supported use cases the information |
4264 | on clobbered registers is not available. */ |
4265 | && !lookup_attribute (attr_name: "naked" , DECL_ATTRIBUTES (current_function_decl))) |
4266 | collect_fn_hard_reg_usage (); |
4267 | final_end_function (); |
4268 | |
4269 | /* The IA-64 ".handlerdata" directive must be issued before the ".endp" |
4270 | directive that closes the procedure descriptor. Similarly, for x64 SEH. |
4271 | Otherwise it's not strictly necessary, but it doesn't hurt either. */ |
4272 | output_function_exception_table (crtl->has_bb_partition ? 1 : 0); |
4273 | |
4274 | assemble_end_function (current_function_decl, fnname); |
4275 | |
4276 | /* Free up reg info memory. */ |
4277 | free_reg_info (); |
4278 | |
4279 | if (! quiet_flag) |
4280 | fflush (stream: asm_out_file); |
4281 | |
4282 | /* Note that for those inline functions where we don't initially |
4283 | know for certain that we will be generating an out-of-line copy, |
4284 | the first invocation of this routine (rest_of_compilation) will |
4285 | skip over this code by doing a `goto exit_rest_of_compilation;'. |
4286 | Later on, wrapup_global_declarations will (indirectly) call |
4287 | rest_of_compilation again for those inline functions that need |
4288 | to have out-of-line copies generated. During that call, we |
4289 | *will* be routed past here. */ |
4290 | |
4291 | timevar_push (tv: TV_SYMOUT); |
4292 | if (!DECL_IGNORED_P (current_function_decl)) |
4293 | debug_hooks->function_decl (current_function_decl); |
4294 | timevar_pop (tv: TV_SYMOUT); |
4295 | |
4296 | /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */ |
4297 | DECL_INITIAL (current_function_decl) = error_mark_node; |
4298 | |
4299 | if (DECL_STATIC_CONSTRUCTOR (current_function_decl) |
4300 | && targetm.have_ctors_dtors) |
4301 | targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0), |
4302 | decl_init_priority_lookup |
4303 | (current_function_decl)); |
4304 | if (DECL_STATIC_DESTRUCTOR (current_function_decl) |
4305 | && targetm.have_ctors_dtors) |
4306 | targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0), |
4307 | decl_fini_priority_lookup |
4308 | (current_function_decl)); |
4309 | return 0; |
4310 | } |
4311 | |
4312 | namespace { |
4313 | |
4314 | const pass_data pass_data_final = |
4315 | { |
4316 | .type: RTL_PASS, /* type */ |
4317 | .name: "final" , /* name */ |
4318 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
4319 | .tv_id: TV_FINAL, /* tv_id */ |
4320 | .properties_required: 0, /* properties_required */ |
4321 | .properties_provided: 0, /* properties_provided */ |
4322 | .properties_destroyed: 0, /* properties_destroyed */ |
4323 | .todo_flags_start: 0, /* todo_flags_start */ |
4324 | .todo_flags_finish: 0, /* todo_flags_finish */ |
4325 | }; |
4326 | |
4327 | class pass_final : public rtl_opt_pass |
4328 | { |
4329 | public: |
4330 | pass_final (gcc::context *ctxt) |
4331 | : rtl_opt_pass (pass_data_final, ctxt) |
4332 | {} |
4333 | |
4334 | /* opt_pass methods: */ |
4335 | unsigned int execute (function *) final override |
4336 | { |
4337 | return rest_of_handle_final (); |
4338 | } |
4339 | |
4340 | }; // class pass_final |
4341 | |
4342 | } // anon namespace |
4343 | |
4344 | rtl_opt_pass * |
4345 | make_pass_final (gcc::context *ctxt) |
4346 | { |
4347 | return new pass_final (ctxt); |
4348 | } |
4349 | |
4350 | |
4351 | static unsigned int |
4352 | rest_of_handle_shorten_branches (void) |
4353 | { |
4354 | /* Shorten branches. */ |
4355 | shorten_branches (first: get_insns ()); |
4356 | return 0; |
4357 | } |
4358 | |
4359 | namespace { |
4360 | |
4361 | const pass_data pass_data_shorten_branches = |
4362 | { |
4363 | .type: RTL_PASS, /* type */ |
4364 | .name: "shorten" , /* name */ |
4365 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
4366 | .tv_id: TV_SHORTEN_BRANCH, /* tv_id */ |
4367 | .properties_required: 0, /* properties_required */ |
4368 | .properties_provided: 0, /* properties_provided */ |
4369 | .properties_destroyed: 0, /* properties_destroyed */ |
4370 | .todo_flags_start: 0, /* todo_flags_start */ |
4371 | .todo_flags_finish: 0, /* todo_flags_finish */ |
4372 | }; |
4373 | |
4374 | class pass_shorten_branches : public rtl_opt_pass |
4375 | { |
4376 | public: |
4377 | pass_shorten_branches (gcc::context *ctxt) |
4378 | : rtl_opt_pass (pass_data_shorten_branches, ctxt) |
4379 | {} |
4380 | |
4381 | /* opt_pass methods: */ |
4382 | unsigned int execute (function *) final override |
4383 | { |
4384 | return rest_of_handle_shorten_branches (); |
4385 | } |
4386 | |
4387 | }; // class pass_shorten_branches |
4388 | |
4389 | } // anon namespace |
4390 | |
4391 | rtl_opt_pass * |
4392 | make_pass_shorten_branches (gcc::context *ctxt) |
4393 | { |
4394 | return new pass_shorten_branches (ctxt); |
4395 | } |
4396 | |
4397 | |
4398 | static unsigned int |
4399 | rest_of_clean_state (void) |
4400 | { |
4401 | rtx_insn *insn, *next; |
4402 | FILE *final_output = NULL; |
4403 | int save_unnumbered = flag_dump_unnumbered; |
4404 | int save_noaddr = flag_dump_noaddr; |
4405 | |
4406 | if (flag_dump_final_insns) |
4407 | { |
4408 | final_output = fopen (flag_dump_final_insns, modes: "a" ); |
4409 | if (!final_output) |
4410 | { |
4411 | error ("could not open final insn dump file %qs: %m" , |
4412 | flag_dump_final_insns); |
4413 | flag_dump_final_insns = NULL; |
4414 | } |
4415 | else |
4416 | { |
4417 | flag_dump_noaddr = flag_dump_unnumbered = 1; |
4418 | if (flag_compare_debug_opt || flag_compare_debug) |
4419 | dump_flags |= TDF_NOUID | TDF_COMPARE_DEBUG; |
4420 | dump_function_header (final_output, current_function_decl, |
4421 | dump_flags); |
4422 | final_insns_dump_p = true; |
4423 | |
4424 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
4425 | if (LABEL_P (insn)) |
4426 | INSN_UID (insn) = CODE_LABEL_NUMBER (insn); |
4427 | else |
4428 | { |
4429 | if (NOTE_P (insn)) |
4430 | set_block_for_insn (insn, NULL); |
4431 | INSN_UID (insn) = 0; |
4432 | } |
4433 | } |
4434 | } |
4435 | |
4436 | /* It is very important to decompose the RTL instruction chain here: |
4437 | debug information keeps pointing into CODE_LABEL insns inside the function |
4438 | body. If these remain pointing to the other insns, we end up preserving |
4439 | whole RTL chain and attached detailed debug info in memory. */ |
4440 | for (insn = get_insns (); insn; insn = next) |
4441 | { |
4442 | next = NEXT_INSN (insn); |
4443 | SET_NEXT_INSN (insn) = NULL; |
4444 | SET_PREV_INSN (insn) = NULL; |
4445 | |
4446 | rtx_insn *call_insn = insn; |
4447 | if (NONJUMP_INSN_P (call_insn) |
4448 | && GET_CODE (PATTERN (call_insn)) == SEQUENCE) |
4449 | { |
4450 | rtx_sequence *seq = as_a <rtx_sequence *> (p: PATTERN (insn: call_insn)); |
4451 | call_insn = seq->insn (index: 0); |
4452 | } |
4453 | if (CALL_P (call_insn)) |
4454 | { |
4455 | rtx note |
4456 | = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX); |
4457 | if (note) |
4458 | remove_note (call_insn, note); |
4459 | } |
4460 | |
4461 | if (final_output |
4462 | && (!NOTE_P (insn) |
4463 | || (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION |
4464 | && NOTE_KIND (insn) != NOTE_INSN_BEGIN_STMT |
4465 | && NOTE_KIND (insn) != NOTE_INSN_INLINE_ENTRY |
4466 | && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG |
4467 | && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END |
4468 | && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL))) |
4469 | print_rtl_single (final_output, insn); |
4470 | } |
4471 | |
4472 | if (final_output) |
4473 | { |
4474 | flag_dump_noaddr = save_noaddr; |
4475 | flag_dump_unnumbered = save_unnumbered; |
4476 | final_insns_dump_p = false; |
4477 | |
4478 | if (fclose (stream: final_output)) |
4479 | { |
4480 | error ("could not close final insn dump file %qs: %m" , |
4481 | flag_dump_final_insns); |
4482 | flag_dump_final_insns = NULL; |
4483 | } |
4484 | } |
4485 | |
4486 | flag_rerun_cse_after_global_opts = 0; |
4487 | reload_completed = 0; |
4488 | epilogue_completed = 0; |
4489 | #ifdef STACK_REGS |
4490 | regstack_completed = 0; |
4491 | #endif |
4492 | |
4493 | /* Clear out the insn_length contents now that they are no |
4494 | longer valid. */ |
4495 | init_insn_lengths (); |
4496 | |
4497 | /* Show no temporary slots allocated. */ |
4498 | init_temp_slots (); |
4499 | |
4500 | free_bb_for_insn (); |
4501 | |
4502 | if (cfun->gimple_df) |
4503 | delete_tree_ssa (cfun); |
4504 | |
4505 | /* We can reduce stack alignment on call site only when we are sure that |
4506 | the function body just produced will be actually used in the final |
4507 | executable. */ |
4508 | if (flag_ipa_stack_alignment |
4509 | && decl_binds_to_current_def_p (current_function_decl)) |
4510 | { |
4511 | unsigned int pref = crtl->preferred_stack_boundary; |
4512 | if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary) |
4513 | pref = crtl->stack_alignment_needed; |
4514 | cgraph_node::rtl_info (current_function_decl) |
4515 | ->preferred_incoming_stack_boundary = pref; |
4516 | } |
4517 | |
4518 | /* Make sure volatile mem refs aren't considered valid operands for |
4519 | arithmetic insns. We must call this here if this is a nested inline |
4520 | function, since the above code leaves us in the init_recog state, |
4521 | and the function context push/pop code does not save/restore volatile_ok. |
4522 | |
4523 | ??? Maybe it isn't necessary for expand_start_function to call this |
4524 | anymore if we do it here? */ |
4525 | |
4526 | init_recog_no_volatile (); |
4527 | |
4528 | /* We're done with this function. Free up memory if we can. */ |
4529 | free_after_parsing (cfun); |
4530 | free_after_compilation (cfun); |
4531 | return 0; |
4532 | } |
4533 | |
4534 | namespace { |
4535 | |
4536 | const pass_data pass_data_clean_state = |
4537 | { |
4538 | .type: RTL_PASS, /* type */ |
4539 | .name: "*clean_state" , /* name */ |
4540 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
4541 | .tv_id: TV_FINAL, /* tv_id */ |
4542 | .properties_required: 0, /* properties_required */ |
4543 | .properties_provided: 0, /* properties_provided */ |
4544 | PROP_rtl, /* properties_destroyed */ |
4545 | .todo_flags_start: 0, /* todo_flags_start */ |
4546 | .todo_flags_finish: 0, /* todo_flags_finish */ |
4547 | }; |
4548 | |
4549 | class pass_clean_state : public rtl_opt_pass |
4550 | { |
4551 | public: |
4552 | pass_clean_state (gcc::context *ctxt) |
4553 | : rtl_opt_pass (pass_data_clean_state, ctxt) |
4554 | {} |
4555 | |
4556 | /* opt_pass methods: */ |
4557 | unsigned int execute (function *) final override |
4558 | { |
4559 | return rest_of_clean_state (); |
4560 | } |
4561 | |
4562 | }; // class pass_clean_state |
4563 | |
4564 | } // anon namespace |
4565 | |
4566 | rtl_opt_pass * |
4567 | make_pass_clean_state (gcc::context *ctxt) |
4568 | { |
4569 | return new pass_clean_state (ctxt); |
4570 | } |
4571 | |
4572 | /* Return true if INSN is a call to the current function. */ |
4573 | |
4574 | static bool |
4575 | self_recursive_call_p (rtx_insn *insn) |
4576 | { |
4577 | tree fndecl = get_call_fndecl (insn); |
4578 | return (fndecl == current_function_decl |
4579 | && decl_binds_to_current_def_p (fndecl)); |
4580 | } |
4581 | |
4582 | /* Collect hard register usage for the current function. */ |
4583 | |
4584 | static void |
4585 | collect_fn_hard_reg_usage (void) |
4586 | { |
4587 | rtx_insn *insn; |
4588 | #ifdef STACK_REGS |
4589 | int i; |
4590 | #endif |
4591 | struct cgraph_rtl_info *node; |
4592 | HARD_REG_SET function_used_regs; |
4593 | |
4594 | /* ??? To be removed when all the ports have been fixed. */ |
4595 | if (!targetm.call_fusage_contains_non_callee_clobbers) |
4596 | return; |
4597 | |
4598 | /* Be conservative - mark fixed and global registers as used. */ |
4599 | function_used_regs = fixed_reg_set; |
4600 | |
4601 | #ifdef STACK_REGS |
4602 | /* Handle STACK_REGS conservatively, since the df-framework does not |
4603 | provide accurate information for them. */ |
4604 | |
4605 | for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++) |
4606 | SET_HARD_REG_BIT (set&: function_used_regs, bit: i); |
4607 | #endif |
4608 | |
4609 | for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn)) |
4610 | { |
4611 | HARD_REG_SET insn_used_regs; |
4612 | |
4613 | if (!NONDEBUG_INSN_P (insn)) |
4614 | continue; |
4615 | |
4616 | if (CALL_P (insn) |
4617 | && !self_recursive_call_p (insn)) |
4618 | function_used_regs |
4619 | |= insn_callee_abi (insn).full_and_partial_reg_clobbers (); |
4620 | |
4621 | find_all_hard_reg_sets (insn, &insn_used_regs, false); |
4622 | function_used_regs |= insn_used_regs; |
4623 | |
4624 | if (hard_reg_set_subset_p (crtl->abi->full_and_partial_reg_clobbers (), |
4625 | y: function_used_regs)) |
4626 | return; |
4627 | } |
4628 | |
4629 | /* Mask out fully-saved registers, so that they don't affect equality |
4630 | comparisons between function_abis. */ |
4631 | function_used_regs &= crtl->abi->full_and_partial_reg_clobbers (); |
4632 | |
4633 | node = cgraph_node::rtl_info (current_function_decl); |
4634 | gcc_assert (node != NULL); |
4635 | |
4636 | node->function_used_regs = function_used_regs; |
4637 | } |
4638 | |