1 | /* Exported functions from emit-rtl.cc |
2 | Copyright (C) 2004-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #ifndef GCC_EMIT_RTL_H |
21 | #define GCC_EMIT_RTL_H |
22 | |
23 | class temp_slot; |
24 | typedef class temp_slot *temp_slot_p; |
25 | class predefined_function_abi; |
26 | namespace rtl_ssa { class function_info; } |
27 | |
28 | /* Information mainlined about RTL representation of incoming arguments. */ |
29 | struct GTY(()) incoming_args { |
30 | /* Number of bytes of args popped by function being compiled on its return. |
31 | Zero if no bytes are to be popped. |
32 | May affect compilation of return insn or of function epilogue. */ |
33 | poly_int64 pops_args; |
34 | |
35 | /* If function's args have a fixed size, this is that size, in bytes. |
36 | Otherwise, it is -1. |
37 | May affect compilation of return insn or of function epilogue. */ |
38 | poly_int64 size; |
39 | |
40 | /* # bytes the prologue should push and pretend that the caller pushed them. |
41 | The prologue must do this, but only if parms can be passed in |
42 | registers. */ |
43 | int pretend_args_size; |
44 | |
45 | /* This is the offset from the arg pointer to the place where the first |
46 | anonymous arg can be found, if there is one. */ |
47 | rtx arg_offset_rtx; |
48 | |
49 | /* Quantities of various kinds of registers |
50 | used for the current function's args. */ |
51 | CUMULATIVE_ARGS info; |
52 | |
53 | /* The arg pointer hard register, or the pseudo into which it was copied. */ |
54 | rtx internal_arg_pointer; |
55 | }; |
56 | |
57 | |
58 | /* Datastructures maintained for currently processed function in RTL form. */ |
59 | struct GTY(()) rtl_data { |
60 | void init_stack_alignment (); |
61 | |
62 | struct expr_status expr; |
63 | struct emit_status emit; |
64 | struct varasm_status varasm; |
65 | struct incoming_args args; |
66 | struct function_subsections subsections; |
67 | struct rtl_eh eh; |
68 | |
69 | /* The ABI of the function, i.e. the interface it presents to its callers. |
70 | This is the ABI that should be queried to see which registers the |
71 | function needs to save before it uses them. |
72 | |
73 | Other functions (including those called by this function) might use |
74 | different ABIs. */ |
75 | const predefined_function_abi *GTY((skip)) abi; |
76 | |
77 | rtl_ssa::function_info *GTY((skip)) ssa; |
78 | |
79 | /* For function.cc */ |
80 | |
81 | /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is |
82 | defined, the needed space is pushed by the prologue. */ |
83 | poly_int64 outgoing_args_size; |
84 | |
85 | /* If nonzero, an RTL expression for the location at which the current |
86 | function returns its result. If the current function returns its |
87 | result in a register, current_function_return_rtx will always be |
88 | the hard register containing the result. */ |
89 | rtx return_rtx; |
90 | |
91 | /* Vector of initial-value pairs. Each pair consists of a pseudo |
92 | register of approprite mode that stores the initial value a hard |
93 | register REGNO, and that hard register itself. */ |
94 | /* ??? This could be a VEC but there is currently no way to define an |
95 | opaque VEC type. */ |
96 | struct initial_value_struct *hard_reg_initial_vals; |
97 | |
98 | /* A variable living at the top of the frame that holds a known value. |
99 | Used for detecting stack clobbers. */ |
100 | tree stack_protect_guard; |
101 | |
102 | /* The __stack_chk_guard variable or expression holding the stack |
103 | protector canary value. */ |
104 | tree stack_protect_guard_decl; |
105 | |
106 | /* List (chain of INSN_LIST) of labels heading the current handlers for |
107 | nonlocal gotos. */ |
108 | rtx_insn_list *x_nonlocal_goto_handler_labels; |
109 | |
110 | /* Label that will go on function epilogue. |
111 | Jumping to this label serves as a "return" instruction |
112 | on machines which require execution of the epilogue on all returns. */ |
113 | rtx_code_label *x_return_label; |
114 | |
115 | /* Label that will go on the end of function epilogue. |
116 | Jumping to this label serves as a "naked return" instruction |
117 | on machines which require execution of the epilogue on all returns. */ |
118 | rtx_code_label *x_naked_return_label; |
119 | |
120 | /* List (chain of EXPR_LISTs) of all stack slots in this function. |
121 | Made for the sake of unshare_all_rtl. */ |
122 | vec<rtx, va_gc> *x_stack_slot_list; |
123 | |
124 | /* List of empty areas in the stack frame. */ |
125 | class frame_space *frame_space_list; |
126 | |
127 | /* Place after which to insert the tail_recursion_label if we need one. */ |
128 | rtx_note *x_stack_check_probe_note; |
129 | |
130 | /* Location at which to save the argument pointer if it will need to be |
131 | referenced. There are two cases where this is done: if nonlocal gotos |
132 | exist, or if vars stored at an offset from the argument pointer will be |
133 | needed by inner routines. */ |
134 | rtx x_arg_pointer_save_area; |
135 | |
136 | /* Dynamic Realign Argument Pointer used for realigning stack. */ |
137 | rtx drap_reg; |
138 | |
139 | /* Offset to end of allocated area of stack frame. |
140 | If stack grows down, this is the address of the last stack slot allocated. |
141 | If stack grows up, this is the address for the next slot. */ |
142 | poly_int64 x_frame_offset; |
143 | |
144 | /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */ |
145 | rtx_insn *x_parm_birth_insn; |
146 | |
147 | /* List of all used temporaries allocated, by level. */ |
148 | vec<temp_slot_p, va_gc> *x_used_temp_slots; |
149 | |
150 | /* List of available temp slots. */ |
151 | class temp_slot *x_avail_temp_slots; |
152 | |
153 | /* Current nesting level for temporaries. */ |
154 | int x_temp_slot_level; |
155 | |
156 | /* The largest alignment needed on the stack, including requirement |
157 | for outgoing stack alignment. */ |
158 | unsigned int stack_alignment_needed; |
159 | |
160 | /* Preferred alignment of the end of stack frame, which is preferred |
161 | to call other functions. */ |
162 | unsigned int preferred_stack_boundary; |
163 | |
164 | /* The minimum alignment of parameter stack. */ |
165 | unsigned int parm_stack_boundary; |
166 | |
167 | /* The largest alignment of slot allocated on the stack. */ |
168 | unsigned int max_used_stack_slot_alignment; |
169 | |
170 | /* The stack alignment estimated before reload, with consideration of |
171 | following factors: |
172 | 1. Alignment of local stack variables (max_used_stack_slot_alignment) |
173 | 2. Alignment requirement to call other functions |
174 | (preferred_stack_boundary) |
175 | 3. Alignment of non-local stack variables but might be spilled in |
176 | local stack. */ |
177 | unsigned int stack_alignment_estimated; |
178 | |
179 | /* How many NOP insns to place at each function entry by default. */ |
180 | unsigned short patch_area_size; |
181 | |
182 | /* How far the real asm entry point is into this area. */ |
183 | unsigned short patch_area_entry; |
184 | |
185 | /* For reorg. */ |
186 | |
187 | /* Nonzero if function being compiled called builtin_return_addr or |
188 | builtin_frame_address with nonzero count. */ |
189 | bool accesses_prior_frames; |
190 | |
191 | /* Nonzero if the function calls __builtin_eh_return. */ |
192 | bool calls_eh_return; |
193 | |
194 | /* Nonzero if function saves all registers, e.g. if it has a nonlocal |
195 | label that can reach the exit block via non-exceptional paths. */ |
196 | bool saves_all_registers; |
197 | |
198 | /* Nonzero if function being compiled has nonlocal gotos to parent |
199 | function. */ |
200 | bool has_nonlocal_goto; |
201 | |
202 | /* Nonzero if function being compiled has an asm statement. */ |
203 | bool has_asm_statement; |
204 | |
205 | /* This bit is used by the exception handling logic. It is set if all |
206 | calls (if any) are sibling calls. Such functions do not have to |
207 | have EH tables generated, as they cannot throw. A call to such a |
208 | function, however, should be treated as throwing if any of its callees |
209 | can throw. */ |
210 | bool all_throwers_are_sibcalls; |
211 | |
212 | /* Nonzero if stack limit checking should be enabled in the current |
213 | function. */ |
214 | bool limit_stack; |
215 | |
216 | /* Nonzero if profiling code should be generated. */ |
217 | bool profile; |
218 | |
219 | /* Nonzero if the current function uses the constant pool. */ |
220 | bool uses_const_pool; |
221 | |
222 | /* Nonzero if the current function uses pic_offset_table_rtx. */ |
223 | bool uses_pic_offset_table; |
224 | |
225 | /* Nonzero if the current function needs an lsda for exception handling. */ |
226 | bool uses_eh_lsda; |
227 | |
228 | /* Set when the tail call has been produced. */ |
229 | bool tail_call_emit; |
230 | |
231 | /* Nonzero if code to initialize arg_pointer_save_area has been emitted. */ |
232 | bool arg_pointer_save_area_init; |
233 | |
234 | /* Nonzero if current function must be given a frame pointer. |
235 | Set in reload1.cc or lra-eliminations.cc if anything is allocated |
236 | on the stack there. */ |
237 | bool frame_pointer_needed; |
238 | |
239 | /* When set, expand should optimize for speed. */ |
240 | bool maybe_hot_insn_p; |
241 | |
242 | /* Nonzero if function stack realignment is needed. This flag may be |
243 | set twice: before and after reload. It is set before reload wrt |
244 | stack alignment estimation before reload. It will be changed after |
245 | reload if by then criteria of stack realignment is different. |
246 | The value set after reload is the accurate one and is finalized. */ |
247 | bool stack_realign_needed; |
248 | |
249 | /* Nonzero if function stack realignment is tried. This flag is set |
250 | only once before reload. It affects register elimination. This |
251 | is used to generate DWARF debug info for stack variables. */ |
252 | bool stack_realign_tried; |
253 | |
254 | /* Nonzero if function being compiled needs dynamic realigned |
255 | argument pointer (drap) if stack needs realigning. */ |
256 | bool need_drap; |
257 | |
258 | /* Nonzero if function stack realignment estimation is done, namely |
259 | stack_realign_needed flag has been set before reload wrt estimated |
260 | stack alignment info. */ |
261 | bool stack_realign_processed; |
262 | |
263 | /* Nonzero if function stack realignment has been finalized, namely |
264 | stack_realign_needed flag has been set and finalized after reload. */ |
265 | bool stack_realign_finalized; |
266 | |
267 | /* True if dbr_schedule has already been called for this function. */ |
268 | bool dbr_scheduled_p; |
269 | |
270 | /* True if current function cannot throw. Unlike |
271 | TREE_NOTHROW (current_function_decl) it is set even for overwritable |
272 | function where currently compiled version of it is nothrow. */ |
273 | bool nothrow; |
274 | |
275 | /* True if we performed shrink-wrapping for the current function. */ |
276 | bool shrink_wrapped; |
277 | |
278 | /* True if we performed shrink-wrapping for separate components for |
279 | the current function. */ |
280 | bool shrink_wrapped_separate; |
281 | |
282 | /* Nonzero if function being compiled doesn't modify the stack pointer |
283 | (ignoring the prologue and epilogue). This is only valid after |
284 | pass_stack_ptr_mod has run. */ |
285 | bool sp_is_unchanging; |
286 | |
287 | /* True if the stack pointer is clobbered by asm statement. */ |
288 | bool sp_is_clobbered_by_asm; |
289 | |
290 | /* Nonzero if function being compiled doesn't contain any calls |
291 | (ignoring the prologue and epilogue). This is set prior to |
292 | register allocation in IRA and is valid for the remaining |
293 | compiler passes. */ |
294 | bool is_leaf; |
295 | |
296 | /* Nonzero if the function being compiled is a leaf function which only |
297 | uses leaf registers. This is valid after reload (specifically after |
298 | sched2) and is useful only if the port defines LEAF_REGISTERS. */ |
299 | bool uses_only_leaf_regs; |
300 | |
301 | /* Nonzero if the function being compiled has undergone hot/cold partitioning |
302 | (under flag_reorder_blocks_and_partition) and has at least one cold |
303 | block. */ |
304 | bool has_bb_partition; |
305 | |
306 | /* Nonzero if the function being compiled has completed the bb reordering |
307 | pass. */ |
308 | bool bb_reorder_complete; |
309 | |
310 | /* Like regs_ever_live, but 1 if a reg is set or clobbered from an |
311 | asm. Unlike regs_ever_live, elements of this array corresponding |
312 | to eliminable regs (like the frame pointer) are set if an asm |
313 | sets them. */ |
314 | HARD_REG_SET asm_clobbers; |
315 | |
316 | /* All hard registers that need to be zeroed at the return of the routine. */ |
317 | HARD_REG_SET must_be_zero_on_return; |
318 | |
319 | /* The highest address seen during shorten_branches. */ |
320 | int max_insn_address; |
321 | }; |
322 | |
323 | #define return_label (crtl->x_return_label) |
324 | #define naked_return_label (crtl->x_naked_return_label) |
325 | #define stack_slot_list (crtl->x_stack_slot_list) |
326 | #define parm_birth_insn (crtl->x_parm_birth_insn) |
327 | #define frame_offset (crtl->x_frame_offset) |
328 | #define stack_check_probe_note (crtl->x_stack_check_probe_note) |
329 | #define arg_pointer_save_area (crtl->x_arg_pointer_save_area) |
330 | #define used_temp_slots (crtl->x_used_temp_slots) |
331 | #define avail_temp_slots (crtl->x_avail_temp_slots) |
332 | #define temp_slot_level (crtl->x_temp_slot_level) |
333 | #define nonlocal_goto_handler_labels (crtl->x_nonlocal_goto_handler_labels) |
334 | #define frame_pointer_needed (crtl->frame_pointer_needed) |
335 | #define stack_realign_fp (crtl->stack_realign_needed && !crtl->need_drap) |
336 | #define stack_realign_drap (crtl->stack_realign_needed && crtl->need_drap) |
337 | |
338 | extern GTY(()) struct rtl_data x_rtl; |
339 | |
340 | /* Accessor to RTL datastructures. We keep them statically allocated now since |
341 | we never keep multiple functions. For threaded compiler we might however |
342 | want to do differently. */ |
343 | #define crtl (&x_rtl) |
344 | |
345 | /* Return whether two MEM_ATTRs are equal. */ |
346 | bool (const class mem_attrs *, const class mem_attrs *); |
347 | |
348 | /* Set the alias set of MEM to SET. */ |
349 | extern void set_mem_alias_set (rtx, alias_set_type); |
350 | |
351 | /* Set the alignment of MEM to ALIGN bits. */ |
352 | extern void set_mem_align (rtx, unsigned int); |
353 | |
354 | /* Set the address space of MEM to ADDRSPACE. */ |
355 | extern void set_mem_addr_space (rtx, addr_space_t); |
356 | |
357 | /* Set the expr for MEM to EXPR. */ |
358 | extern void set_mem_expr (rtx, tree); |
359 | |
360 | /* Set the offset for MEM to OFFSET. */ |
361 | extern void set_mem_offset (rtx, poly_int64); |
362 | |
363 | /* Clear the offset recorded for MEM. */ |
364 | extern void clear_mem_offset (rtx); |
365 | |
366 | /* Set the size for MEM to SIZE. */ |
367 | extern void set_mem_size (rtx, poly_int64); |
368 | |
369 | /* Clear the size recorded for MEM. */ |
370 | extern void clear_mem_size (rtx); |
371 | |
372 | /* Set the attributes for MEM appropriate for a spill slot. */ |
373 | extern void set_mem_attrs_for_spill (rtx); |
374 | extern tree get_spill_slot_decl (bool); |
375 | |
376 | /* Return a memory reference like MEMREF, but with its address changed to |
377 | ADDR. The caller is asserting that the actual piece of memory pointed |
378 | to is the same, just the form of the address is being changed, such as |
379 | by putting something into a register. */ |
380 | extern rtx replace_equiv_address (rtx, rtx, bool = false); |
381 | |
382 | /* Likewise, but the reference is not required to be valid. */ |
383 | extern rtx replace_equiv_address_nv (rtx, rtx, bool = false); |
384 | |
385 | extern rtx gen_blockage (void); |
386 | extern rtvec gen_rtvec (int, ...); |
387 | extern rtx copy_insn_1 (rtx); |
388 | extern rtx copy_insn (rtx); |
389 | extern rtx_insn *copy_delay_slot_insn (rtx_insn *); |
390 | extern rtx gen_int_mode (poly_int64, machine_mode); |
391 | extern rtx_insn *emit_copy_of_insn_after (rtx_insn *, rtx_insn *); |
392 | extern void set_reg_attrs_from_value (rtx, rtx); |
393 | extern void set_reg_attrs_for_parm (rtx, rtx); |
394 | extern void set_reg_attrs_for_decl_rtl (tree t, rtx x); |
395 | extern void adjust_reg_mode (rtx, machine_mode); |
396 | extern bool mem_expr_equal_p (const_tree, const_tree); |
397 | extern rtx gen_int_shift_amount (machine_mode, poly_int64); |
398 | |
399 | extern bool need_atomic_barrier_p (enum memmodel, bool); |
400 | |
401 | /* Return the current sequence. */ |
402 | |
403 | inline struct sequence_stack * |
404 | get_current_sequence (void) |
405 | { |
406 | return &crtl->emit.seq; |
407 | } |
408 | |
409 | /* Return the outermost sequence. */ |
410 | |
411 | inline struct sequence_stack * |
412 | get_topmost_sequence (void) |
413 | { |
414 | struct sequence_stack *seq, *top; |
415 | |
416 | seq = get_current_sequence (); |
417 | do |
418 | { |
419 | top = seq; |
420 | seq = seq->next; |
421 | } while (seq); |
422 | return top; |
423 | } |
424 | |
425 | /* Return the first insn of the current sequence or current function. */ |
426 | |
427 | inline rtx_insn * |
428 | get_insns (void) |
429 | { |
430 | return get_current_sequence ()->first; |
431 | } |
432 | |
433 | /* Specify a new insn as the first in the chain. */ |
434 | |
435 | inline void |
436 | set_first_insn (rtx_insn *insn) |
437 | { |
438 | gcc_checking_assert (!insn || !PREV_INSN (insn)); |
439 | get_current_sequence ()->first = insn; |
440 | } |
441 | |
442 | /* Return the last insn emitted in current sequence or current function. */ |
443 | |
444 | inline rtx_insn * |
445 | get_last_insn (void) |
446 | { |
447 | return get_current_sequence ()->last; |
448 | } |
449 | |
450 | /* Specify a new insn as the last in the chain. */ |
451 | |
452 | inline void |
453 | set_last_insn (rtx_insn *insn) |
454 | { |
455 | gcc_checking_assert (!insn || !NEXT_INSN (insn)); |
456 | get_current_sequence ()->last = insn; |
457 | } |
458 | |
459 | /* Return a number larger than any instruction's uid in this function. */ |
460 | |
461 | inline int |
462 | get_max_uid (void) |
463 | { |
464 | return crtl->emit.x_cur_insn_uid; |
465 | } |
466 | |
467 | extern bool valid_for_const_vector_p (machine_mode, rtx); |
468 | extern rtx gen_const_vec_duplicate (machine_mode, rtx); |
469 | extern rtx gen_vec_duplicate (machine_mode, rtx); |
470 | |
471 | extern rtx gen_const_vec_series (machine_mode, rtx, rtx); |
472 | extern rtx gen_vec_series (machine_mode, rtx, rtx); |
473 | |
474 | extern void set_decl_incoming_rtl (tree, rtx, bool); |
475 | |
476 | /* Return a memory reference like MEMREF, but with its mode changed |
477 | to MODE and its address changed to ADDR. |
478 | (VOIDmode means don't change the mode. |
479 | NULL for ADDR means don't change the address.) */ |
480 | extern rtx change_address (rtx, machine_mode, rtx); |
481 | |
482 | /* Return a memory reference like MEMREF, but with its mode changed |
483 | to MODE and its address offset by OFFSET bytes. */ |
484 | #define adjust_address(MEMREF, MODE, OFFSET) \ |
485 | adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1, 0, 0) |
486 | |
487 | /* Likewise, but the reference is not required to be valid. */ |
488 | #define adjust_address_nv(MEMREF, MODE, OFFSET) \ |
489 | adjust_address_1 (MEMREF, MODE, OFFSET, 0, 1, 0, 0) |
490 | |
491 | /* Return a memory reference like MEMREF, but with its mode changed |
492 | to MODE and its address offset by OFFSET bytes. Assume that it's |
493 | for a bitfield and conservatively drop the underlying object if we |
494 | cannot be sure to stay within its bounds. */ |
495 | #define adjust_bitfield_address(MEMREF, MODE, OFFSET) \ |
496 | adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1, 1, 0) |
497 | |
498 | /* As for adjust_bitfield_address, but specify that the width of |
499 | BLKmode accesses is SIZE bytes. */ |
500 | #define adjust_bitfield_address_size(MEMREF, MODE, OFFSET, SIZE) \ |
501 | adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1, 1, SIZE) |
502 | |
503 | /* Likewise, but the reference is not required to be valid. */ |
504 | #define adjust_bitfield_address_nv(MEMREF, MODE, OFFSET) \ |
505 | adjust_address_1 (MEMREF, MODE, OFFSET, 0, 1, 1, 0) |
506 | |
507 | /* Return a memory reference like MEMREF, but with its mode changed |
508 | to MODE and its address changed to ADDR, which is assumed to be |
509 | increased by OFFSET bytes from MEMREF. */ |
510 | #define adjust_automodify_address(MEMREF, MODE, ADDR, OFFSET) \ |
511 | adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 1) |
512 | |
513 | /* Likewise, but the reference is not required to be valid. */ |
514 | #define adjust_automodify_address_nv(MEMREF, MODE, ADDR, OFFSET) \ |
515 | adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 0) |
516 | |
517 | extern rtx adjust_address_1 (rtx, machine_mode, poly_int64, int, int, |
518 | int, poly_int64); |
519 | extern rtx adjust_automodify_address_1 (rtx, machine_mode, rtx, |
520 | poly_int64, int); |
521 | |
522 | /* Return a memory reference like MEMREF, but whose address is changed by |
523 | adding OFFSET, an RTX, to it. POW2 is the highest power of two factor |
524 | known to be in OFFSET (possibly 1). */ |
525 | extern rtx offset_address (rtx, rtx, unsigned HOST_WIDE_INT); |
526 | |
527 | /* Given REF, a MEM, and T, either the type of X or the expression |
528 | corresponding to REF, set the memory attributes. OBJECTP is nonzero |
529 | if we are making a new object of this type. */ |
530 | extern void set_mem_attributes (rtx, tree, int); |
531 | |
532 | /* Similar, except that BITPOS has not yet been applied to REF, so if |
533 | we alter MEM_OFFSET according to T then we should subtract BITPOS |
534 | expecting that it'll be added back in later. */ |
535 | extern void set_mem_attributes_minus_bitpos (rtx, tree, int, poly_int64); |
536 | |
537 | /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN |
538 | bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or |
539 | -1 if not known. */ |
540 | extern int get_mem_align_offset (rtx, unsigned int); |
541 | |
542 | /* Return a memory reference like MEMREF, but with its mode widened to |
543 | MODE and adjusted by OFFSET. */ |
544 | extern rtx widen_memory_access (rtx, machine_mode, poly_int64); |
545 | |
546 | extern void maybe_set_max_label_num (rtx_code_label *x); |
547 | |
548 | #endif /* GCC_EMIT_RTL_H */ |
549 | |