1 | /* Output variables, constants and external declarations, for GNU compiler. |
2 | Copyright (C) 1987-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | |
21 | /* This file handles generation of all the assembler code |
22 | *except* the instructions of a function. |
23 | This includes declarations of variables and their initial values. |
24 | |
25 | We also output the assembler code for constants stored in memory |
26 | and are responsible for combining constants with the same value. */ |
27 | |
28 | #include "config.h" |
29 | #include "system.h" |
30 | #include "coretypes.h" |
31 | #include "backend.h" |
32 | #include "target.h" |
33 | #include "rtl.h" |
34 | #include "tree.h" |
35 | #include "predict.h" |
36 | #include "memmodel.h" |
37 | #include "tm_p.h" |
38 | #include "stringpool.h" |
39 | #include "regs.h" |
40 | #include "emit-rtl.h" |
41 | #include "cgraph.h" |
42 | #include "diagnostic-core.h" |
43 | #include "fold-const.h" |
44 | #include "stor-layout.h" |
45 | #include "varasm.h" |
46 | #include "version.h" |
47 | #include "flags.h" |
48 | #include "stmt.h" |
49 | #include "expr.h" |
50 | #include "expmed.h" |
51 | #include "optabs.h" |
52 | #include "output.h" |
53 | #include "langhooks.h" |
54 | #include "debug.h" |
55 | #include "common/common-target.h" |
56 | #include "stringpool.h" |
57 | #include "attribs.h" |
58 | #include "asan.h" |
59 | #include "rtl-iter.h" |
60 | #include "file-prefix-map.h" /* remap_debug_filename() */ |
61 | #include "alloc-pool.h" |
62 | #include "toplev.h" |
63 | #include "opts.h" |
64 | |
65 | /* The (assembler) name of the first globally-visible object output. */ |
66 | extern GTY(()) const char *first_global_object_name; |
67 | extern GTY(()) const char *weak_global_object_name; |
68 | |
69 | const char *first_global_object_name; |
70 | const char *weak_global_object_name; |
71 | |
72 | class addr_const; |
73 | class constant_descriptor_rtx; |
74 | struct rtx_constant_pool; |
75 | |
76 | #define n_deferred_constants (crtl->varasm.deferred_constants) |
77 | |
78 | /* Number for making the label on the next |
79 | constant that is stored in memory. */ |
80 | |
81 | static GTY(()) int const_labelno; |
82 | |
83 | /* Carry information from ASM_DECLARE_OBJECT_NAME |
84 | to ASM_FINISH_DECLARE_OBJECT. */ |
85 | |
86 | int size_directive_output; |
87 | |
88 | /* The last decl for which assemble_variable was called, |
89 | if it did ASM_DECLARE_OBJECT_NAME. |
90 | If the last call to assemble_variable didn't do that, |
91 | this holds 0. */ |
92 | |
93 | tree last_assemble_variable_decl; |
94 | |
95 | /* The following global variable indicates if the first basic block |
96 | in a function belongs to the cold partition or not. */ |
97 | |
98 | bool first_function_block_is_cold; |
99 | |
100 | /* Whether we saw any functions with no_split_stack. */ |
101 | |
102 | static bool saw_no_split_stack; |
103 | |
104 | static const char *strip_reg_name (const char *); |
105 | static bool contains_pointers_p (tree); |
106 | #ifdef ASM_OUTPUT_EXTERNAL |
107 | static bool incorporeal_function_p (tree); |
108 | #endif |
109 | static void decode_addr_const (tree, class addr_const *); |
110 | static hashval_t const_hash_1 (const tree); |
111 | static bool compare_constant (const tree, const tree); |
112 | static void output_constant_def_contents (rtx); |
113 | static void output_addressed_constants (tree, int); |
114 | static unsigned HOST_WIDE_INT output_constant (tree, unsigned HOST_WIDE_INT, |
115 | unsigned int, bool, bool); |
116 | static void globalize_decl (tree); |
117 | static bool decl_readonly_section_1 (enum section_category); |
118 | #ifdef BSS_SECTION_ASM_OP |
119 | #ifdef ASM_OUTPUT_ALIGNED_BSS |
120 | static void asm_output_aligned_bss (FILE *, tree, const char *, |
121 | unsigned HOST_WIDE_INT, int) |
122 | ATTRIBUTE_UNUSED; |
123 | #endif |
124 | #endif /* BSS_SECTION_ASM_OP */ |
125 | static void mark_weak (tree); |
126 | static void output_constant_pool (const char *, tree); |
127 | static void handle_vtv_comdat_section (section *, const_tree); |
128 | |
129 | /* Well-known sections, each one associated with some sort of *_ASM_OP. */ |
130 | section *text_section; |
131 | section *data_section; |
132 | section *readonly_data_section; |
133 | section *sdata_section; |
134 | section *ctors_section; |
135 | section *dtors_section; |
136 | section *bss_section; |
137 | section *sbss_section; |
138 | |
139 | /* Various forms of common section. All are guaranteed to be nonnull. */ |
140 | section *tls_comm_section; |
141 | section *comm_section; |
142 | section *lcomm_section; |
143 | |
144 | /* A SECTION_NOSWITCH section used for declaring global BSS variables. |
145 | May be null. */ |
146 | section *bss_noswitch_section; |
147 | |
148 | /* The section that holds the main exception table, when known. The section |
149 | is set either by the target's init_sections hook or by the first call to |
150 | switch_to_exception_section. */ |
151 | section *exception_section; |
152 | |
153 | /* The section that holds the DWARF2 frame unwind information, when known. |
154 | The section is set either by the target's init_sections hook or by the |
155 | first call to switch_to_eh_frame_section. */ |
156 | section *eh_frame_section; |
157 | |
158 | /* asm_out_file's current section. This is NULL if no section has yet |
159 | been selected or if we lose track of what the current section is. */ |
160 | section *in_section; |
161 | |
162 | /* True if code for the current function is currently being directed |
163 | at the cold section. */ |
164 | bool in_cold_section_p; |
165 | |
166 | /* The following global holds the "function name" for the code in the |
167 | cold section of a function, if hot/cold function splitting is enabled |
168 | and there was actually code that went into the cold section. A |
169 | pseudo function name is needed for the cold section of code for some |
170 | debugging tools that perform symbolization. */ |
171 | tree cold_function_name = NULL_TREE; |
172 | |
173 | /* A linked list of all the unnamed sections. */ |
174 | static GTY(()) section *unnamed_sections; |
175 | |
176 | /* Return a nonzero value if DECL has a section attribute. */ |
177 | #define IN_NAMED_SECTION(DECL) \ |
178 | (VAR_OR_FUNCTION_DECL_P (DECL) && DECL_SECTION_NAME (DECL) != NULL) |
179 | |
180 | struct section_hasher : ggc_ptr_hash<section> |
181 | { |
182 | typedef const char *compare_type; |
183 | |
184 | static hashval_t hash (section *); |
185 | static bool equal (section *, const char *); |
186 | }; |
187 | |
188 | /* Hash table of named sections. */ |
189 | static GTY(()) hash_table<section_hasher> *section_htab; |
190 | |
191 | struct object_block_hasher : ggc_ptr_hash<object_block> |
192 | { |
193 | typedef const section *compare_type; |
194 | |
195 | static hashval_t hash (object_block *); |
196 | static bool equal (object_block *, const section *); |
197 | }; |
198 | |
199 | /* A table of object_blocks, indexed by section. */ |
200 | static GTY(()) hash_table<object_block_hasher> *object_block_htab; |
201 | |
202 | /* The next number to use for internal anchor labels. */ |
203 | static GTY(()) int anchor_labelno; |
204 | |
205 | /* A pool of constants that can be shared between functions. */ |
206 | static GTY(()) struct rtx_constant_pool *shared_constant_pool; |
207 | |
208 | /* Helper routines for maintaining section_htab. */ |
209 | |
210 | bool |
211 | section_hasher::equal (section *old, const char *new_name) |
212 | { |
213 | return strcmp (s1: old->named.name, s2: new_name) == 0; |
214 | } |
215 | |
216 | hashval_t |
217 | section_hasher::hash (section *old) |
218 | { |
219 | return htab_hash_string (old->named.name); |
220 | } |
221 | |
222 | /* Return a hash value for section SECT. */ |
223 | |
224 | static hashval_t |
225 | hash_section (section *sect) |
226 | { |
227 | if (sect->common.flags & SECTION_NAMED) |
228 | return htab_hash_string (sect->named.name); |
229 | return sect->common.flags & ~SECTION_DECLARED; |
230 | } |
231 | |
232 | /* Helper routines for maintaining object_block_htab. */ |
233 | |
234 | inline bool |
235 | object_block_hasher::equal (object_block *old, const section *new_section) |
236 | { |
237 | return old->sect == new_section; |
238 | } |
239 | |
240 | hashval_t |
241 | object_block_hasher::hash (object_block *old) |
242 | { |
243 | return hash_section (sect: old->sect); |
244 | } |
245 | |
246 | /* Return a new unnamed section with the given fields. */ |
247 | |
248 | section * |
249 | get_unnamed_section (unsigned int flags, void (*callback) (const char *), |
250 | const char *data) |
251 | { |
252 | section *sect; |
253 | |
254 | sect = ggc_alloc<section> (); |
255 | sect->unnamed.common.flags = flags | SECTION_UNNAMED; |
256 | sect->unnamed.callback = callback; |
257 | sect->unnamed.data = data; |
258 | sect->unnamed.next = unnamed_sections; |
259 | |
260 | unnamed_sections = sect; |
261 | return sect; |
262 | } |
263 | |
264 | /* Return a SECTION_NOSWITCH section with the given fields. */ |
265 | |
266 | static section * |
267 | get_noswitch_section (unsigned int flags, noswitch_section_callback callback) |
268 | { |
269 | section *sect; |
270 | |
271 | sect = ggc_alloc<section> (); |
272 | sect->noswitch.common.flags = flags | SECTION_NOSWITCH; |
273 | sect->noswitch.callback = callback; |
274 | |
275 | return sect; |
276 | } |
277 | |
278 | /* Return the named section structure associated with NAME. Create |
279 | a new section with the given fields if no such structure exists. |
280 | When NOT_EXISTING, then fail if the section already exists. Return |
281 | the existing section if the SECTION_RETAIN bit doesn't match. Set |
282 | the SECTION_WRITE | SECTION_RELRO bits on the existing section |
283 | if one of the section flags is SECTION_WRITE | SECTION_RELRO and the |
284 | other has none of these flags in named sections and either the section |
285 | hasn't been declared yet or has been declared as writable. */ |
286 | |
287 | section * |
288 | get_section (const char *name, unsigned int flags, tree decl, |
289 | bool not_existing) |
290 | { |
291 | section *sect, **slot; |
292 | |
293 | slot = section_htab->find_slot_with_hash (comparable: name, hash: htab_hash_string (name), |
294 | insert: INSERT); |
295 | flags |= SECTION_NAMED; |
296 | if (decl != nullptr |
297 | && DECL_P (decl) |
298 | && lookup_attribute (attr_name: "retain" , DECL_ATTRIBUTES (decl))) |
299 | flags |= SECTION_RETAIN; |
300 | if (*slot == NULL) |
301 | { |
302 | sect = ggc_alloc<section> (); |
303 | sect->named.common.flags = flags; |
304 | sect->named.name = ggc_strdup (name); |
305 | sect->named.decl = decl; |
306 | *slot = sect; |
307 | } |
308 | else |
309 | { |
310 | if (not_existing) |
311 | internal_error ("section already exists: %qs" , name); |
312 | |
313 | sect = *slot; |
314 | /* It is fine if one of the sections has SECTION_NOTYPE as long as |
315 | the other has none of the contrary flags (see the logic at the end |
316 | of default_section_type_flags, below). */ |
317 | if (((sect->common.flags ^ flags) & SECTION_NOTYPE) |
318 | && !((sect->common.flags | flags) |
319 | & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE |
320 | | (HAVE_COMDAT_GROUP ? SECTION_LINKONCE : 0)))) |
321 | { |
322 | sect->common.flags |= SECTION_NOTYPE; |
323 | flags |= SECTION_NOTYPE; |
324 | } |
325 | if ((sect->common.flags & ~SECTION_DECLARED) != flags |
326 | && ((sect->common.flags | flags) & SECTION_OVERRIDE) == 0) |
327 | { |
328 | /* It is fine if one of the section flags is |
329 | SECTION_WRITE | SECTION_RELRO and the other has none of these |
330 | flags (i.e. read-only) in named sections and either the |
331 | section hasn't been declared yet or has been declared as writable. |
332 | In that case just make sure the resulting flags are |
333 | SECTION_WRITE | SECTION_RELRO, ie. writable only because of |
334 | relocations. */ |
335 | if (((sect->common.flags ^ flags) & (SECTION_WRITE | SECTION_RELRO)) |
336 | == (SECTION_WRITE | SECTION_RELRO) |
337 | && (sect->common.flags |
338 | & ~(SECTION_DECLARED | SECTION_WRITE | SECTION_RELRO)) |
339 | == (flags & ~(SECTION_WRITE | SECTION_RELRO)) |
340 | && ((sect->common.flags & SECTION_DECLARED) == 0 |
341 | || (sect->common.flags & SECTION_WRITE))) |
342 | { |
343 | sect->common.flags |= (SECTION_WRITE | SECTION_RELRO); |
344 | return sect; |
345 | } |
346 | /* If the SECTION_RETAIN bit doesn't match, return and switch |
347 | to a new section later. */ |
348 | if ((sect->common.flags & SECTION_RETAIN) |
349 | != (flags & SECTION_RETAIN)) |
350 | return sect; |
351 | /* Sanity check user variables for flag changes. */ |
352 | if (sect->named.decl != NULL |
353 | && DECL_P (sect->named.decl) |
354 | && decl != sect->named.decl) |
355 | { |
356 | if (decl != NULL && DECL_P (decl)) |
357 | error ("%+qD causes a section type conflict with %qD" , |
358 | decl, sect->named.decl); |
359 | else |
360 | error ("section type conflict with %qD" , sect->named.decl); |
361 | inform (DECL_SOURCE_LOCATION (sect->named.decl), |
362 | "%qD was declared here" , sect->named.decl); |
363 | } |
364 | else if (decl != NULL && DECL_P (decl)) |
365 | error ("%+qD causes a section type conflict" , decl); |
366 | else |
367 | error ("section type conflict" ); |
368 | /* Make sure we don't error about one section multiple times. */ |
369 | sect->common.flags |= SECTION_OVERRIDE; |
370 | } |
371 | } |
372 | return sect; |
373 | } |
374 | |
375 | /* Return true if the current compilation mode benefits from having |
376 | objects grouped into blocks. */ |
377 | |
378 | static bool |
379 | use_object_blocks_p (void) |
380 | { |
381 | return flag_section_anchors; |
382 | } |
383 | |
384 | /* Return the object_block structure for section SECT. Create a new |
385 | structure if we haven't created one already. Return null if SECT |
386 | itself is null. Return also null for mergeable sections since |
387 | section anchors can't be used in mergeable sections anyway, |
388 | because the linker might move objects around, and using the |
389 | object blocks infrastructure in that case is both a waste and a |
390 | maintenance burden. */ |
391 | |
392 | static struct object_block * |
393 | get_block_for_section (section *sect) |
394 | { |
395 | struct object_block *block; |
396 | |
397 | if (sect == NULL) |
398 | return NULL; |
399 | |
400 | if (sect->common.flags & SECTION_MERGE) |
401 | return NULL; |
402 | |
403 | object_block **slot |
404 | = object_block_htab->find_slot_with_hash (comparable: sect, hash: hash_section (sect), |
405 | insert: INSERT); |
406 | block = *slot; |
407 | if (block == NULL) |
408 | { |
409 | block = ggc_cleared_alloc<object_block> (); |
410 | block->sect = sect; |
411 | *slot = block; |
412 | } |
413 | return block; |
414 | } |
415 | |
416 | /* Create a symbol with label LABEL and place it at byte offset |
417 | OFFSET in BLOCK. OFFSET can be negative if the symbol's offset |
418 | is not yet known. LABEL must be a garbage-collected string. */ |
419 | |
420 | static rtx |
421 | create_block_symbol (const char *label, struct object_block *block, |
422 | HOST_WIDE_INT offset) |
423 | { |
424 | rtx symbol; |
425 | unsigned int size; |
426 | |
427 | /* Create the extended SYMBOL_REF. */ |
428 | size = RTX_HDR_SIZE + sizeof (struct block_symbol); |
429 | symbol = (rtx) ggc_internal_alloc (s: size); |
430 | |
431 | /* Initialize the normal SYMBOL_REF fields. */ |
432 | memset (s: symbol, c: 0, n: size); |
433 | PUT_CODE (symbol, SYMBOL_REF); |
434 | PUT_MODE (x: symbol, Pmode); |
435 | XSTR (symbol, 0) = label; |
436 | SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_HAS_BLOCK_INFO; |
437 | |
438 | /* Initialize the block_symbol stuff. */ |
439 | SYMBOL_REF_BLOCK (symbol) = block; |
440 | SYMBOL_REF_BLOCK_OFFSET (symbol) = offset; |
441 | |
442 | return symbol; |
443 | } |
444 | |
445 | /* Return a section with a particular name and with whatever SECTION_* |
446 | flags section_type_flags deems appropriate. The name of the section |
447 | is taken from NAME if nonnull, otherwise it is taken from DECL's |
448 | DECL_SECTION_NAME. DECL is the decl associated with the section |
449 | (see the section comment for details) and RELOC is as for |
450 | section_type_flags. */ |
451 | |
452 | section * |
453 | get_named_section (tree decl, const char *name, int reloc) |
454 | { |
455 | unsigned int flags; |
456 | |
457 | if (name == NULL) |
458 | { |
459 | gcc_assert (decl && DECL_P (decl) && DECL_SECTION_NAME (decl)); |
460 | name = DECL_SECTION_NAME (decl); |
461 | } |
462 | |
463 | flags = targetm.section_type_flags (decl, name, reloc); |
464 | return get_section (name, flags, decl); |
465 | } |
466 | |
467 | /* Worker for resolve_unique_section. */ |
468 | |
469 | static bool |
470 | set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED) |
471 | { |
472 | n->implicit_section = true; |
473 | return false; |
474 | } |
475 | |
476 | /* If required, set DECL_SECTION_NAME to a unique name. */ |
477 | |
478 | void |
479 | resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED, |
480 | int flag_function_or_data_sections) |
481 | { |
482 | if (DECL_SECTION_NAME (decl) == NULL |
483 | && targetm_common.have_named_sections |
484 | && (flag_function_or_data_sections |
485 | || lookup_attribute (attr_name: "retain" , DECL_ATTRIBUTES (decl)) |
486 | || DECL_COMDAT_GROUP (decl))) |
487 | { |
488 | targetm.asm_out.unique_section (decl, reloc); |
489 | if (DECL_SECTION_NAME (decl)) |
490 | symtab_node::get (decl)->call_for_symbol_and_aliases |
491 | (callback: set_implicit_section, NULL, include_overwritable: true); |
492 | } |
493 | } |
494 | |
495 | #ifdef BSS_SECTION_ASM_OP |
496 | |
497 | #ifdef ASM_OUTPUT_ALIGNED_BSS |
498 | |
499 | /* Utility function for targets to use in implementing |
500 | ASM_OUTPUT_ALIGNED_BSS. |
501 | ??? It is believed that this function will work in most cases so such |
502 | support is localized here. */ |
503 | |
504 | static void |
505 | asm_output_aligned_bss (FILE *file, tree decl ATTRIBUTE_UNUSED, |
506 | const char *name, unsigned HOST_WIDE_INT size, |
507 | int align) |
508 | { |
509 | switch_to_section (bss_section); |
510 | ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT)); |
511 | #ifdef ASM_DECLARE_OBJECT_NAME |
512 | last_assemble_variable_decl = decl; |
513 | ASM_DECLARE_OBJECT_NAME (file, name, decl); |
514 | #else |
515 | /* Standard thing is just output label for the object. */ |
516 | ASM_OUTPUT_LABEL (file, name); |
517 | #endif /* ASM_DECLARE_OBJECT_NAME */ |
518 | ASM_OUTPUT_SKIP (file, size ? size : 1); |
519 | } |
520 | |
521 | #endif |
522 | |
523 | #endif /* BSS_SECTION_ASM_OP */ |
524 | |
525 | #ifndef USE_SELECT_SECTION_FOR_FUNCTIONS |
526 | /* Return the hot section for function DECL. Return text_section for |
527 | null DECLs. */ |
528 | |
529 | static section * |
530 | hot_function_section (tree decl) |
531 | { |
532 | if (decl != NULL_TREE |
533 | && DECL_SECTION_NAME (decl) != NULL |
534 | && targetm_common.have_named_sections) |
535 | return get_named_section (decl, NULL, reloc: 0); |
536 | else |
537 | return text_section; |
538 | } |
539 | #endif |
540 | |
541 | /* Return section for TEXT_SECTION_NAME if DECL or DECL_SECTION_NAME (DECL) |
542 | is NULL. |
543 | |
544 | When DECL_SECTION_NAME is non-NULL and it is implicit section and |
545 | NAMED_SECTION_SUFFIX is non-NULL, then produce section called |
546 | concatenate the name with NAMED_SECTION_SUFFIX. |
547 | Otherwise produce "TEXT_SECTION_NAME.IMPLICIT_NAME". */ |
548 | |
549 | section * |
550 | get_named_text_section (tree decl, |
551 | const char *text_section_name, |
552 | const char *named_section_suffix) |
553 | { |
554 | if (decl && DECL_SECTION_NAME (decl)) |
555 | { |
556 | if (named_section_suffix) |
557 | { |
558 | const char *dsn = DECL_SECTION_NAME (decl); |
559 | const char *stripped_name; |
560 | char *name, *buffer; |
561 | |
562 | name = (char *) alloca (strlen (dsn) + 1); |
563 | memcpy (dest: name, src: dsn, |
564 | n: strlen (s: dsn) + 1); |
565 | |
566 | stripped_name = targetm.strip_name_encoding (name); |
567 | |
568 | buffer = ACONCAT ((stripped_name, named_section_suffix, NULL)); |
569 | return get_named_section (decl, name: buffer, reloc: 0); |
570 | } |
571 | else if (symtab_node::get (decl)->implicit_section) |
572 | { |
573 | const char *name; |
574 | |
575 | /* Do not try to split gnu_linkonce functions. This gets somewhat |
576 | slipperly. */ |
577 | if (DECL_COMDAT_GROUP (decl) && !HAVE_COMDAT_GROUP) |
578 | return NULL; |
579 | name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); |
580 | name = targetm.strip_name_encoding (name); |
581 | return get_named_section (decl, ACONCAT ((text_section_name, "." , |
582 | name, NULL)), reloc: 0); |
583 | } |
584 | else |
585 | return NULL; |
586 | } |
587 | return get_named_section (decl, name: text_section_name, reloc: 0); |
588 | } |
589 | |
590 | /* Choose named function section based on its frequency. */ |
591 | |
592 | section * |
593 | default_function_section (tree decl, enum node_frequency freq, |
594 | bool startup, bool exit) |
595 | { |
596 | #if defined HAVE_LD_EH_GC_SECTIONS && defined HAVE_LD_EH_GC_SECTIONS_BUG |
597 | /* Old GNU linkers have buggy --gc-section support, which sometimes |
598 | results in .gcc_except_table* sections being garbage collected. */ |
599 | if (decl |
600 | && symtab_node::get (decl)->implicit_section) |
601 | return NULL; |
602 | #endif |
603 | |
604 | if (!flag_reorder_functions |
605 | || !targetm_common.have_named_sections) |
606 | return NULL; |
607 | /* Startup code should go to startup subsection unless it is |
608 | unlikely executed (this happens especially with function splitting |
609 | where we can split away unnecessary parts of static constructors. */ |
610 | if (startup && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED) |
611 | { |
612 | /* During LTO the tp_first_run profiling will naturally place all |
613 | initialization code first. Using separate section is counter-productive |
614 | because startup only code may call functions which are no longer |
615 | startup only. */ |
616 | if (!in_lto_p |
617 | || !cgraph_node::get (decl)->tp_first_run |
618 | || !opt_for_fn (decl, flag_profile_reorder_functions)) |
619 | return get_named_text_section (decl, text_section_name: ".text.startup" , NULL); |
620 | else |
621 | return NULL; |
622 | } |
623 | |
624 | /* Similarly for exit. */ |
625 | if (exit && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED) |
626 | return get_named_text_section (decl, text_section_name: ".text.exit" , NULL); |
627 | |
628 | /* Group cold functions together, similarly for hot code. */ |
629 | switch (freq) |
630 | { |
631 | case NODE_FREQUENCY_UNLIKELY_EXECUTED: |
632 | return get_named_text_section (decl, text_section_name: ".text.unlikely" , NULL); |
633 | case NODE_FREQUENCY_HOT: |
634 | return get_named_text_section (decl, text_section_name: ".text.hot" , NULL); |
635 | /* FALLTHRU */ |
636 | default: |
637 | return NULL; |
638 | } |
639 | } |
640 | |
641 | /* Return the section for function DECL. |
642 | |
643 | If DECL is NULL_TREE, return the text section. We can be passed |
644 | NULL_TREE under some circumstances by dbxout.cc at least. |
645 | |
646 | If FORCE_COLD is true, return cold function section ignoring |
647 | the frequency info of cgraph_node. */ |
648 | |
649 | static section * |
650 | function_section_1 (tree decl, bool force_cold) |
651 | { |
652 | section *section = NULL; |
653 | enum node_frequency freq = NODE_FREQUENCY_NORMAL; |
654 | bool startup = false, exit = false; |
655 | |
656 | if (decl) |
657 | { |
658 | struct cgraph_node *node = cgraph_node::get (decl); |
659 | |
660 | if (node) |
661 | { |
662 | freq = node->frequency; |
663 | startup = node->only_called_at_startup; |
664 | exit = node->only_called_at_exit; |
665 | } |
666 | } |
667 | if (force_cold) |
668 | freq = NODE_FREQUENCY_UNLIKELY_EXECUTED; |
669 | |
670 | #ifdef USE_SELECT_SECTION_FOR_FUNCTIONS |
671 | if (decl != NULL_TREE |
672 | && DECL_SECTION_NAME (decl) != NULL) |
673 | { |
674 | if (targetm.asm_out.function_section) |
675 | section = targetm.asm_out.function_section (decl, freq, |
676 | startup, exit); |
677 | if (section) |
678 | return section; |
679 | return get_named_section (decl, NULL, 0); |
680 | } |
681 | else |
682 | return targetm.asm_out.select_section |
683 | (decl, freq == NODE_FREQUENCY_UNLIKELY_EXECUTED, |
684 | symtab_node::get (decl)->definition_alignment ()); |
685 | #else |
686 | if (targetm.asm_out.function_section) |
687 | section = targetm.asm_out.function_section (decl, freq, startup, exit); |
688 | if (section) |
689 | return section; |
690 | return hot_function_section (decl); |
691 | #endif |
692 | } |
693 | |
694 | /* Return the section for function DECL. |
695 | |
696 | If DECL is NULL_TREE, return the text section. We can be passed |
697 | NULL_TREE under some circumstances by dbxout.cc at least. */ |
698 | |
699 | section * |
700 | function_section (tree decl) |
701 | { |
702 | /* Handle cases where function splitting code decides |
703 | to put function entry point into unlikely executed section |
704 | despite the fact that the function itself is not cold |
705 | (i.e. it is called rarely but contains a hot loop that is |
706 | better to live in hot subsection for the code locality). */ |
707 | return function_section_1 (decl, |
708 | force_cold: first_function_block_is_cold); |
709 | } |
710 | |
711 | /* Return the section for the current function, take IN_COLD_SECTION_P |
712 | into account. */ |
713 | |
714 | section * |
715 | current_function_section (void) |
716 | { |
717 | return function_section_1 (decl: current_function_decl, force_cold: in_cold_section_p); |
718 | } |
719 | |
720 | /* Tell assembler to switch to unlikely-to-be-executed text section. */ |
721 | |
722 | section * |
723 | unlikely_text_section (void) |
724 | { |
725 | return function_section_1 (decl: current_function_decl, force_cold: true); |
726 | } |
727 | |
728 | /* When called within a function context, return true if the function |
729 | has been assigned a cold text section and if SECT is that section. |
730 | When called outside a function context, return true if SECT is the |
731 | default cold section. */ |
732 | |
733 | bool |
734 | unlikely_text_section_p (section *sect) |
735 | { |
736 | return sect == function_section_1 (decl: current_function_decl, force_cold: true); |
737 | } |
738 | |
739 | /* Switch to the other function partition (if inside of hot section |
740 | into cold section, otherwise into the hot section). */ |
741 | |
742 | void |
743 | switch_to_other_text_partition (void) |
744 | { |
745 | in_cold_section_p = !in_cold_section_p; |
746 | switch_to_section (current_function_section ()); |
747 | } |
748 | |
749 | /* Return the read-only or relocated read-only data section |
750 | associated with function DECL. */ |
751 | |
752 | section * |
753 | default_function_rodata_section (tree decl, bool relocatable) |
754 | { |
755 | const char* sname; |
756 | unsigned int flags; |
757 | |
758 | flags = 0; |
759 | |
760 | if (relocatable) |
761 | { |
762 | sname = ".data.rel.ro.local" ; |
763 | flags = (SECTION_WRITE | SECTION_RELRO); |
764 | } |
765 | else |
766 | sname = ".rodata" ; |
767 | |
768 | if (decl && DECL_SECTION_NAME (decl)) |
769 | { |
770 | const char *name = DECL_SECTION_NAME (decl); |
771 | |
772 | if (DECL_COMDAT_GROUP (decl) && HAVE_COMDAT_GROUP) |
773 | { |
774 | const char *dot; |
775 | size_t len; |
776 | char* rname; |
777 | |
778 | dot = strchr (s: name + 1, c: '.'); |
779 | if (!dot) |
780 | dot = name; |
781 | len = strlen (s: dot) + strlen (s: sname) + 1; |
782 | rname = (char *) alloca (len); |
783 | |
784 | strcpy (dest: rname, src: sname); |
785 | strcat (dest: rname, src: dot); |
786 | return get_section (name: rname, flags: (SECTION_LINKONCE | flags), decl); |
787 | } |
788 | /* For .gnu.linkonce.t.foo we want to use .gnu.linkonce.r.foo or |
789 | .gnu.linkonce.d.rel.ro.local.foo if the jump table is relocatable. */ |
790 | else if (DECL_COMDAT_GROUP (decl) |
791 | && startswith (str: name, prefix: ".gnu.linkonce.t." )) |
792 | { |
793 | size_t len; |
794 | char *rname; |
795 | |
796 | if (relocatable) |
797 | { |
798 | len = strlen (s: name) + strlen (s: ".rel.ro.local" ) + 1; |
799 | rname = (char *) alloca (len); |
800 | |
801 | strcpy (dest: rname, src: ".gnu.linkonce.d.rel.ro.local" ); |
802 | strcat (dest: rname, src: name + 15); |
803 | } |
804 | else |
805 | { |
806 | len = strlen (s: name) + 1; |
807 | rname = (char *) alloca (len); |
808 | |
809 | memcpy (dest: rname, src: name, n: len); |
810 | rname[14] = 'r'; |
811 | } |
812 | return get_section (name: rname, flags: (SECTION_LINKONCE | flags), decl); |
813 | } |
814 | /* For .text.foo we want to use .rodata.foo. */ |
815 | else if (flag_function_sections && flag_data_sections |
816 | && startswith (str: name, prefix: ".text." )) |
817 | { |
818 | size_t len = strlen (s: name) + 1; |
819 | char *rname = (char *) alloca (len + strlen (sname) - 5); |
820 | |
821 | memcpy (dest: rname, src: sname, n: strlen (s: sname)); |
822 | memcpy (dest: rname + strlen (s: sname), src: name + 5, n: len - 5); |
823 | return get_section (name: rname, flags, decl); |
824 | } |
825 | } |
826 | |
827 | if (relocatable) |
828 | return get_section (name: sname, flags, decl); |
829 | else |
830 | return readonly_data_section; |
831 | } |
832 | |
833 | /* Return the read-only data section associated with function DECL |
834 | for targets where that section should be always the single |
835 | readonly data section. */ |
836 | |
837 | section * |
838 | default_no_function_rodata_section (tree, bool) |
839 | { |
840 | return readonly_data_section; |
841 | } |
842 | |
843 | /* A subroutine of mergeable_string_section and mergeable_constant_section. */ |
844 | |
845 | static const char * |
846 | function_mergeable_rodata_prefix (void) |
847 | { |
848 | section *s = targetm.asm_out.function_rodata_section (current_function_decl, |
849 | false); |
850 | if (SECTION_STYLE (s) == SECTION_NAMED) |
851 | return s->named.name; |
852 | else |
853 | return targetm.asm_out.mergeable_rodata_prefix; |
854 | } |
855 | |
856 | /* Return the section to use for string merging. */ |
857 | |
858 | static section * |
859 | mergeable_string_section (tree decl ATTRIBUTE_UNUSED, |
860 | unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED, |
861 | unsigned int flags ATTRIBUTE_UNUSED) |
862 | { |
863 | HOST_WIDE_INT len; |
864 | |
865 | if (HAVE_GAS_SHF_MERGE && flag_merge_constants |
866 | && TREE_CODE (decl) == STRING_CST |
867 | && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE |
868 | && align <= 256 |
869 | && (len = int_size_in_bytes (TREE_TYPE (decl))) > 0 |
870 | && TREE_STRING_LENGTH (decl) == len) |
871 | { |
872 | scalar_int_mode mode; |
873 | unsigned int modesize; |
874 | const char *str; |
875 | HOST_WIDE_INT i; |
876 | int j, unit; |
877 | const char *prefix = function_mergeable_rodata_prefix (); |
878 | char *name = (char *) alloca (strlen (prefix) + 30); |
879 | |
880 | mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (TREE_TYPE (decl))); |
881 | modesize = GET_MODE_BITSIZE (mode); |
882 | if (modesize >= 8 && modesize <= 256 |
883 | && (modesize & (modesize - 1)) == 0) |
884 | { |
885 | if (align < modesize) |
886 | align = modesize; |
887 | |
888 | if (!HAVE_LD_ALIGNED_SHF_MERGE && align > 8) |
889 | return readonly_data_section; |
890 | |
891 | str = TREE_STRING_POINTER (decl); |
892 | unit = GET_MODE_SIZE (mode); |
893 | |
894 | /* Check for embedded NUL characters. */ |
895 | for (i = 0; i < len; i += unit) |
896 | { |
897 | for (j = 0; j < unit; j++) |
898 | if (str[i + j] != '\0') |
899 | break; |
900 | if (j == unit) |
901 | break; |
902 | } |
903 | if (i == len - unit || (unit == 1 && i == len)) |
904 | { |
905 | sprintf (s: name, format: "%s.str%d.%d" , prefix, |
906 | modesize / 8, (int) (align / 8)); |
907 | flags |= (modesize / 8) | SECTION_MERGE | SECTION_STRINGS; |
908 | return get_section (name, flags, NULL); |
909 | } |
910 | } |
911 | } |
912 | |
913 | return readonly_data_section; |
914 | } |
915 | |
916 | /* Return the section to use for constant merging. */ |
917 | |
918 | section * |
919 | mergeable_constant_section (machine_mode mode ATTRIBUTE_UNUSED, |
920 | unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED, |
921 | unsigned int flags ATTRIBUTE_UNUSED) |
922 | { |
923 | if (HAVE_GAS_SHF_MERGE && flag_merge_constants |
924 | && mode != VOIDmode |
925 | && mode != BLKmode |
926 | && known_le (GET_MODE_BITSIZE (mode), align) |
927 | && align >= 8 |
928 | && align <= 256 |
929 | && (align & (align - 1)) == 0 |
930 | && (HAVE_LD_ALIGNED_SHF_MERGE ? 1 : align == 8)) |
931 | { |
932 | const char *prefix = function_mergeable_rodata_prefix (); |
933 | char *name = (char *) alloca (strlen (prefix) + 30); |
934 | |
935 | sprintf (s: name, format: "%s.cst%d" , prefix, (int) (align / 8)); |
936 | flags |= (align / 8) | SECTION_MERGE; |
937 | return get_section (name, flags, NULL); |
938 | } |
939 | return readonly_data_section; |
940 | } |
941 | |
942 | /* Given NAME, a putative register name, discard any customary prefixes. */ |
943 | |
944 | static const char * |
945 | strip_reg_name (const char *name) |
946 | { |
947 | #ifdef REGISTER_PREFIX |
948 | if (!strncmp (name, REGISTER_PREFIX, strlen (REGISTER_PREFIX))) |
949 | name += strlen (REGISTER_PREFIX); |
950 | #endif |
951 | if (name[0] == '%' || name[0] == '#') |
952 | name++; |
953 | return name; |
954 | } |
955 | |
956 | /* The user has asked for a DECL to have a particular name. Set (or |
957 | change) it in such a way that we don't prefix an underscore to |
958 | it. */ |
959 | void |
960 | set_user_assembler_name (tree decl, const char *name) |
961 | { |
962 | char *starred = (char *) alloca (strlen (name) + 2); |
963 | starred[0] = '*'; |
964 | strcpy (dest: starred + 1, src: name); |
965 | symtab->change_decl_assembler_name (decl, get_identifier (starred)); |
966 | SET_DECL_RTL (decl, NULL_RTX); |
967 | } |
968 | |
969 | /* Decode an `asm' spec for a declaration as a register name. |
970 | Return the register number, or -1 if nothing specified, |
971 | or -2 if the ASMSPEC is not `cc' or `memory' and is not recognized, |
972 | or -3 if ASMSPEC is `cc' and is not recognized, |
973 | or -4 if ASMSPEC is `memory' and is not recognized. |
974 | Accept an exact spelling or a decimal number. |
975 | Prefixes such as % are optional. */ |
976 | |
977 | int |
978 | decode_reg_name_and_count (const char *asmspec, int *pnregs) |
979 | { |
980 | /* Presume just one register is clobbered. */ |
981 | *pnregs = 1; |
982 | |
983 | if (asmspec != 0) |
984 | { |
985 | int i; |
986 | |
987 | /* Get rid of confusing prefixes. */ |
988 | asmspec = strip_reg_name (name: asmspec); |
989 | |
990 | /* Allow a decimal number as a "register name". */ |
991 | for (i = strlen (s: asmspec) - 1; i >= 0; i--) |
992 | if (! ISDIGIT (asmspec[i])) |
993 | break; |
994 | if (asmspec[0] != 0 && i < 0) |
995 | { |
996 | i = atoi (nptr: asmspec); |
997 | if (i < FIRST_PSEUDO_REGISTER && i >= 0 && reg_names[i][0]) |
998 | return i; |
999 | else |
1000 | return -2; |
1001 | } |
1002 | |
1003 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
1004 | if (reg_names[i][0] |
1005 | && ! strcmp (s1: asmspec, s2: strip_reg_name (reg_names[i]))) |
1006 | return i; |
1007 | |
1008 | #ifdef OVERLAPPING_REGISTER_NAMES |
1009 | { |
1010 | static const struct |
1011 | { |
1012 | const char *const name; |
1013 | const int number; |
1014 | const int nregs; |
1015 | } table[] = OVERLAPPING_REGISTER_NAMES; |
1016 | |
1017 | for (i = 0; i < (int) ARRAY_SIZE (table); i++) |
1018 | if (table[i].name[0] |
1019 | && ! strcmp (asmspec, table[i].name)) |
1020 | { |
1021 | *pnregs = table[i].nregs; |
1022 | return table[i].number; |
1023 | } |
1024 | } |
1025 | #endif /* OVERLAPPING_REGISTER_NAMES */ |
1026 | |
1027 | #ifdef ADDITIONAL_REGISTER_NAMES |
1028 | { |
1029 | static const struct { const char *const name; const int number; } table[] |
1030 | = ADDITIONAL_REGISTER_NAMES; |
1031 | |
1032 | for (i = 0; i < (int) ARRAY_SIZE (table); i++) |
1033 | if (table[i].name[0] |
1034 | && ! strcmp (s1: asmspec, s2: table[i].name) |
1035 | && reg_names[table[i].number][0]) |
1036 | return table[i].number; |
1037 | } |
1038 | #endif /* ADDITIONAL_REGISTER_NAMES */ |
1039 | |
1040 | if (!strcmp (s1: asmspec, s2: "memory" )) |
1041 | return -4; |
1042 | |
1043 | if (!strcmp (s1: asmspec, s2: "cc" )) |
1044 | return -3; |
1045 | |
1046 | return -2; |
1047 | } |
1048 | |
1049 | return -1; |
1050 | } |
1051 | |
1052 | int |
1053 | decode_reg_name (const char *name) |
1054 | { |
1055 | int count; |
1056 | return decode_reg_name_and_count (asmspec: name, pnregs: &count); |
1057 | } |
1058 | |
1059 | |
1060 | /* Return true if DECL's initializer is suitable for a BSS section. */ |
1061 | |
1062 | bool |
1063 | bss_initializer_p (const_tree decl, bool named) |
1064 | { |
1065 | /* Do not put non-common constants into the .bss section, they belong in |
1066 | a readonly section, except when NAMED is true. */ |
1067 | return ((!TREE_READONLY (decl) || DECL_COMMON (decl) || named) |
1068 | && (DECL_INITIAL (decl) == NULL |
1069 | /* In LTO we have no errors in program; error_mark_node is used |
1070 | to mark offlined constructors. */ |
1071 | || (DECL_INITIAL (decl) == error_mark_node |
1072 | && !in_lto_p) |
1073 | || (flag_zero_initialized_in_bss |
1074 | && initializer_zerop (DECL_INITIAL (decl)) |
1075 | /* A decl with the "persistent" attribute applied and |
1076 | explicitly initialized to 0 should not be treated as a BSS |
1077 | variable. */ |
1078 | && !DECL_PERSISTENT_P (decl)))); |
1079 | } |
1080 | |
1081 | /* Compute the alignment of variable specified by DECL. |
1082 | DONT_OUTPUT_DATA is from assemble_variable. */ |
1083 | |
1084 | void |
1085 | align_variable (tree decl, bool dont_output_data) |
1086 | { |
1087 | unsigned int align = DECL_ALIGN (decl); |
1088 | |
1089 | /* In the case for initialing an array whose length isn't specified, |
1090 | where we have not yet been able to do the layout, |
1091 | figure out the proper alignment now. */ |
1092 | if (dont_output_data && DECL_SIZE (decl) == 0 |
1093 | && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) |
1094 | align = MAX (align, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))); |
1095 | |
1096 | /* Some object file formats have a maximum alignment which they support. |
1097 | In particular, a.out format supports a maximum alignment of 4. */ |
1098 | if (align > MAX_OFILE_ALIGNMENT) |
1099 | { |
1100 | error ("alignment of %q+D is greater than maximum object " |
1101 | "file alignment %d" , decl, |
1102 | MAX_OFILE_ALIGNMENT/BITS_PER_UNIT); |
1103 | align = MAX_OFILE_ALIGNMENT; |
1104 | } |
1105 | |
1106 | if (! DECL_USER_ALIGN (decl)) |
1107 | { |
1108 | #ifdef DATA_ABI_ALIGNMENT |
1109 | unsigned int data_abi_align |
1110 | = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align); |
1111 | /* For backwards compatibility, don't assume the ABI alignment for |
1112 | TLS variables. */ |
1113 | if (! DECL_THREAD_LOCAL_P (decl) || data_abi_align <= BITS_PER_WORD) |
1114 | align = data_abi_align; |
1115 | #endif |
1116 | |
1117 | /* On some machines, it is good to increase alignment sometimes. |
1118 | But as DECL_ALIGN is used both for actually emitting the variable |
1119 | and for code accessing the variable as guaranteed alignment, we |
1120 | can only increase the alignment if it is a performance optimization |
1121 | if the references to it must bind to the current definition. */ |
1122 | if (decl_binds_to_current_def_p (decl) |
1123 | && !DECL_VIRTUAL_P (decl)) |
1124 | { |
1125 | #ifdef DATA_ALIGNMENT |
1126 | unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align); |
1127 | /* Don't increase alignment too much for TLS variables - TLS space |
1128 | is too precious. */ |
1129 | if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD) |
1130 | align = data_align; |
1131 | #endif |
1132 | if (DECL_INITIAL (decl) != 0 |
1133 | /* In LTO we have no errors in program; error_mark_node is used |
1134 | to mark offlined constructors. */ |
1135 | && (in_lto_p || DECL_INITIAL (decl) != error_mark_node)) |
1136 | { |
1137 | unsigned int const_align |
1138 | = targetm.constant_alignment (DECL_INITIAL (decl), align); |
1139 | /* Don't increase alignment too much for TLS variables - TLS |
1140 | space is too precious. */ |
1141 | if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD) |
1142 | align = const_align; |
1143 | } |
1144 | } |
1145 | } |
1146 | |
1147 | /* Reset the alignment in case we have made it tighter, so we can benefit |
1148 | from it in get_pointer_alignment. */ |
1149 | SET_DECL_ALIGN (decl, align); |
1150 | } |
1151 | |
1152 | /* Return DECL_ALIGN (decl), possibly increased for optimization purposes |
1153 | beyond what align_variable returned. */ |
1154 | |
1155 | static unsigned int |
1156 | get_variable_align (tree decl) |
1157 | { |
1158 | unsigned int align = DECL_ALIGN (decl); |
1159 | |
1160 | /* For user aligned vars or static vars align_variable already did |
1161 | everything. */ |
1162 | if (DECL_USER_ALIGN (decl) || !TREE_PUBLIC (decl)) |
1163 | return align; |
1164 | |
1165 | #ifdef DATA_ABI_ALIGNMENT |
1166 | if (DECL_THREAD_LOCAL_P (decl)) |
1167 | align = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align); |
1168 | #endif |
1169 | |
1170 | /* For decls that bind to the current definition, align_variable |
1171 | did also everything, except for not assuming ABI required alignment |
1172 | of TLS variables. For other vars, increase the alignment here |
1173 | as an optimization. */ |
1174 | if (!decl_binds_to_current_def_p (decl)) |
1175 | { |
1176 | /* On some machines, it is good to increase alignment sometimes. */ |
1177 | #ifdef DATA_ALIGNMENT |
1178 | unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align); |
1179 | /* Don't increase alignment too much for TLS variables - TLS space |
1180 | is too precious. */ |
1181 | if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD) |
1182 | align = data_align; |
1183 | #endif |
1184 | if (DECL_INITIAL (decl) != 0 |
1185 | /* In LTO we have no errors in program; error_mark_node is used |
1186 | to mark offlined constructors. */ |
1187 | && (in_lto_p || DECL_INITIAL (decl) != error_mark_node)) |
1188 | { |
1189 | unsigned int const_align |
1190 | = targetm.constant_alignment (DECL_INITIAL (decl), align); |
1191 | /* Don't increase alignment too much for TLS variables - TLS space |
1192 | is too precious. */ |
1193 | if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD) |
1194 | align = const_align; |
1195 | } |
1196 | } |
1197 | |
1198 | return align; |
1199 | } |
1200 | |
1201 | /* Compute reloc for get_variable_section. The return value |
1202 | is a mask for which bit 1 indicates a global relocation, and bit 0 |
1203 | indicates a local relocation. */ |
1204 | |
1205 | int |
1206 | compute_reloc_for_var (tree decl) |
1207 | { |
1208 | int reloc; |
1209 | |
1210 | if (DECL_INITIAL (decl) == error_mark_node) |
1211 | reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0; |
1212 | else if (DECL_INITIAL (decl)) |
1213 | reloc = compute_reloc_for_constant (DECL_INITIAL (decl)); |
1214 | else |
1215 | reloc = 0; |
1216 | |
1217 | return reloc; |
1218 | } |
1219 | |
1220 | /* Return the section into which the given VAR_DECL or CONST_DECL |
1221 | should be placed. PREFER_NOSWITCH_P is true if a noswitch |
1222 | section should be used wherever possible. */ |
1223 | |
1224 | section * |
1225 | get_variable_section (tree decl, bool prefer_noswitch_p) |
1226 | { |
1227 | addr_space_t as = ADDR_SPACE_GENERIC; |
1228 | int reloc; |
1229 | varpool_node *vnode = varpool_node::get (decl); |
1230 | if (vnode) |
1231 | { |
1232 | vnode = vnode->ultimate_alias_target (); |
1233 | decl = vnode->decl; |
1234 | } |
1235 | |
1236 | if (TREE_TYPE (decl) != error_mark_node) |
1237 | as = TYPE_ADDR_SPACE (TREE_TYPE (decl)); |
1238 | |
1239 | /* We need the constructor to figure out reloc flag. */ |
1240 | if (vnode) |
1241 | vnode->get_constructor (); |
1242 | |
1243 | if (DECL_COMMON (decl) |
1244 | && !lookup_attribute (attr_name: "retain" , DECL_ATTRIBUTES (decl))) |
1245 | { |
1246 | /* If the decl has been given an explicit section name, or it resides |
1247 | in a non-generic address space, then it isn't common, and shouldn't |
1248 | be handled as such. */ |
1249 | gcc_assert (DECL_SECTION_NAME (decl) == NULL |
1250 | && ADDR_SPACE_GENERIC_P (as)); |
1251 | if (DECL_THREAD_LOCAL_P (decl)) |
1252 | return tls_comm_section; |
1253 | else if (TREE_PUBLIC (decl) && bss_initializer_p (decl)) |
1254 | return comm_section; |
1255 | } |
1256 | |
1257 | reloc = compute_reloc_for_var (decl); |
1258 | |
1259 | resolve_unique_section (decl, reloc, flag_data_sections); |
1260 | if (IN_NAMED_SECTION (decl)) |
1261 | { |
1262 | section *sect = get_named_section (decl, NULL, reloc); |
1263 | |
1264 | if ((sect->common.flags & SECTION_BSS) |
1265 | && !bss_initializer_p (decl, named: true)) |
1266 | { |
1267 | error_at (DECL_SOURCE_LOCATION (decl), |
1268 | "only zero initializers are allowed in section %qs" , |
1269 | sect->named.name); |
1270 | DECL_INITIAL (decl) = error_mark_node; |
1271 | } |
1272 | return sect; |
1273 | } |
1274 | |
1275 | if (ADDR_SPACE_GENERIC_P (as) |
1276 | && !DECL_THREAD_LOCAL_P (decl) |
1277 | && !DECL_NOINIT_P (decl) |
1278 | && !(prefer_noswitch_p && targetm.have_switchable_bss_sections) |
1279 | && bss_initializer_p (decl)) |
1280 | { |
1281 | if (!TREE_PUBLIC (decl) |
1282 | && !((flag_sanitize & SANITIZE_ADDRESS) |
1283 | && asan_protect_global (decl))) |
1284 | return lcomm_section; |
1285 | if (bss_noswitch_section) |
1286 | return bss_noswitch_section; |
1287 | } |
1288 | |
1289 | return targetm.asm_out.select_section (decl, reloc, |
1290 | get_variable_align (decl)); |
1291 | } |
1292 | |
1293 | /* Return the block into which object_block DECL should be placed. */ |
1294 | |
1295 | static struct object_block * |
1296 | get_block_for_decl (tree decl) |
1297 | { |
1298 | section *sect; |
1299 | |
1300 | if (VAR_P (decl)) |
1301 | { |
1302 | /* The object must be defined in this translation unit. */ |
1303 | if (DECL_EXTERNAL (decl)) |
1304 | return NULL; |
1305 | |
1306 | /* There's no point using object blocks for something that is |
1307 | isolated by definition. */ |
1308 | if (DECL_COMDAT_GROUP (decl)) |
1309 | return NULL; |
1310 | } |
1311 | |
1312 | /* We can only calculate block offsets if the decl has a known |
1313 | constant size. */ |
1314 | if (DECL_SIZE_UNIT (decl) == NULL) |
1315 | return NULL; |
1316 | if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl))) |
1317 | return NULL; |
1318 | |
1319 | /* Find out which section should contain DECL. We cannot put it into |
1320 | an object block if it requires a standalone definition. */ |
1321 | if (VAR_P (decl)) |
1322 | align_variable (decl, dont_output_data: 0); |
1323 | sect = get_variable_section (decl, prefer_noswitch_p: true); |
1324 | if (SECTION_STYLE (sect) == SECTION_NOSWITCH) |
1325 | return NULL; |
1326 | |
1327 | if (bool (lookup_attribute (attr_name: "retain" , DECL_ATTRIBUTES (decl))) |
1328 | != bool (sect->common.flags & SECTION_RETAIN)) |
1329 | return NULL; |
1330 | |
1331 | return get_block_for_section (sect); |
1332 | } |
1333 | |
1334 | /* Make sure block symbol SYMBOL is in block BLOCK. */ |
1335 | |
1336 | static void |
1337 | change_symbol_block (rtx symbol, struct object_block *block) |
1338 | { |
1339 | if (block != SYMBOL_REF_BLOCK (symbol)) |
1340 | { |
1341 | gcc_assert (SYMBOL_REF_BLOCK_OFFSET (symbol) < 0); |
1342 | SYMBOL_REF_BLOCK (symbol) = block; |
1343 | } |
1344 | } |
1345 | |
1346 | /* Return true if it is possible to put DECL in an object_block. */ |
1347 | |
1348 | static bool |
1349 | use_blocks_for_decl_p (tree decl) |
1350 | { |
1351 | struct symtab_node *snode; |
1352 | |
1353 | /* Don't create object blocks if each DECL is placed into a separate |
1354 | section because that will uselessly create a section anchor for |
1355 | each DECL. */ |
1356 | if (flag_data_sections) |
1357 | return false; |
1358 | |
1359 | /* Only data DECLs can be placed into object blocks. */ |
1360 | if (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL) |
1361 | return false; |
1362 | |
1363 | /* DECL_INITIAL (decl) set to decl is a hack used for some decls that |
1364 | are never used from code directly and we never want object block handling |
1365 | for those. */ |
1366 | if (DECL_INITIAL (decl) == decl) |
1367 | return false; |
1368 | |
1369 | /* If this decl is an alias, then we don't want to emit a |
1370 | definition. */ |
1371 | if (VAR_P (decl) |
1372 | && (snode = symtab_node::get (decl)) != NULL |
1373 | && snode->alias) |
1374 | return false; |
1375 | |
1376 | return targetm.use_blocks_for_decl_p (decl); |
1377 | } |
1378 | |
1379 | /* Follow the IDENTIFIER_TRANSPARENT_ALIAS chain starting at *ALIAS |
1380 | until we find an identifier that is not itself a transparent alias. |
1381 | Modify the alias passed to it by reference (and all aliases on the |
1382 | way to the ultimate target), such that they do not have to be |
1383 | followed again, and return the ultimate target of the alias |
1384 | chain. */ |
1385 | |
1386 | static inline tree |
1387 | ultimate_transparent_alias_target (tree *alias) |
1388 | { |
1389 | tree target = *alias; |
1390 | |
1391 | if (IDENTIFIER_TRANSPARENT_ALIAS (target)) |
1392 | { |
1393 | gcc_assert (TREE_CHAIN (target)); |
1394 | target = ultimate_transparent_alias_target (alias: &TREE_CHAIN (target)); |
1395 | gcc_assert (! IDENTIFIER_TRANSPARENT_ALIAS (target) |
1396 | && ! TREE_CHAIN (target)); |
1397 | *alias = target; |
1398 | } |
1399 | |
1400 | return target; |
1401 | } |
1402 | |
1403 | /* Return true if REGNUM is mentioned in ELIMINABLE_REGS as a from |
1404 | register number. */ |
1405 | |
1406 | static bool |
1407 | eliminable_regno_p (int regnum) |
1408 | { |
1409 | static const struct |
1410 | { |
1411 | const int from; |
1412 | const int to; |
1413 | } eliminables[] = ELIMINABLE_REGS; |
1414 | for (size_t i = 0; i < ARRAY_SIZE (eliminables); i++) |
1415 | if (regnum == eliminables[i].from) |
1416 | return true; |
1417 | return false; |
1418 | } |
1419 | |
1420 | /* Create the DECL_RTL for a VAR_DECL or FUNCTION_DECL. DECL should |
1421 | have static storage duration. In other words, it should not be an |
1422 | automatic variable, including PARM_DECLs. |
1423 | |
1424 | There is, however, one exception: this function handles variables |
1425 | explicitly placed in a particular register by the user. |
1426 | |
1427 | This is never called for PARM_DECL nodes. */ |
1428 | |
1429 | void |
1430 | make_decl_rtl (tree decl) |
1431 | { |
1432 | const char *name = 0; |
1433 | int reg_number; |
1434 | tree id; |
1435 | rtx x; |
1436 | |
1437 | /* Check that we are not being given an automatic variable. */ |
1438 | gcc_assert (TREE_CODE (decl) != PARM_DECL |
1439 | && TREE_CODE (decl) != RESULT_DECL); |
1440 | |
1441 | /* A weak alias has TREE_PUBLIC set but not the other bits. */ |
1442 | gcc_assert (!VAR_P (decl) |
1443 | || TREE_STATIC (decl) |
1444 | || TREE_PUBLIC (decl) |
1445 | || DECL_EXTERNAL (decl) |
1446 | || DECL_REGISTER (decl)); |
1447 | |
1448 | /* And that we were not given a type or a label. */ |
1449 | gcc_assert (TREE_CODE (decl) != TYPE_DECL |
1450 | && TREE_CODE (decl) != LABEL_DECL); |
1451 | |
1452 | /* For a duplicate declaration, we can be called twice on the |
1453 | same DECL node. Don't discard the RTL already made. */ |
1454 | if (DECL_RTL_SET_P (decl)) |
1455 | { |
1456 | /* If the old RTL had the wrong mode, fix the mode. */ |
1457 | x = DECL_RTL (decl); |
1458 | if (GET_MODE (x) != DECL_MODE (decl)) |
1459 | SET_DECL_RTL (decl, adjust_address_nv (x, DECL_MODE (decl), 0)); |
1460 | |
1461 | if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl)) |
1462 | return; |
1463 | |
1464 | /* ??? Another way to do this would be to maintain a hashed |
1465 | table of such critters. Instead of adding stuff to a DECL |
1466 | to give certain attributes to it, we could use an external |
1467 | hash map from DECL to set of attributes. */ |
1468 | |
1469 | /* Let the target reassign the RTL if it wants. |
1470 | This is necessary, for example, when one machine specific |
1471 | decl attribute overrides another. */ |
1472 | targetm.encode_section_info (decl, DECL_RTL (decl), false); |
1473 | |
1474 | /* If the symbol has a SYMBOL_REF_BLOCK field, update it based |
1475 | on the new decl information. */ |
1476 | if (MEM_P (x) |
1477 | && GET_CODE (XEXP (x, 0)) == SYMBOL_REF |
1478 | && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (x, 0))) |
1479 | change_symbol_block (XEXP (x, 0), block: get_block_for_decl (decl)); |
1480 | |
1481 | return; |
1482 | } |
1483 | |
1484 | /* If this variable belongs to the global constant pool, retrieve the |
1485 | pre-computed RTL or recompute it in LTO mode. */ |
1486 | if (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl)) |
1487 | { |
1488 | SET_DECL_RTL (decl, output_constant_def (DECL_INITIAL (decl), 1)); |
1489 | return; |
1490 | } |
1491 | |
1492 | id = DECL_ASSEMBLER_NAME (decl); |
1493 | name = IDENTIFIER_POINTER (id); |
1494 | |
1495 | if (name[0] != '*' && TREE_CODE (decl) != FUNCTION_DECL |
1496 | && DECL_REGISTER (decl)) |
1497 | { |
1498 | error ("register name not specified for %q+D" , decl); |
1499 | } |
1500 | else if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl)) |
1501 | { |
1502 | const char *asmspec = name+1; |
1503 | machine_mode mode = DECL_MODE (decl); |
1504 | reg_number = decode_reg_name (name: asmspec); |
1505 | /* First detect errors in declaring global registers. */ |
1506 | if (reg_number == -1) |
1507 | error ("register name not specified for %q+D" , decl); |
1508 | else if (reg_number < 0) |
1509 | error ("invalid register name for %q+D" , decl); |
1510 | else if (mode == BLKmode) |
1511 | error ("data type of %q+D isn%'t suitable for a register" , |
1512 | decl); |
1513 | else if (!in_hard_reg_set_p (accessible_reg_set, mode, regno: reg_number)) |
1514 | error ("the register specified for %q+D cannot be accessed" |
1515 | " by the current target" , decl); |
1516 | else if (!in_hard_reg_set_p (operand_reg_set, mode, regno: reg_number)) |
1517 | error ("the register specified for %q+D is not general enough" |
1518 | " to be used as a register variable" , decl); |
1519 | else if (!targetm.hard_regno_mode_ok (reg_number, mode)) |
1520 | error ("register specified for %q+D isn%'t suitable for data type" , |
1521 | decl); |
1522 | else if (reg_number != HARD_FRAME_POINTER_REGNUM |
1523 | && (reg_number == FRAME_POINTER_REGNUM |
1524 | #ifdef RETURN_ADDRESS_POINTER_REGNUM |
1525 | || reg_number == RETURN_ADDRESS_POINTER_REGNUM |
1526 | #endif |
1527 | || reg_number == ARG_POINTER_REGNUM) |
1528 | && eliminable_regno_p (regnum: reg_number)) |
1529 | error ("register specified for %q+D is an internal GCC " |
1530 | "implementation detail" , decl); |
1531 | /* Now handle properly declared static register variables. */ |
1532 | else |
1533 | { |
1534 | int nregs; |
1535 | |
1536 | if (DECL_INITIAL (decl) != 0 && TREE_STATIC (decl)) |
1537 | { |
1538 | DECL_INITIAL (decl) = 0; |
1539 | error ("global register variable has initial value" ); |
1540 | } |
1541 | if (TREE_THIS_VOLATILE (decl)) |
1542 | warning (OPT_Wvolatile_register_var, |
1543 | "optimization may eliminate reads and/or " |
1544 | "writes to register variables" ); |
1545 | |
1546 | /* If the user specified one of the eliminables registers here, |
1547 | e.g., FRAME_POINTER_REGNUM, we don't want to get this variable |
1548 | confused with that register and be eliminated. This usage is |
1549 | somewhat suspect... */ |
1550 | |
1551 | SET_DECL_RTL (decl, gen_raw_REG (mode, reg_number)); |
1552 | ORIGINAL_REGNO (DECL_RTL (decl)) = reg_number; |
1553 | REG_USERVAR_P (DECL_RTL (decl)) = 1; |
1554 | |
1555 | if (TREE_STATIC (decl)) |
1556 | { |
1557 | /* Make this register global, so not usable for anything |
1558 | else. */ |
1559 | #ifdef ASM_DECLARE_REGISTER_GLOBAL |
1560 | name = IDENTIFIER_POINTER (DECL_NAME (decl)); |
1561 | ASM_DECLARE_REGISTER_GLOBAL (asm_out_file, decl, reg_number, name); |
1562 | #endif |
1563 | nregs = hard_regno_nregs (regno: reg_number, mode); |
1564 | while (nregs > 0) |
1565 | globalize_reg (decl, reg_number + --nregs); |
1566 | } |
1567 | |
1568 | /* As a register variable, it has no section. */ |
1569 | return; |
1570 | } |
1571 | /* Avoid internal errors from invalid register |
1572 | specifications. */ |
1573 | SET_DECL_ASSEMBLER_NAME (decl, NULL_TREE); |
1574 | DECL_HARD_REGISTER (decl) = 0; |
1575 | /* Also avoid SSA inconsistencies by pretending this is an external |
1576 | decl now. */ |
1577 | DECL_EXTERNAL (decl) = 1; |
1578 | return; |
1579 | } |
1580 | /* Now handle ordinary static variables and functions (in memory). |
1581 | Also handle vars declared register invalidly. */ |
1582 | else if (name[0] == '*') |
1583 | { |
1584 | #ifdef REGISTER_PREFIX |
1585 | if (strlen (REGISTER_PREFIX) != 0) |
1586 | { |
1587 | reg_number = decode_reg_name (name); |
1588 | if (reg_number >= 0 || reg_number == -3) |
1589 | error ("register name given for non-register variable %q+D" , decl); |
1590 | } |
1591 | #endif |
1592 | } |
1593 | |
1594 | /* Specifying a section attribute on a variable forces it into a |
1595 | non-.bss section, and thus it cannot be common. */ |
1596 | /* FIXME: In general this code should not be necessary because |
1597 | visibility pass is doing the same work. But notice_global_symbol |
1598 | is called early and it needs to make DECL_RTL to get the name. |
1599 | we take care of recomputing the DECL_RTL after visibility is changed. */ |
1600 | if (VAR_P (decl) |
1601 | && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)) |
1602 | && DECL_SECTION_NAME (decl) != NULL |
1603 | && DECL_INITIAL (decl) == NULL_TREE |
1604 | && DECL_COMMON (decl)) |
1605 | DECL_COMMON (decl) = 0; |
1606 | |
1607 | /* Variables can't be both common and weak. */ |
1608 | if (VAR_P (decl) && DECL_WEAK (decl)) |
1609 | DECL_COMMON (decl) = 0; |
1610 | |
1611 | if (use_object_blocks_p () && use_blocks_for_decl_p (decl)) |
1612 | x = create_block_symbol (label: name, block: get_block_for_decl (decl), offset: -1); |
1613 | else |
1614 | { |
1615 | machine_mode address_mode = Pmode; |
1616 | if (TREE_TYPE (decl) != error_mark_node) |
1617 | { |
1618 | addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl)); |
1619 | address_mode = targetm.addr_space.address_mode (as); |
1620 | } |
1621 | x = gen_rtx_SYMBOL_REF (address_mode, name); |
1622 | } |
1623 | SYMBOL_REF_WEAK (x) = DECL_WEAK (decl); |
1624 | SET_SYMBOL_REF_DECL (x, decl); |
1625 | |
1626 | x = gen_rtx_MEM (DECL_MODE (decl), x); |
1627 | if (TREE_CODE (decl) != FUNCTION_DECL) |
1628 | set_mem_attributes (x, decl, 1); |
1629 | SET_DECL_RTL (decl, x); |
1630 | |
1631 | /* Optionally set flags or add text to the name to record information |
1632 | such as that it is a function name. |
1633 | If the name is changed, the macro ASM_OUTPUT_LABELREF |
1634 | will have to know how to strip this information. */ |
1635 | targetm.encode_section_info (decl, DECL_RTL (decl), true); |
1636 | } |
1637 | |
1638 | /* Like make_decl_rtl, but inhibit creation of new alias sets when |
1639 | calling make_decl_rtl. Also, reset DECL_RTL before returning the |
1640 | rtl. */ |
1641 | |
1642 | rtx |
1643 | make_decl_rtl_for_debug (tree decl) |
1644 | { |
1645 | unsigned int save_aliasing_flag; |
1646 | rtx rtl; |
1647 | |
1648 | if (DECL_RTL_SET_P (decl)) |
1649 | return DECL_RTL (decl); |
1650 | |
1651 | /* Kludge alert! Somewhere down the call chain, make_decl_rtl will |
1652 | call new_alias_set. If running with -fcompare-debug, sometimes |
1653 | we do not want to create alias sets that will throw the alias |
1654 | numbers off in the comparison dumps. So... clearing |
1655 | flag_strict_aliasing will keep new_alias_set() from creating a |
1656 | new set. */ |
1657 | save_aliasing_flag = flag_strict_aliasing; |
1658 | flag_strict_aliasing = 0; |
1659 | |
1660 | rtl = DECL_RTL (decl); |
1661 | /* Reset DECL_RTL back, as various parts of the compiler expects |
1662 | DECL_RTL set meaning it is actually going to be output. */ |
1663 | SET_DECL_RTL (decl, NULL); |
1664 | |
1665 | flag_strict_aliasing = save_aliasing_flag; |
1666 | return rtl; |
1667 | } |
1668 | |
1669 | /* Output a string of literal assembler code |
1670 | for an `asm' keyword used between functions. */ |
1671 | |
1672 | void |
1673 | assemble_asm (tree string) |
1674 | { |
1675 | const char *p; |
1676 | app_enable (); |
1677 | |
1678 | if (TREE_CODE (string) == ADDR_EXPR) |
1679 | string = TREE_OPERAND (string, 0); |
1680 | |
1681 | p = TREE_STRING_POINTER (string); |
1682 | fprintf (stream: asm_out_file, format: "%s%s\n" , p[0] == '\t' ? "" : "\t" , p); |
1683 | } |
1684 | |
1685 | /* Write the address of the entity given by SYMBOL to SEC. */ |
1686 | void |
1687 | assemble_addr_to_section (rtx symbol, section *sec) |
1688 | { |
1689 | switch_to_section (sec); |
1690 | assemble_align (POINTER_SIZE); |
1691 | assemble_integer (symbol, POINTER_SIZE_UNITS, POINTER_SIZE, 1); |
1692 | } |
1693 | |
1694 | /* Return the numbered .ctors.N (if CONSTRUCTOR_P) or .dtors.N (if |
1695 | not) section for PRIORITY. */ |
1696 | section * |
1697 | get_cdtor_priority_section (int priority, bool constructor_p) |
1698 | { |
1699 | /* Buffer conservatively large enough for the full range of a 32-bit |
1700 | int plus the text below. */ |
1701 | char buf[18]; |
1702 | |
1703 | /* ??? This only works reliably with the GNU linker. */ |
1704 | sprintf (s: buf, format: "%s.%.5u" , |
1705 | constructor_p ? ".ctors" : ".dtors" , |
1706 | /* Invert the numbering so the linker puts us in the proper |
1707 | order; constructors are run from right to left, and the |
1708 | linker sorts in increasing order. */ |
1709 | MAX_INIT_PRIORITY - priority); |
1710 | return get_section (name: buf, flags: SECTION_WRITE, NULL); |
1711 | } |
1712 | |
1713 | void |
1714 | default_named_section_asm_out_destructor (rtx symbol, int priority) |
1715 | { |
1716 | section *sec; |
1717 | |
1718 | if (priority != DEFAULT_INIT_PRIORITY) |
1719 | sec = get_cdtor_priority_section (priority, |
1720 | /*constructor_p=*/false); |
1721 | else |
1722 | sec = get_section (name: ".dtors" , flags: SECTION_WRITE, NULL); |
1723 | |
1724 | assemble_addr_to_section (symbol, sec); |
1725 | } |
1726 | |
1727 | #ifdef DTORS_SECTION_ASM_OP |
1728 | void |
1729 | default_dtor_section_asm_out_destructor (rtx symbol, |
1730 | int priority ATTRIBUTE_UNUSED) |
1731 | { |
1732 | assemble_addr_to_section (symbol, dtors_section); |
1733 | } |
1734 | #endif |
1735 | |
1736 | void |
1737 | default_named_section_asm_out_constructor (rtx symbol, int priority) |
1738 | { |
1739 | section *sec; |
1740 | |
1741 | if (priority != DEFAULT_INIT_PRIORITY) |
1742 | sec = get_cdtor_priority_section (priority, |
1743 | /*constructor_p=*/true); |
1744 | else |
1745 | sec = get_section (name: ".ctors" , flags: SECTION_WRITE, NULL); |
1746 | |
1747 | assemble_addr_to_section (symbol, sec); |
1748 | } |
1749 | |
1750 | #ifdef CTORS_SECTION_ASM_OP |
1751 | void |
1752 | default_ctor_section_asm_out_constructor (rtx symbol, |
1753 | int priority ATTRIBUTE_UNUSED) |
1754 | { |
1755 | assemble_addr_to_section (symbol, ctors_section); |
1756 | } |
1757 | #endif |
1758 | |
1759 | /* CONSTANT_POOL_BEFORE_FUNCTION may be defined as an expression with |
1760 | a nonzero value if the constant pool should be output before the |
1761 | start of the function, or a zero value if the pool should output |
1762 | after the end of the function. The default is to put it before the |
1763 | start. */ |
1764 | |
1765 | #ifndef CONSTANT_POOL_BEFORE_FUNCTION |
1766 | #define CONSTANT_POOL_BEFORE_FUNCTION 1 |
1767 | #endif |
1768 | |
1769 | /* DECL is an object (either VAR_DECL or FUNCTION_DECL) which is going |
1770 | to be output to assembler. |
1771 | Set first_global_object_name and weak_global_object_name as appropriate. */ |
1772 | |
1773 | void |
1774 | notice_global_symbol (tree decl) |
1775 | { |
1776 | const char **t = &first_global_object_name; |
1777 | |
1778 | if (first_global_object_name |
1779 | || !TREE_PUBLIC (decl) |
1780 | || DECL_EXTERNAL (decl) |
1781 | || !DECL_NAME (decl) |
1782 | || (VAR_P (decl) && DECL_HARD_REGISTER (decl)) |
1783 | || (TREE_CODE (decl) != FUNCTION_DECL |
1784 | && (!VAR_P (decl) |
1785 | || (DECL_COMMON (decl) |
1786 | && (DECL_INITIAL (decl) == 0 |
1787 | || DECL_INITIAL (decl) == error_mark_node))))) |
1788 | return; |
1789 | |
1790 | /* We win when global object is found, but it is useful to know about weak |
1791 | symbol as well so we can produce nicer unique names. */ |
1792 | if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl) || flag_shlib) |
1793 | t = &weak_global_object_name; |
1794 | |
1795 | if (!*t) |
1796 | { |
1797 | tree id = DECL_ASSEMBLER_NAME (decl); |
1798 | ultimate_transparent_alias_target (alias: &id); |
1799 | *t = ggc_strdup (targetm.strip_name_encoding (IDENTIFIER_POINTER (id))); |
1800 | } |
1801 | } |
1802 | |
1803 | /* If not using flag_reorder_blocks_and_partition, decide early whether the |
1804 | current function goes into the cold section, so that targets can use |
1805 | current_function_section during RTL expansion. DECL describes the |
1806 | function. */ |
1807 | |
1808 | void |
1809 | decide_function_section (tree decl) |
1810 | { |
1811 | first_function_block_is_cold = false; |
1812 | |
1813 | if (DECL_SECTION_NAME (decl)) |
1814 | { |
1815 | struct cgraph_node *node = cgraph_node::get (decl: current_function_decl); |
1816 | /* Calls to function_section rely on first_function_block_is_cold |
1817 | being accurate. */ |
1818 | first_function_block_is_cold = (node |
1819 | && node->frequency |
1820 | == NODE_FREQUENCY_UNLIKELY_EXECUTED); |
1821 | } |
1822 | |
1823 | in_cold_section_p = first_function_block_is_cold; |
1824 | } |
1825 | |
1826 | /* Get the function's name, as described by its RTL. This may be |
1827 | different from the DECL_NAME name used in the source file. */ |
1828 | const char * |
1829 | get_fnname_from_decl (tree decl) |
1830 | { |
1831 | rtx x = DECL_RTL (decl); |
1832 | gcc_assert (MEM_P (x)); |
1833 | x = XEXP (x, 0); |
1834 | gcc_assert (GET_CODE (x) == SYMBOL_REF); |
1835 | return XSTR (x, 0); |
1836 | } |
1837 | |
1838 | /* Output assembler code for the constant pool of a function and associated |
1839 | with defining the name of the function. DECL describes the function. |
1840 | NAME is the function's name. For the constant pool, we use the current |
1841 | constant pool data. */ |
1842 | |
1843 | void |
1844 | assemble_start_function (tree decl, const char *fnname) |
1845 | { |
1846 | int align; |
1847 | char tmp_label[100]; |
1848 | bool hot_label_written = false; |
1849 | |
1850 | if (crtl->has_bb_partition) |
1851 | { |
1852 | ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB" , const_labelno); |
1853 | crtl->subsections.hot_section_label = ggc_strdup (tmp_label); |
1854 | ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDB" , const_labelno); |
1855 | crtl->subsections.cold_section_label = ggc_strdup (tmp_label); |
1856 | ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTE" , const_labelno); |
1857 | crtl->subsections.hot_section_end_label = ggc_strdup (tmp_label); |
1858 | ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDE" , const_labelno); |
1859 | crtl->subsections.cold_section_end_label = ggc_strdup (tmp_label); |
1860 | const_labelno++; |
1861 | cold_function_name = NULL_TREE; |
1862 | } |
1863 | else |
1864 | { |
1865 | crtl->subsections.hot_section_label = NULL; |
1866 | crtl->subsections.cold_section_label = NULL; |
1867 | crtl->subsections.hot_section_end_label = NULL; |
1868 | crtl->subsections.cold_section_end_label = NULL; |
1869 | } |
1870 | |
1871 | /* The following code does not need preprocessing in the assembler. */ |
1872 | |
1873 | app_disable (); |
1874 | |
1875 | if (CONSTANT_POOL_BEFORE_FUNCTION) |
1876 | output_constant_pool (fnname, decl); |
1877 | |
1878 | align = symtab_node::get (decl)->definition_alignment (); |
1879 | |
1880 | /* Make sure the not and cold text (code) sections are properly |
1881 | aligned. This is necessary here in the case where the function |
1882 | has both hot and cold sections, because we don't want to re-set |
1883 | the alignment when the section switch happens mid-function. */ |
1884 | |
1885 | if (crtl->has_bb_partition) |
1886 | { |
1887 | first_function_block_is_cold = false; |
1888 | |
1889 | switch_to_section (unlikely_text_section ()); |
1890 | assemble_align (align); |
1891 | ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label); |
1892 | |
1893 | /* When the function starts with a cold section, we need to explicitly |
1894 | align the hot section and write out the hot section label. |
1895 | But if the current function is a thunk, we do not have a CFG. */ |
1896 | if (!cfun->is_thunk |
1897 | && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION) |
1898 | { |
1899 | switch_to_section (text_section); |
1900 | assemble_align (align); |
1901 | ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label); |
1902 | hot_label_written = true; |
1903 | first_function_block_is_cold = true; |
1904 | } |
1905 | in_cold_section_p = first_function_block_is_cold; |
1906 | } |
1907 | |
1908 | |
1909 | /* Switch to the correct text section for the start of the function. */ |
1910 | |
1911 | switch_to_section (function_section (decl), decl); |
1912 | if (crtl->has_bb_partition && !hot_label_written) |
1913 | ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label); |
1914 | |
1915 | /* Tell assembler to move to target machine's alignment for functions. */ |
1916 | align = floor_log2 (x: align / BITS_PER_UNIT); |
1917 | if (align > 0) |
1918 | { |
1919 | ASM_OUTPUT_ALIGN (asm_out_file, align); |
1920 | } |
1921 | |
1922 | /* Handle a user-specified function alignment. |
1923 | Note that we still need to align to DECL_ALIGN, as above, |
1924 | because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */ |
1925 | if (! DECL_USER_ALIGN (decl) |
1926 | && align_functions.levels[0].log > align |
1927 | && optimize_function_for_speed_p (cfun)) |
1928 | { |
1929 | #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN |
1930 | int align_log = align_functions.levels[0].log; |
1931 | #endif |
1932 | int max_skip = align_functions.levels[0].maxskip; |
1933 | if (flag_limit_function_alignment && crtl->max_insn_address > 0 |
1934 | && max_skip >= crtl->max_insn_address) |
1935 | max_skip = crtl->max_insn_address - 1; |
1936 | |
1937 | #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN |
1938 | ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, align_log, max_skip); |
1939 | if (max_skip == align_functions.levels[0].maxskip) |
1940 | ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, |
1941 | align_functions.levels[1].log, |
1942 | align_functions.levels[1].maxskip); |
1943 | #else |
1944 | ASM_OUTPUT_ALIGN (asm_out_file, align_functions.levels[0].log); |
1945 | #endif |
1946 | } |
1947 | |
1948 | #ifdef ASM_OUTPUT_FUNCTION_PREFIX |
1949 | ASM_OUTPUT_FUNCTION_PREFIX (asm_out_file, fnname); |
1950 | #endif |
1951 | |
1952 | if (!DECL_IGNORED_P (decl)) |
1953 | (*debug_hooks->begin_function) (decl); |
1954 | |
1955 | /* Make function name accessible from other files, if appropriate. */ |
1956 | |
1957 | if (TREE_PUBLIC (decl)) |
1958 | { |
1959 | notice_global_symbol (decl); |
1960 | |
1961 | globalize_decl (decl); |
1962 | |
1963 | maybe_assemble_visibility (decl); |
1964 | } |
1965 | |
1966 | if (DECL_PRESERVE_P (decl)) |
1967 | targetm.asm_out.mark_decl_preserved (fnname); |
1968 | |
1969 | unsigned short patch_area_size = crtl->patch_area_size; |
1970 | unsigned short patch_area_entry = crtl->patch_area_entry; |
1971 | |
1972 | /* Emit the patching area before the entry label, if any. */ |
1973 | if (patch_area_entry > 0) |
1974 | targetm.asm_out.print_patchable_function_entry (asm_out_file, |
1975 | patch_area_entry, true); |
1976 | |
1977 | /* Do any machine/system dependent processing of the function name. */ |
1978 | #ifdef ASM_DECLARE_FUNCTION_NAME |
1979 | ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl); |
1980 | #else |
1981 | /* Standard thing is just output label for the function. */ |
1982 | ASM_OUTPUT_FUNCTION_LABEL (asm_out_file, fnname, current_function_decl); |
1983 | #endif /* ASM_DECLARE_FUNCTION_NAME */ |
1984 | |
1985 | /* And the area after the label. Record it if we haven't done so yet. */ |
1986 | if (patch_area_size > patch_area_entry) |
1987 | targetm.asm_out.print_patchable_function_entry (asm_out_file, |
1988 | patch_area_size |
1989 | - patch_area_entry, |
1990 | patch_area_entry == 0); |
1991 | |
1992 | if (lookup_attribute (attr_name: "no_split_stack" , DECL_ATTRIBUTES (decl))) |
1993 | saw_no_split_stack = true; |
1994 | } |
1995 | |
1996 | /* Output assembler code associated with defining the size of the |
1997 | function. DECL describes the function. NAME is the function's name. */ |
1998 | |
1999 | void |
2000 | assemble_end_function (tree decl, const char *fnname ATTRIBUTE_UNUSED) |
2001 | { |
2002 | #ifdef ASM_DECLARE_FUNCTION_SIZE |
2003 | /* We could have switched section in the middle of the function. */ |
2004 | if (crtl->has_bb_partition) |
2005 | switch_to_section (function_section (decl)); |
2006 | ASM_DECLARE_FUNCTION_SIZE (asm_out_file, fnname, decl); |
2007 | #endif |
2008 | if (! CONSTANT_POOL_BEFORE_FUNCTION) |
2009 | { |
2010 | output_constant_pool (fnname, decl); |
2011 | switch_to_section (function_section (decl)); /* need to switch back */ |
2012 | } |
2013 | /* Output labels for end of hot/cold text sections (to be used by |
2014 | debug info.) */ |
2015 | if (crtl->has_bb_partition) |
2016 | { |
2017 | section *save_text_section; |
2018 | |
2019 | save_text_section = in_section; |
2020 | switch_to_section (unlikely_text_section ()); |
2021 | #ifdef ASM_DECLARE_COLD_FUNCTION_SIZE |
2022 | if (cold_function_name != NULL_TREE) |
2023 | ASM_DECLARE_COLD_FUNCTION_SIZE (asm_out_file, |
2024 | IDENTIFIER_POINTER (cold_function_name), |
2025 | decl); |
2026 | #endif |
2027 | ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_end_label); |
2028 | if (first_function_block_is_cold) |
2029 | switch_to_section (text_section); |
2030 | else |
2031 | switch_to_section (function_section (decl)); |
2032 | ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_end_label); |
2033 | switch_to_section (save_text_section); |
2034 | } |
2035 | } |
2036 | |
2037 | /* Assemble code to leave SIZE bytes of zeros. */ |
2038 | |
2039 | void |
2040 | assemble_zeros (unsigned HOST_WIDE_INT size) |
2041 | { |
2042 | /* Do no output if -fsyntax-only. */ |
2043 | if (flag_syntax_only) |
2044 | return; |
2045 | |
2046 | #ifdef ASM_NO_SKIP_IN_TEXT |
2047 | /* The `space' pseudo in the text section outputs nop insns rather than 0s, |
2048 | so we must output 0s explicitly in the text section. */ |
2049 | if (ASM_NO_SKIP_IN_TEXT && (in_section->common.flags & SECTION_CODE) != 0) |
2050 | { |
2051 | unsigned HOST_WIDE_INT i; |
2052 | for (i = 0; i < size; i++) |
2053 | assemble_integer (const0_rtx, 1, BITS_PER_UNIT, 1); |
2054 | } |
2055 | else |
2056 | #endif |
2057 | if (size > 0) |
2058 | ASM_OUTPUT_SKIP (asm_out_file, size); |
2059 | } |
2060 | |
2061 | /* Assemble an alignment pseudo op for an ALIGN-bit boundary. */ |
2062 | |
2063 | void |
2064 | assemble_align (unsigned int align) |
2065 | { |
2066 | if (align > BITS_PER_UNIT) |
2067 | { |
2068 | ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); |
2069 | } |
2070 | } |
2071 | |
2072 | /* Assemble a string constant with the specified C string as contents. */ |
2073 | |
2074 | void |
2075 | assemble_string (const char *p, int size) |
2076 | { |
2077 | int pos = 0; |
2078 | int maximum = 2000; |
2079 | |
2080 | /* If the string is very long, split it up. */ |
2081 | |
2082 | while (pos < size) |
2083 | { |
2084 | int thissize = size - pos; |
2085 | if (thissize > maximum) |
2086 | thissize = maximum; |
2087 | |
2088 | ASM_OUTPUT_ASCII (asm_out_file, p, thissize); |
2089 | |
2090 | pos += thissize; |
2091 | p += thissize; |
2092 | } |
2093 | } |
2094 | |
2095 | |
2096 | /* A noswitch_section_callback for lcomm_section. */ |
2097 | |
2098 | static bool |
2099 | emit_local (tree decl ATTRIBUTE_UNUSED, |
2100 | const char *name ATTRIBUTE_UNUSED, |
2101 | unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED, |
2102 | unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED) |
2103 | { |
2104 | #if defined ASM_OUTPUT_ALIGNED_DECL_LOCAL |
2105 | unsigned int align = symtab_node::get (decl)->definition_alignment (); |
2106 | ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, decl, name, |
2107 | size, align); |
2108 | return true; |
2109 | #elif defined ASM_OUTPUT_ALIGNED_LOCAL |
2110 | unsigned int align = symtab_node::get (decl)->definition_alignment (); |
2111 | ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, align); |
2112 | return true; |
2113 | #else |
2114 | ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded); |
2115 | return false; |
2116 | #endif |
2117 | } |
2118 | |
2119 | /* A noswitch_section_callback for bss_noswitch_section. */ |
2120 | |
2121 | #if defined ASM_OUTPUT_ALIGNED_BSS |
2122 | static bool |
2123 | emit_bss (tree decl ATTRIBUTE_UNUSED, |
2124 | const char *name ATTRIBUTE_UNUSED, |
2125 | unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED, |
2126 | unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED) |
2127 | { |
2128 | ASM_OUTPUT_ALIGNED_BSS (asm_out_file, decl, name, size, |
2129 | get_variable_align (decl)); |
2130 | return true; |
2131 | } |
2132 | #endif |
2133 | |
2134 | /* A noswitch_section_callback for comm_section. */ |
2135 | |
2136 | static bool |
2137 | emit_common (tree decl ATTRIBUTE_UNUSED, |
2138 | const char *name ATTRIBUTE_UNUSED, |
2139 | unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED, |
2140 | unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED) |
2141 | { |
2142 | #if defined ASM_OUTPUT_ALIGNED_DECL_COMMON |
2143 | ASM_OUTPUT_ALIGNED_DECL_COMMON (asm_out_file, decl, name, |
2144 | size, get_variable_align (decl)); |
2145 | return true; |
2146 | #elif defined ASM_OUTPUT_ALIGNED_COMMON |
2147 | ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size, |
2148 | get_variable_align (decl)); |
2149 | return true; |
2150 | #else |
2151 | ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded); |
2152 | return false; |
2153 | #endif |
2154 | } |
2155 | |
2156 | /* A noswitch_section_callback for tls_comm_section. */ |
2157 | |
2158 | static bool |
2159 | emit_tls_common (tree decl ATTRIBUTE_UNUSED, |
2160 | const char *name ATTRIBUTE_UNUSED, |
2161 | unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED, |
2162 | unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED) |
2163 | { |
2164 | #ifdef ASM_OUTPUT_TLS_COMMON |
2165 | ASM_OUTPUT_TLS_COMMON (asm_out_file, decl, name, size); |
2166 | return true; |
2167 | #else |
2168 | sorry ("thread-local COMMON data not implemented" ); |
2169 | return true; |
2170 | #endif |
2171 | } |
2172 | |
2173 | /* Assemble DECL given that it belongs in SECTION_NOSWITCH section SECT. |
2174 | NAME is the name of DECL's SYMBOL_REF. */ |
2175 | |
2176 | static void |
2177 | assemble_noswitch_variable (tree decl, const char *name, section *sect, |
2178 | unsigned int align) |
2179 | { |
2180 | unsigned HOST_WIDE_INT size, rounded; |
2181 | |
2182 | size = tree_to_uhwi (DECL_SIZE_UNIT (decl)); |
2183 | rounded = size; |
2184 | |
2185 | if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl)) |
2186 | size += asan_red_zone_size (size); |
2187 | |
2188 | /* Don't allocate zero bytes of common, |
2189 | since that means "undefined external" in the linker. */ |
2190 | if (size == 0) |
2191 | rounded = 1; |
2192 | |
2193 | /* Round size up to multiple of BIGGEST_ALIGNMENT bits |
2194 | so that each uninitialized object starts on such a boundary. */ |
2195 | rounded += (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1; |
2196 | rounded = (rounded / (BIGGEST_ALIGNMENT / BITS_PER_UNIT) |
2197 | * (BIGGEST_ALIGNMENT / BITS_PER_UNIT)); |
2198 | |
2199 | if (!sect->noswitch.callback (decl, name, size, rounded) |
2200 | && (unsigned HOST_WIDE_INT) (align / BITS_PER_UNIT) > rounded) |
2201 | error ("requested alignment for %q+D is greater than " |
2202 | "implemented alignment of %wu" , decl, rounded); |
2203 | } |
2204 | |
2205 | /* A subroutine of assemble_variable. Output the label and contents of |
2206 | DECL, whose address is a SYMBOL_REF with name NAME. DONT_OUTPUT_DATA |
2207 | is as for assemble_variable. */ |
2208 | |
2209 | static void |
2210 | assemble_variable_contents (tree decl, const char *name, |
2211 | bool dont_output_data, bool merge_strings) |
2212 | { |
2213 | /* Do any machine/system dependent processing of the object. */ |
2214 | #ifdef ASM_DECLARE_OBJECT_NAME |
2215 | last_assemble_variable_decl = decl; |
2216 | ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl); |
2217 | #else |
2218 | /* Standard thing is just output label for the object. */ |
2219 | ASM_OUTPUT_LABEL (asm_out_file, name); |
2220 | #endif /* ASM_DECLARE_OBJECT_NAME */ |
2221 | |
2222 | if (!dont_output_data) |
2223 | { |
2224 | /* Caller is supposed to use varpool_get_constructor when it wants |
2225 | to output the body. */ |
2226 | gcc_assert (!in_lto_p || DECL_INITIAL (decl) != error_mark_node); |
2227 | if (DECL_INITIAL (decl) |
2228 | && DECL_INITIAL (decl) != error_mark_node |
2229 | && !initializer_zerop (DECL_INITIAL (decl))) |
2230 | /* Output the actual data. */ |
2231 | output_constant (DECL_INITIAL (decl), |
2232 | tree_to_uhwi (DECL_SIZE_UNIT (decl)), |
2233 | get_variable_align (decl), |
2234 | false, merge_strings); |
2235 | else |
2236 | /* Leave space for it. */ |
2237 | assemble_zeros (size: tree_to_uhwi (DECL_SIZE_UNIT (decl))); |
2238 | targetm.asm_out.decl_end (); |
2239 | } |
2240 | } |
2241 | |
2242 | /* Write out assembly for the variable DECL, which is not defined in |
2243 | the current translation unit. */ |
2244 | void |
2245 | assemble_undefined_decl (tree decl) |
2246 | { |
2247 | const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0); |
2248 | targetm.asm_out.assemble_undefined_decl (asm_out_file, name, decl); |
2249 | } |
2250 | |
2251 | /* Assemble everything that is needed for a variable or function declaration. |
2252 | Not used for automatic variables, and not used for function definitions. |
2253 | Should not be called for variables of incomplete structure type. |
2254 | |
2255 | TOP_LEVEL is nonzero if this variable has file scope. |
2256 | AT_END is nonzero if this is the special handling, at end of compilation, |
2257 | to define things that have had only tentative definitions. |
2258 | DONT_OUTPUT_DATA if nonzero means don't actually output the |
2259 | initial value (that will be done by the caller). */ |
2260 | |
2261 | void |
2262 | assemble_variable (tree decl, int top_level ATTRIBUTE_UNUSED, |
2263 | int at_end ATTRIBUTE_UNUSED, int dont_output_data) |
2264 | { |
2265 | const char *name; |
2266 | rtx decl_rtl, symbol; |
2267 | section *sect; |
2268 | unsigned int align; |
2269 | bool asan_protected = false; |
2270 | |
2271 | /* This function is supposed to handle VARIABLES. Ensure we have one. */ |
2272 | gcc_assert (VAR_P (decl)); |
2273 | |
2274 | /* Emulated TLS had better not get this far. */ |
2275 | gcc_checking_assert (targetm.have_tls || !DECL_THREAD_LOCAL_P (decl)); |
2276 | |
2277 | last_assemble_variable_decl = 0; |
2278 | |
2279 | /* Normally no need to say anything here for external references, |
2280 | since assemble_external is called by the language-specific code |
2281 | when a declaration is first seen. */ |
2282 | |
2283 | if (DECL_EXTERNAL (decl)) |
2284 | return; |
2285 | |
2286 | /* Do nothing for global register variables. */ |
2287 | if (DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl))) |
2288 | { |
2289 | TREE_ASM_WRITTEN (decl) = 1; |
2290 | return; |
2291 | } |
2292 | |
2293 | /* If type was incomplete when the variable was declared, |
2294 | see if it is complete now. */ |
2295 | |
2296 | if (DECL_SIZE (decl) == 0) |
2297 | layout_decl (decl, 0); |
2298 | |
2299 | /* Still incomplete => don't allocate it; treat the tentative defn |
2300 | (which is what it must have been) as an `extern' reference. */ |
2301 | |
2302 | if (!dont_output_data && DECL_SIZE (decl) == 0) |
2303 | { |
2304 | error ("storage size of %q+D isn%'t known" , decl); |
2305 | TREE_ASM_WRITTEN (decl) = 1; |
2306 | return; |
2307 | } |
2308 | |
2309 | /* The first declaration of a variable that comes through this function |
2310 | decides whether it is global (in C, has external linkage) |
2311 | or local (in C, has internal linkage). So do nothing more |
2312 | if this function has already run. */ |
2313 | |
2314 | if (TREE_ASM_WRITTEN (decl)) |
2315 | return; |
2316 | |
2317 | /* Make sure targetm.encode_section_info is invoked before we set |
2318 | ASM_WRITTEN. */ |
2319 | decl_rtl = DECL_RTL (decl); |
2320 | |
2321 | TREE_ASM_WRITTEN (decl) = 1; |
2322 | |
2323 | /* Do no output if -fsyntax-only. */ |
2324 | if (flag_syntax_only) |
2325 | return; |
2326 | |
2327 | if (! dont_output_data |
2328 | && ! valid_constant_size_p (DECL_SIZE_UNIT (decl))) |
2329 | { |
2330 | error ("size of variable %q+D is too large" , decl); |
2331 | return; |
2332 | } |
2333 | |
2334 | gcc_assert (MEM_P (decl_rtl)); |
2335 | gcc_assert (GET_CODE (XEXP (decl_rtl, 0)) == SYMBOL_REF); |
2336 | symbol = XEXP (decl_rtl, 0); |
2337 | |
2338 | /* If this symbol belongs to the tree constant pool, output the constant |
2339 | if it hasn't already been written. */ |
2340 | if (TREE_CONSTANT_POOL_ADDRESS_P (symbol)) |
2341 | { |
2342 | tree decl = SYMBOL_REF_DECL (symbol); |
2343 | if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl))) |
2344 | output_constant_def_contents (symbol); |
2345 | return; |
2346 | } |
2347 | |
2348 | app_disable (); |
2349 | |
2350 | name = XSTR (symbol, 0); |
2351 | if (TREE_PUBLIC (decl) && DECL_NAME (decl)) |
2352 | notice_global_symbol (decl); |
2353 | |
2354 | /* Compute the alignment of this data. */ |
2355 | |
2356 | align_variable (decl, dont_output_data); |
2357 | |
2358 | if ((flag_sanitize & SANITIZE_ADDRESS) |
2359 | && asan_protect_global (decl)) |
2360 | { |
2361 | asan_protected = true; |
2362 | SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl), |
2363 | ASAN_RED_ZONE_SIZE * BITS_PER_UNIT)); |
2364 | } |
2365 | |
2366 | set_mem_align (decl_rtl, DECL_ALIGN (decl)); |
2367 | |
2368 | align = get_variable_align (decl); |
2369 | |
2370 | if (TREE_PUBLIC (decl)) |
2371 | maybe_assemble_visibility (decl); |
2372 | |
2373 | if (DECL_PRESERVE_P (decl)) |
2374 | targetm.asm_out.mark_decl_preserved (name); |
2375 | |
2376 | /* First make the assembler name(s) global if appropriate. */ |
2377 | sect = get_variable_section (decl, prefer_noswitch_p: false); |
2378 | if (TREE_PUBLIC (decl) |
2379 | && (sect->common.flags & SECTION_COMMON) == 0) |
2380 | globalize_decl (decl); |
2381 | |
2382 | /* Output any data that we will need to use the address of. */ |
2383 | if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node) |
2384 | output_addressed_constants (DECL_INITIAL (decl), 0); |
2385 | |
2386 | /* dbxout.cc needs to know this. */ |
2387 | if (sect && (sect->common.flags & SECTION_CODE) != 0) |
2388 | DECL_IN_TEXT_SECTION (decl) = 1; |
2389 | |
2390 | /* If the decl is part of an object_block, make sure that the decl |
2391 | has been positioned within its block, but do not write out its |
2392 | definition yet. output_object_blocks will do that later. */ |
2393 | if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol)) |
2394 | { |
2395 | gcc_assert (!dont_output_data); |
2396 | place_block_symbol (symbol); |
2397 | } |
2398 | else if (SECTION_STYLE (sect) == SECTION_NOSWITCH) |
2399 | assemble_noswitch_variable (decl, name, sect, align); |
2400 | else |
2401 | { |
2402 | /* Special-case handling of vtv comdat sections. */ |
2403 | if (SECTION_STYLE (sect) == SECTION_NAMED |
2404 | && (strcmp (s1: sect->named.name, s2: ".vtable_map_vars" ) == 0)) |
2405 | handle_vtv_comdat_section (sect, decl); |
2406 | else |
2407 | switch_to_section (sect, decl); |
2408 | if (align > BITS_PER_UNIT) |
2409 | ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); |
2410 | assemble_variable_contents (decl, name, dont_output_data, |
2411 | merge_strings: (sect->common.flags & SECTION_MERGE) |
2412 | && (sect->common.flags & SECTION_STRINGS)); |
2413 | if (asan_protected) |
2414 | { |
2415 | unsigned HOST_WIDE_INT int size |
2416 | = tree_to_uhwi (DECL_SIZE_UNIT (decl)); |
2417 | assemble_zeros (size: asan_red_zone_size (size)); |
2418 | } |
2419 | } |
2420 | } |
2421 | |
2422 | /* Return true if type TYPE contains any pointers. */ |
2423 | |
2424 | static bool |
2425 | contains_pointers_p (tree type) |
2426 | { |
2427 | switch (TREE_CODE (type)) |
2428 | { |
2429 | case POINTER_TYPE: |
2430 | case REFERENCE_TYPE: |
2431 | /* I'm not sure whether OFFSET_TYPE needs this treatment, |
2432 | so I'll play safe and return 1. */ |
2433 | case OFFSET_TYPE: |
2434 | return true; |
2435 | |
2436 | case RECORD_TYPE: |
2437 | case UNION_TYPE: |
2438 | case QUAL_UNION_TYPE: |
2439 | { |
2440 | tree fields; |
2441 | /* For a type that has fields, see if the fields have pointers. */ |
2442 | for (fields = TYPE_FIELDS (type); fields; fields = DECL_CHAIN (fields)) |
2443 | if (TREE_CODE (fields) == FIELD_DECL |
2444 | && contains_pointers_p (TREE_TYPE (fields))) |
2445 | return true; |
2446 | return false; |
2447 | } |
2448 | |
2449 | case ARRAY_TYPE: |
2450 | /* An array type contains pointers if its element type does. */ |
2451 | return contains_pointers_p (TREE_TYPE (type)); |
2452 | |
2453 | default: |
2454 | return false; |
2455 | } |
2456 | } |
2457 | |
2458 | /* We delay assemble_external processing until |
2459 | the compilation unit is finalized. This is the best we can do for |
2460 | right now (i.e. stage 3 of GCC 4.0) - the right thing is to delay |
2461 | it all the way to final. See PR 17982 for further discussion. */ |
2462 | static GTY(()) tree pending_assemble_externals; |
2463 | |
2464 | #ifdef ASM_OUTPUT_EXTERNAL |
2465 | /* Some targets delay some output to final using TARGET_ASM_FILE_END. |
2466 | As a result, assemble_external can be called after the list of externals |
2467 | is processed and the pointer set destroyed. */ |
2468 | static bool pending_assemble_externals_processed; |
2469 | |
2470 | /* Avoid O(external_decls**2) lookups in the pending_assemble_externals |
2471 | TREE_LIST in assemble_external. */ |
2472 | static hash_set<tree> *pending_assemble_externals_set; |
2473 | |
2474 | /* True if DECL is a function decl for which no out-of-line copy exists. |
2475 | It is assumed that DECL's assembler name has been set. */ |
2476 | |
2477 | static bool |
2478 | incorporeal_function_p (tree decl) |
2479 | { |
2480 | if (TREE_CODE (decl) == FUNCTION_DECL && fndecl_built_in_p (node: decl)) |
2481 | { |
2482 | const char *name; |
2483 | |
2484 | if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL |
2485 | && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl))) |
2486 | return true; |
2487 | |
2488 | name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); |
2489 | /* Atomic or sync builtins which have survived this far will be |
2490 | resolved externally and therefore are not incorporeal. */ |
2491 | if (startswith (str: name, prefix: "__builtin_" )) |
2492 | return true; |
2493 | } |
2494 | return false; |
2495 | } |
2496 | |
2497 | /* Actually do the tests to determine if this is necessary, and invoke |
2498 | ASM_OUTPUT_EXTERNAL. */ |
2499 | static void |
2500 | assemble_external_real (tree decl) |
2501 | { |
2502 | rtx rtl = DECL_RTL (decl); |
2503 | |
2504 | if (MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF |
2505 | && !SYMBOL_REF_USED (XEXP (rtl, 0)) |
2506 | && !incorporeal_function_p (decl)) |
2507 | { |
2508 | /* Some systems do require some output. */ |
2509 | SYMBOL_REF_USED (XEXP (rtl, 0)) = 1; |
2510 | ASM_OUTPUT_EXTERNAL (asm_out_file, decl, XSTR (XEXP (rtl, 0), 0)); |
2511 | } |
2512 | } |
2513 | #endif |
2514 | |
2515 | void |
2516 | process_pending_assemble_externals (void) |
2517 | { |
2518 | #ifdef ASM_OUTPUT_EXTERNAL |
2519 | tree list; |
2520 | for (list = pending_assemble_externals; list; list = TREE_CHAIN (list)) |
2521 | assemble_external_real (TREE_VALUE (list)); |
2522 | |
2523 | pending_assemble_externals = 0; |
2524 | pending_assemble_externals_processed = true; |
2525 | delete pending_assemble_externals_set; |
2526 | #endif |
2527 | } |
2528 | |
2529 | /* This TREE_LIST contains any weak symbol declarations waiting |
2530 | to be emitted. */ |
2531 | static GTY(()) tree weak_decls; |
2532 | |
2533 | /* Output something to declare an external symbol to the assembler, |
2534 | and qualifiers such as weakness. (Most assemblers don't need |
2535 | extern declaration, so we normally output nothing.) Do nothing if |
2536 | DECL is not external. */ |
2537 | |
2538 | void |
2539 | assemble_external (tree decl ATTRIBUTE_UNUSED) |
2540 | { |
2541 | /* Make sure that the ASM_OUT_FILE is open. |
2542 | If it's not, we should not be calling this function. */ |
2543 | gcc_assert (asm_out_file); |
2544 | |
2545 | /* In a perfect world, the following condition would be true. |
2546 | Sadly, the Go front end emit assembly *from the front end*, |
2547 | bypassing the call graph. See PR52739. Fix before GCC 4.8. */ |
2548 | #if 0 |
2549 | /* This function should only be called if we are expanding, or have |
2550 | expanded, to RTL. |
2551 | Ideally, only final.cc would be calling this function, but it is |
2552 | not clear whether that would break things somehow. See PR 17982 |
2553 | for further discussion. */ |
2554 | gcc_assert (state == EXPANSION |
2555 | || state == FINISHED); |
2556 | #endif |
2557 | |
2558 | if (!DECL_P (decl) || !DECL_EXTERNAL (decl) || !TREE_PUBLIC (decl)) |
2559 | return; |
2560 | |
2561 | /* We want to output annotation for weak and external symbols at |
2562 | very last to check if they are references or not. */ |
2563 | |
2564 | if (TARGET_SUPPORTS_WEAK |
2565 | && DECL_WEAK (decl) |
2566 | /* TREE_STATIC is a weird and abused creature which is not |
2567 | generally the right test for whether an entity has been |
2568 | locally emitted, inlined or otherwise not-really-extern, but |
2569 | for declarations that can be weak, it happens to be |
2570 | match. */ |
2571 | && !TREE_STATIC (decl) |
2572 | && lookup_attribute (attr_name: "weak" , DECL_ATTRIBUTES (decl)) |
2573 | && value_member (decl, weak_decls) == NULL_TREE) |
2574 | weak_decls = tree_cons (NULL, decl, weak_decls); |
2575 | |
2576 | #ifdef ASM_OUTPUT_EXTERNAL |
2577 | if (pending_assemble_externals_processed) |
2578 | { |
2579 | assemble_external_real (decl); |
2580 | return; |
2581 | } |
2582 | |
2583 | if (! pending_assemble_externals_set->add (k: decl)) |
2584 | pending_assemble_externals = tree_cons (NULL, decl, |
2585 | pending_assemble_externals); |
2586 | #endif |
2587 | } |
2588 | |
2589 | /* Similar, for calling a library function FUN. */ |
2590 | |
2591 | void |
2592 | assemble_external_libcall (rtx fun) |
2593 | { |
2594 | /* Declare library function name external when first used, if nec. */ |
2595 | if (! SYMBOL_REF_USED (fun)) |
2596 | { |
2597 | SYMBOL_REF_USED (fun) = 1; |
2598 | targetm.asm_out.external_libcall (fun); |
2599 | } |
2600 | } |
2601 | |
2602 | /* Assemble a label named NAME. */ |
2603 | |
2604 | void |
2605 | assemble_label (FILE *file, const char *name) |
2606 | { |
2607 | ASM_OUTPUT_LABEL (file, name); |
2608 | } |
2609 | |
2610 | /* Set the symbol_referenced flag for ID. */ |
2611 | void |
2612 | mark_referenced (tree id) |
2613 | { |
2614 | TREE_SYMBOL_REFERENCED (id) = 1; |
2615 | } |
2616 | |
2617 | /* Set the symbol_referenced flag for DECL and notify callgraph. */ |
2618 | void |
2619 | mark_decl_referenced (tree decl) |
2620 | { |
2621 | if (TREE_CODE (decl) == FUNCTION_DECL) |
2622 | { |
2623 | /* Extern inline functions don't become needed when referenced. |
2624 | If we know a method will be emitted in other TU and no new |
2625 | functions can be marked reachable, just use the external |
2626 | definition. */ |
2627 | struct cgraph_node *node = cgraph_node::get_create (decl); |
2628 | if (!DECL_EXTERNAL (decl) |
2629 | && !node->definition) |
2630 | node->mark_force_output (); |
2631 | } |
2632 | else if (VAR_P (decl)) |
2633 | { |
2634 | varpool_node *node = varpool_node::get_create (decl); |
2635 | /* C++ frontend use mark_decl_references to force COMDAT variables |
2636 | to be output that might appear dead otherwise. */ |
2637 | node->force_output = true; |
2638 | } |
2639 | /* else do nothing - we can get various sorts of CST nodes here, |
2640 | which do not need to be marked. */ |
2641 | } |
2642 | |
2643 | |
2644 | /* Output to FILE (an assembly file) a reference to NAME. If NAME |
2645 | starts with a *, the rest of NAME is output verbatim. Otherwise |
2646 | NAME is transformed in a target-specific way (usually by the |
2647 | addition of an underscore). */ |
2648 | |
2649 | void |
2650 | assemble_name_raw (FILE *file, const char *name) |
2651 | { |
2652 | if (name[0] == '*') |
2653 | fputs (s: &name[1], stream: file); |
2654 | else |
2655 | ASM_OUTPUT_LABELREF (file, name); |
2656 | } |
2657 | |
2658 | /* Return NAME that should actually be emitted, looking through |
2659 | transparent aliases. If NAME refers to an entity that is also |
2660 | represented as a tree (like a function or variable), mark the entity |
2661 | as referenced. */ |
2662 | const char * |
2663 | assemble_name_resolve (const char *name) |
2664 | { |
2665 | const char *real_name = targetm.strip_name_encoding (name); |
2666 | tree id = maybe_get_identifier (real_name); |
2667 | |
2668 | if (id) |
2669 | { |
2670 | tree id_orig = id; |
2671 | |
2672 | mark_referenced (id); |
2673 | ultimate_transparent_alias_target (alias: &id); |
2674 | if (id != id_orig) |
2675 | name = IDENTIFIER_POINTER (id); |
2676 | gcc_assert (! TREE_CHAIN (id)); |
2677 | } |
2678 | |
2679 | return name; |
2680 | } |
2681 | |
2682 | /* Like assemble_name_raw, but should be used when NAME might refer to |
2683 | an entity that is also represented as a tree (like a function or |
2684 | variable). If NAME does refer to such an entity, that entity will |
2685 | be marked as referenced. */ |
2686 | |
2687 | void |
2688 | assemble_name (FILE *file, const char *name) |
2689 | { |
2690 | assemble_name_raw (file, name: assemble_name_resolve (name)); |
2691 | } |
2692 | |
2693 | /* Allocate SIZE bytes writable static space with a gensym name |
2694 | and return an RTX to refer to its address. */ |
2695 | |
2696 | rtx |
2697 | assemble_static_space (unsigned HOST_WIDE_INT size) |
2698 | { |
2699 | char name[17]; |
2700 | const char *namestring; |
2701 | rtx x; |
2702 | |
2703 | ASM_GENERATE_INTERNAL_LABEL (name, "LF" , const_labelno); |
2704 | ++const_labelno; |
2705 | namestring = ggc_strdup (name); |
2706 | |
2707 | x = gen_rtx_SYMBOL_REF (Pmode, namestring); |
2708 | SYMBOL_REF_FLAGS (x) = SYMBOL_FLAG_LOCAL; |
2709 | |
2710 | #ifdef ASM_OUTPUT_ALIGNED_DECL_LOCAL |
2711 | ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, NULL_TREE, name, size, |
2712 | BIGGEST_ALIGNMENT); |
2713 | #else |
2714 | #ifdef ASM_OUTPUT_ALIGNED_LOCAL |
2715 | ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT); |
2716 | #else |
2717 | { |
2718 | /* Round size up to multiple of BIGGEST_ALIGNMENT bits |
2719 | so that each uninitialized object starts on such a boundary. */ |
2720 | /* Variable `rounded' might or might not be used in ASM_OUTPUT_LOCAL. */ |
2721 | unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED |
2722 | = ((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1) |
2723 | / (BIGGEST_ALIGNMENT / BITS_PER_UNIT) |
2724 | * (BIGGEST_ALIGNMENT / BITS_PER_UNIT)); |
2725 | ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded); |
2726 | } |
2727 | #endif |
2728 | #endif |
2729 | return x; |
2730 | } |
2731 | |
2732 | /* Assemble the static constant template for function entry trampolines. |
2733 | This is done at most once per compilation. |
2734 | Returns an RTX for the address of the template. */ |
2735 | |
2736 | static GTY(()) rtx initial_trampoline; |
2737 | |
2738 | rtx |
2739 | assemble_trampoline_template (void) |
2740 | { |
2741 | char label[256]; |
2742 | const char *name; |
2743 | int align; |
2744 | rtx symbol; |
2745 | |
2746 | gcc_assert (targetm.asm_out.trampoline_template != NULL); |
2747 | |
2748 | if (initial_trampoline) |
2749 | return initial_trampoline; |
2750 | |
2751 | /* By default, put trampoline templates in read-only data section. */ |
2752 | |
2753 | #ifdef TRAMPOLINE_SECTION |
2754 | switch_to_section (TRAMPOLINE_SECTION); |
2755 | #else |
2756 | switch_to_section (readonly_data_section); |
2757 | #endif |
2758 | |
2759 | /* Write the assembler code to define one. */ |
2760 | align = floor_log2 (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT); |
2761 | if (align > 0) |
2762 | ASM_OUTPUT_ALIGN (asm_out_file, align); |
2763 | |
2764 | targetm.asm_out.internal_label (asm_out_file, "LTRAMP" , 0); |
2765 | targetm.asm_out.trampoline_template (asm_out_file); |
2766 | |
2767 | /* Record the rtl to refer to it. */ |
2768 | ASM_GENERATE_INTERNAL_LABEL (label, "LTRAMP" , 0); |
2769 | name = ggc_strdup (label); |
2770 | symbol = gen_rtx_SYMBOL_REF (Pmode, name); |
2771 | SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL; |
2772 | |
2773 | initial_trampoline = gen_const_mem (BLKmode, symbol); |
2774 | set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT); |
2775 | set_mem_size (initial_trampoline, TRAMPOLINE_SIZE); |
2776 | |
2777 | return initial_trampoline; |
2778 | } |
2779 | |
2780 | /* A and B are either alignments or offsets. Return the minimum alignment |
2781 | that may be assumed after adding the two together. */ |
2782 | |
2783 | static inline unsigned |
2784 | min_align (unsigned int a, unsigned int b) |
2785 | { |
2786 | return least_bit_hwi (x: a | b); |
2787 | } |
2788 | |
2789 | /* Return the assembler directive for creating a given kind of integer |
2790 | object. SIZE is the number of bytes in the object and ALIGNED_P |
2791 | indicates whether it is known to be aligned. Return NULL if the |
2792 | assembly dialect has no such directive. |
2793 | |
2794 | The returned string should be printed at the start of a new line and |
2795 | be followed immediately by the object's initial value. */ |
2796 | |
2797 | const char * |
2798 | integer_asm_op (int size, int aligned_p) |
2799 | { |
2800 | struct asm_int_op *ops; |
2801 | |
2802 | if (aligned_p) |
2803 | ops = &targetm.asm_out.aligned_op; |
2804 | else |
2805 | ops = &targetm.asm_out.unaligned_op; |
2806 | |
2807 | switch (size) |
2808 | { |
2809 | case 1: |
2810 | return targetm.asm_out.byte_op; |
2811 | case 2: |
2812 | return ops->hi; |
2813 | case 3: |
2814 | return ops->psi; |
2815 | case 4: |
2816 | return ops->si; |
2817 | case 5: |
2818 | case 6: |
2819 | case 7: |
2820 | return ops->pdi; |
2821 | case 8: |
2822 | return ops->di; |
2823 | case 9: |
2824 | case 10: |
2825 | case 11: |
2826 | case 12: |
2827 | case 13: |
2828 | case 14: |
2829 | case 15: |
2830 | return ops->pti; |
2831 | case 16: |
2832 | return ops->ti; |
2833 | default: |
2834 | return NULL; |
2835 | } |
2836 | } |
2837 | |
2838 | /* Use directive OP to assemble an integer object X. Print OP at the |
2839 | start of the line, followed immediately by the value of X. */ |
2840 | |
2841 | void |
2842 | assemble_integer_with_op (const char *op, rtx x) |
2843 | { |
2844 | fputs (s: op, stream: asm_out_file); |
2845 | output_addr_const (asm_out_file, x); |
2846 | fputc (c: '\n', stream: asm_out_file); |
2847 | } |
2848 | |
2849 | /* The default implementation of the asm_out.integer target hook. */ |
2850 | |
2851 | bool |
2852 | default_assemble_integer (rtx x ATTRIBUTE_UNUSED, |
2853 | unsigned int size ATTRIBUTE_UNUSED, |
2854 | int aligned_p ATTRIBUTE_UNUSED) |
2855 | { |
2856 | const char *op = integer_asm_op (size, aligned_p); |
2857 | /* Avoid GAS bugs for large values. Specifically negative values whose |
2858 | absolute value fits in a bfd_vma, but not in a bfd_signed_vma. */ |
2859 | if (size > UNITS_PER_WORD && size > POINTER_SIZE_UNITS) |
2860 | return false; |
2861 | return op && (assemble_integer_with_op (op, x), true); |
2862 | } |
2863 | |
2864 | /* Assemble the integer constant X into an object of SIZE bytes. ALIGN is |
2865 | the alignment of the integer in bits. Return 1 if we were able to output |
2866 | the constant, otherwise 0. We must be able to output the constant, |
2867 | if FORCE is nonzero. */ |
2868 | |
2869 | bool |
2870 | assemble_integer (rtx x, unsigned int size, unsigned int align, int force) |
2871 | { |
2872 | int aligned_p; |
2873 | |
2874 | aligned_p = (align >= MIN (size * BITS_PER_UNIT, BIGGEST_ALIGNMENT)); |
2875 | |
2876 | /* See if the target hook can handle this kind of object. */ |
2877 | if (targetm.asm_out.integer (x, size, aligned_p)) |
2878 | return true; |
2879 | |
2880 | /* If the object is a multi-byte one, try splitting it up. Split |
2881 | it into words it if is multi-word, otherwise split it into bytes. */ |
2882 | if (size > 1) |
2883 | { |
2884 | machine_mode omode, imode; |
2885 | unsigned int subalign; |
2886 | unsigned int subsize, i; |
2887 | enum mode_class mclass; |
2888 | |
2889 | subsize = size > UNITS_PER_WORD? UNITS_PER_WORD : 1; |
2890 | subalign = MIN (align, subsize * BITS_PER_UNIT); |
2891 | if (GET_CODE (x) == CONST_FIXED) |
2892 | mclass = GET_MODE_CLASS (GET_MODE (x)); |
2893 | else |
2894 | mclass = MODE_INT; |
2895 | |
2896 | omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0).require (); |
2897 | imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0).require (); |
2898 | |
2899 | for (i = 0; i < size; i += subsize) |
2900 | { |
2901 | rtx partial = simplify_subreg (outermode: omode, op: x, innermode: imode, byte: i); |
2902 | if (!partial || !assemble_integer (x: partial, size: subsize, align: subalign, force: 0)) |
2903 | break; |
2904 | } |
2905 | if (i == size) |
2906 | return true; |
2907 | |
2908 | /* If we've printed some of it, but not all of it, there's no going |
2909 | back now. */ |
2910 | gcc_assert (!i); |
2911 | } |
2912 | |
2913 | gcc_assert (!force); |
2914 | |
2915 | return false; |
2916 | } |
2917 | |
2918 | /* Assemble the floating-point constant D into an object of size MODE. ALIGN |
2919 | is the alignment of the constant in bits. If REVERSE is true, D is output |
2920 | in reverse storage order. */ |
2921 | |
2922 | void |
2923 | assemble_real (REAL_VALUE_TYPE d, scalar_float_mode mode, unsigned int align, |
2924 | bool reverse) |
2925 | { |
2926 | long data[4] = {0, 0, 0, 0}; |
2927 | int bitsize, nelts, nunits, units_per; |
2928 | rtx elt; |
2929 | |
2930 | /* This is hairy. We have a quantity of known size. real_to_target |
2931 | will put it into an array of *host* longs, 32 bits per element |
2932 | (even if long is more than 32 bits). We need to determine the |
2933 | number of array elements that are occupied (nelts) and the number |
2934 | of *target* min-addressable units that will be occupied in the |
2935 | object file (nunits). We cannot assume that 32 divides the |
2936 | mode's bitsize (size * BITS_PER_UNIT) evenly. |
2937 | |
2938 | size * BITS_PER_UNIT is used here to make sure that padding bits |
2939 | (which might appear at either end of the value; real_to_target |
2940 | will include the padding bits in its output array) are included. */ |
2941 | |
2942 | nunits = GET_MODE_SIZE (mode); |
2943 | bitsize = nunits * BITS_PER_UNIT; |
2944 | nelts = CEIL (bitsize, 32); |
2945 | units_per = 32 / BITS_PER_UNIT; |
2946 | |
2947 | real_to_target (data, &d, mode); |
2948 | |
2949 | /* Put out the first word with the specified alignment. */ |
2950 | unsigned int chunk_nunits = MIN (nunits, units_per); |
2951 | if (reverse) |
2952 | elt = flip_storage_order (SImode, gen_int_mode (data[nelts - 1], SImode)); |
2953 | else |
2954 | elt = GEN_INT (sext_hwi (data[0], chunk_nunits * BITS_PER_UNIT)); |
2955 | assemble_integer (x: elt, size: chunk_nunits, align, force: 1); |
2956 | nunits -= chunk_nunits; |
2957 | |
2958 | /* Subsequent words need only 32-bit alignment. */ |
2959 | align = min_align (a: align, b: 32); |
2960 | |
2961 | for (int i = 1; i < nelts; i++) |
2962 | { |
2963 | chunk_nunits = MIN (nunits, units_per); |
2964 | if (reverse) |
2965 | elt = flip_storage_order (SImode, |
2966 | gen_int_mode (data[nelts - 1 - i], SImode)); |
2967 | else |
2968 | elt = GEN_INT (sext_hwi (data[i], chunk_nunits * BITS_PER_UNIT)); |
2969 | assemble_integer (x: elt, size: chunk_nunits, align, force: 1); |
2970 | nunits -= chunk_nunits; |
2971 | } |
2972 | } |
2973 | |
2974 | /* Given an expression EXP with a constant value, |
2975 | reduce it to the sum of an assembler symbol and an integer. |
2976 | Store them both in the structure *VALUE. |
2977 | EXP must be reducible. */ |
2978 | |
2979 | class addr_const { |
2980 | public: |
2981 | rtx base; |
2982 | poly_int64 offset; |
2983 | }; |
2984 | |
2985 | static void |
2986 | decode_addr_const (tree exp, class addr_const *value) |
2987 | { |
2988 | tree target = TREE_OPERAND (exp, 0); |
2989 | poly_int64 offset = 0; |
2990 | rtx x; |
2991 | |
2992 | while (1) |
2993 | { |
2994 | poly_int64 bytepos; |
2995 | if (TREE_CODE (target) == COMPONENT_REF |
2996 | && poly_int_tree_p (t: byte_position (TREE_OPERAND (target, 1)), |
2997 | value: &bytepos)) |
2998 | { |
2999 | offset += bytepos; |
3000 | target = TREE_OPERAND (target, 0); |
3001 | } |
3002 | else if (TREE_CODE (target) == ARRAY_REF |
3003 | || TREE_CODE (target) == ARRAY_RANGE_REF) |
3004 | { |
3005 | /* Truncate big offset. */ |
3006 | offset |
3007 | += (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (target))) |
3008 | * wi::to_poly_widest (TREE_OPERAND (target, 1)).force_shwi ()); |
3009 | target = TREE_OPERAND (target, 0); |
3010 | } |
3011 | else if (TREE_CODE (target) == MEM_REF |
3012 | && TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR) |
3013 | { |
3014 | offset += mem_ref_offset (target).force_shwi (); |
3015 | target = TREE_OPERAND (TREE_OPERAND (target, 0), 0); |
3016 | } |
3017 | else if (INDIRECT_REF_P (target) |
3018 | && TREE_CODE (TREE_OPERAND (target, 0)) == NOP_EXPR |
3019 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (target, 0), 0)) |
3020 | == ADDR_EXPR) |
3021 | target = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (target, 0), 0), 0); |
3022 | else |
3023 | break; |
3024 | } |
3025 | |
3026 | switch (TREE_CODE (target)) |
3027 | { |
3028 | case VAR_DECL: |
3029 | case FUNCTION_DECL: |
3030 | x = DECL_RTL (target); |
3031 | break; |
3032 | |
3033 | case LABEL_DECL: |
3034 | x = gen_rtx_MEM (FUNCTION_MODE, |
3035 | gen_rtx_LABEL_REF (Pmode, force_label_rtx (target))); |
3036 | break; |
3037 | |
3038 | case REAL_CST: |
3039 | case FIXED_CST: |
3040 | case STRING_CST: |
3041 | case COMPLEX_CST: |
3042 | case CONSTRUCTOR: |
3043 | case INTEGER_CST: |
3044 | x = lookup_constant_def (target); |
3045 | /* Should have been added by output_addressed_constants. */ |
3046 | gcc_assert (x); |
3047 | break; |
3048 | |
3049 | case INDIRECT_REF: |
3050 | /* This deals with absolute addresses. */ |
3051 | offset += tree_to_shwi (TREE_OPERAND (target, 0)); |
3052 | x = gen_rtx_MEM (QImode, |
3053 | gen_rtx_SYMBOL_REF (Pmode, "origin of addresses" )); |
3054 | break; |
3055 | |
3056 | case COMPOUND_LITERAL_EXPR: |
3057 | gcc_assert (COMPOUND_LITERAL_EXPR_DECL (target)); |
3058 | x = DECL_RTL (COMPOUND_LITERAL_EXPR_DECL (target)); |
3059 | break; |
3060 | |
3061 | default: |
3062 | gcc_unreachable (); |
3063 | } |
3064 | |
3065 | gcc_assert (MEM_P (x)); |
3066 | x = XEXP (x, 0); |
3067 | |
3068 | value->base = x; |
3069 | value->offset = offset; |
3070 | } |
3071 | |
3072 | static GTY(()) hash_table<tree_descriptor_hasher> *const_desc_htab; |
3073 | |
3074 | static void maybe_output_constant_def_contents (struct constant_descriptor_tree *, int); |
3075 | |
3076 | /* Constant pool accessor function. */ |
3077 | |
3078 | hash_table<tree_descriptor_hasher> * |
3079 | constant_pool_htab (void) |
3080 | { |
3081 | return const_desc_htab; |
3082 | } |
3083 | |
3084 | /* Compute a hash code for a constant expression. */ |
3085 | |
3086 | hashval_t |
3087 | tree_descriptor_hasher::hash (constant_descriptor_tree *ptr) |
3088 | { |
3089 | return ptr->hash; |
3090 | } |
3091 | |
3092 | static hashval_t |
3093 | const_hash_1 (const tree exp) |
3094 | { |
3095 | const char *p; |
3096 | hashval_t hi; |
3097 | int len, i; |
3098 | enum tree_code code = TREE_CODE (exp); |
3099 | |
3100 | /* Either set P and LEN to the address and len of something to hash and |
3101 | exit the switch or return a value. */ |
3102 | |
3103 | switch (code) |
3104 | { |
3105 | case INTEGER_CST: |
3106 | p = (char *) &TREE_INT_CST_ELT (exp, 0); |
3107 | len = TREE_INT_CST_NUNITS (exp) * sizeof (HOST_WIDE_INT); |
3108 | break; |
3109 | |
3110 | case REAL_CST: |
3111 | return real_hash (TREE_REAL_CST_PTR (exp)); |
3112 | |
3113 | case FIXED_CST: |
3114 | return fixed_hash (TREE_FIXED_CST_PTR (exp)); |
3115 | |
3116 | case STRING_CST: |
3117 | p = TREE_STRING_POINTER (exp); |
3118 | len = TREE_STRING_LENGTH (exp); |
3119 | break; |
3120 | |
3121 | case COMPLEX_CST: |
3122 | return (const_hash_1 (TREE_REALPART (exp)) * 5 |
3123 | + const_hash_1 (TREE_IMAGPART (exp))); |
3124 | |
3125 | case VECTOR_CST: |
3126 | { |
3127 | hi = 7 + VECTOR_CST_NPATTERNS (exp); |
3128 | hi = hi * 563 + VECTOR_CST_NELTS_PER_PATTERN (exp); |
3129 | unsigned int count = vector_cst_encoded_nelts (t: exp); |
3130 | for (unsigned int i = 0; i < count; ++i) |
3131 | hi = hi * 563 + const_hash_1 (VECTOR_CST_ENCODED_ELT (exp, i)); |
3132 | return hi; |
3133 | } |
3134 | |
3135 | case CONSTRUCTOR: |
3136 | { |
3137 | unsigned HOST_WIDE_INT idx; |
3138 | tree value; |
3139 | |
3140 | hi = 5 + int_size_in_bytes (TREE_TYPE (exp)); |
3141 | |
3142 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value) |
3143 | if (value) |
3144 | hi = hi * 603 + const_hash_1 (exp: value); |
3145 | |
3146 | return hi; |
3147 | } |
3148 | |
3149 | case ADDR_EXPR: |
3150 | if (CONSTANT_CLASS_P (TREE_OPERAND (exp, 0))) |
3151 | return const_hash_1 (TREE_OPERAND (exp, 0)); |
3152 | |
3153 | /* Fallthru. */ |
3154 | case FDESC_EXPR: |
3155 | { |
3156 | class addr_const value; |
3157 | |
3158 | decode_addr_const (exp, value: &value); |
3159 | switch (GET_CODE (value.base)) |
3160 | { |
3161 | case SYMBOL_REF: |
3162 | /* Don't hash the address of the SYMBOL_REF; |
3163 | only use the offset and the symbol name. */ |
3164 | hi = value.offset.coeffs[0]; |
3165 | p = XSTR (value.base, 0); |
3166 | for (i = 0; p[i] != 0; i++) |
3167 | hi = ((hi * 613) + (unsigned) (p[i])); |
3168 | break; |
3169 | |
3170 | case LABEL_REF: |
3171 | hi = (value.offset.coeffs[0] |
3172 | + CODE_LABEL_NUMBER (label_ref_label (value.base)) * 13); |
3173 | break; |
3174 | |
3175 | default: |
3176 | gcc_unreachable (); |
3177 | } |
3178 | } |
3179 | return hi; |
3180 | |
3181 | case PLUS_EXPR: |
3182 | case POINTER_PLUS_EXPR: |
3183 | case MINUS_EXPR: |
3184 | return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9 |
3185 | + const_hash_1 (TREE_OPERAND (exp, 1))); |
3186 | |
3187 | CASE_CONVERT: |
3188 | return const_hash_1 (TREE_OPERAND (exp, 0)) * 7 + 2; |
3189 | |
3190 | default: |
3191 | /* A language specific constant. Just hash the code. */ |
3192 | return code; |
3193 | } |
3194 | |
3195 | /* Compute hashing function. */ |
3196 | hi = len; |
3197 | for (i = 0; i < len; i++) |
3198 | hi = ((hi * 613) + (unsigned) (p[i])); |
3199 | |
3200 | return hi; |
3201 | } |
3202 | |
3203 | /* Wrapper of compare_constant, for the htab interface. */ |
3204 | bool |
3205 | tree_descriptor_hasher::equal (constant_descriptor_tree *c1, |
3206 | constant_descriptor_tree *c2) |
3207 | { |
3208 | if (c1->hash != c2->hash) |
3209 | return false; |
3210 | return compare_constant (c1->value, c2->value); |
3211 | } |
3212 | |
3213 | /* Compare t1 and t2, and return true only if they are known to result in |
3214 | the same bit pattern on output. */ |
3215 | |
3216 | static bool |
3217 | compare_constant (const tree t1, const tree t2) |
3218 | { |
3219 | enum tree_code typecode; |
3220 | |
3221 | if (t1 == NULL_TREE) |
3222 | return t2 == NULL_TREE; |
3223 | if (t2 == NULL_TREE) |
3224 | return false; |
3225 | |
3226 | if (TREE_CODE (t1) != TREE_CODE (t2)) |
3227 | return false; |
3228 | |
3229 | switch (TREE_CODE (t1)) |
3230 | { |
3231 | case INTEGER_CST: |
3232 | /* Integer constants are the same only if the same width of type. */ |
3233 | if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2))) |
3234 | return false; |
3235 | if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))) |
3236 | return false; |
3237 | return tree_int_cst_equal (t1, t2); |
3238 | |
3239 | case REAL_CST: |
3240 | /* Real constants are the same only if the same width of type. In |
3241 | addition to the same width, we need to check whether the modes are the |
3242 | same. There might be two floating point modes that are the same size |
3243 | but have different representations, such as the PowerPC that has 2 |
3244 | different 128-bit floating point types (IBM extended double and IEEE |
3245 | 128-bit floating point). */ |
3246 | if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2))) |
3247 | return false; |
3248 | if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))) |
3249 | return false; |
3250 | return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2)); |
3251 | |
3252 | case FIXED_CST: |
3253 | /* Fixed constants are the same only if the same width of type. */ |
3254 | if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2))) |
3255 | return false; |
3256 | |
3257 | return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2)); |
3258 | |
3259 | case STRING_CST: |
3260 | if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)) |
3261 | || int_size_in_bytes (TREE_TYPE (t1)) |
3262 | != int_size_in_bytes (TREE_TYPE (t2))) |
3263 | return false; |
3264 | |
3265 | return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2) |
3266 | && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), |
3267 | TREE_STRING_LENGTH (t1))); |
3268 | |
3269 | case COMPLEX_CST: |
3270 | return (compare_constant (TREE_REALPART (t1), TREE_REALPART (t2)) |
3271 | && compare_constant (TREE_IMAGPART (t1), TREE_IMAGPART (t2))); |
3272 | |
3273 | case VECTOR_CST: |
3274 | { |
3275 | if (VECTOR_CST_NPATTERNS (t1) |
3276 | != VECTOR_CST_NPATTERNS (t2)) |
3277 | return false; |
3278 | |
3279 | if (VECTOR_CST_NELTS_PER_PATTERN (t1) |
3280 | != VECTOR_CST_NELTS_PER_PATTERN (t2)) |
3281 | return false; |
3282 | |
3283 | unsigned int count = vector_cst_encoded_nelts (t: t1); |
3284 | for (unsigned int i = 0; i < count; ++i) |
3285 | if (!compare_constant (VECTOR_CST_ENCODED_ELT (t1, i), |
3286 | VECTOR_CST_ENCODED_ELT (t2, i))) |
3287 | return false; |
3288 | |
3289 | return true; |
3290 | } |
3291 | |
3292 | case CONSTRUCTOR: |
3293 | { |
3294 | vec<constructor_elt, va_gc> *v1, *v2; |
3295 | unsigned HOST_WIDE_INT idx; |
3296 | |
3297 | typecode = TREE_CODE (TREE_TYPE (t1)); |
3298 | if (typecode != TREE_CODE (TREE_TYPE (t2))) |
3299 | return false; |
3300 | |
3301 | if (typecode == ARRAY_TYPE) |
3302 | { |
3303 | HOST_WIDE_INT size_1 = int_size_in_bytes (TREE_TYPE (t1)); |
3304 | /* For arrays, check that mode, size and storage order match. */ |
3305 | if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)) |
3306 | || size_1 == -1 |
3307 | || size_1 != int_size_in_bytes (TREE_TYPE (t2)) |
3308 | || TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t1)) |
3309 | != TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t2))) |
3310 | return false; |
3311 | } |
3312 | else |
3313 | { |
3314 | /* For record and union constructors, require exact type |
3315 | equality. */ |
3316 | if (TREE_TYPE (t1) != TREE_TYPE (t2)) |
3317 | return false; |
3318 | } |
3319 | |
3320 | v1 = CONSTRUCTOR_ELTS (t1); |
3321 | v2 = CONSTRUCTOR_ELTS (t2); |
3322 | if (vec_safe_length (v: v1) != vec_safe_length (v: v2)) |
3323 | return false; |
3324 | |
3325 | for (idx = 0; idx < vec_safe_length (v: v1); ++idx) |
3326 | { |
3327 | constructor_elt *c1 = &(*v1)[idx]; |
3328 | constructor_elt *c2 = &(*v2)[idx]; |
3329 | |
3330 | /* Check that each value is the same... */ |
3331 | if (!compare_constant (t1: c1->value, t2: c2->value)) |
3332 | return false; |
3333 | /* ... and that they apply to the same fields! */ |
3334 | if (typecode == ARRAY_TYPE) |
3335 | { |
3336 | if (!compare_constant (t1: c1->index, t2: c2->index)) |
3337 | return false; |
3338 | } |
3339 | else |
3340 | { |
3341 | if (c1->index != c2->index) |
3342 | return false; |
3343 | } |
3344 | } |
3345 | |
3346 | return true; |
3347 | } |
3348 | |
3349 | case ADDR_EXPR: |
3350 | case FDESC_EXPR: |
3351 | { |
3352 | class addr_const value1, value2; |
3353 | enum rtx_code code; |
3354 | bool ret; |
3355 | |
3356 | decode_addr_const (exp: t1, value: &value1); |
3357 | decode_addr_const (exp: t2, value: &value2); |
3358 | |
3359 | if (maybe_ne (a: value1.offset, b: value2.offset)) |
3360 | return false; |
3361 | |
3362 | code = GET_CODE (value1.base); |
3363 | if (code != GET_CODE (value2.base)) |
3364 | return false; |
3365 | |
3366 | switch (code) |
3367 | { |
3368 | case SYMBOL_REF: |
3369 | ret = (strcmp (XSTR (value1.base, 0), XSTR (value2.base, 0)) == 0); |
3370 | break; |
3371 | |
3372 | case LABEL_REF: |
3373 | ret = (CODE_LABEL_NUMBER (label_ref_label (value1.base)) |
3374 | == CODE_LABEL_NUMBER (label_ref_label (value2.base))); |
3375 | break; |
3376 | |
3377 | default: |
3378 | gcc_unreachable (); |
3379 | } |
3380 | return ret; |
3381 | } |
3382 | |
3383 | case PLUS_EXPR: |
3384 | case POINTER_PLUS_EXPR: |
3385 | case MINUS_EXPR: |
3386 | case RANGE_EXPR: |
3387 | return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)) |
3388 | && compare_constant (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1))); |
3389 | |
3390 | CASE_CONVERT: |
3391 | case VIEW_CONVERT_EXPR: |
3392 | return compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); |
3393 | |
3394 | default: |
3395 | return false; |
3396 | } |
3397 | } |
3398 | |
3399 | /* Return the section into which constant EXP should be placed. */ |
3400 | |
3401 | static section * |
3402 | get_constant_section (tree exp, unsigned int align) |
3403 | { |
3404 | return targetm.asm_out.select_section (exp, |
3405 | compute_reloc_for_constant (exp), |
3406 | align); |
3407 | } |
3408 | |
3409 | /* Return the size of constant EXP in bytes. */ |
3410 | |
3411 | static HOST_WIDE_INT |
3412 | get_constant_size (tree exp) |
3413 | { |
3414 | HOST_WIDE_INT size; |
3415 | |
3416 | size = int_size_in_bytes (TREE_TYPE (exp)); |
3417 | gcc_checking_assert (size >= 0); |
3418 | gcc_checking_assert (TREE_CODE (exp) != STRING_CST |
3419 | || size >= TREE_STRING_LENGTH (exp)); |
3420 | return size; |
3421 | } |
3422 | |
3423 | /* Subroutine of output_constant_def: |
3424 | No constant equal to EXP is known to have been output. |
3425 | Make a constant descriptor to enter EXP in the hash table. |
3426 | Assign the label number and construct RTL to refer to the |
3427 | constant's location in memory. |
3428 | Caller is responsible for updating the hash table. */ |
3429 | |
3430 | static struct constant_descriptor_tree * |
3431 | build_constant_desc (tree exp) |
3432 | { |
3433 | struct constant_descriptor_tree *desc; |
3434 | rtx symbol, rtl; |
3435 | char label[256]; |
3436 | int labelno; |
3437 | tree decl; |
3438 | |
3439 | desc = ggc_alloc<constant_descriptor_tree> (); |
3440 | desc->value = exp; |
3441 | |
3442 | /* Create a string containing the label name, in LABEL. */ |
3443 | labelno = const_labelno++; |
3444 | ASM_GENERATE_INTERNAL_LABEL (label, "LC" , labelno); |
3445 | |
3446 | /* Construct the VAR_DECL associated with the constant. */ |
3447 | decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (label), |
3448 | TREE_TYPE (exp)); |
3449 | DECL_ARTIFICIAL (decl) = 1; |
3450 | DECL_IGNORED_P (decl) = 1; |
3451 | TREE_READONLY (decl) = 1; |
3452 | TREE_STATIC (decl) = 1; |
3453 | TREE_ADDRESSABLE (decl) = 1; |
3454 | /* We don't set the RTL yet as this would cause varpool to assume that the |
3455 | variable is referenced. Moreover, it would just be dropped in LTO mode. |
3456 | Instead we set the flag that will be recognized in make_decl_rtl. */ |
3457 | DECL_IN_CONSTANT_POOL (decl) = 1; |
3458 | DECL_INITIAL (decl) = desc->value; |
3459 | /* ??? targetm.constant_alignment hasn't been updated for vector types on |
3460 | most architectures so use DATA_ALIGNMENT as well, except for strings. */ |
3461 | if (TREE_CODE (exp) == STRING_CST) |
3462 | SET_DECL_ALIGN (decl, targetm.constant_alignment (exp, DECL_ALIGN (decl))); |
3463 | else |
3464 | { |
3465 | align_variable (decl, dont_output_data: 0); |
3466 | if (DECL_ALIGN (decl) < GET_MODE_ALIGNMENT (DECL_MODE (decl)) |
3467 | && ((optab_handler (op: movmisalign_optab, DECL_MODE (decl)) |
3468 | != CODE_FOR_nothing) |
3469 | || targetm.slow_unaligned_access (DECL_MODE (decl), |
3470 | DECL_ALIGN (decl)))) |
3471 | SET_DECL_ALIGN (decl, GET_MODE_ALIGNMENT (DECL_MODE (decl))); |
3472 | } |
3473 | |
3474 | /* Now construct the SYMBOL_REF and the MEM. */ |
3475 | if (use_object_blocks_p ()) |
3476 | { |
3477 | int align = (TREE_CODE (decl) == CONST_DECL |
3478 | || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl)) |
3479 | ? DECL_ALIGN (decl) |
3480 | : symtab_node::get (decl)->definition_alignment ()); |
3481 | section *sect = get_constant_section (exp, align); |
3482 | symbol = create_block_symbol (ggc_strdup (label), |
3483 | block: get_block_for_section (sect), offset: -1); |
3484 | } |
3485 | else |
3486 | symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label)); |
3487 | SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL; |
3488 | SET_SYMBOL_REF_DECL (symbol, decl); |
3489 | TREE_CONSTANT_POOL_ADDRESS_P (symbol) = 1; |
3490 | |
3491 | rtl = gen_const_mem (TYPE_MODE (TREE_TYPE (exp)), symbol); |
3492 | set_mem_alias_set (rtl, 0); |
3493 | |
3494 | /* Putting EXP into the literal pool might have imposed a different |
3495 | alignment which should be visible in the RTX as well. */ |
3496 | set_mem_align (rtl, DECL_ALIGN (decl)); |
3497 | |
3498 | /* We cannot share RTX'es in pool entries. |
3499 | Mark this piece of RTL as required for unsharing. */ |
3500 | RTX_FLAG (rtl, used) = 1; |
3501 | |
3502 | /* Set flags or add text to the name to record information, such as |
3503 | that it is a local symbol. If the name is changed, the macro |
3504 | ASM_OUTPUT_LABELREF will have to know how to strip this |
3505 | information. This call might invalidate our local variable |
3506 | SYMBOL; we can't use it afterward. */ |
3507 | targetm.encode_section_info (exp, rtl, true); |
3508 | |
3509 | desc->rtl = rtl; |
3510 | |
3511 | return desc; |
3512 | } |
3513 | |
3514 | /* Subroutine of output_constant_def and tree_output_constant_def: |
3515 | Add a constant to the hash table that tracks which constants |
3516 | already have labels. */ |
3517 | |
3518 | static constant_descriptor_tree * |
3519 | add_constant_to_table (tree exp, int defer) |
3520 | { |
3521 | /* The hash table methods may call output_constant_def for addressed |
3522 | constants, so handle them first. */ |
3523 | output_addressed_constants (exp, defer); |
3524 | |
3525 | /* Sanity check to catch recursive insertion. */ |
3526 | static bool inserting; |
3527 | gcc_assert (!inserting); |
3528 | inserting = true; |
3529 | |
3530 | /* Look up EXP in the table of constant descriptors. If we didn't |
3531 | find it, create a new one. */ |
3532 | struct constant_descriptor_tree key; |
3533 | key.value = exp; |
3534 | key.hash = const_hash_1 (exp); |
3535 | constant_descriptor_tree **loc |
3536 | = const_desc_htab->find_slot_with_hash (comparable: &key, hash: key.hash, insert: INSERT); |
3537 | |
3538 | inserting = false; |
3539 | |
3540 | struct constant_descriptor_tree *desc = *loc; |
3541 | if (!desc) |
3542 | { |
3543 | desc = build_constant_desc (exp); |
3544 | desc->hash = key.hash; |
3545 | *loc = desc; |
3546 | } |
3547 | |
3548 | return desc; |
3549 | } |
3550 | |
3551 | /* Return an rtx representing a reference to constant data in memory |
3552 | for the constant expression EXP. |
3553 | |
3554 | If assembler code for such a constant has already been output, |
3555 | return an rtx to refer to it. |
3556 | Otherwise, output such a constant in memory |
3557 | and generate an rtx for it. |
3558 | |
3559 | If DEFER is nonzero, this constant can be deferred and output only |
3560 | if referenced in the function after all optimizations. |
3561 | |
3562 | `const_desc_table' records which constants already have label strings. */ |
3563 | |
3564 | rtx |
3565 | output_constant_def (tree exp, int defer) |
3566 | { |
3567 | struct constant_descriptor_tree *desc = add_constant_to_table (exp, defer); |
3568 | maybe_output_constant_def_contents (desc, defer); |
3569 | return desc->rtl; |
3570 | } |
3571 | |
3572 | /* Subroutine of output_constant_def: Decide whether or not we need to |
3573 | output the constant DESC now, and if so, do it. */ |
3574 | static void |
3575 | maybe_output_constant_def_contents (struct constant_descriptor_tree *desc, |
3576 | int defer) |
3577 | { |
3578 | rtx symbol = XEXP (desc->rtl, 0); |
3579 | tree exp = desc->value; |
3580 | |
3581 | if (flag_syntax_only) |
3582 | return; |
3583 | |
3584 | if (TREE_ASM_WRITTEN (exp)) |
3585 | /* Already output; don't do it again. */ |
3586 | return; |
3587 | |
3588 | /* We can always defer constants as long as the context allows |
3589 | doing so. */ |
3590 | if (defer) |
3591 | { |
3592 | /* Increment n_deferred_constants if it exists. It needs to be at |
3593 | least as large as the number of constants actually referred to |
3594 | by the function. If it's too small we'll stop looking too early |
3595 | and fail to emit constants; if it's too large we'll only look |
3596 | through the entire function when we could have stopped earlier. */ |
3597 | if (cfun) |
3598 | n_deferred_constants++; |
3599 | return; |
3600 | } |
3601 | |
3602 | output_constant_def_contents (symbol); |
3603 | } |
3604 | |
3605 | /* Subroutine of output_constant_def_contents. Output the definition |
3606 | of constant EXP, which is pointed to by label LABEL. ALIGN is the |
3607 | constant's alignment in bits. */ |
3608 | |
3609 | static void |
3610 | assemble_constant_contents (tree exp, const char *label, unsigned int align, |
3611 | bool merge_strings) |
3612 | { |
3613 | HOST_WIDE_INT size; |
3614 | |
3615 | size = get_constant_size (exp); |
3616 | |
3617 | /* Do any machine/system dependent processing of the constant. */ |
3618 | targetm.asm_out.declare_constant_name (asm_out_file, label, exp, size); |
3619 | |
3620 | /* Output the value of EXP. */ |
3621 | output_constant (exp, size, align, false, merge_strings); |
3622 | |
3623 | targetm.asm_out.decl_end (); |
3624 | } |
3625 | |
3626 | /* We must output the constant data referred to by SYMBOL; do so. */ |
3627 | |
3628 | static void |
3629 | output_constant_def_contents (rtx symbol) |
3630 | { |
3631 | tree decl = SYMBOL_REF_DECL (symbol); |
3632 | tree exp = DECL_INITIAL (decl); |
3633 | bool asan_protected = false; |
3634 | |
3635 | /* Make sure any other constants whose addresses appear in EXP |
3636 | are assigned label numbers. */ |
3637 | output_addressed_constants (exp, 0); |
3638 | |
3639 | /* We are no longer deferring this constant. */ |
3640 | TREE_ASM_WRITTEN (decl) = TREE_ASM_WRITTEN (exp) = 1; |
3641 | |
3642 | if ((flag_sanitize & SANITIZE_ADDRESS) |
3643 | && TREE_CODE (exp) == STRING_CST |
3644 | && asan_protect_global (exp)) |
3645 | { |
3646 | asan_protected = true; |
3647 | SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl), |
3648 | ASAN_RED_ZONE_SIZE * BITS_PER_UNIT)); |
3649 | } |
3650 | |
3651 | /* If the constant is part of an object block, make sure that the |
3652 | decl has been positioned within its block, but do not write out |
3653 | its definition yet. output_object_blocks will do that later. */ |
3654 | if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol)) |
3655 | place_block_symbol (symbol); |
3656 | else |
3657 | { |
3658 | int align = (TREE_CODE (decl) == CONST_DECL |
3659 | || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl)) |
3660 | ? DECL_ALIGN (decl) |
3661 | : symtab_node::get (decl)->definition_alignment ()); |
3662 | section *sect = get_constant_section (exp, align); |
3663 | switch_to_section (sect); |
3664 | if (align > BITS_PER_UNIT) |
3665 | ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); |
3666 | assemble_constant_contents (exp, XSTR (symbol, 0), align, |
3667 | merge_strings: (sect->common.flags & SECTION_MERGE) |
3668 | && (sect->common.flags & SECTION_STRINGS)); |
3669 | if (asan_protected) |
3670 | { |
3671 | HOST_WIDE_INT size = get_constant_size (exp); |
3672 | assemble_zeros (size: asan_red_zone_size (size)); |
3673 | } |
3674 | } |
3675 | } |
3676 | |
3677 | /* Look up EXP in the table of constant descriptors. Return the rtl |
3678 | if it has been emitted, else null. */ |
3679 | |
3680 | rtx |
3681 | lookup_constant_def (tree exp) |
3682 | { |
3683 | struct constant_descriptor_tree key; |
3684 | |
3685 | key.value = exp; |
3686 | key.hash = const_hash_1 (exp); |
3687 | constant_descriptor_tree *desc |
3688 | = const_desc_htab->find_with_hash (comparable: &key, hash: key.hash); |
3689 | |
3690 | return (desc ? desc->rtl : NULL_RTX); |
3691 | } |
3692 | |
3693 | /* Return a tree representing a reference to constant data in memory |
3694 | for the constant expression EXP. |
3695 | |
3696 | This is the counterpart of output_constant_def at the Tree level. */ |
3697 | |
3698 | tree |
3699 | tree_output_constant_def (tree exp) |
3700 | { |
3701 | struct constant_descriptor_tree *desc = add_constant_to_table (exp, defer: 1); |
3702 | tree decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0)); |
3703 | varpool_node::finalize_decl (decl); |
3704 | return decl; |
3705 | } |
3706 | |
3707 | class GTY((chain_next ("%h.next" ), for_user)) constant_descriptor_rtx { |
3708 | public: |
3709 | class constant_descriptor_rtx *next; |
3710 | rtx mem; |
3711 | rtx sym; |
3712 | rtx constant; |
3713 | HOST_WIDE_INT offset; |
3714 | hashval_t hash; |
3715 | fixed_size_mode mode; |
3716 | unsigned int align; |
3717 | int labelno; |
3718 | int mark; |
3719 | }; |
3720 | |
3721 | struct const_rtx_desc_hasher : ggc_ptr_hash<constant_descriptor_rtx> |
3722 | { |
3723 | static hashval_t hash (constant_descriptor_rtx *); |
3724 | static bool equal (constant_descriptor_rtx *, constant_descriptor_rtx *); |
3725 | }; |
3726 | |
3727 | /* Used in the hash tables to avoid outputting the same constant |
3728 | twice. Unlike 'struct constant_descriptor_tree', RTX constants |
3729 | are output once per function, not once per file. */ |
3730 | /* ??? Only a few targets need per-function constant pools. Most |
3731 | can use one per-file pool. Should add a targetm bit to tell the |
3732 | difference. */ |
3733 | |
3734 | struct GTY(()) rtx_constant_pool { |
3735 | /* Pointers to first and last constant in pool, as ordered by offset. */ |
3736 | class constant_descriptor_rtx *first; |
3737 | class constant_descriptor_rtx *last; |
3738 | |
3739 | /* Hash facility for making memory-constants from constant rtl-expressions. |
3740 | It is used on RISC machines where immediate integer arguments and |
3741 | constant addresses are restricted so that such constants must be stored |
3742 | in memory. */ |
3743 | hash_table<const_rtx_desc_hasher> *const_rtx_htab; |
3744 | |
3745 | /* Current offset in constant pool (does not include any |
3746 | machine-specific header). */ |
3747 | HOST_WIDE_INT offset; |
3748 | }; |
3749 | |
3750 | /* Hash and compare functions for const_rtx_htab. */ |
3751 | |
3752 | hashval_t |
3753 | const_rtx_desc_hasher::hash (constant_descriptor_rtx *desc) |
3754 | { |
3755 | return desc->hash; |
3756 | } |
3757 | |
3758 | bool |
3759 | const_rtx_desc_hasher::equal (constant_descriptor_rtx *x, |
3760 | constant_descriptor_rtx *y) |
3761 | { |
3762 | if (x->mode != y->mode) |
3763 | return false; |
3764 | return rtx_equal_p (x->constant, y->constant); |
3765 | } |
3766 | |
3767 | /* Hash one component of a constant. */ |
3768 | |
3769 | static hashval_t |
3770 | const_rtx_hash_1 (const_rtx x) |
3771 | { |
3772 | unsigned HOST_WIDE_INT hwi; |
3773 | machine_mode mode; |
3774 | enum rtx_code code; |
3775 | hashval_t h; |
3776 | int i; |
3777 | |
3778 | code = GET_CODE (x); |
3779 | mode = GET_MODE (x); |
3780 | h = (hashval_t) code * 1048573 + mode; |
3781 | |
3782 | switch (code) |
3783 | { |
3784 | case CONST_INT: |
3785 | hwi = INTVAL (x); |
3786 | |
3787 | fold_hwi: |
3788 | { |
3789 | int shift = sizeof (hashval_t) * CHAR_BIT; |
3790 | const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t); |
3791 | |
3792 | h ^= (hashval_t) hwi; |
3793 | for (i = 1; i < n; ++i) |
3794 | { |
3795 | hwi >>= shift; |
3796 | h ^= (hashval_t) hwi; |
3797 | } |
3798 | } |
3799 | break; |
3800 | |
3801 | case CONST_WIDE_INT: |
3802 | hwi = 0; |
3803 | { |
3804 | for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++) |
3805 | hwi ^= CONST_WIDE_INT_ELT (x, i); |
3806 | goto fold_hwi; |
3807 | } |
3808 | |
3809 | case CONST_DOUBLE: |
3810 | if (TARGET_SUPPORTS_WIDE_INT == 0 && mode == VOIDmode) |
3811 | { |
3812 | hwi = CONST_DOUBLE_LOW (x) ^ CONST_DOUBLE_HIGH (x); |
3813 | goto fold_hwi; |
3814 | } |
3815 | else |
3816 | h ^= real_hash (CONST_DOUBLE_REAL_VALUE (x)); |
3817 | break; |
3818 | |
3819 | case CONST_FIXED: |
3820 | h ^= fixed_hash (CONST_FIXED_VALUE (x)); |
3821 | break; |
3822 | |
3823 | case SYMBOL_REF: |
3824 | h ^= htab_hash_string (XSTR (x, 0)); |
3825 | break; |
3826 | |
3827 | case LABEL_REF: |
3828 | h = h * 251 + CODE_LABEL_NUMBER (label_ref_label (x)); |
3829 | break; |
3830 | |
3831 | case UNSPEC: |
3832 | case UNSPEC_VOLATILE: |
3833 | h = h * 251 + XINT (x, 1); |
3834 | break; |
3835 | |
3836 | default: |
3837 | break; |
3838 | } |
3839 | |
3840 | return h; |
3841 | } |
3842 | |
3843 | /* Compute a hash value for X, which should be a constant. */ |
3844 | |
3845 | static hashval_t |
3846 | const_rtx_hash (rtx x) |
3847 | { |
3848 | hashval_t h = 0; |
3849 | subrtx_iterator::array_type array; |
3850 | FOR_EACH_SUBRTX (iter, array, x, ALL) |
3851 | h = h * 509 + const_rtx_hash_1 (x: *iter); |
3852 | return h; |
3853 | } |
3854 | |
3855 | |
3856 | /* Create and return a new rtx constant pool. */ |
3857 | |
3858 | static struct rtx_constant_pool * |
3859 | create_constant_pool (void) |
3860 | { |
3861 | struct rtx_constant_pool *pool; |
3862 | |
3863 | pool = ggc_alloc<rtx_constant_pool> (); |
3864 | pool->const_rtx_htab = hash_table<const_rtx_desc_hasher>::create_ggc (n: 31); |
3865 | pool->first = NULL; |
3866 | pool->last = NULL; |
3867 | pool->offset = 0; |
3868 | return pool; |
3869 | } |
3870 | |
3871 | /* Initialize constant pool hashing for a new function. */ |
3872 | |
3873 | void |
3874 | init_varasm_status (void) |
3875 | { |
3876 | crtl->varasm.pool = create_constant_pool (); |
3877 | crtl->varasm.deferred_constants = 0; |
3878 | } |
3879 | |
3880 | /* Given a MINUS expression, simplify it if both sides |
3881 | include the same symbol. */ |
3882 | |
3883 | rtx |
3884 | simplify_subtraction (rtx x) |
3885 | { |
3886 | rtx r = simplify_rtx (x); |
3887 | return r ? r : x; |
3888 | } |
3889 | |
3890 | /* Given a constant rtx X, make (or find) a memory constant for its value |
3891 | and return a MEM rtx to refer to it in memory. IN_MODE is the mode |
3892 | of X. */ |
3893 | |
3894 | rtx |
3895 | force_const_mem (machine_mode in_mode, rtx x) |
3896 | { |
3897 | class constant_descriptor_rtx *desc, tmp; |
3898 | struct rtx_constant_pool *pool; |
3899 | char label[256]; |
3900 | rtx def, symbol; |
3901 | hashval_t hash; |
3902 | unsigned int align; |
3903 | constant_descriptor_rtx **slot; |
3904 | fixed_size_mode mode; |
3905 | |
3906 | /* We can't force variable-sized objects to memory. */ |
3907 | if (!is_a <fixed_size_mode> (m: in_mode, result: &mode)) |
3908 | return NULL_RTX; |
3909 | |
3910 | /* If we're not allowed to drop X into the constant pool, don't. */ |
3911 | if (targetm.cannot_force_const_mem (mode, x)) |
3912 | return NULL_RTX; |
3913 | |
3914 | /* Record that this function has used a constant pool entry. */ |
3915 | crtl->uses_const_pool = 1; |
3916 | |
3917 | /* Decide which pool to use. */ |
3918 | pool = (targetm.use_blocks_for_constant_p (mode, x) |
3919 | ? shared_constant_pool |
3920 | : crtl->varasm.pool); |
3921 | |
3922 | /* Lookup the value in the hashtable. */ |
3923 | tmp.constant = x; |
3924 | tmp.mode = mode; |
3925 | hash = const_rtx_hash (x); |
3926 | slot = pool->const_rtx_htab->find_slot_with_hash (comparable: &tmp, hash, insert: INSERT); |
3927 | desc = *slot; |
3928 | |
3929 | /* If the constant was already present, return its memory. */ |
3930 | if (desc) |
3931 | return copy_rtx (desc->mem); |
3932 | |
3933 | /* Otherwise, create a new descriptor. */ |
3934 | desc = ggc_alloc<constant_descriptor_rtx> (); |
3935 | *slot = desc; |
3936 | |
3937 | /* Align the location counter as required by EXP's data type. */ |
3938 | machine_mode align_mode = (mode == VOIDmode ? word_mode : mode); |
3939 | align = targetm.static_rtx_alignment (align_mode); |
3940 | |
3941 | pool->offset += (align / BITS_PER_UNIT) - 1; |
3942 | pool->offset &= ~ ((align / BITS_PER_UNIT) - 1); |
3943 | |
3944 | desc->next = NULL; |
3945 | desc->constant = copy_rtx (tmp.constant); |
3946 | desc->offset = pool->offset; |
3947 | desc->hash = hash; |
3948 | desc->mode = mode; |
3949 | desc->align = align; |
3950 | desc->labelno = const_labelno; |
3951 | desc->mark = 0; |
3952 | |
3953 | pool->offset += GET_MODE_SIZE (mode); |
3954 | if (pool->last) |
3955 | pool->last->next = desc; |
3956 | else |
3957 | pool->first = pool->last = desc; |
3958 | pool->last = desc; |
3959 | |
3960 | /* Create a string containing the label name, in LABEL. */ |
3961 | ASM_GENERATE_INTERNAL_LABEL (label, "LC" , const_labelno); |
3962 | ++const_labelno; |
3963 | |
3964 | /* Construct the SYMBOL_REF. Make sure to mark it as belonging to |
3965 | the constants pool. */ |
3966 | if (use_object_blocks_p () && targetm.use_blocks_for_constant_p (mode, x)) |
3967 | { |
3968 | section *sect = targetm.asm_out.select_rtx_section (mode, x, align); |
3969 | symbol = create_block_symbol (ggc_strdup (label), |
3970 | block: get_block_for_section (sect), offset: -1); |
3971 | } |
3972 | else |
3973 | symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label)); |
3974 | desc->sym = symbol; |
3975 | SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL; |
3976 | CONSTANT_POOL_ADDRESS_P (symbol) = 1; |
3977 | SET_SYMBOL_REF_CONSTANT (symbol, desc); |
3978 | |
3979 | /* Construct the MEM. */ |
3980 | desc->mem = def = gen_const_mem (mode, symbol); |
3981 | set_mem_align (def, align); |
3982 | |
3983 | /* If we're dropping a label to the constant pool, make sure we |
3984 | don't delete it. */ |
3985 | if (GET_CODE (x) == LABEL_REF) |
3986 | LABEL_PRESERVE_P (XEXP (x, 0)) = 1; |
3987 | |
3988 | return copy_rtx (def); |
3989 | } |
3990 | |
3991 | /* Given a constant pool SYMBOL_REF, return the corresponding constant. */ |
3992 | |
3993 | rtx |
3994 | get_pool_constant (const_rtx addr) |
3995 | { |
3996 | return SYMBOL_REF_CONSTANT (addr)->constant; |
3997 | } |
3998 | |
3999 | /* Given a constant pool SYMBOL_REF, return the corresponding constant |
4000 | and whether it has been output or not. */ |
4001 | |
4002 | rtx |
4003 | get_pool_constant_mark (rtx addr, bool *pmarked) |
4004 | { |
4005 | class constant_descriptor_rtx *desc; |
4006 | |
4007 | desc = SYMBOL_REF_CONSTANT (addr); |
4008 | *pmarked = (desc->mark != 0); |
4009 | return desc->constant; |
4010 | } |
4011 | |
4012 | /* Similar, return the mode. */ |
4013 | |
4014 | fixed_size_mode |
4015 | get_pool_mode (const_rtx addr) |
4016 | { |
4017 | return SYMBOL_REF_CONSTANT (addr)->mode; |
4018 | } |
4019 | |
4020 | /* Return TRUE if and only if the constant pool has no entries. Note |
4021 | that even entries we might end up choosing not to emit are counted |
4022 | here, so there is the potential for missed optimizations. */ |
4023 | |
4024 | bool |
4025 | constant_pool_empty_p (void) |
4026 | { |
4027 | return crtl->varasm.pool->first == NULL; |
4028 | } |
4029 | |
4030 | /* Worker function for output_constant_pool_1. Emit assembly for X |
4031 | in MODE with known alignment ALIGN. */ |
4032 | |
4033 | static void |
4034 | output_constant_pool_2 (fixed_size_mode mode, rtx x, unsigned int align) |
4035 | { |
4036 | switch (GET_MODE_CLASS (mode)) |
4037 | { |
4038 | case MODE_FLOAT: |
4039 | case MODE_DECIMAL_FLOAT: |
4040 | { |
4041 | gcc_assert (CONST_DOUBLE_AS_FLOAT_P (x)); |
4042 | assemble_real (d: *CONST_DOUBLE_REAL_VALUE (x), |
4043 | mode: as_a <scalar_float_mode> (m: mode), align, reverse: false); |
4044 | break; |
4045 | } |
4046 | |
4047 | case MODE_INT: |
4048 | case MODE_PARTIAL_INT: |
4049 | case MODE_FRACT: |
4050 | case MODE_UFRACT: |
4051 | case MODE_ACCUM: |
4052 | case MODE_UACCUM: |
4053 | assemble_integer (x, size: GET_MODE_SIZE (mode), align, force: 1); |
4054 | break; |
4055 | |
4056 | case MODE_VECTOR_BOOL: |
4057 | { |
4058 | gcc_assert (GET_CODE (x) == CONST_VECTOR); |
4059 | |
4060 | /* Pick the smallest integer mode that contains at least one |
4061 | whole element. Often this is byte_mode and contains more |
4062 | than one element. */ |
4063 | unsigned int nelts = GET_MODE_NUNITS (mode); |
4064 | unsigned int elt_bits = GET_MODE_PRECISION (mode) / nelts; |
4065 | unsigned int int_bits = MAX (elt_bits, BITS_PER_UNIT); |
4066 | scalar_int_mode int_mode = int_mode_for_size (size: int_bits, limit: 0).require (); |
4067 | unsigned int mask = GET_MODE_MASK (GET_MODE_INNER (mode)); |
4068 | |
4069 | /* We allow GET_MODE_PRECISION (mode) <= GET_MODE_BITSIZE (mode) but |
4070 | only properly handle cases where the difference is less than a |
4071 | byte. */ |
4072 | gcc_assert (GET_MODE_BITSIZE (mode) - GET_MODE_PRECISION (mode) < |
4073 | BITS_PER_UNIT); |
4074 | |
4075 | /* Build the constant up one integer at a time. */ |
4076 | unsigned int elts_per_int = int_bits / elt_bits; |
4077 | for (unsigned int i = 0; i < nelts; i += elts_per_int) |
4078 | { |
4079 | unsigned HOST_WIDE_INT value = 0; |
4080 | unsigned int limit = MIN (nelts - i, elts_per_int); |
4081 | for (unsigned int j = 0; j < limit; ++j) |
4082 | { |
4083 | auto elt = INTVAL (CONST_VECTOR_ELT (x, i + j)); |
4084 | value |= (elt & mask) << (j * elt_bits); |
4085 | } |
4086 | output_constant_pool_2 (mode: int_mode, x: gen_int_mode (value, int_mode), |
4087 | align: i != 0 ? MIN (align, int_bits) : align); |
4088 | } |
4089 | break; |
4090 | } |
4091 | case MODE_VECTOR_FLOAT: |
4092 | case MODE_VECTOR_INT: |
4093 | case MODE_VECTOR_FRACT: |
4094 | case MODE_VECTOR_UFRACT: |
4095 | case MODE_VECTOR_ACCUM: |
4096 | case MODE_VECTOR_UACCUM: |
4097 | { |
4098 | int i, units; |
4099 | scalar_mode submode = GET_MODE_INNER (mode); |
4100 | unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode)); |
4101 | |
4102 | gcc_assert (GET_CODE (x) == CONST_VECTOR); |
4103 | units = GET_MODE_NUNITS (mode); |
4104 | |
4105 | for (i = 0; i < units; i++) |
4106 | { |
4107 | rtx elt = CONST_VECTOR_ELT (x, i); |
4108 | output_constant_pool_2 (mode: submode, x: elt, align: i ? subalign : align); |
4109 | } |
4110 | } |
4111 | break; |
4112 | |
4113 | default: |
4114 | gcc_unreachable (); |
4115 | } |
4116 | } |
4117 | |
4118 | /* Worker function for output_constant_pool. Emit constant DESC, |
4119 | giving it ALIGN bits of alignment. */ |
4120 | |
4121 | static void |
4122 | output_constant_pool_1 (class constant_descriptor_rtx *desc, |
4123 | unsigned int align) |
4124 | { |
4125 | rtx x, tmp; |
4126 | |
4127 | x = desc->constant; |
4128 | |
4129 | /* See if X is a LABEL_REF (or a CONST referring to a LABEL_REF) |
4130 | whose CODE_LABEL has been deleted. This can occur if a jump table |
4131 | is eliminated by optimization. If so, write a constant of zero |
4132 | instead. Note that this can also happen by turning the |
4133 | CODE_LABEL into a NOTE. */ |
4134 | /* ??? This seems completely and utterly wrong. Certainly it's |
4135 | not true for NOTE_INSN_DELETED_LABEL, but I disbelieve proper |
4136 | functioning even with rtx_insn::deleted and friends. */ |
4137 | |
4138 | tmp = x; |
4139 | switch (GET_CODE (tmp)) |
4140 | { |
4141 | case CONST: |
4142 | if (GET_CODE (XEXP (tmp, 0)) != PLUS |
4143 | || GET_CODE (XEXP (XEXP (tmp, 0), 0)) != LABEL_REF) |
4144 | break; |
4145 | tmp = XEXP (XEXP (tmp, 0), 0); |
4146 | /* FALLTHRU */ |
4147 | |
4148 | case LABEL_REF: |
4149 | { |
4150 | rtx_insn *insn = label_ref_label (ref: tmp); |
4151 | gcc_assert (!insn->deleted ()); |
4152 | gcc_assert (!NOTE_P (insn) |
4153 | || NOTE_KIND (insn) != NOTE_INSN_DELETED); |
4154 | break; |
4155 | } |
4156 | |
4157 | default: |
4158 | break; |
4159 | } |
4160 | |
4161 | #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY |
4162 | ASM_OUTPUT_SPECIAL_POOL_ENTRY (asm_out_file, x, desc->mode, |
4163 | align, desc->labelno, done); |
4164 | #endif |
4165 | |
4166 | assemble_align (align); |
4167 | |
4168 | /* Output the label. */ |
4169 | targetm.asm_out.internal_label (asm_out_file, "LC" , desc->labelno); |
4170 | |
4171 | /* Output the data. |
4172 | Pass actual alignment value while emitting string constant to asm code |
4173 | as function 'output_constant_pool_1' explicitly passes the alignment as 1 |
4174 | assuming that the data is already aligned which prevents the generation |
4175 | of fix-up table entries. */ |
4176 | output_constant_pool_2 (mode: desc->mode, x, align: desc->align); |
4177 | |
4178 | /* Make sure all constants in SECTION_MERGE and not SECTION_STRINGS |
4179 | sections have proper size. */ |
4180 | if (align > GET_MODE_BITSIZE (mode: desc->mode) |
4181 | && in_section |
4182 | && (in_section->common.flags & SECTION_MERGE)) |
4183 | assemble_align (align); |
4184 | |
4185 | #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY |
4186 | done: |
4187 | #endif |
4188 | return; |
4189 | } |
4190 | |
4191 | /* Recompute the offsets of entries in POOL, and the overall size of |
4192 | POOL. Do this after calling mark_constant_pool to ensure that we |
4193 | are computing the offset values for the pool which we will actually |
4194 | emit. */ |
4195 | |
4196 | static void |
4197 | recompute_pool_offsets (struct rtx_constant_pool *pool) |
4198 | { |
4199 | class constant_descriptor_rtx *desc; |
4200 | pool->offset = 0; |
4201 | |
4202 | for (desc = pool->first; desc ; desc = desc->next) |
4203 | if (desc->mark) |
4204 | { |
4205 | /* Recalculate offset. */ |
4206 | unsigned int align = desc->align; |
4207 | pool->offset += (align / BITS_PER_UNIT) - 1; |
4208 | pool->offset &= ~ ((align / BITS_PER_UNIT) - 1); |
4209 | desc->offset = pool->offset; |
4210 | pool->offset += GET_MODE_SIZE (mode: desc->mode); |
4211 | } |
4212 | } |
4213 | |
4214 | /* Mark all constants that are referenced by SYMBOL_REFs in X. |
4215 | Emit referenced deferred strings. */ |
4216 | |
4217 | static void |
4218 | mark_constants_in_pattern (rtx insn) |
4219 | { |
4220 | subrtx_iterator::array_type array; |
4221 | FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL) |
4222 | { |
4223 | const_rtx x = *iter; |
4224 | if (GET_CODE (x) == SYMBOL_REF) |
4225 | { |
4226 | if (CONSTANT_POOL_ADDRESS_P (x)) |
4227 | { |
4228 | class constant_descriptor_rtx *desc = SYMBOL_REF_CONSTANT (x); |
4229 | if (desc->mark == 0) |
4230 | { |
4231 | desc->mark = 1; |
4232 | iter.substitute (x: desc->constant); |
4233 | } |
4234 | } |
4235 | else if (TREE_CONSTANT_POOL_ADDRESS_P (x)) |
4236 | { |
4237 | tree decl = SYMBOL_REF_DECL (x); |
4238 | if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl))) |
4239 | { |
4240 | n_deferred_constants--; |
4241 | output_constant_def_contents (CONST_CAST_RTX (x)); |
4242 | } |
4243 | } |
4244 | } |
4245 | } |
4246 | } |
4247 | |
4248 | /* Look through appropriate parts of INSN, marking all entries in the |
4249 | constant pool which are actually being used. Entries that are only |
4250 | referenced by other constants are also marked as used. Emit |
4251 | deferred strings that are used. */ |
4252 | |
4253 | static void |
4254 | mark_constants (rtx_insn *insn) |
4255 | { |
4256 | if (!INSN_P (insn)) |
4257 | return; |
4258 | |
4259 | /* Insns may appear inside a SEQUENCE. Only check the patterns of |
4260 | insns, not any notes that may be attached. We don't want to mark |
4261 | a constant just because it happens to appear in a REG_EQUIV note. */ |
4262 | if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (p: PATTERN (insn))) |
4263 | { |
4264 | int i, n = seq->len (); |
4265 | for (i = 0; i < n; ++i) |
4266 | { |
4267 | rtx subinsn = seq->element (index: i); |
4268 | if (INSN_P (subinsn)) |
4269 | mark_constants_in_pattern (insn: subinsn); |
4270 | } |
4271 | } |
4272 | else |
4273 | mark_constants_in_pattern (insn); |
4274 | } |
4275 | |
4276 | /* Look through the instructions for this function, and mark all the |
4277 | entries in POOL which are actually being used. Emit deferred constants |
4278 | which have indeed been used. */ |
4279 | |
4280 | static void |
4281 | mark_constant_pool (void) |
4282 | { |
4283 | rtx_insn *insn; |
4284 | |
4285 | if (!crtl->uses_const_pool && n_deferred_constants == 0) |
4286 | return; |
4287 | |
4288 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
4289 | mark_constants (insn); |
4290 | } |
4291 | |
4292 | /* Write all the constants in POOL. */ |
4293 | |
4294 | static void |
4295 | output_constant_pool_contents (struct rtx_constant_pool *pool) |
4296 | { |
4297 | class constant_descriptor_rtx *desc; |
4298 | |
4299 | for (desc = pool->first; desc ; desc = desc->next) |
4300 | if (desc->mark < 0) |
4301 | { |
4302 | #ifdef ASM_OUTPUT_DEF |
4303 | gcc_checking_assert (TARGET_SUPPORTS_ALIASES); |
4304 | |
4305 | const char *name = XSTR (desc->sym, 0); |
4306 | char label[256]; |
4307 | char buffer[256 + 32]; |
4308 | const char *p; |
4309 | |
4310 | ASM_GENERATE_INTERNAL_LABEL (label, "LC" , ~desc->mark); |
4311 | p = label; |
4312 | if (desc->offset) |
4313 | { |
4314 | sprintf (s: buffer, format: "%s+%ld" , p, (long) (desc->offset)); |
4315 | p = buffer; |
4316 | } |
4317 | ASM_OUTPUT_DEF (asm_out_file, name, p); |
4318 | #else |
4319 | gcc_unreachable (); |
4320 | #endif |
4321 | } |
4322 | else if (desc->mark) |
4323 | { |
4324 | /* If the constant is part of an object_block, make sure that |
4325 | the constant has been positioned within its block, but do not |
4326 | write out its definition yet. output_object_blocks will do |
4327 | that later. */ |
4328 | if (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym) |
4329 | && SYMBOL_REF_BLOCK (desc->sym)) |
4330 | place_block_symbol (desc->sym); |
4331 | else |
4332 | { |
4333 | switch_to_section (targetm.asm_out.select_rtx_section |
4334 | (desc->mode, desc->constant, desc->align)); |
4335 | output_constant_pool_1 (desc, align: desc->align); |
4336 | } |
4337 | } |
4338 | } |
4339 | |
4340 | struct constant_descriptor_rtx_data { |
4341 | constant_descriptor_rtx *desc; |
4342 | target_unit *bytes; |
4343 | unsigned short size; |
4344 | unsigned short offset; |
4345 | unsigned int hash; |
4346 | }; |
4347 | |
4348 | /* qsort callback to sort constant_descriptor_rtx_data * vector by |
4349 | decreasing size. */ |
4350 | |
4351 | static int |
4352 | constant_descriptor_rtx_data_cmp (const void *p1, const void *p2) |
4353 | { |
4354 | constant_descriptor_rtx_data *const data1 |
4355 | = *(constant_descriptor_rtx_data * const *) p1; |
4356 | constant_descriptor_rtx_data *const data2 |
4357 | = *(constant_descriptor_rtx_data * const *) p2; |
4358 | if (data1->size > data2->size) |
4359 | return -1; |
4360 | if (data1->size < data2->size) |
4361 | return 1; |
4362 | if (data1->hash < data2->hash) |
4363 | return -1; |
4364 | gcc_assert (data1->hash > data2->hash); |
4365 | return 1; |
4366 | } |
4367 | |
4368 | struct const_rtx_data_hasher : nofree_ptr_hash<constant_descriptor_rtx_data> |
4369 | { |
4370 | static hashval_t hash (constant_descriptor_rtx_data *); |
4371 | static bool equal (constant_descriptor_rtx_data *, |
4372 | constant_descriptor_rtx_data *); |
4373 | }; |
4374 | |
4375 | /* Hash and compare functions for const_rtx_data_htab. */ |
4376 | |
4377 | hashval_t |
4378 | const_rtx_data_hasher::hash (constant_descriptor_rtx_data *data) |
4379 | { |
4380 | return data->hash; |
4381 | } |
4382 | |
4383 | bool |
4384 | const_rtx_data_hasher::equal (constant_descriptor_rtx_data *x, |
4385 | constant_descriptor_rtx_data *y) |
4386 | { |
4387 | if (x->hash != y->hash || x->size != y->size) |
4388 | return false; |
4389 | unsigned int align1 = x->desc->align; |
4390 | unsigned int align2 = y->desc->align; |
4391 | unsigned int offset1 = (x->offset * BITS_PER_UNIT) & (align1 - 1); |
4392 | unsigned int offset2 = (y->offset * BITS_PER_UNIT) & (align2 - 1); |
4393 | if (offset1) |
4394 | align1 = least_bit_hwi (x: offset1); |
4395 | if (offset2) |
4396 | align2 = least_bit_hwi (x: offset2); |
4397 | if (align2 > align1) |
4398 | return false; |
4399 | if (memcmp (s1: x->bytes, s2: y->bytes, n: x->size * sizeof (target_unit)) != 0) |
4400 | return false; |
4401 | return true; |
4402 | } |
4403 | |
4404 | /* Attempt to optimize constant pool POOL. If it contains both CONST_VECTOR |
4405 | constants and scalar constants with the values of CONST_VECTOR elements, |
4406 | try to alias the scalar constants with the CONST_VECTOR elements. */ |
4407 | |
4408 | static void |
4409 | optimize_constant_pool (struct rtx_constant_pool *pool) |
4410 | { |
4411 | auto_vec<target_unit, 128> buffer; |
4412 | auto_vec<constant_descriptor_rtx_data *, 128> vec; |
4413 | object_allocator<constant_descriptor_rtx_data> |
4414 | data_pool ("constant_descriptor_rtx_data_pool" ); |
4415 | int idx = 0; |
4416 | size_t size = 0; |
4417 | for (constant_descriptor_rtx *desc = pool->first; desc; desc = desc->next) |
4418 | if (desc->mark > 0 |
4419 | && ! (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym) |
4420 | && SYMBOL_REF_BLOCK (desc->sym))) |
4421 | { |
4422 | buffer.truncate (size: 0); |
4423 | buffer.reserve (nelems: GET_MODE_SIZE (mode: desc->mode)); |
4424 | if (native_encode_rtx (desc->mode, desc->constant, buffer, 0, |
4425 | GET_MODE_SIZE (mode: desc->mode))) |
4426 | { |
4427 | constant_descriptor_rtx_data *data = data_pool.allocate (); |
4428 | data->desc = desc; |
4429 | data->bytes = NULL; |
4430 | data->size = GET_MODE_SIZE (mode: desc->mode); |
4431 | data->offset = 0; |
4432 | data->hash = idx++; |
4433 | size += data->size; |
4434 | vec.safe_push (obj: data); |
4435 | } |
4436 | } |
4437 | if (idx) |
4438 | { |
4439 | vec.qsort (constant_descriptor_rtx_data_cmp); |
4440 | unsigned min_size = vec.last ()->size; |
4441 | target_unit *bytes = XNEWVEC (target_unit, size); |
4442 | unsigned int i; |
4443 | constant_descriptor_rtx_data *data; |
4444 | hash_table<const_rtx_data_hasher> * htab |
4445 | = new hash_table<const_rtx_data_hasher> (31); |
4446 | size = 0; |
4447 | FOR_EACH_VEC_ELT (vec, i, data) |
4448 | { |
4449 | buffer.truncate (size: 0); |
4450 | native_encode_rtx (data->desc->mode, data->desc->constant, |
4451 | buffer, 0, data->size); |
4452 | memcpy (dest: bytes + size, src: buffer.address (), n: data->size); |
4453 | data->bytes = bytes + size; |
4454 | data->hash = iterative_hash (data->bytes, |
4455 | data->size * sizeof (target_unit), 0); |
4456 | size += data->size; |
4457 | constant_descriptor_rtx_data **slot |
4458 | = htab->find_slot_with_hash (comparable: data, hash: data->hash, insert: INSERT); |
4459 | if (*slot) |
4460 | { |
4461 | data->desc->mark = ~(*slot)->desc->labelno; |
4462 | data->desc->offset = (*slot)->offset; |
4463 | } |
4464 | else |
4465 | { |
4466 | unsigned int sz = 1 << floor_log2 (x: data->size); |
4467 | |
4468 | *slot = data; |
4469 | for (sz >>= 1; sz >= min_size; sz >>= 1) |
4470 | for (unsigned off = 0; off + sz <= data->size; off += sz) |
4471 | { |
4472 | constant_descriptor_rtx_data tmp; |
4473 | tmp.desc = data->desc; |
4474 | tmp.bytes = data->bytes + off; |
4475 | tmp.size = sz; |
4476 | tmp.offset = off; |
4477 | tmp.hash = iterative_hash (tmp.bytes, |
4478 | sz * sizeof (target_unit), 0); |
4479 | slot = htab->find_slot_with_hash (comparable: &tmp, hash: tmp.hash, insert: INSERT); |
4480 | if (*slot == NULL) |
4481 | { |
4482 | *slot = data_pool.allocate (); |
4483 | **slot = tmp; |
4484 | } |
4485 | } |
4486 | } |
4487 | } |
4488 | delete htab; |
4489 | XDELETE (bytes); |
4490 | } |
4491 | data_pool.release (); |
4492 | } |
4493 | |
4494 | /* Mark all constants that are used in the current function, then write |
4495 | out the function's private constant pool. */ |
4496 | |
4497 | static void |
4498 | output_constant_pool (const char *fnname ATTRIBUTE_UNUSED, |
4499 | tree fndecl ATTRIBUTE_UNUSED) |
4500 | { |
4501 | struct rtx_constant_pool *pool = crtl->varasm.pool; |
4502 | |
4503 | /* It is possible for gcc to call force_const_mem and then to later |
4504 | discard the instructions which refer to the constant. In such a |
4505 | case we do not need to output the constant. */ |
4506 | mark_constant_pool (); |
4507 | |
4508 | /* Having marked the constant pool entries we'll actually emit, we |
4509 | now need to rebuild the offset information, which may have become |
4510 | stale. */ |
4511 | recompute_pool_offsets (pool); |
4512 | |
4513 | #ifdef ASM_OUTPUT_POOL_PROLOGUE |
4514 | ASM_OUTPUT_POOL_PROLOGUE (asm_out_file, fnname, fndecl, pool->offset); |
4515 | #endif |
4516 | |
4517 | output_constant_pool_contents (pool); |
4518 | |
4519 | #ifdef ASM_OUTPUT_POOL_EPILOGUE |
4520 | ASM_OUTPUT_POOL_EPILOGUE (asm_out_file, fnname, fndecl, pool->offset); |
4521 | #endif |
4522 | } |
4523 | |
4524 | /* Write the contents of the shared constant pool. */ |
4525 | |
4526 | void |
4527 | output_shared_constant_pool (void) |
4528 | { |
4529 | if (optimize |
4530 | && TARGET_SUPPORTS_ALIASES) |
4531 | optimize_constant_pool (pool: shared_constant_pool); |
4532 | |
4533 | output_constant_pool_contents (pool: shared_constant_pool); |
4534 | } |
4535 | |
4536 | /* Determine what kind of relocations EXP may need. */ |
4537 | |
4538 | int |
4539 | compute_reloc_for_constant (tree exp) |
4540 | { |
4541 | int reloc = 0, reloc2; |
4542 | tree tem; |
4543 | |
4544 | switch (TREE_CODE (exp)) |
4545 | { |
4546 | case ADDR_EXPR: |
4547 | case FDESC_EXPR: |
4548 | /* Go inside any operations that get_inner_reference can handle and see |
4549 | if what's inside is a constant: no need to do anything here for |
4550 | addresses of variables or functions. */ |
4551 | for (tem = TREE_OPERAND (exp, 0); handled_component_p (t: tem); |
4552 | tem = TREE_OPERAND (tem, 0)) |
4553 | ; |
4554 | |
4555 | if (TREE_CODE (tem) == MEM_REF |
4556 | && TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR) |
4557 | { |
4558 | reloc = compute_reloc_for_constant (TREE_OPERAND (tem, 0)); |
4559 | break; |
4560 | } |
4561 | |
4562 | if (!targetm.binds_local_p (tem)) |
4563 | reloc |= 2; |
4564 | else |
4565 | reloc |= 1; |
4566 | break; |
4567 | |
4568 | case PLUS_EXPR: |
4569 | case POINTER_PLUS_EXPR: |
4570 | reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0)); |
4571 | reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1)); |
4572 | break; |
4573 | |
4574 | case MINUS_EXPR: |
4575 | reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0)); |
4576 | reloc2 = compute_reloc_for_constant (TREE_OPERAND (exp, 1)); |
4577 | /* The difference of two local labels is computable at link time. */ |
4578 | if (reloc == 1 && reloc2 == 1) |
4579 | reloc = 0; |
4580 | else |
4581 | reloc |= reloc2; |
4582 | break; |
4583 | |
4584 | CASE_CONVERT: |
4585 | case VIEW_CONVERT_EXPR: |
4586 | reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0)); |
4587 | break; |
4588 | |
4589 | case CONSTRUCTOR: |
4590 | { |
4591 | unsigned HOST_WIDE_INT idx; |
4592 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem) |
4593 | if (tem != 0) |
4594 | reloc |= compute_reloc_for_constant (exp: tem); |
4595 | } |
4596 | break; |
4597 | |
4598 | default: |
4599 | break; |
4600 | } |
4601 | return reloc; |
4602 | } |
4603 | |
4604 | /* Find all the constants whose addresses are referenced inside of EXP, |
4605 | and make sure assembler code with a label has been output for each one. |
4606 | Indicate whether an ADDR_EXPR has been encountered. */ |
4607 | |
4608 | static void |
4609 | output_addressed_constants (tree exp, int defer) |
4610 | { |
4611 | tree tem; |
4612 | |
4613 | switch (TREE_CODE (exp)) |
4614 | { |
4615 | case ADDR_EXPR: |
4616 | case FDESC_EXPR: |
4617 | /* Go inside any operations that get_inner_reference can handle and see |
4618 | if what's inside is a constant: no need to do anything here for |
4619 | addresses of variables or functions. */ |
4620 | for (tem = TREE_OPERAND (exp, 0); handled_component_p (t: tem); |
4621 | tem = TREE_OPERAND (tem, 0)) |
4622 | ; |
4623 | |
4624 | /* If we have an initialized CONST_DECL, retrieve the initializer. */ |
4625 | if (TREE_CODE (tem) == CONST_DECL && DECL_INITIAL (tem)) |
4626 | tem = DECL_INITIAL (tem); |
4627 | |
4628 | if (CONSTANT_CLASS_P (tem) || TREE_CODE (tem) == CONSTRUCTOR) |
4629 | output_constant_def (exp: tem, defer); |
4630 | |
4631 | if (TREE_CODE (tem) == MEM_REF) |
4632 | output_addressed_constants (TREE_OPERAND (tem, 0), defer); |
4633 | break; |
4634 | |
4635 | case PLUS_EXPR: |
4636 | case POINTER_PLUS_EXPR: |
4637 | case MINUS_EXPR: |
4638 | output_addressed_constants (TREE_OPERAND (exp, 1), defer); |
4639 | gcc_fallthrough (); |
4640 | |
4641 | CASE_CONVERT: |
4642 | case VIEW_CONVERT_EXPR: |
4643 | output_addressed_constants (TREE_OPERAND (exp, 0), defer); |
4644 | break; |
4645 | |
4646 | case CONSTRUCTOR: |
4647 | { |
4648 | unsigned HOST_WIDE_INT idx; |
4649 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem) |
4650 | if (tem != 0) |
4651 | output_addressed_constants (exp: tem, defer); |
4652 | } |
4653 | break; |
4654 | |
4655 | default: |
4656 | break; |
4657 | } |
4658 | } |
4659 | |
4660 | /* Whether a constructor CTOR is a valid static constant initializer if all |
4661 | its elements are. This used to be internal to initializer_constant_valid_p |
4662 | and has been exposed to let other functions like categorize_ctor_elements |
4663 | evaluate the property while walking a constructor for other purposes. */ |
4664 | |
4665 | bool |
4666 | constructor_static_from_elts_p (const_tree ctor) |
4667 | { |
4668 | return (TREE_CONSTANT (ctor) |
4669 | && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE |
4670 | || TREE_CODE (TREE_TYPE (ctor)) == RECORD_TYPE |
4671 | || TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)); |
4672 | } |
4673 | |
4674 | static tree initializer_constant_valid_p_1 (tree value, tree endtype, |
4675 | tree *cache); |
4676 | |
4677 | /* A subroutine of initializer_constant_valid_p. VALUE is a MINUS_EXPR, |
4678 | PLUS_EXPR or POINTER_PLUS_EXPR. This looks for cases of VALUE |
4679 | which are valid when ENDTYPE is an integer of any size; in |
4680 | particular, this does not accept a pointer minus a constant. This |
4681 | returns null_pointer_node if the VALUE is an absolute constant |
4682 | which can be used to initialize a static variable. Otherwise it |
4683 | returns NULL. */ |
4684 | |
4685 | static tree |
4686 | narrowing_initializer_constant_valid_p (tree value, tree endtype, tree *cache) |
4687 | { |
4688 | tree op0, op1; |
4689 | |
4690 | if (!INTEGRAL_TYPE_P (endtype)) |
4691 | return NULL_TREE; |
4692 | |
4693 | op0 = TREE_OPERAND (value, 0); |
4694 | op1 = TREE_OPERAND (value, 1); |
4695 | |
4696 | /* Like STRIP_NOPS except allow the operand mode to widen. This |
4697 | works around a feature of fold that simplifies (int)(p1 - p2) to |
4698 | ((int)p1 - (int)p2) under the theory that the narrower operation |
4699 | is cheaper. */ |
4700 | |
4701 | while (CONVERT_EXPR_P (op0) |
4702 | || TREE_CODE (op0) == NON_LVALUE_EXPR) |
4703 | { |
4704 | tree inner = TREE_OPERAND (op0, 0); |
4705 | if (inner == error_mark_node |
4706 | || ! INTEGRAL_TYPE_P (TREE_TYPE (op0)) |
4707 | || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (op0))) |
4708 | || ! INTEGRAL_TYPE_P (TREE_TYPE (inner)) |
4709 | || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (inner))) |
4710 | || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op0))) |
4711 | > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner))))) |
4712 | break; |
4713 | op0 = inner; |
4714 | } |
4715 | |
4716 | while (CONVERT_EXPR_P (op1) |
4717 | || TREE_CODE (op1) == NON_LVALUE_EXPR) |
4718 | { |
4719 | tree inner = TREE_OPERAND (op1, 0); |
4720 | if (inner == error_mark_node |
4721 | || ! INTEGRAL_TYPE_P (TREE_TYPE (op1)) |
4722 | || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (op1))) |
4723 | || ! INTEGRAL_TYPE_P (TREE_TYPE (inner)) |
4724 | || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (inner))) |
4725 | || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op1))) |
4726 | > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner))))) |
4727 | break; |
4728 | op1 = inner; |
4729 | } |
4730 | |
4731 | op0 = initializer_constant_valid_p_1 (value: op0, endtype, cache); |
4732 | if (!op0) |
4733 | return NULL_TREE; |
4734 | |
4735 | op1 = initializer_constant_valid_p_1 (value: op1, endtype, |
4736 | cache: cache ? cache + 2 : NULL); |
4737 | /* Both initializers must be known. */ |
4738 | if (op1) |
4739 | { |
4740 | if (op0 == op1 |
4741 | && (op0 == null_pointer_node |
4742 | || TREE_CODE (value) == MINUS_EXPR)) |
4743 | return null_pointer_node; |
4744 | |
4745 | /* Support differences between labels. */ |
4746 | if (TREE_CODE (op0) == LABEL_DECL |
4747 | && TREE_CODE (op1) == LABEL_DECL) |
4748 | return null_pointer_node; |
4749 | |
4750 | if (TREE_CODE (op0) == STRING_CST |
4751 | && TREE_CODE (op1) == STRING_CST |
4752 | && operand_equal_p (op0, op1, flags: 1)) |
4753 | return null_pointer_node; |
4754 | } |
4755 | |
4756 | return NULL_TREE; |
4757 | } |
4758 | |
4759 | /* Helper function of initializer_constant_valid_p. |
4760 | Return nonzero if VALUE is a valid constant-valued expression |
4761 | for use in initializing a static variable; one that can be an |
4762 | element of a "constant" initializer. |
4763 | |
4764 | Return null_pointer_node if the value is absolute; |
4765 | if it is relocatable, return the variable that determines the relocation. |
4766 | We assume that VALUE has been folded as much as possible; |
4767 | therefore, we do not need to check for such things as |
4768 | arithmetic-combinations of integers. |
4769 | |
4770 | Use CACHE (pointer to 2 tree values) for caching if non-NULL. */ |
4771 | |
4772 | static tree |
4773 | initializer_constant_valid_p_1 (tree value, tree endtype, tree *cache) |
4774 | { |
4775 | tree ret; |
4776 | |
4777 | switch (TREE_CODE (value)) |
4778 | { |
4779 | case CONSTRUCTOR: |
4780 | if (constructor_static_from_elts_p (ctor: value)) |
4781 | { |
4782 | unsigned HOST_WIDE_INT idx; |
4783 | tree elt; |
4784 | bool absolute = true; |
4785 | |
4786 | if (cache && cache[0] == value) |
4787 | return cache[1]; |
4788 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt) |
4789 | { |
4790 | tree reloc; |
4791 | reloc = initializer_constant_valid_p_1 (value: elt, TREE_TYPE (elt), |
4792 | NULL); |
4793 | if (!reloc |
4794 | /* An absolute value is required with reverse SSO. */ |
4795 | || (reloc != null_pointer_node |
4796 | && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (value)) |
4797 | && !AGGREGATE_TYPE_P (TREE_TYPE (elt)))) |
4798 | { |
4799 | if (cache) |
4800 | { |
4801 | cache[0] = value; |
4802 | cache[1] = NULL_TREE; |
4803 | } |
4804 | return NULL_TREE; |
4805 | } |
4806 | if (reloc != null_pointer_node) |
4807 | absolute = false; |
4808 | } |
4809 | /* For a non-absolute relocation, there is no single |
4810 | variable that can be "the variable that determines the |
4811 | relocation." */ |
4812 | if (cache) |
4813 | { |
4814 | cache[0] = value; |
4815 | cache[1] = absolute ? null_pointer_node : error_mark_node; |
4816 | } |
4817 | return absolute ? null_pointer_node : error_mark_node; |
4818 | } |
4819 | |
4820 | return TREE_STATIC (value) ? null_pointer_node : NULL_TREE; |
4821 | |
4822 | case INTEGER_CST: |
4823 | case VECTOR_CST: |
4824 | case REAL_CST: |
4825 | case FIXED_CST: |
4826 | case STRING_CST: |
4827 | case COMPLEX_CST: |
4828 | return null_pointer_node; |
4829 | |
4830 | case ADDR_EXPR: |
4831 | case FDESC_EXPR: |
4832 | { |
4833 | tree op0 = staticp (TREE_OPERAND (value, 0)); |
4834 | if (op0) |
4835 | { |
4836 | /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out |
4837 | to be a constant, this is old-skool offsetof-like nonsense. */ |
4838 | if (TREE_CODE (op0) == INDIRECT_REF |
4839 | && TREE_CONSTANT (TREE_OPERAND (op0, 0))) |
4840 | return null_pointer_node; |
4841 | /* Taking the address of a nested function involves a trampoline, |
4842 | unless we don't need or want one. */ |
4843 | if (TREE_CODE (op0) == FUNCTION_DECL |
4844 | && DECL_STATIC_CHAIN (op0) |
4845 | && !TREE_NO_TRAMPOLINE (value)) |
4846 | return NULL_TREE; |
4847 | /* "&{...}" requires a temporary to hold the constructed |
4848 | object. */ |
4849 | if (TREE_CODE (op0) == CONSTRUCTOR) |
4850 | return NULL_TREE; |
4851 | } |
4852 | return op0; |
4853 | } |
4854 | |
4855 | case NON_LVALUE_EXPR: |
4856 | return initializer_constant_valid_p_1 (TREE_OPERAND (value, 0), |
4857 | endtype, cache); |
4858 | |
4859 | case VIEW_CONVERT_EXPR: |
4860 | { |
4861 | tree src = TREE_OPERAND (value, 0); |
4862 | tree src_type = TREE_TYPE (src); |
4863 | tree dest_type = TREE_TYPE (value); |
4864 | |
4865 | /* Allow view-conversions from aggregate to non-aggregate type only |
4866 | if the bit pattern is fully preserved afterwards; otherwise, the |
4867 | RTL expander won't be able to apply a subsequent transformation |
4868 | to the underlying constructor. */ |
4869 | if (AGGREGATE_TYPE_P (src_type) && !AGGREGATE_TYPE_P (dest_type)) |
4870 | { |
4871 | if (TYPE_MODE (endtype) == TYPE_MODE (dest_type)) |
4872 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4873 | else |
4874 | return NULL_TREE; |
4875 | } |
4876 | |
4877 | /* Allow all other kinds of view-conversion. */ |
4878 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4879 | } |
4880 | |
4881 | CASE_CONVERT: |
4882 | { |
4883 | tree src = TREE_OPERAND (value, 0); |
4884 | tree src_type = TREE_TYPE (src); |
4885 | tree dest_type = TREE_TYPE (value); |
4886 | |
4887 | /* Allow conversions between pointer types and offset types. */ |
4888 | if ((POINTER_TYPE_P (dest_type) && POINTER_TYPE_P (src_type)) |
4889 | || (TREE_CODE (dest_type) == OFFSET_TYPE |
4890 | && TREE_CODE (src_type) == OFFSET_TYPE)) |
4891 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4892 | |
4893 | /* Allow length-preserving conversions between integer types and |
4894 | floating-point types. */ |
4895 | if (((INTEGRAL_TYPE_P (dest_type) && INTEGRAL_TYPE_P (src_type)) |
4896 | || (SCALAR_FLOAT_TYPE_P (dest_type) |
4897 | && SCALAR_FLOAT_TYPE_P (src_type))) |
4898 | && (TYPE_PRECISION (dest_type) == TYPE_PRECISION (src_type))) |
4899 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4900 | |
4901 | /* Allow conversions between other integer types only if |
4902 | explicit value. Don't allow sign-extension to a type larger |
4903 | than word and pointer, there aren't relocations that would |
4904 | allow to sign extend it to a wider type. */ |
4905 | if (INTEGRAL_TYPE_P (dest_type) |
4906 | && INTEGRAL_TYPE_P (src_type) |
4907 | && (TYPE_UNSIGNED (src_type) |
4908 | || TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type) |
4909 | || TYPE_PRECISION (dest_type) <= BITS_PER_WORD |
4910 | || TYPE_PRECISION (dest_type) <= POINTER_SIZE)) |
4911 | { |
4912 | tree inner = initializer_constant_valid_p_1 (value: src, endtype, cache); |
4913 | if (inner == null_pointer_node) |
4914 | return null_pointer_node; |
4915 | break; |
4916 | } |
4917 | |
4918 | /* Allow (int) &foo provided int is as wide as a pointer. */ |
4919 | if (INTEGRAL_TYPE_P (dest_type) && POINTER_TYPE_P (src_type) |
4920 | && (TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type))) |
4921 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4922 | |
4923 | /* Likewise conversions from int to pointers, but also allow |
4924 | conversions from 0. */ |
4925 | if ((POINTER_TYPE_P (dest_type) |
4926 | || TREE_CODE (dest_type) == OFFSET_TYPE) |
4927 | && INTEGRAL_TYPE_P (src_type)) |
4928 | { |
4929 | if (TREE_CODE (src) == INTEGER_CST |
4930 | && TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type)) |
4931 | return null_pointer_node; |
4932 | if (integer_zerop (src)) |
4933 | return null_pointer_node; |
4934 | else if (TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type)) |
4935 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4936 | } |
4937 | |
4938 | /* Allow conversions to struct or union types if the value |
4939 | inside is okay. */ |
4940 | if (TREE_CODE (dest_type) == RECORD_TYPE |
4941 | || TREE_CODE (dest_type) == UNION_TYPE) |
4942 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4943 | } |
4944 | break; |
4945 | |
4946 | case POINTER_PLUS_EXPR: |
4947 | case PLUS_EXPR: |
4948 | /* Any valid floating-point constants will have been folded by now; |
4949 | with -frounding-math we hit this with addition of two constants. */ |
4950 | if (TREE_CODE (endtype) == REAL_TYPE) |
4951 | return NULL_TREE; |
4952 | if (cache && cache[0] == value) |
4953 | return cache[1]; |
4954 | if (! INTEGRAL_TYPE_P (endtype) |
4955 | || ! INTEGRAL_TYPE_P (TREE_TYPE (value)) |
4956 | || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value))) |
4957 | { |
4958 | tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE }; |
4959 | tree valid0 |
4960 | = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0), |
4961 | endtype, cache: ncache); |
4962 | tree valid1 |
4963 | = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1), |
4964 | endtype, cache: ncache + 2); |
4965 | /* If either term is absolute, use the other term's relocation. */ |
4966 | if (valid0 == null_pointer_node) |
4967 | ret = valid1; |
4968 | else if (valid1 == null_pointer_node) |
4969 | ret = valid0; |
4970 | /* Support narrowing pointer differences. */ |
4971 | else |
4972 | ret = narrowing_initializer_constant_valid_p (value, endtype, |
4973 | cache: ncache); |
4974 | } |
4975 | else |
4976 | /* Support narrowing pointer differences. */ |
4977 | ret = narrowing_initializer_constant_valid_p (value, endtype, NULL); |
4978 | if (cache) |
4979 | { |
4980 | cache[0] = value; |
4981 | cache[1] = ret; |
4982 | } |
4983 | return ret; |
4984 | |
4985 | case POINTER_DIFF_EXPR: |
4986 | case MINUS_EXPR: |
4987 | if (TREE_CODE (endtype) == REAL_TYPE) |
4988 | return NULL_TREE; |
4989 | if (cache && cache[0] == value) |
4990 | return cache[1]; |
4991 | if (! INTEGRAL_TYPE_P (endtype) |
4992 | || ! INTEGRAL_TYPE_P (TREE_TYPE (value)) |
4993 | || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value))) |
4994 | { |
4995 | tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE }; |
4996 | tree valid0 |
4997 | = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0), |
4998 | endtype, cache: ncache); |
4999 | tree valid1 |
5000 | = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1), |
5001 | endtype, cache: ncache + 2); |
5002 | /* Win if second argument is absolute. */ |
5003 | if (valid1 == null_pointer_node) |
5004 | ret = valid0; |
5005 | /* Win if both arguments have the same relocation. |
5006 | Then the value is absolute. */ |
5007 | else if (valid0 == valid1 && valid0 != 0) |
5008 | ret = null_pointer_node; |
5009 | /* Since GCC guarantees that string constants are unique in the |
5010 | generated code, a subtraction between two copies of the same |
5011 | constant string is absolute. */ |
5012 | else if (valid0 && TREE_CODE (valid0) == STRING_CST |
5013 | && valid1 && TREE_CODE (valid1) == STRING_CST |
5014 | && operand_equal_p (valid0, valid1, flags: 1)) |
5015 | ret = null_pointer_node; |
5016 | /* Support narrowing differences. */ |
5017 | else |
5018 | ret = narrowing_initializer_constant_valid_p (value, endtype, |
5019 | cache: ncache); |
5020 | } |
5021 | else |
5022 | /* Support narrowing differences. */ |
5023 | ret = narrowing_initializer_constant_valid_p (value, endtype, NULL); |
5024 | if (cache) |
5025 | { |
5026 | cache[0] = value; |
5027 | cache[1] = ret; |
5028 | } |
5029 | return ret; |
5030 | |
5031 | default: |
5032 | break; |
5033 | } |
5034 | |
5035 | return NULL_TREE; |
5036 | } |
5037 | |
5038 | /* Return nonzero if VALUE is a valid constant-valued expression |
5039 | for use in initializing a static variable; one that can be an |
5040 | element of a "constant" initializer. |
5041 | |
5042 | Return null_pointer_node if the value is absolute; |
5043 | if it is relocatable, return the variable that determines the relocation. |
5044 | We assume that VALUE has been folded as much as possible; |
5045 | therefore, we do not need to check for such things as |
5046 | arithmetic-combinations of integers. */ |
5047 | tree |
5048 | initializer_constant_valid_p (tree value, tree endtype, bool reverse) |
5049 | { |
5050 | tree reloc = initializer_constant_valid_p_1 (value, endtype, NULL); |
5051 | |
5052 | /* An absolute value is required with reverse storage order. */ |
5053 | if (reloc |
5054 | && reloc != null_pointer_node |
5055 | && reverse |
5056 | && !AGGREGATE_TYPE_P (endtype) |
5057 | && !VECTOR_TYPE_P (endtype)) |
5058 | reloc = NULL_TREE; |
5059 | |
5060 | return reloc; |
5061 | } |
5062 | |
5063 | /* Return true if VALUE is a valid constant-valued expression |
5064 | for use in initializing a static bit-field; one that can be |
5065 | an element of a "constant" initializer. */ |
5066 | |
5067 | bool |
5068 | initializer_constant_valid_for_bitfield_p (const_tree value) |
5069 | { |
5070 | /* For bitfields we support integer constants or possibly nested aggregates |
5071 | of such. */ |
5072 | switch (TREE_CODE (value)) |
5073 | { |
5074 | case CONSTRUCTOR: |
5075 | { |
5076 | unsigned HOST_WIDE_INT idx; |
5077 | const_tree elt; |
5078 | |
5079 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt) |
5080 | if (!initializer_constant_valid_for_bitfield_p (value: elt)) |
5081 | return false; |
5082 | return true; |
5083 | } |
5084 | |
5085 | case INTEGER_CST: |
5086 | case REAL_CST: |
5087 | return true; |
5088 | |
5089 | case VIEW_CONVERT_EXPR: |
5090 | case NON_LVALUE_EXPR: |
5091 | return |
5092 | initializer_constant_valid_for_bitfield_p (TREE_OPERAND (value, 0)); |
5093 | |
5094 | default: |
5095 | break; |
5096 | } |
5097 | |
5098 | return false; |
5099 | } |
5100 | |
5101 | /* Check if a STRING_CST fits into the field. |
5102 | Tolerate only the case when the NUL termination |
5103 | does not fit into the field. */ |
5104 | |
5105 | static bool |
5106 | check_string_literal (tree string, unsigned HOST_WIDE_INT size) |
5107 | { |
5108 | tree type = TREE_TYPE (string); |
5109 | tree eltype = TREE_TYPE (type); |
5110 | unsigned HOST_WIDE_INT elts = tree_to_uhwi (TYPE_SIZE_UNIT (eltype)); |
5111 | unsigned HOST_WIDE_INT mem_size = tree_to_uhwi (TYPE_SIZE_UNIT (type)); |
5112 | int len = TREE_STRING_LENGTH (string); |
5113 | |
5114 | if (elts != 1 && elts != 2 && elts != 4) |
5115 | return false; |
5116 | if (len < 0 || len % elts != 0) |
5117 | return false; |
5118 | if (size < (unsigned)len) |
5119 | return false; |
5120 | if (mem_size != size) |
5121 | return false; |
5122 | return true; |
5123 | } |
5124 | |
5125 | /* output_constructor outer state of relevance in recursive calls, typically |
5126 | for nested aggregate bitfields. */ |
5127 | |
5128 | struct oc_outer_state { |
5129 | unsigned int bit_offset; /* current position in ... */ |
5130 | int byte; /* ... the outer byte buffer. */ |
5131 | }; |
5132 | |
5133 | static unsigned HOST_WIDE_INT |
5134 | output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int, bool, |
5135 | oc_outer_state *); |
5136 | |
5137 | /* Output assembler code for constant EXP, with no label. |
5138 | This includes the pseudo-op such as ".int" or ".byte", and a newline. |
5139 | Assumes output_addressed_constants has been done on EXP already. |
5140 | |
5141 | Generate at least SIZE bytes of assembler data, padding at the end |
5142 | with zeros if necessary. SIZE must always be specified. The returned |
5143 | value is the actual number of bytes of assembler data generated, which |
5144 | may be bigger than SIZE if the object contains a variable length field. |
5145 | |
5146 | SIZE is important for structure constructors, |
5147 | since trailing members may have been omitted from the constructor. |
5148 | It is also important for initialization of arrays from string constants |
5149 | since the full length of the string constant might not be wanted. |
5150 | It is also needed for initialization of unions, where the initializer's |
5151 | type is just one member, and that may not be as long as the union. |
5152 | |
5153 | There a case in which we would fail to output exactly SIZE bytes: |
5154 | for a structure constructor that wants to produce more than SIZE bytes. |
5155 | But such constructors will never be generated for any possible input. |
5156 | |
5157 | ALIGN is the alignment of the data in bits. |
5158 | |
5159 | If REVERSE is true, EXP is output in reverse storage order. */ |
5160 | |
5161 | static unsigned HOST_WIDE_INT |
5162 | output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align, |
5163 | bool reverse, bool merge_strings) |
5164 | { |
5165 | enum tree_code code; |
5166 | unsigned HOST_WIDE_INT thissize; |
5167 | rtx cst; |
5168 | |
5169 | if (size == 0 || flag_syntax_only) |
5170 | return size; |
5171 | |
5172 | /* See if we're trying to initialize a pointer in a non-default mode |
5173 | to the address of some declaration somewhere. If the target says |
5174 | the mode is valid for pointers, assume the target has a way of |
5175 | resolving it. */ |
5176 | if (TREE_CODE (exp) == NOP_EXPR |
5177 | && POINTER_TYPE_P (TREE_TYPE (exp)) |
5178 | && targetm.addr_space.valid_pointer_mode |
5179 | (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)), |
5180 | TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))))) |
5181 | { |
5182 | tree saved_type = TREE_TYPE (exp); |
5183 | |
5184 | /* Peel off any intermediate conversions-to-pointer for valid |
5185 | pointer modes. */ |
5186 | while (TREE_CODE (exp) == NOP_EXPR |
5187 | && POINTER_TYPE_P (TREE_TYPE (exp)) |
5188 | && targetm.addr_space.valid_pointer_mode |
5189 | (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)), |
5190 | TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))))) |
5191 | exp = TREE_OPERAND (exp, 0); |
5192 | |
5193 | /* If what we're left with is the address of something, we can |
5194 | convert the address to the final type and output it that |
5195 | way. */ |
5196 | if (TREE_CODE (exp) == ADDR_EXPR) |
5197 | exp = build1 (ADDR_EXPR, saved_type, TREE_OPERAND (exp, 0)); |
5198 | /* Likewise for constant ints. */ |
5199 | else if (TREE_CODE (exp) == INTEGER_CST) |
5200 | exp = fold_convert (saved_type, exp); |
5201 | |
5202 | } |
5203 | |
5204 | /* Eliminate any conversions since we'll be outputting the underlying |
5205 | constant. */ |
5206 | while (CONVERT_EXPR_P (exp) |
5207 | || TREE_CODE (exp) == NON_LVALUE_EXPR |
5208 | || TREE_CODE (exp) == VIEW_CONVERT_EXPR) |
5209 | { |
5210 | HOST_WIDE_INT type_size = int_size_in_bytes (TREE_TYPE (exp)); |
5211 | HOST_WIDE_INT op_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))); |
5212 | |
5213 | /* Make sure eliminating the conversion is really a no-op, except with |
5214 | VIEW_CONVERT_EXPRs to allow for wild Ada unchecked conversions and |
5215 | union types to allow for Ada unchecked unions. */ |
5216 | if (type_size > op_size |
5217 | && TREE_CODE (exp) != VIEW_CONVERT_EXPR |
5218 | && TREE_CODE (TREE_TYPE (exp)) != UNION_TYPE) |
5219 | /* Keep the conversion. */ |
5220 | break; |
5221 | else |
5222 | exp = TREE_OPERAND (exp, 0); |
5223 | } |
5224 | |
5225 | code = TREE_CODE (TREE_TYPE (exp)); |
5226 | thissize = int_size_in_bytes (TREE_TYPE (exp)); |
5227 | |
5228 | /* Allow a constructor with no elements for any data type. |
5229 | This means to fill the space with zeros. */ |
5230 | if (TREE_CODE (exp) == CONSTRUCTOR |
5231 | && vec_safe_is_empty (CONSTRUCTOR_ELTS (exp))) |
5232 | { |
5233 | assemble_zeros (size); |
5234 | return size; |
5235 | } |
5236 | |
5237 | if (TREE_CODE (exp) == FDESC_EXPR) |
5238 | { |
5239 | #ifdef ASM_OUTPUT_FDESC |
5240 | HOST_WIDE_INT part = tree_to_shwi (TREE_OPERAND (exp, 1)); |
5241 | tree decl = TREE_OPERAND (exp, 0); |
5242 | ASM_OUTPUT_FDESC (asm_out_file, decl, part); |
5243 | #else |
5244 | gcc_unreachable (); |
5245 | #endif |
5246 | return size; |
5247 | } |
5248 | |
5249 | /* Now output the underlying data. If we've handling the padding, return. |
5250 | Otherwise, break and ensure SIZE is the size written. */ |
5251 | switch (code) |
5252 | { |
5253 | case BOOLEAN_TYPE: |
5254 | case INTEGER_TYPE: |
5255 | case ENUMERAL_TYPE: |
5256 | case POINTER_TYPE: |
5257 | case REFERENCE_TYPE: |
5258 | case OFFSET_TYPE: |
5259 | case FIXED_POINT_TYPE: |
5260 | case NULLPTR_TYPE: |
5261 | cst = expand_expr (exp, NULL_RTX, VOIDmode, modifier: EXPAND_INITIALIZER); |
5262 | if (reverse) |
5263 | cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst); |
5264 | if (!assemble_integer (x: cst, MIN (size, thissize), align, force: 0)) |
5265 | error ("initializer for integer/fixed-point value is too complicated" ); |
5266 | break; |
5267 | |
5268 | case REAL_TYPE: |
5269 | gcc_assert (size == thissize); |
5270 | if (TREE_CODE (exp) != REAL_CST) |
5271 | error ("initializer for floating value is not a floating constant" ); |
5272 | else |
5273 | assemble_real (TREE_REAL_CST (exp), |
5274 | SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (exp)), |
5275 | align, reverse); |
5276 | break; |
5277 | |
5278 | case COMPLEX_TYPE: |
5279 | output_constant (TREE_REALPART (exp), size: thissize / 2, align, |
5280 | reverse, merge_strings: false); |
5281 | output_constant (TREE_IMAGPART (exp), size: thissize / 2, |
5282 | align: min_align (a: align, BITS_PER_UNIT * (thissize / 2)), |
5283 | reverse, merge_strings: false); |
5284 | break; |
5285 | |
5286 | case BITINT_TYPE: |
5287 | if (TREE_CODE (exp) != INTEGER_CST) |
5288 | error ("initializer for %<_BitInt(%d)%> value is not an integer " |
5289 | "constant" , TYPE_PRECISION (TREE_TYPE (exp))); |
5290 | else |
5291 | { |
5292 | struct bitint_info info; |
5293 | tree type = TREE_TYPE (exp); |
5294 | bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info); |
5295 | gcc_assert (ok); |
5296 | scalar_int_mode limb_mode = as_a <scalar_int_mode> (m: info.limb_mode); |
5297 | if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (mode: limb_mode)) |
5298 | { |
5299 | cst = expand_expr (exp, NULL_RTX, VOIDmode, modifier: EXPAND_INITIALIZER); |
5300 | if (reverse) |
5301 | cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst); |
5302 | if (!assemble_integer (x: cst, MIN (size, thissize), align, force: 0)) |
5303 | error ("initializer for integer/fixed-point value is too " |
5304 | "complicated" ); |
5305 | break; |
5306 | } |
5307 | int prec = GET_MODE_PRECISION (mode: limb_mode); |
5308 | int cnt = CEIL (TYPE_PRECISION (type), prec); |
5309 | tree limb_type = build_nonstandard_integer_type (prec, 1); |
5310 | int elt_size = GET_MODE_SIZE (mode: limb_mode); |
5311 | unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (limb_mode)); |
5312 | thissize = 0; |
5313 | if (prec == HOST_BITS_PER_WIDE_INT) |
5314 | for (int i = 0; i < cnt; i++) |
5315 | { |
5316 | int idx = (info.big_endian ^ reverse) ? cnt - 1 - i : i; |
5317 | tree c; |
5318 | if (idx >= TREE_INT_CST_EXT_NUNITS (exp)) |
5319 | c = build_int_cst (limb_type, |
5320 | tree_int_cst_sgn (exp) < 0 ? -1 : 0); |
5321 | else |
5322 | c = build_int_cst (limb_type, |
5323 | TREE_INT_CST_ELT (exp, idx)); |
5324 | output_constant (exp: c, size: elt_size, align: nalign, reverse, merge_strings: false); |
5325 | thissize += elt_size; |
5326 | } |
5327 | else |
5328 | for (int i = 0; i < cnt; i++) |
5329 | { |
5330 | int idx = (info.big_endian ^ reverse) ? cnt - 1 - i : i; |
5331 | wide_int w = wi::rshift (x: wi::to_wide (t: exp), y: idx * prec, |
5332 | TYPE_SIGN (TREE_TYPE (exp))); |
5333 | tree c = wide_int_to_tree (type: limb_type, |
5334 | cst: wide_int::from (x: w, precision: prec, sgn: UNSIGNED)); |
5335 | output_constant (exp: c, size: elt_size, align: nalign, reverse, merge_strings: false); |
5336 | thissize += elt_size; |
5337 | } |
5338 | } |
5339 | break; |
5340 | |
5341 | case ARRAY_TYPE: |
5342 | case VECTOR_TYPE: |
5343 | switch (TREE_CODE (exp)) |
5344 | { |
5345 | case CONSTRUCTOR: |
5346 | return output_constructor (exp, size, align, reverse, NULL); |
5347 | case STRING_CST: |
5348 | thissize = (unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp); |
5349 | if (merge_strings |
5350 | && (thissize == 0 |
5351 | || TREE_STRING_POINTER (exp) [thissize - 1] != '\0')) |
5352 | thissize++; |
5353 | gcc_checking_assert (check_string_literal (exp, size)); |
5354 | assemble_string (TREE_STRING_POINTER (exp), size: thissize); |
5355 | break; |
5356 | case VECTOR_CST: |
5357 | { |
5358 | scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); |
5359 | unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner)); |
5360 | int elt_size = GET_MODE_SIZE (mode: inner); |
5361 | output_constant (VECTOR_CST_ELT (exp, 0), size: elt_size, align, |
5362 | reverse, merge_strings: false); |
5363 | thissize = elt_size; |
5364 | /* Static constants must have a fixed size. */ |
5365 | unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant (); |
5366 | for (unsigned int i = 1; i < nunits; i++) |
5367 | { |
5368 | output_constant (VECTOR_CST_ELT (exp, i), size: elt_size, align: nalign, |
5369 | reverse, merge_strings: false); |
5370 | thissize += elt_size; |
5371 | } |
5372 | break; |
5373 | } |
5374 | default: |
5375 | gcc_unreachable (); |
5376 | } |
5377 | break; |
5378 | |
5379 | case RECORD_TYPE: |
5380 | case UNION_TYPE: |
5381 | gcc_assert (TREE_CODE (exp) == CONSTRUCTOR); |
5382 | return output_constructor (exp, size, align, reverse, NULL); |
5383 | |
5384 | case ERROR_MARK: |
5385 | return 0; |
5386 | |
5387 | default: |
5388 | gcc_unreachable (); |
5389 | } |
5390 | |
5391 | if (size > thissize) |
5392 | assemble_zeros (size: size - thissize); |
5393 | |
5394 | return size; |
5395 | } |
5396 | |
5397 | /* Subroutine of output_constructor, used for computing the size of |
5398 | arrays of unspecified length. VAL must be a CONSTRUCTOR of an array |
5399 | type with an unspecified upper bound. */ |
5400 | |
5401 | static unsigned HOST_WIDE_INT |
5402 | array_size_for_constructor (tree val) |
5403 | { |
5404 | tree max_index; |
5405 | unsigned HOST_WIDE_INT cnt; |
5406 | tree index, value, tmp; |
5407 | offset_int i; |
5408 | |
5409 | /* This code used to attempt to handle string constants that are not |
5410 | arrays of single-bytes, but nothing else does, so there's no point in |
5411 | doing it here. */ |
5412 | if (TREE_CODE (val) == STRING_CST) |
5413 | return TREE_STRING_LENGTH (val); |
5414 | |
5415 | max_index = NULL_TREE; |
5416 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (val), cnt, index, value) |
5417 | { |
5418 | if (TREE_CODE (index) == RANGE_EXPR) |
5419 | index = TREE_OPERAND (index, 1); |
5420 | if (max_index == NULL_TREE || tree_int_cst_lt (t1: max_index, t2: index)) |
5421 | max_index = index; |
5422 | } |
5423 | |
5424 | if (max_index == NULL_TREE) |
5425 | return 0; |
5426 | |
5427 | /* Compute the total number of array elements. */ |
5428 | tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val))); |
5429 | i = wi::to_offset (t: max_index) - wi::to_offset (t: tmp) + 1; |
5430 | |
5431 | /* Multiply by the array element unit size to find number of bytes. */ |
5432 | i *= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val)))); |
5433 | |
5434 | gcc_assert (wi::fits_uhwi_p (i)); |
5435 | return i.to_uhwi (); |
5436 | } |
5437 | |
5438 | /* Other datastructures + helpers for output_constructor. */ |
5439 | |
5440 | /* output_constructor local state to support interaction with helpers. */ |
5441 | |
5442 | struct oc_local_state { |
5443 | |
5444 | /* Received arguments. */ |
5445 | tree exp; /* Constructor expression. */ |
5446 | tree type; /* Type of constructor expression. */ |
5447 | unsigned HOST_WIDE_INT size; /* # bytes to output - pad if necessary. */ |
5448 | unsigned int align; /* Known initial alignment. */ |
5449 | tree min_index; /* Lower bound if specified for an array. */ |
5450 | |
5451 | /* Output processing state. */ |
5452 | HOST_WIDE_INT total_bytes; /* # bytes output so far / current position. */ |
5453 | int byte; /* Part of a bitfield byte yet to be output. */ |
5454 | int last_relative_index; /* Implicit or explicit index of the last |
5455 | array element output within a bitfield. */ |
5456 | bool byte_buffer_in_use; /* Whether BYTE is in use. */ |
5457 | bool reverse; /* Whether reverse storage order is in use. */ |
5458 | |
5459 | /* Current element. */ |
5460 | tree field; /* Current field decl in a record. */ |
5461 | tree val; /* Current element value. */ |
5462 | tree index; /* Current element index. */ |
5463 | |
5464 | }; |
5465 | |
5466 | /* Helper for output_constructor. From the current LOCAL state, output a |
5467 | RANGE_EXPR element. */ |
5468 | |
5469 | static void |
5470 | output_constructor_array_range (oc_local_state *local) |
5471 | { |
5472 | /* Perform the index calculation in modulo arithmetic but |
5473 | sign-extend the result because Ada has negative DECL_FIELD_OFFSETs |
5474 | but we are using an unsigned sizetype. */ |
5475 | unsigned prec = TYPE_PRECISION (sizetype); |
5476 | offset_int idx = wi::sext (x: wi::to_offset (TREE_OPERAND (local->index, 0)) |
5477 | - wi::to_offset (t: local->min_index), offset: prec); |
5478 | tree valtype = TREE_TYPE (local->val); |
5479 | HOST_WIDE_INT fieldpos |
5480 | = (idx * wi::to_offset (TYPE_SIZE_UNIT (valtype))).to_short_addr (); |
5481 | |
5482 | /* Advance to offset of this element. */ |
5483 | if (fieldpos > local->total_bytes) |
5484 | { |
5485 | assemble_zeros (size: fieldpos - local->total_bytes); |
5486 | local->total_bytes = fieldpos; |
5487 | } |
5488 | else |
5489 | /* Must not go backwards. */ |
5490 | gcc_assert (fieldpos == local->total_bytes); |
5491 | |
5492 | unsigned HOST_WIDE_INT fieldsize |
5493 | = int_size_in_bytes (TREE_TYPE (local->type)); |
5494 | |
5495 | HOST_WIDE_INT lo_index |
5496 | = tree_to_shwi (TREE_OPERAND (local->index, 0)); |
5497 | HOST_WIDE_INT hi_index |
5498 | = tree_to_shwi (TREE_OPERAND (local->index, 1)); |
5499 | HOST_WIDE_INT index; |
5500 | |
5501 | unsigned int align2 |
5502 | = min_align (a: local->align, b: fieldsize * BITS_PER_UNIT); |
5503 | |
5504 | for (index = lo_index; index <= hi_index; index++) |
5505 | { |
5506 | /* Output the element's initial value. */ |
5507 | if (local->val == NULL_TREE) |
5508 | assemble_zeros (size: fieldsize); |
5509 | else |
5510 | fieldsize = output_constant (exp: local->val, size: fieldsize, align: align2, |
5511 | reverse: local->reverse, merge_strings: false); |
5512 | |
5513 | /* Count its size. */ |
5514 | local->total_bytes += fieldsize; |
5515 | } |
5516 | } |
5517 | |
5518 | /* Helper for output_constructor. From the current LOCAL state, output a |
5519 | field element that is not true bitfield or part of an outer one. */ |
5520 | |
5521 | static void |
5522 | output_constructor_regular_field (oc_local_state *local) |
5523 | { |
5524 | /* Field size and position. Since this structure is static, we know the |
5525 | positions are constant. */ |
5526 | unsigned HOST_WIDE_INT fieldsize; |
5527 | HOST_WIDE_INT fieldpos; |
5528 | |
5529 | unsigned int align2; |
5530 | |
5531 | /* Output any buffered-up bit-fields preceding this element. */ |
5532 | if (local->byte_buffer_in_use) |
5533 | { |
5534 | assemble_integer (GEN_INT (local->byte), size: 1, BITS_PER_UNIT, force: 1); |
5535 | local->total_bytes++; |
5536 | local->byte_buffer_in_use = false; |
5537 | } |
5538 | |
5539 | if (local->index != NULL_TREE) |
5540 | { |
5541 | /* Perform the index calculation in modulo arithmetic but |
5542 | sign-extend the result because Ada has negative DECL_FIELD_OFFSETs |
5543 | but we are using an unsigned sizetype. */ |
5544 | unsigned prec = TYPE_PRECISION (sizetype); |
5545 | offset_int idx = wi::sext (x: wi::to_offset (t: local->index) |
5546 | - wi::to_offset (t: local->min_index), offset: prec); |
5547 | fieldpos = (idx * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (local->val)))) |
5548 | .to_short_addr (); |
5549 | } |
5550 | else if (local->field != NULL_TREE) |
5551 | fieldpos = int_byte_position (local->field); |
5552 | else |
5553 | fieldpos = 0; |
5554 | |
5555 | /* Advance to offset of this element. |
5556 | Note no alignment needed in an array, since that is guaranteed |
5557 | if each element has the proper size. */ |
5558 | if (local->field != NULL_TREE || local->index != NULL_TREE) |
5559 | { |
5560 | if (fieldpos > local->total_bytes) |
5561 | { |
5562 | assemble_zeros (size: fieldpos - local->total_bytes); |
5563 | local->total_bytes = fieldpos; |
5564 | } |
5565 | else |
5566 | /* Must not go backwards. */ |
5567 | gcc_assert (fieldpos == local->total_bytes); |
5568 | } |
5569 | |
5570 | /* Find the alignment of this element. */ |
5571 | align2 = min_align (a: local->align, BITS_PER_UNIT * fieldpos); |
5572 | |
5573 | /* Determine size this element should occupy. */ |
5574 | if (local->field) |
5575 | { |
5576 | fieldsize = 0; |
5577 | |
5578 | /* If this is an array with an unspecified upper bound, |
5579 | the initializer determines the size. */ |
5580 | /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL, |
5581 | but we cannot do this until the deprecated support for |
5582 | initializing zero-length array members is removed. */ |
5583 | if (TREE_CODE (TREE_TYPE (local->field)) == ARRAY_TYPE |
5584 | && (!TYPE_DOMAIN (TREE_TYPE (local->field)) |
5585 | || !TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (local->field))))) |
5586 | { |
5587 | unsigned HOST_WIDE_INT fldsize |
5588 | = array_size_for_constructor (val: local->val); |
5589 | fieldsize = int_size_in_bytes (TREE_TYPE (local->val)); |
5590 | /* In most cases fieldsize == fldsize as the size of the initializer |
5591 | determines how many elements the flexible array member has. For |
5592 | C++ fldsize can be smaller though, if the last or several last or |
5593 | all initializers of the flexible array member have side-effects |
5594 | and the FE splits them into dynamic initialization. */ |
5595 | gcc_checking_assert (fieldsize >= fldsize); |
5596 | /* Given a non-empty initialization, this field had better |
5597 | be last. Given a flexible array member, the next field |
5598 | on the chain is a TYPE_DECL of the enclosing struct. */ |
5599 | const_tree next = DECL_CHAIN (local->field); |
5600 | gcc_assert (!fieldsize || !next || TREE_CODE (next) != FIELD_DECL); |
5601 | } |
5602 | else |
5603 | fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field)); |
5604 | } |
5605 | else |
5606 | fieldsize = int_size_in_bytes (TREE_TYPE (local->type)); |
5607 | |
5608 | /* Output the element's initial value. */ |
5609 | if (local->val == NULL_TREE) |
5610 | assemble_zeros (size: fieldsize); |
5611 | else |
5612 | fieldsize = output_constant (exp: local->val, size: fieldsize, align: align2, |
5613 | reverse: local->reverse, merge_strings: false); |
5614 | |
5615 | /* Count its size. */ |
5616 | local->total_bytes += fieldsize; |
5617 | } |
5618 | |
5619 | /* Helper for output_constructor. From the LOCAL state, output an element |
5620 | that is a true bitfield or part of an outer one. BIT_OFFSET is the offset |
5621 | from the start of a possibly ongoing outer byte buffer. */ |
5622 | |
5623 | static void |
5624 | output_constructor_bitfield (oc_local_state *local, unsigned int bit_offset) |
5625 | { |
5626 | /* Bit size of this element. */ |
5627 | HOST_WIDE_INT ebitsize |
5628 | = (local->field |
5629 | ? tree_to_uhwi (DECL_SIZE (local->field)) |
5630 | : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type)))); |
5631 | |
5632 | /* Relative index of this element if this is an array component. */ |
5633 | HOST_WIDE_INT relative_index |
5634 | = (local->field |
5635 | ? 0 |
5636 | : (local->index |
5637 | ? tree_to_uhwi (local->index) - tree_to_uhwi (local->min_index) |
5638 | : local->last_relative_index + 1)); |
5639 | |
5640 | /* Bit position of this element from the start of the containing |
5641 | constructor. */ |
5642 | HOST_WIDE_INT constructor_relative_ebitpos |
5643 | = (local->field |
5644 | ? int_bit_position (field: local->field) |
5645 | : ebitsize * relative_index); |
5646 | |
5647 | /* Bit position of this element from the start of a possibly ongoing |
5648 | outer byte buffer. */ |
5649 | HOST_WIDE_INT byte_relative_ebitpos |
5650 | = bit_offset + constructor_relative_ebitpos; |
5651 | |
5652 | /* From the start of a possibly ongoing outer byte buffer, offsets to |
5653 | the first bit of this element and to the first bit past the end of |
5654 | this element. */ |
5655 | HOST_WIDE_INT next_offset = byte_relative_ebitpos; |
5656 | HOST_WIDE_INT end_offset = byte_relative_ebitpos + ebitsize; |
5657 | |
5658 | local->last_relative_index = relative_index; |
5659 | |
5660 | if (local->val == NULL_TREE) |
5661 | local->val = integer_zero_node; |
5662 | |
5663 | while (TREE_CODE (local->val) == VIEW_CONVERT_EXPR |
5664 | || TREE_CODE (local->val) == NON_LVALUE_EXPR) |
5665 | local->val = TREE_OPERAND (local->val, 0); |
5666 | |
5667 | if (TREE_CODE (local->val) != INTEGER_CST |
5668 | && TREE_CODE (local->val) != CONSTRUCTOR) |
5669 | { |
5670 | error ("invalid initial value for member %qE" , DECL_NAME (local->field)); |
5671 | return; |
5672 | } |
5673 | |
5674 | /* If this field does not start in this (or next) byte, skip some bytes. */ |
5675 | if (next_offset / BITS_PER_UNIT != local->total_bytes) |
5676 | { |
5677 | /* Output remnant of any bit field in previous bytes. */ |
5678 | if (local->byte_buffer_in_use) |
5679 | { |
5680 | assemble_integer (GEN_INT (local->byte), size: 1, BITS_PER_UNIT, force: 1); |
5681 | local->total_bytes++; |
5682 | local->byte_buffer_in_use = false; |
5683 | } |
5684 | |
5685 | /* If still not at proper byte, advance to there. */ |
5686 | if (next_offset / BITS_PER_UNIT != local->total_bytes) |
5687 | { |
5688 | gcc_assert (next_offset / BITS_PER_UNIT >= local->total_bytes); |
5689 | assemble_zeros (size: next_offset / BITS_PER_UNIT - local->total_bytes); |
5690 | local->total_bytes = next_offset / BITS_PER_UNIT; |
5691 | } |
5692 | } |
5693 | |
5694 | /* Set up the buffer if necessary. */ |
5695 | if (!local->byte_buffer_in_use) |
5696 | { |
5697 | local->byte = 0; |
5698 | if (ebitsize > 0) |
5699 | local->byte_buffer_in_use = true; |
5700 | } |
5701 | |
5702 | /* If this is nested constructor, recurse passing the bit offset and the |
5703 | pending data, then retrieve the new pending data afterwards. */ |
5704 | if (TREE_CODE (local->val) == CONSTRUCTOR) |
5705 | { |
5706 | oc_outer_state temp_state; |
5707 | temp_state.bit_offset = next_offset % BITS_PER_UNIT; |
5708 | temp_state.byte = local->byte; |
5709 | local->total_bytes |
5710 | += output_constructor (local->val, 0, 0, local->reverse, &temp_state); |
5711 | local->byte = temp_state.byte; |
5712 | return; |
5713 | } |
5714 | |
5715 | /* Otherwise, we must split the element into pieces that fall within |
5716 | separate bytes, and combine each byte with previous or following |
5717 | bit-fields. */ |
5718 | while (next_offset < end_offset) |
5719 | { |
5720 | int this_time; |
5721 | int shift; |
5722 | unsigned HOST_WIDE_INT value; |
5723 | HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT; |
5724 | HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT; |
5725 | |
5726 | /* Advance from byte to byte within this element when necessary. */ |
5727 | while (next_byte != local->total_bytes) |
5728 | { |
5729 | assemble_integer (GEN_INT (local->byte), size: 1, BITS_PER_UNIT, force: 1); |
5730 | local->total_bytes++; |
5731 | local->byte = 0; |
5732 | } |
5733 | |
5734 | /* Number of bits we can process at once (all part of the same byte). */ |
5735 | this_time = MIN (end_offset - next_offset, BITS_PER_UNIT - next_bit); |
5736 | if (local->reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN) |
5737 | { |
5738 | /* For big-endian data, take the most significant bits (of the |
5739 | bits that are significant) first and put them into bytes from |
5740 | the most significant end. */ |
5741 | shift = end_offset - next_offset - this_time; |
5742 | |
5743 | /* Don't try to take a bunch of bits that cross |
5744 | the word boundary in the INTEGER_CST. We can |
5745 | only select bits from one element. */ |
5746 | if ((shift / HOST_BITS_PER_WIDE_INT) |
5747 | != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT)) |
5748 | { |
5749 | const int end = shift + this_time - 1; |
5750 | shift = end & -HOST_BITS_PER_WIDE_INT; |
5751 | this_time = end - shift + 1; |
5752 | } |
5753 | |
5754 | /* Now get the bits we want to insert. */ |
5755 | value = wi::extract_uhwi (x: wi::to_widest (t: local->val), |
5756 | bitpos: shift, width: this_time); |
5757 | |
5758 | /* Get the result. This works only when: |
5759 | 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */ |
5760 | local->byte |= value << (BITS_PER_UNIT - this_time - next_bit); |
5761 | } |
5762 | else |
5763 | { |
5764 | /* On little-endian machines, take the least significant bits of |
5765 | the value first and pack them starting at the least significant |
5766 | bits of the bytes. */ |
5767 | shift = next_offset - byte_relative_ebitpos; |
5768 | |
5769 | /* Don't try to take a bunch of bits that cross |
5770 | the word boundary in the INTEGER_CST. We can |
5771 | only select bits from one element. */ |
5772 | if ((shift / HOST_BITS_PER_WIDE_INT) |
5773 | != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT)) |
5774 | this_time |
5775 | = HOST_BITS_PER_WIDE_INT - (shift & (HOST_BITS_PER_WIDE_INT - 1)); |
5776 | |
5777 | /* Now get the bits we want to insert. */ |
5778 | value = wi::extract_uhwi (x: wi::to_widest (t: local->val), |
5779 | bitpos: shift, width: this_time); |
5780 | |
5781 | /* Get the result. This works only when: |
5782 | 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */ |
5783 | local->byte |= value << next_bit; |
5784 | } |
5785 | |
5786 | next_offset += this_time; |
5787 | local->byte_buffer_in_use = true; |
5788 | } |
5789 | } |
5790 | |
5791 | /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants). |
5792 | Generate at least SIZE bytes, padding if necessary. OUTER designates the |
5793 | caller output state of relevance in recursive invocations. */ |
5794 | |
5795 | static unsigned HOST_WIDE_INT |
5796 | output_constructor (tree exp, unsigned HOST_WIDE_INT size, unsigned int align, |
5797 | bool reverse, oc_outer_state *outer) |
5798 | { |
5799 | unsigned HOST_WIDE_INT cnt; |
5800 | constructor_elt *ce; |
5801 | oc_local_state local; |
5802 | |
5803 | /* Setup our local state to communicate with helpers. */ |
5804 | local.exp = exp; |
5805 | local.type = TREE_TYPE (exp); |
5806 | local.size = size; |
5807 | local.align = align; |
5808 | if (TREE_CODE (local.type) == ARRAY_TYPE && TYPE_DOMAIN (local.type)) |
5809 | local.min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (local.type)); |
5810 | else |
5811 | local.min_index = integer_zero_node; |
5812 | |
5813 | local.total_bytes = 0; |
5814 | local.byte_buffer_in_use = outer != NULL; |
5815 | local.byte = outer ? outer->byte : 0; |
5816 | local.last_relative_index = -1; |
5817 | /* The storage order is specified for every aggregate type. */ |
5818 | if (AGGREGATE_TYPE_P (local.type)) |
5819 | local.reverse = TYPE_REVERSE_STORAGE_ORDER (local.type); |
5820 | else |
5821 | local.reverse = reverse; |
5822 | |
5823 | gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT); |
5824 | |
5825 | /* As CE goes through the elements of the constant, FIELD goes through the |
5826 | structure fields if the constant is a structure. If the constant is a |
5827 | union, we override this by getting the field from the TREE_LIST element. |
5828 | But the constant could also be an array. Then FIELD is zero. |
5829 | |
5830 | There is always a maximum of one element in the chain LINK for unions |
5831 | (even if the initializer in a source program incorrectly contains |
5832 | more one). */ |
5833 | |
5834 | if (TREE_CODE (local.type) == RECORD_TYPE) |
5835 | local.field = TYPE_FIELDS (local.type); |
5836 | else |
5837 | local.field = NULL_TREE; |
5838 | |
5839 | for (cnt = 0; |
5840 | vec_safe_iterate (CONSTRUCTOR_ELTS (exp), ix: cnt, ptr: &ce); |
5841 | cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0) |
5842 | { |
5843 | local.val = ce->value; |
5844 | local.index = NULL_TREE; |
5845 | |
5846 | /* The element in a union constructor specifies the proper field |
5847 | or index. */ |
5848 | if (RECORD_OR_UNION_TYPE_P (local.type) && ce->index != NULL_TREE) |
5849 | local.field = ce->index; |
5850 | |
5851 | else if (TREE_CODE (local.type) == ARRAY_TYPE) |
5852 | local.index = ce->index; |
5853 | |
5854 | if (local.field && flag_verbose_asm) |
5855 | fprintf (stream: asm_out_file, format: "%s %s:\n" , |
5856 | ASM_COMMENT_START, |
5857 | DECL_NAME (local.field) |
5858 | ? IDENTIFIER_POINTER (DECL_NAME (local.field)) |
5859 | : "<anonymous>" ); |
5860 | |
5861 | /* Eliminate the marker that makes a cast not be an lvalue. */ |
5862 | if (local.val != NULL_TREE) |
5863 | STRIP_NOPS (local.val); |
5864 | |
5865 | /* Output the current element, using the appropriate helper ... */ |
5866 | |
5867 | /* For an array slice not part of an outer bitfield. */ |
5868 | if (!outer |
5869 | && local.index != NULL_TREE |
5870 | && TREE_CODE (local.index) == RANGE_EXPR) |
5871 | output_constructor_array_range (local: &local); |
5872 | |
5873 | /* For a field that is neither a true bitfield nor part of an outer one, |
5874 | known to be at least byte aligned and multiple-of-bytes long. */ |
5875 | else if (!outer |
5876 | && (local.field == NULL_TREE |
5877 | || !CONSTRUCTOR_BITFIELD_P (local.field))) |
5878 | output_constructor_regular_field (local: &local); |
5879 | |
5880 | /* For a true bitfield or part of an outer one. Only INTEGER_CSTs are |
5881 | supported for scalar fields, so we may need to convert first. */ |
5882 | else |
5883 | { |
5884 | if (TREE_CODE (local.val) == REAL_CST) |
5885 | local.val |
5886 | = fold_unary (VIEW_CONVERT_EXPR, |
5887 | build_nonstandard_integer_type |
5888 | (TYPE_PRECISION (TREE_TYPE (local.val)), 0), |
5889 | local.val); |
5890 | output_constructor_bitfield (local: &local, bit_offset: outer ? outer->bit_offset : 0); |
5891 | } |
5892 | } |
5893 | |
5894 | /* If we are not at toplevel, save the pending data for our caller. |
5895 | Otherwise output the pending data and padding zeros as needed. */ |
5896 | if (outer) |
5897 | outer->byte = local.byte; |
5898 | else |
5899 | { |
5900 | if (local.byte_buffer_in_use) |
5901 | { |
5902 | assemble_integer (GEN_INT (local.byte), size: 1, BITS_PER_UNIT, force: 1); |
5903 | local.total_bytes++; |
5904 | } |
5905 | |
5906 | if ((unsigned HOST_WIDE_INT)local.total_bytes < local.size) |
5907 | { |
5908 | assemble_zeros (size: local.size - local.total_bytes); |
5909 | local.total_bytes = local.size; |
5910 | } |
5911 | } |
5912 | |
5913 | return local.total_bytes; |
5914 | } |
5915 | |
5916 | /* Mark DECL as weak. */ |
5917 | |
5918 | static void |
5919 | mark_weak (tree decl) |
5920 | { |
5921 | if (DECL_WEAK (decl)) |
5922 | return; |
5923 | |
5924 | struct symtab_node *n = symtab_node::get (decl); |
5925 | if (n && n->refuse_visibility_changes) |
5926 | error ("%qD declared weak after being used" , decl); |
5927 | DECL_WEAK (decl) = 1; |
5928 | |
5929 | if (DECL_RTL_SET_P (decl) |
5930 | && MEM_P (DECL_RTL (decl)) |
5931 | && XEXP (DECL_RTL (decl), 0) |
5932 | && GET_CODE (XEXP (DECL_RTL (decl), 0)) == SYMBOL_REF) |
5933 | SYMBOL_REF_WEAK (XEXP (DECL_RTL (decl), 0)) = 1; |
5934 | } |
5935 | |
5936 | /* Merge weak status between NEWDECL and OLDDECL. */ |
5937 | |
5938 | void |
5939 | merge_weak (tree newdecl, tree olddecl) |
5940 | { |
5941 | if (DECL_WEAK (newdecl) == DECL_WEAK (olddecl)) |
5942 | { |
5943 | if (DECL_WEAK (newdecl) && TARGET_SUPPORTS_WEAK) |
5944 | { |
5945 | tree *pwd; |
5946 | /* We put the NEWDECL on the weak_decls list at some point |
5947 | and OLDDECL as well. Keep just OLDDECL on the list. */ |
5948 | for (pwd = &weak_decls; *pwd; pwd = &TREE_CHAIN (*pwd)) |
5949 | if (TREE_VALUE (*pwd) == newdecl) |
5950 | { |
5951 | *pwd = TREE_CHAIN (*pwd); |
5952 | break; |
5953 | } |
5954 | } |
5955 | return; |
5956 | } |
5957 | |
5958 | if (DECL_WEAK (newdecl)) |
5959 | { |
5960 | tree wd; |
5961 | |
5962 | /* NEWDECL is weak, but OLDDECL is not. */ |
5963 | |
5964 | /* If we already output the OLDDECL, we're in trouble; we can't |
5965 | go back and make it weak. This should never happen in |
5966 | unit-at-a-time compilation. */ |
5967 | gcc_assert (!TREE_ASM_WRITTEN (olddecl)); |
5968 | |
5969 | /* If we've already generated rtl referencing OLDDECL, we may |
5970 | have done so in a way that will not function properly with |
5971 | a weak symbol. Again in unit-at-a-time this should be |
5972 | impossible. */ |
5973 | gcc_assert (!TREE_USED (olddecl) |
5974 | || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (olddecl))); |
5975 | |
5976 | /* PR 49899: You cannot convert a static function into a weak, public function. */ |
5977 | if (! TREE_PUBLIC (olddecl) && TREE_PUBLIC (newdecl)) |
5978 | error ("weak declaration of %q+D being applied to a already " |
5979 | "existing, static definition" , newdecl); |
5980 | |
5981 | if (TARGET_SUPPORTS_WEAK) |
5982 | { |
5983 | /* We put the NEWDECL on the weak_decls list at some point. |
5984 | Replace it with the OLDDECL. */ |
5985 | for (wd = weak_decls; wd; wd = TREE_CHAIN (wd)) |
5986 | if (TREE_VALUE (wd) == newdecl) |
5987 | { |
5988 | TREE_VALUE (wd) = olddecl; |
5989 | break; |
5990 | } |
5991 | /* We may not find the entry on the list. If NEWDECL is a |
5992 | weak alias, then we will have already called |
5993 | globalize_decl to remove the entry; in that case, we do |
5994 | not need to do anything. */ |
5995 | } |
5996 | |
5997 | /* Make the OLDDECL weak; it's OLDDECL that we'll be keeping. */ |
5998 | mark_weak (decl: olddecl); |
5999 | } |
6000 | else |
6001 | /* OLDDECL was weak, but NEWDECL was not explicitly marked as |
6002 | weak. Just update NEWDECL to indicate that it's weak too. */ |
6003 | mark_weak (decl: newdecl); |
6004 | } |
6005 | |
6006 | /* Declare DECL to be a weak symbol. */ |
6007 | |
6008 | void |
6009 | declare_weak (tree decl) |
6010 | { |
6011 | /* With -fsyntax-only, TREE_ASM_WRITTEN might be set on certain function |
6012 | decls earlier than normally, but as with -fsyntax-only nothing is really |
6013 | emitted, there is no harm in marking it weak later. */ |
6014 | gcc_assert (TREE_CODE (decl) != FUNCTION_DECL |
6015 | || !TREE_ASM_WRITTEN (decl) |
6016 | || flag_syntax_only); |
6017 | if (! TREE_PUBLIC (decl)) |
6018 | { |
6019 | error ("weak declaration of %q+D must be public" , decl); |
6020 | return; |
6021 | } |
6022 | else if (!TARGET_SUPPORTS_WEAK) |
6023 | warning (0, "weak declaration of %q+D not supported" , decl); |
6024 | |
6025 | mark_weak (decl); |
6026 | if (!lookup_attribute (attr_name: "weak" , DECL_ATTRIBUTES (decl))) |
6027 | DECL_ATTRIBUTES (decl) |
6028 | = tree_cons (get_identifier ("weak" ), NULL, DECL_ATTRIBUTES (decl)); |
6029 | } |
6030 | |
6031 | static void |
6032 | weak_finish_1 (tree decl) |
6033 | { |
6034 | #if defined (ASM_WEAKEN_DECL) || defined (ASM_WEAKEN_LABEL) |
6035 | const char *const name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); |
6036 | #endif |
6037 | |
6038 | if (! TREE_USED (decl)) |
6039 | return; |
6040 | |
6041 | #ifdef ASM_WEAKEN_DECL |
6042 | ASM_WEAKEN_DECL (asm_out_file, decl, name, NULL); |
6043 | #else |
6044 | #ifdef ASM_WEAKEN_LABEL |
6045 | ASM_WEAKEN_LABEL (asm_out_file, name); |
6046 | #else |
6047 | #ifdef ASM_OUTPUT_WEAK_ALIAS |
6048 | { |
6049 | static bool warn_once = 0; |
6050 | if (! warn_once) |
6051 | { |
6052 | warning (0, "only weak aliases are supported in this configuration" ); |
6053 | warn_once = 1; |
6054 | } |
6055 | return; |
6056 | } |
6057 | #endif |
6058 | #endif |
6059 | #endif |
6060 | } |
6061 | |
6062 | /* Fiven an assembly name, find the decl it is associated with. */ |
6063 | static tree |
6064 | find_decl (tree target) |
6065 | { |
6066 | symtab_node *node = symtab_node::get_for_asmname (asmname: target); |
6067 | if (node) |
6068 | return node->decl; |
6069 | return NULL_TREE; |
6070 | } |
6071 | |
6072 | /* This TREE_LIST contains weakref targets. */ |
6073 | |
6074 | static GTY(()) tree weakref_targets; |
6075 | |
6076 | /* Emit any pending weak declarations. */ |
6077 | |
6078 | void |
6079 | weak_finish (void) |
6080 | { |
6081 | tree t; |
6082 | |
6083 | for (t = weakref_targets; t; t = TREE_CHAIN (t)) |
6084 | { |
6085 | tree alias_decl = TREE_PURPOSE (t); |
6086 | tree target = ultimate_transparent_alias_target (alias: &TREE_VALUE (t)); |
6087 | |
6088 | if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl)) |
6089 | || TREE_SYMBOL_REFERENCED (target)) |
6090 | /* Remove alias_decl from the weak list, but leave entries for |
6091 | the target alone. */ |
6092 | target = NULL_TREE; |
6093 | #ifndef ASM_OUTPUT_WEAKREF |
6094 | else if (! TREE_SYMBOL_REFERENCED (target)) |
6095 | { |
6096 | /* Use ASM_WEAKEN_LABEL only if ASM_WEAKEN_DECL is not |
6097 | defined, otherwise we and weak_finish_1 would use |
6098 | different macros. */ |
6099 | # if defined ASM_WEAKEN_LABEL && ! defined ASM_WEAKEN_DECL |
6100 | ASM_WEAKEN_LABEL (asm_out_file, IDENTIFIER_POINTER (target)); |
6101 | # else |
6102 | tree decl = find_decl (target); |
6103 | |
6104 | if (! decl) |
6105 | { |
6106 | decl = build_decl (DECL_SOURCE_LOCATION (alias_decl), |
6107 | TREE_CODE (alias_decl), target, |
6108 | TREE_TYPE (alias_decl)); |
6109 | |
6110 | DECL_EXTERNAL (decl) = 1; |
6111 | TREE_PUBLIC (decl) = 1; |
6112 | DECL_ARTIFICIAL (decl) = 1; |
6113 | TREE_NOTHROW (decl) = TREE_NOTHROW (alias_decl); |
6114 | TREE_USED (decl) = 1; |
6115 | } |
6116 | |
6117 | weak_finish_1 (decl); |
6118 | # endif |
6119 | } |
6120 | #endif |
6121 | |
6122 | { |
6123 | tree *p; |
6124 | tree t2; |
6125 | |
6126 | /* Remove the alias and the target from the pending weak list |
6127 | so that we do not emit any .weak directives for the former, |
6128 | nor multiple .weak directives for the latter. */ |
6129 | for (p = &weak_decls; (t2 = *p) ; ) |
6130 | { |
6131 | if (TREE_VALUE (t2) == alias_decl |
6132 | || target == DECL_ASSEMBLER_NAME (TREE_VALUE (t2))) |
6133 | *p = TREE_CHAIN (t2); |
6134 | else |
6135 | p = &TREE_CHAIN (t2); |
6136 | } |
6137 | |
6138 | /* Remove other weakrefs to the same target, to speed things up. */ |
6139 | for (p = &TREE_CHAIN (t); (t2 = *p) ; ) |
6140 | { |
6141 | if (target == ultimate_transparent_alias_target (alias: &TREE_VALUE (t2))) |
6142 | *p = TREE_CHAIN (t2); |
6143 | else |
6144 | p = &TREE_CHAIN (t2); |
6145 | } |
6146 | } |
6147 | } |
6148 | |
6149 | for (t = weak_decls; t; t = TREE_CHAIN (t)) |
6150 | { |
6151 | tree decl = TREE_VALUE (t); |
6152 | |
6153 | weak_finish_1 (decl); |
6154 | } |
6155 | } |
6156 | |
6157 | /* Emit the assembly bits to indicate that DECL is globally visible. */ |
6158 | |
6159 | static void |
6160 | globalize_decl (tree decl) |
6161 | { |
6162 | |
6163 | #if defined (ASM_WEAKEN_LABEL) || defined (ASM_WEAKEN_DECL) |
6164 | if (DECL_WEAK (decl)) |
6165 | { |
6166 | const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0); |
6167 | tree *p, t; |
6168 | |
6169 | #ifdef ASM_WEAKEN_DECL |
6170 | ASM_WEAKEN_DECL (asm_out_file, decl, name, 0); |
6171 | #else |
6172 | ASM_WEAKEN_LABEL (asm_out_file, name); |
6173 | #endif |
6174 | |
6175 | /* Remove this function from the pending weak list so that |
6176 | we do not emit multiple .weak directives for it. */ |
6177 | for (p = &weak_decls; (t = *p) ; ) |
6178 | { |
6179 | if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t))) |
6180 | *p = TREE_CHAIN (t); |
6181 | else |
6182 | p = &TREE_CHAIN (t); |
6183 | } |
6184 | |
6185 | /* Remove weakrefs to the same target from the pending weakref |
6186 | list, for the same reason. */ |
6187 | for (p = &weakref_targets; (t = *p) ; ) |
6188 | { |
6189 | if (DECL_ASSEMBLER_NAME (decl) |
6190 | == ultimate_transparent_alias_target (alias: &TREE_VALUE (t))) |
6191 | *p = TREE_CHAIN (t); |
6192 | else |
6193 | p = &TREE_CHAIN (t); |
6194 | } |
6195 | |
6196 | return; |
6197 | } |
6198 | #endif |
6199 | |
6200 | targetm.asm_out.globalize_decl_name (asm_out_file, decl); |
6201 | } |
6202 | |
6203 | vec<alias_pair, va_gc> *alias_pairs; |
6204 | |
6205 | /* Output the assembler code for a define (equate) using ASM_OUTPUT_DEF |
6206 | or ASM_OUTPUT_DEF_FROM_DECLS. The function defines the symbol whose |
6207 | tree node is DECL to have the value of the tree node TARGET. */ |
6208 | |
6209 | void |
6210 | do_assemble_alias (tree decl, tree target) |
6211 | { |
6212 | tree id; |
6213 | |
6214 | /* Emulated TLS had better not get this var. */ |
6215 | gcc_assert (!(!targetm.have_tls |
6216 | && VAR_P (decl) |
6217 | && DECL_THREAD_LOCAL_P (decl))); |
6218 | |
6219 | if (TREE_ASM_WRITTEN (decl)) |
6220 | return; |
6221 | |
6222 | id = DECL_ASSEMBLER_NAME (decl); |
6223 | ultimate_transparent_alias_target (alias: &id); |
6224 | ultimate_transparent_alias_target (alias: &target); |
6225 | |
6226 | /* We must force creation of DECL_RTL for debug info generation, even though |
6227 | we don't use it here. */ |
6228 | make_decl_rtl (decl); |
6229 | |
6230 | TREE_ASM_WRITTEN (decl) = 1; |
6231 | TREE_ASM_WRITTEN (DECL_ASSEMBLER_NAME (decl)) = 1; |
6232 | TREE_ASM_WRITTEN (id) = 1; |
6233 | |
6234 | if (lookup_attribute (attr_name: "weakref" , DECL_ATTRIBUTES (decl))) |
6235 | { |
6236 | if (!TREE_SYMBOL_REFERENCED (target)) |
6237 | weakref_targets = tree_cons (decl, target, weakref_targets); |
6238 | |
6239 | #ifdef ASM_OUTPUT_WEAKREF |
6240 | ASM_OUTPUT_WEAKREF (asm_out_file, decl, |
6241 | IDENTIFIER_POINTER (id), |
6242 | IDENTIFIER_POINTER (target)); |
6243 | #else |
6244 | if (!TARGET_SUPPORTS_WEAK) |
6245 | { |
6246 | error_at (DECL_SOURCE_LOCATION (decl), |
6247 | "%qs is not supported in this configuration" , "weakref " ); |
6248 | return; |
6249 | } |
6250 | #endif |
6251 | return; |
6252 | } |
6253 | |
6254 | #ifdef ASM_OUTPUT_DEF |
6255 | tree orig_decl = decl; |
6256 | |
6257 | /* Make name accessible from other files, if appropriate. */ |
6258 | |
6259 | if (TREE_PUBLIC (decl) || TREE_PUBLIC (orig_decl)) |
6260 | { |
6261 | globalize_decl (decl); |
6262 | maybe_assemble_visibility (decl); |
6263 | } |
6264 | if (TREE_CODE (decl) == FUNCTION_DECL |
6265 | && cgraph_node::get (decl)->ifunc_resolver) |
6266 | { |
6267 | #if defined (ASM_OUTPUT_TYPE_DIRECTIVE) |
6268 | if (targetm.has_ifunc_p ()) |
6269 | ASM_OUTPUT_TYPE_DIRECTIVE |
6270 | (asm_out_file, IDENTIFIER_POINTER (id), |
6271 | IFUNC_ASM_TYPE); |
6272 | else |
6273 | #endif |
6274 | error_at (DECL_SOURCE_LOCATION (decl), |
6275 | "%qs is not supported on this target" , "ifunc" ); |
6276 | } |
6277 | |
6278 | # ifdef ASM_OUTPUT_DEF_FROM_DECLS |
6279 | ASM_OUTPUT_DEF_FROM_DECLS (asm_out_file, decl, target); |
6280 | # else |
6281 | ASM_OUTPUT_DEF (asm_out_file, |
6282 | IDENTIFIER_POINTER (id), |
6283 | IDENTIFIER_POINTER (target)); |
6284 | # endif |
6285 | /* If symbol aliases aren't actually supported... */ |
6286 | if (!TARGET_SUPPORTS_ALIASES) |
6287 | /* ..., 'ASM_OUTPUT_DEF{,_FROM_DECLS}' better have raised an error. */ |
6288 | gcc_checking_assert (seen_error ()); |
6289 | #elif defined (ASM_OUTPUT_WEAK_ALIAS) || defined (ASM_WEAKEN_DECL) |
6290 | { |
6291 | const char *name; |
6292 | tree *p, t; |
6293 | |
6294 | name = IDENTIFIER_POINTER (id); |
6295 | # ifdef ASM_WEAKEN_DECL |
6296 | ASM_WEAKEN_DECL (asm_out_file, decl, name, IDENTIFIER_POINTER (target)); |
6297 | # else |
6298 | ASM_OUTPUT_WEAK_ALIAS (asm_out_file, name, IDENTIFIER_POINTER (target)); |
6299 | # endif |
6300 | /* Remove this function from the pending weak list so that |
6301 | we do not emit multiple .weak directives for it. */ |
6302 | for (p = &weak_decls; (t = *p) ; ) |
6303 | if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t)) |
6304 | || id == DECL_ASSEMBLER_NAME (TREE_VALUE (t))) |
6305 | *p = TREE_CHAIN (t); |
6306 | else |
6307 | p = &TREE_CHAIN (t); |
6308 | |
6309 | /* Remove weakrefs to the same target from the pending weakref |
6310 | list, for the same reason. */ |
6311 | for (p = &weakref_targets; (t = *p) ; ) |
6312 | { |
6313 | if (id == ultimate_transparent_alias_target (&TREE_VALUE (t))) |
6314 | *p = TREE_CHAIN (t); |
6315 | else |
6316 | p = &TREE_CHAIN (t); |
6317 | } |
6318 | } |
6319 | #endif |
6320 | } |
6321 | |
6322 | /* Output .symver directive. */ |
6323 | |
6324 | void |
6325 | do_assemble_symver (tree decl, tree target) |
6326 | { |
6327 | tree id = DECL_ASSEMBLER_NAME (decl); |
6328 | ultimate_transparent_alias_target (alias: &id); |
6329 | ultimate_transparent_alias_target (alias: &target); |
6330 | #ifdef ASM_OUTPUT_SYMVER_DIRECTIVE |
6331 | ASM_OUTPUT_SYMVER_DIRECTIVE (asm_out_file, |
6332 | IDENTIFIER_POINTER (target), |
6333 | IDENTIFIER_POINTER (id)); |
6334 | #else |
6335 | error ("symver is only supported on ELF platforms" ); |
6336 | #endif |
6337 | } |
6338 | |
6339 | /* Emit an assembler directive to make the symbol for DECL an alias to |
6340 | the symbol for TARGET. */ |
6341 | |
6342 | void |
6343 | assemble_alias (tree decl, tree target) |
6344 | { |
6345 | tree target_decl; |
6346 | |
6347 | if (lookup_attribute (attr_name: "weakref" , DECL_ATTRIBUTES (decl))) |
6348 | { |
6349 | tree alias = DECL_ASSEMBLER_NAME (decl); |
6350 | |
6351 | ultimate_transparent_alias_target (alias: &target); |
6352 | |
6353 | if (alias == target) |
6354 | error ("%qs symbol %q+D ultimately targets itself" , "weakref" , decl); |
6355 | if (TREE_PUBLIC (decl)) |
6356 | error ("%qs symbol %q+D must have static linkage" , "weakref" , decl); |
6357 | } |
6358 | else if (!TARGET_SUPPORTS_ALIASES) |
6359 | { |
6360 | # if !defined(ASM_OUTPUT_WEAK_ALIAS) && !defined (ASM_WEAKEN_DECL) |
6361 | error_at (DECL_SOURCE_LOCATION (decl), |
6362 | "alias definitions not supported in this configuration" ); |
6363 | TREE_ASM_WRITTEN (decl) = 1; |
6364 | return; |
6365 | # else |
6366 | if (!DECL_WEAK (decl)) |
6367 | { |
6368 | /* NB: ifunc_resolver isn't set when an error is detected. */ |
6369 | if (TREE_CODE (decl) == FUNCTION_DECL |
6370 | && lookup_attribute (attr_name: "ifunc" , DECL_ATTRIBUTES (decl))) |
6371 | error_at (DECL_SOURCE_LOCATION (decl), |
6372 | "%qs is not supported in this configuration" , "ifunc" ); |
6373 | else |
6374 | error_at (DECL_SOURCE_LOCATION (decl), |
6375 | "only weak aliases are supported in this configuration" ); |
6376 | TREE_ASM_WRITTEN (decl) = 1; |
6377 | return; |
6378 | } |
6379 | # endif |
6380 | gcc_unreachable (); |
6381 | } |
6382 | TREE_USED (decl) = 1; |
6383 | |
6384 | /* Allow aliases to aliases. */ |
6385 | if (TREE_CODE (decl) == FUNCTION_DECL) |
6386 | cgraph_node::get_create (decl)->alias = true; |
6387 | else |
6388 | varpool_node::get_create (decl)->alias = true; |
6389 | |
6390 | /* If the target has already been emitted, we don't have to queue the |
6391 | alias. This saves a tad of memory. */ |
6392 | if (symtab->global_info_ready) |
6393 | target_decl = find_decl (target); |
6394 | else |
6395 | target_decl= NULL; |
6396 | if ((target_decl && TREE_ASM_WRITTEN (target_decl)) |
6397 | || symtab->state >= EXPANSION) |
6398 | do_assemble_alias (decl, target); |
6399 | else |
6400 | { |
6401 | alias_pair p = {.decl: decl, .target: target}; |
6402 | vec_safe_push (v&: alias_pairs, obj: p); |
6403 | } |
6404 | } |
6405 | |
6406 | /* Record and output a table of translations from original function |
6407 | to its transaction aware clone. Note that tm_pure functions are |
6408 | considered to be their own clone. */ |
6409 | |
6410 | struct tm_clone_hasher : ggc_cache_ptr_hash<tree_map> |
6411 | { |
6412 | static hashval_t hash (tree_map *m) { return tree_map_hash (m); } |
6413 | static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); } |
6414 | |
6415 | static int |
6416 | keep_cache_entry (tree_map *&e) |
6417 | { |
6418 | return ggc_marked_p (e->base.from); |
6419 | } |
6420 | }; |
6421 | |
6422 | static GTY((cache)) hash_table<tm_clone_hasher> *tm_clone_hash; |
6423 | |
6424 | void |
6425 | record_tm_clone_pair (tree o, tree n) |
6426 | { |
6427 | struct tree_map **slot, *h; |
6428 | |
6429 | if (tm_clone_hash == NULL) |
6430 | tm_clone_hash = hash_table<tm_clone_hasher>::create_ggc (n: 32); |
6431 | |
6432 | h = ggc_alloc<tree_map> (); |
6433 | h->hash = htab_hash_pointer (o); |
6434 | h->base.from = o; |
6435 | h->to = n; |
6436 | |
6437 | slot = tm_clone_hash->find_slot_with_hash (comparable: h, hash: h->hash, insert: INSERT); |
6438 | *slot = h; |
6439 | } |
6440 | |
6441 | tree |
6442 | get_tm_clone_pair (tree o) |
6443 | { |
6444 | if (tm_clone_hash) |
6445 | { |
6446 | struct tree_map *h, in; |
6447 | |
6448 | in.base.from = o; |
6449 | in.hash = htab_hash_pointer (o); |
6450 | h = tm_clone_hash->find_with_hash (comparable: &in, hash: in.hash); |
6451 | if (h) |
6452 | return h->to; |
6453 | } |
6454 | return NULL_TREE; |
6455 | } |
6456 | |
6457 | struct tm_alias_pair |
6458 | { |
6459 | unsigned int uid; |
6460 | tree from; |
6461 | tree to; |
6462 | }; |
6463 | |
6464 | |
6465 | /* Dump the actual pairs to the .tm_clone_table section. */ |
6466 | |
6467 | static void |
6468 | dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs) |
6469 | { |
6470 | unsigned i; |
6471 | tm_alias_pair *p; |
6472 | bool switched = false; |
6473 | |
6474 | FOR_EACH_VEC_ELT (tm_alias_pairs, i, p) |
6475 | { |
6476 | tree src = p->from; |
6477 | tree dst = p->to; |
6478 | struct cgraph_node *src_n = cgraph_node::get (decl: src); |
6479 | struct cgraph_node *dst_n = cgraph_node::get (decl: dst); |
6480 | |
6481 | /* The function ipa_tm_create_version() marks the clone as needed if |
6482 | the original function was needed. But we also mark the clone as |
6483 | needed if we ever called the clone indirectly through |
6484 | TM_GETTMCLONE. If neither of these are true, we didn't generate |
6485 | a clone, and we didn't call it indirectly... no sense keeping it |
6486 | in the clone table. */ |
6487 | if (!dst_n || !dst_n->definition) |
6488 | continue; |
6489 | |
6490 | /* This covers the case where we have optimized the original |
6491 | function away, and only access the transactional clone. */ |
6492 | if (!src_n || !src_n->definition) |
6493 | continue; |
6494 | |
6495 | if (!switched) |
6496 | { |
6497 | switch_to_section (targetm.asm_out.tm_clone_table_section ()); |
6498 | assemble_align (POINTER_SIZE); |
6499 | switched = true; |
6500 | } |
6501 | |
6502 | assemble_integer (XEXP (DECL_RTL (src), 0), |
6503 | POINTER_SIZE_UNITS, POINTER_SIZE, force: 1); |
6504 | assemble_integer (XEXP (DECL_RTL (dst), 0), |
6505 | POINTER_SIZE_UNITS, POINTER_SIZE, force: 1); |
6506 | } |
6507 | } |
6508 | |
6509 | /* Provide a default for the tm_clone_table section. */ |
6510 | |
6511 | section * |
6512 | default_clone_table_section (void) |
6513 | { |
6514 | return get_named_section (NULL, name: ".tm_clone_table" , reloc: 3); |
6515 | } |
6516 | |
6517 | /* Helper comparison function for qsorting by the DECL_UID stored in |
6518 | alias_pair->emitted_diags. */ |
6519 | |
6520 | static int |
6521 | tm_alias_pair_cmp (const void *x, const void *y) |
6522 | { |
6523 | const tm_alias_pair *p1 = (const tm_alias_pair *) x; |
6524 | const tm_alias_pair *p2 = (const tm_alias_pair *) y; |
6525 | if (p1->uid < p2->uid) |
6526 | return -1; |
6527 | if (p1->uid > p2->uid) |
6528 | return 1; |
6529 | return 0; |
6530 | } |
6531 | |
6532 | void |
6533 | finish_tm_clone_pairs (void) |
6534 | { |
6535 | vec<tm_alias_pair> tm_alias_pairs = vNULL; |
6536 | |
6537 | if (tm_clone_hash == NULL) |
6538 | return; |
6539 | |
6540 | /* We need a determenistic order for the .tm_clone_table, otherwise |
6541 | we will get bootstrap comparison failures, so dump the hash table |
6542 | to a vector, sort it, and dump the vector. */ |
6543 | |
6544 | /* Dump the hashtable to a vector. */ |
6545 | tree_map *map; |
6546 | hash_table<tm_clone_hasher>::iterator iter; |
6547 | FOR_EACH_HASH_TABLE_ELEMENT (*tm_clone_hash, map, tree_map *, iter) |
6548 | { |
6549 | tm_alias_pair p = {DECL_UID (map->base.from), .from: map->base.from, .to: map->to}; |
6550 | tm_alias_pairs.safe_push (obj: p); |
6551 | } |
6552 | /* Sort it. */ |
6553 | tm_alias_pairs.qsort (tm_alias_pair_cmp); |
6554 | |
6555 | /* Dump it. */ |
6556 | dump_tm_clone_pairs (tm_alias_pairs); |
6557 | |
6558 | tm_clone_hash->empty (); |
6559 | tm_clone_hash = NULL; |
6560 | tm_alias_pairs.release (); |
6561 | } |
6562 | |
6563 | |
6564 | /* Emit an assembler directive to set symbol for DECL visibility to |
6565 | the visibility type VIS, which must not be VISIBILITY_DEFAULT. */ |
6566 | |
6567 | void |
6568 | default_assemble_visibility (tree decl ATTRIBUTE_UNUSED, |
6569 | int vis ATTRIBUTE_UNUSED) |
6570 | { |
6571 | #ifdef HAVE_GAS_HIDDEN |
6572 | static const char * const visibility_types[] = { |
6573 | NULL, "protected" , "hidden" , "internal" |
6574 | }; |
6575 | |
6576 | const char *name, *type; |
6577 | tree id; |
6578 | |
6579 | id = DECL_ASSEMBLER_NAME (decl); |
6580 | ultimate_transparent_alias_target (alias: &id); |
6581 | name = IDENTIFIER_POINTER (id); |
6582 | |
6583 | type = visibility_types[vis]; |
6584 | |
6585 | fprintf (stream: asm_out_file, format: "\t.%s\t" , type); |
6586 | assemble_name (file: asm_out_file, name); |
6587 | fprintf (stream: asm_out_file, format: "\n" ); |
6588 | #else |
6589 | if (!DECL_ARTIFICIAL (decl)) |
6590 | warning (OPT_Wattributes, "visibility attribute not supported " |
6591 | "in this configuration; ignored" ); |
6592 | #endif |
6593 | } |
6594 | |
6595 | /* A helper function to call assemble_visibility when needed for a decl. */ |
6596 | |
6597 | bool |
6598 | maybe_assemble_visibility (tree decl) |
6599 | { |
6600 | enum symbol_visibility vis = DECL_VISIBILITY (decl); |
6601 | if (vis != VISIBILITY_DEFAULT) |
6602 | { |
6603 | targetm.asm_out.assemble_visibility (decl, vis); |
6604 | return true; |
6605 | } |
6606 | else |
6607 | return false; |
6608 | } |
6609 | |
6610 | /* Returns true if the target configuration supports defining public symbols |
6611 | so that one of them will be chosen at link time instead of generating a |
6612 | multiply-defined symbol error, whether through the use of weak symbols or |
6613 | a target-specific mechanism for having duplicates discarded. */ |
6614 | |
6615 | bool |
6616 | supports_one_only (void) |
6617 | { |
6618 | if (SUPPORTS_ONE_ONLY) |
6619 | return true; |
6620 | if (TARGET_SUPPORTS_WEAK) |
6621 | return true; |
6622 | return false; |
6623 | } |
6624 | |
6625 | /* Set up DECL as a public symbol that can be defined in multiple |
6626 | translation units without generating a linker error. */ |
6627 | |
6628 | void |
6629 | make_decl_one_only (tree decl, tree comdat_group) |
6630 | { |
6631 | struct symtab_node *symbol; |
6632 | gcc_assert (VAR_OR_FUNCTION_DECL_P (decl)); |
6633 | |
6634 | TREE_PUBLIC (decl) = 1; |
6635 | |
6636 | if (VAR_P (decl)) |
6637 | symbol = varpool_node::get_create (decl); |
6638 | else |
6639 | symbol = cgraph_node::get_create (decl); |
6640 | |
6641 | if (SUPPORTS_ONE_ONLY) |
6642 | { |
6643 | #ifdef MAKE_DECL_ONE_ONLY |
6644 | MAKE_DECL_ONE_ONLY (decl); |
6645 | #endif |
6646 | symbol->set_comdat_group (comdat_group); |
6647 | } |
6648 | else if (VAR_P (decl) |
6649 | && (DECL_INITIAL (decl) == 0 |
6650 | || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node))) |
6651 | DECL_COMMON (decl) = 1; |
6652 | else |
6653 | { |
6654 | gcc_assert (TARGET_SUPPORTS_WEAK); |
6655 | DECL_WEAK (decl) = 1; |
6656 | } |
6657 | } |
6658 | |
6659 | void |
6660 | init_varasm_once (void) |
6661 | { |
6662 | section_htab = hash_table<section_hasher>::create_ggc (n: 31); |
6663 | object_block_htab = hash_table<object_block_hasher>::create_ggc (n: 31); |
6664 | const_desc_htab = hash_table<tree_descriptor_hasher>::create_ggc (n: 1009); |
6665 | |
6666 | shared_constant_pool = create_constant_pool (); |
6667 | |
6668 | #ifdef TEXT_SECTION_ASM_OP |
6669 | text_section = get_unnamed_section (flags: SECTION_CODE, callback: output_section_asm_op, |
6670 | TEXT_SECTION_ASM_OP); |
6671 | #endif |
6672 | |
6673 | #ifdef DATA_SECTION_ASM_OP |
6674 | data_section = get_unnamed_section (flags: SECTION_WRITE, callback: output_section_asm_op, |
6675 | DATA_SECTION_ASM_OP); |
6676 | #endif |
6677 | |
6678 | #ifdef SDATA_SECTION_ASM_OP |
6679 | sdata_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op, |
6680 | SDATA_SECTION_ASM_OP); |
6681 | #endif |
6682 | |
6683 | #ifdef READONLY_DATA_SECTION_ASM_OP |
6684 | readonly_data_section = get_unnamed_section (flags: 0, callback: output_section_asm_op, |
6685 | READONLY_DATA_SECTION_ASM_OP); |
6686 | #endif |
6687 | |
6688 | #ifdef CTORS_SECTION_ASM_OP |
6689 | ctors_section = get_unnamed_section (0, output_section_asm_op, |
6690 | CTORS_SECTION_ASM_OP); |
6691 | #endif |
6692 | |
6693 | #ifdef DTORS_SECTION_ASM_OP |
6694 | dtors_section = get_unnamed_section (0, output_section_asm_op, |
6695 | DTORS_SECTION_ASM_OP); |
6696 | #endif |
6697 | |
6698 | #ifdef BSS_SECTION_ASM_OP |
6699 | bss_section = get_unnamed_section (flags: SECTION_WRITE | SECTION_BSS, |
6700 | callback: output_section_asm_op, |
6701 | BSS_SECTION_ASM_OP); |
6702 | #endif |
6703 | |
6704 | #ifdef SBSS_SECTION_ASM_OP |
6705 | sbss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS, |
6706 | output_section_asm_op, |
6707 | SBSS_SECTION_ASM_OP); |
6708 | #endif |
6709 | |
6710 | tls_comm_section = get_noswitch_section (flags: SECTION_WRITE | SECTION_BSS |
6711 | | SECTION_COMMON, callback: emit_tls_common); |
6712 | lcomm_section = get_noswitch_section (flags: SECTION_WRITE | SECTION_BSS |
6713 | | SECTION_COMMON, callback: emit_local); |
6714 | comm_section = get_noswitch_section (flags: SECTION_WRITE | SECTION_BSS |
6715 | | SECTION_COMMON, callback: emit_common); |
6716 | |
6717 | #if defined ASM_OUTPUT_ALIGNED_BSS |
6718 | bss_noswitch_section = get_noswitch_section (flags: SECTION_WRITE | SECTION_BSS, |
6719 | callback: emit_bss); |
6720 | #endif |
6721 | |
6722 | targetm.asm_out.init_sections (); |
6723 | |
6724 | if (readonly_data_section == NULL) |
6725 | readonly_data_section = text_section; |
6726 | |
6727 | #ifdef ASM_OUTPUT_EXTERNAL |
6728 | pending_assemble_externals_set = new hash_set<tree>; |
6729 | #endif |
6730 | } |
6731 | |
6732 | /* Determine whether SYMBOL is used in any optimized function. */ |
6733 | |
6734 | static bool |
6735 | have_optimized_refs (struct symtab_node *symbol) |
6736 | { |
6737 | struct ipa_ref *ref; |
6738 | |
6739 | for (int i = 0; symbol->iterate_referring (i, ref); i++) |
6740 | { |
6741 | cgraph_node *cnode = dyn_cast <cgraph_node *> (p: ref->referring); |
6742 | |
6743 | if (cnode && opt_for_fn (cnode->decl, optimize)) |
6744 | return true; |
6745 | } |
6746 | |
6747 | return false; |
6748 | } |
6749 | |
6750 | /* Check if promoting general-dynamic TLS access model to local-dynamic is |
6751 | desirable for DECL. */ |
6752 | |
6753 | static bool |
6754 | optimize_dyn_tls_for_decl_p (const_tree decl) |
6755 | { |
6756 | if (cfun) |
6757 | return optimize; |
6758 | return symtab->state >= IPA && have_optimized_refs (symbol: symtab_node::get (decl)); |
6759 | } |
6760 | |
6761 | |
6762 | enum tls_model |
6763 | decl_default_tls_model (const_tree decl) |
6764 | { |
6765 | enum tls_model kind; |
6766 | bool is_local; |
6767 | |
6768 | is_local = targetm.binds_local_p (decl); |
6769 | if (!flag_shlib) |
6770 | { |
6771 | if (is_local) |
6772 | kind = TLS_MODEL_LOCAL_EXEC; |
6773 | else |
6774 | kind = TLS_MODEL_INITIAL_EXEC; |
6775 | } |
6776 | |
6777 | /* Local dynamic is inefficient when we're not combining the |
6778 | parts of the address. */ |
6779 | else if (is_local && optimize_dyn_tls_for_decl_p (decl)) |
6780 | kind = TLS_MODEL_LOCAL_DYNAMIC; |
6781 | else |
6782 | kind = TLS_MODEL_GLOBAL_DYNAMIC; |
6783 | if (kind < flag_tls_default) |
6784 | kind = flag_tls_default; |
6785 | |
6786 | return kind; |
6787 | } |
6788 | |
6789 | /* Select a set of attributes for section NAME based on the properties |
6790 | of DECL and whether or not RELOC indicates that DECL's initializer |
6791 | might contain runtime relocations. |
6792 | |
6793 | We make the section read-only and executable for a function decl, |
6794 | read-only for a const data decl, and writable for a non-const data decl. */ |
6795 | |
6796 | unsigned int |
6797 | default_section_type_flags (tree decl, const char *name, int reloc) |
6798 | { |
6799 | unsigned int flags; |
6800 | |
6801 | if (decl && TREE_CODE (decl) == FUNCTION_DECL) |
6802 | flags = SECTION_CODE; |
6803 | else if (decl) |
6804 | { |
6805 | enum section_category category |
6806 | = categorize_decl_for_section (decl, reloc); |
6807 | if (decl_readonly_section_1 (category)) |
6808 | flags = 0; |
6809 | else if (category == SECCAT_DATA_REL_RO |
6810 | || category == SECCAT_DATA_REL_RO_LOCAL) |
6811 | flags = SECTION_WRITE | SECTION_RELRO; |
6812 | else |
6813 | flags = SECTION_WRITE; |
6814 | } |
6815 | else |
6816 | { |
6817 | flags = SECTION_WRITE; |
6818 | if (strcmp (s1: name, s2: ".data.rel.ro" ) == 0 |
6819 | || strcmp (s1: name, s2: ".data.rel.ro.local" ) == 0) |
6820 | flags |= SECTION_RELRO; |
6821 | } |
6822 | |
6823 | if (decl && DECL_P (decl) && DECL_COMDAT_GROUP (decl)) |
6824 | flags |= SECTION_LINKONCE; |
6825 | |
6826 | if (strcmp (s1: name, s2: ".vtable_map_vars" ) == 0) |
6827 | flags |= SECTION_LINKONCE; |
6828 | |
6829 | if (decl && VAR_P (decl) && DECL_THREAD_LOCAL_P (decl)) |
6830 | flags |= SECTION_TLS | SECTION_WRITE; |
6831 | |
6832 | if (strcmp (s1: name, s2: ".bss" ) == 0 |
6833 | || startswith (str: name, prefix: ".bss." ) |
6834 | || startswith (str: name, prefix: ".gnu.linkonce.b." ) |
6835 | || strcmp (s1: name, s2: ".persistent.bss" ) == 0 |
6836 | || strcmp (s1: name, s2: ".sbss" ) == 0 |
6837 | || startswith (str: name, prefix: ".sbss." ) |
6838 | || startswith (str: name, prefix: ".gnu.linkonce.sb." )) |
6839 | flags |= SECTION_BSS; |
6840 | |
6841 | if (strcmp (s1: name, s2: ".tdata" ) == 0 |
6842 | || startswith (str: name, prefix: ".tdata." ) |
6843 | || startswith (str: name, prefix: ".gnu.linkonce.td." )) |
6844 | flags |= SECTION_TLS; |
6845 | |
6846 | if (strcmp (s1: name, s2: ".tbss" ) == 0 |
6847 | || startswith (str: name, prefix: ".tbss." ) |
6848 | || startswith (str: name, prefix: ".gnu.linkonce.tb." )) |
6849 | flags |= SECTION_TLS | SECTION_BSS; |
6850 | |
6851 | if (strcmp (s1: name, s2: ".noinit" ) == 0) |
6852 | flags |= SECTION_WRITE | SECTION_BSS | SECTION_NOTYPE; |
6853 | |
6854 | if (strcmp (s1: name, s2: ".persistent" ) == 0) |
6855 | flags |= SECTION_WRITE | SECTION_NOTYPE; |
6856 | |
6857 | /* Various sections have special ELF types that the assembler will |
6858 | assign by default based on the name. They are neither SHT_PROGBITS |
6859 | nor SHT_NOBITS, so when changing sections we don't want to print a |
6860 | section type (@progbits or @nobits). Rather than duplicating the |
6861 | assembler's knowledge of what those special name patterns are, just |
6862 | let the assembler choose the type if we don't know a specific |
6863 | reason to set it to something other than the default. SHT_PROGBITS |
6864 | is the default for sections whose name is not specially known to |
6865 | the assembler, so it does no harm to leave the choice to the |
6866 | assembler when @progbits is the best thing we know to use. If |
6867 | someone is silly enough to emit code or TLS variables to one of |
6868 | these sections, then don't handle them specially. |
6869 | |
6870 | default_elf_asm_named_section (below) handles the BSS, TLS, ENTSIZE, and |
6871 | LINKONCE cases when NOTYPE is not set, so leave those to its logic. */ |
6872 | if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE)) |
6873 | && !(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))) |
6874 | flags |= SECTION_NOTYPE; |
6875 | |
6876 | return flags; |
6877 | } |
6878 | |
6879 | /* Return true if the target supports some form of global BSS, |
6880 | either through bss_noswitch_section, or by selecting a BSS |
6881 | section in TARGET_ASM_SELECT_SECTION. */ |
6882 | |
6883 | bool |
6884 | have_global_bss_p (void) |
6885 | { |
6886 | return bss_noswitch_section || targetm.have_switchable_bss_sections; |
6887 | } |
6888 | |
6889 | /* Output assembly to switch to section NAME with attribute FLAGS. |
6890 | Four variants for common object file formats. */ |
6891 | |
6892 | void |
6893 | default_no_named_section (const char *name ATTRIBUTE_UNUSED, |
6894 | unsigned int flags ATTRIBUTE_UNUSED, |
6895 | tree decl ATTRIBUTE_UNUSED) |
6896 | { |
6897 | /* Some object formats don't support named sections at all. The |
6898 | front-end should already have flagged this as an error. */ |
6899 | gcc_unreachable (); |
6900 | } |
6901 | |
6902 | #ifndef TLS_SECTION_ASM_FLAG |
6903 | #define TLS_SECTION_ASM_FLAG 'T' |
6904 | #endif |
6905 | |
6906 | void |
6907 | default_elf_asm_named_section (const char *name, unsigned int flags, |
6908 | tree decl) |
6909 | { |
6910 | char flagchars[11], *f = flagchars; |
6911 | unsigned int numeric_value = 0; |
6912 | |
6913 | /* If we have already declared this section, we can use an |
6914 | abbreviated form to switch back to it -- unless this section is |
6915 | part of a COMDAT groups or with SHF_GNU_RETAIN or with SHF_LINK_ORDER, |
6916 | in which case GAS requires the full declaration every time. */ |
6917 | if (!(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)) |
6918 | && !(flags & (SECTION_RETAIN | SECTION_LINK_ORDER)) |
6919 | && (flags & SECTION_DECLARED)) |
6920 | { |
6921 | fprintf (stream: asm_out_file, format: "\t.section\t%s\n" , name); |
6922 | return; |
6923 | } |
6924 | |
6925 | /* If we have a machine specific flag, then use the numeric value to pass |
6926 | this on to GAS. */ |
6927 | if (targetm.asm_out.elf_flags_numeric (flags, &numeric_value)) |
6928 | snprintf (s: f, maxlen: sizeof (flagchars), format: "0x%08x" , numeric_value); |
6929 | else |
6930 | { |
6931 | if (!(flags & SECTION_DEBUG)) |
6932 | *f++ = 'a'; |
6933 | #if HAVE_GAS_SECTION_EXCLUDE |
6934 | if (flags & SECTION_EXCLUDE) |
6935 | *f++ = 'e'; |
6936 | #endif |
6937 | if (flags & SECTION_WRITE) |
6938 | *f++ = 'w'; |
6939 | if (flags & SECTION_CODE) |
6940 | *f++ = 'x'; |
6941 | if (flags & SECTION_SMALL) |
6942 | *f++ = 's'; |
6943 | if (flags & SECTION_MERGE) |
6944 | *f++ = 'M'; |
6945 | if (flags & SECTION_STRINGS) |
6946 | *f++ = 'S'; |
6947 | if (flags & SECTION_TLS) |
6948 | *f++ = TLS_SECTION_ASM_FLAG; |
6949 | if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)) |
6950 | *f++ = 'G'; |
6951 | if (flags & SECTION_RETAIN) |
6952 | *f++ = 'R'; |
6953 | if (flags & SECTION_LINK_ORDER) |
6954 | *f++ = 'o'; |
6955 | #ifdef MACH_DEP_SECTION_ASM_FLAG |
6956 | if (flags & SECTION_MACH_DEP) |
6957 | *f++ = MACH_DEP_SECTION_ASM_FLAG; |
6958 | #endif |
6959 | *f = '\0'; |
6960 | } |
6961 | |
6962 | fprintf (stream: asm_out_file, format: "\t.section\t%s,\"%s\"" , name, flagchars); |
6963 | |
6964 | /* default_section_type_flags (above) knows which flags need special |
6965 | handling here, and sets NOTYPE when none of these apply so that the |
6966 | assembler's logic for default types can apply to user-chosen |
6967 | section names. */ |
6968 | if (!(flags & SECTION_NOTYPE)) |
6969 | { |
6970 | const char *type; |
6971 | const char *format; |
6972 | |
6973 | if (flags & SECTION_BSS) |
6974 | type = "nobits" ; |
6975 | else |
6976 | type = "progbits" ; |
6977 | |
6978 | format = ",@%s" ; |
6979 | /* On platforms that use "@" as the assembly comment character, |
6980 | use "%" instead. */ |
6981 | if (strcmp (ASM_COMMENT_START, s2: "@" ) == 0) |
6982 | format = ",%%%s" ; |
6983 | fprintf (stream: asm_out_file, format: format, type); |
6984 | |
6985 | if (flags & SECTION_ENTSIZE) |
6986 | fprintf (stream: asm_out_file, format: ",%d" , flags & SECTION_ENTSIZE); |
6987 | if (flags & SECTION_LINK_ORDER) |
6988 | { |
6989 | /* For now, only section "__patchable_function_entries" |
6990 | adopts flag SECTION_LINK_ORDER, internal label LPFE* |
6991 | was emitted in default_print_patchable_function_entry, |
6992 | just place it here for linked_to section. */ |
6993 | gcc_assert (!strcmp (name, "__patchable_function_entries" )); |
6994 | fprintf (stream: asm_out_file, format: "," ); |
6995 | char buf[256]; |
6996 | ASM_GENERATE_INTERNAL_LABEL (buf, "LPFE" , |
6997 | current_function_funcdef_no); |
6998 | assemble_name_raw (file: asm_out_file, name: buf); |
6999 | } |
7000 | if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)) |
7001 | { |
7002 | if (TREE_CODE (decl) == IDENTIFIER_NODE) |
7003 | fprintf (stream: asm_out_file, format: ",%s,comdat" , IDENTIFIER_POINTER (decl)); |
7004 | else |
7005 | fprintf (stream: asm_out_file, format: ",%s,comdat" , |
7006 | IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl))); |
7007 | } |
7008 | } |
7009 | |
7010 | putc (c: '\n', stream: asm_out_file); |
7011 | } |
7012 | |
7013 | void |
7014 | default_coff_asm_named_section (const char *name, unsigned int flags, |
7015 | tree decl ATTRIBUTE_UNUSED) |
7016 | { |
7017 | char flagchars[8], *f = flagchars; |
7018 | |
7019 | if (flags & SECTION_WRITE) |
7020 | *f++ = 'w'; |
7021 | if (flags & SECTION_CODE) |
7022 | *f++ = 'x'; |
7023 | *f = '\0'; |
7024 | |
7025 | fprintf (stream: asm_out_file, format: "\t.section\t%s,\"%s\"\n" , name, flagchars); |
7026 | } |
7027 | |
7028 | void |
7029 | default_pe_asm_named_section (const char *name, unsigned int flags, |
7030 | tree decl) |
7031 | { |
7032 | default_coff_asm_named_section (name, flags, decl); |
7033 | |
7034 | if (flags & SECTION_LINKONCE) |
7035 | { |
7036 | /* Functions may have been compiled at various levels of |
7037 | optimization so we can't use `same_size' here. |
7038 | Instead, have the linker pick one. */ |
7039 | fprintf (stream: asm_out_file, format: "\t.linkonce %s\n" , |
7040 | (flags & SECTION_CODE ? "discard" : "same_size" )); |
7041 | } |
7042 | } |
7043 | |
7044 | /* The lame default section selector. */ |
7045 | |
7046 | section * |
7047 | default_select_section (tree decl, int reloc, |
7048 | unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED) |
7049 | { |
7050 | if (DECL_P (decl)) |
7051 | { |
7052 | if (decl_readonly_section (decl, reloc)) |
7053 | return readonly_data_section; |
7054 | } |
7055 | else if (TREE_CODE (decl) == CONSTRUCTOR) |
7056 | { |
7057 | if (! ((flag_pic && reloc) |
7058 | || !TREE_READONLY (decl) |
7059 | || !TREE_CONSTANT (decl))) |
7060 | return readonly_data_section; |
7061 | } |
7062 | else if (TREE_CODE (decl) == STRING_CST) |
7063 | return readonly_data_section; |
7064 | else if (! (flag_pic && reloc)) |
7065 | return readonly_data_section; |
7066 | |
7067 | return data_section; |
7068 | } |
7069 | |
7070 | enum section_category |
7071 | categorize_decl_for_section (const_tree decl, int reloc) |
7072 | { |
7073 | enum section_category ret; |
7074 | |
7075 | if (TREE_CODE (decl) == FUNCTION_DECL) |
7076 | return SECCAT_TEXT; |
7077 | else if (TREE_CODE (decl) == STRING_CST) |
7078 | { |
7079 | if ((flag_sanitize & SANITIZE_ADDRESS) |
7080 | && asan_protect_global (CONST_CAST_TREE (decl))) |
7081 | /* or !flag_merge_constants */ |
7082 | return SECCAT_RODATA; |
7083 | else |
7084 | return SECCAT_RODATA_MERGE_STR; |
7085 | } |
7086 | else if (VAR_P (decl)) |
7087 | { |
7088 | tree d = CONST_CAST_TREE (decl); |
7089 | if (bss_initializer_p (decl)) |
7090 | ret = SECCAT_BSS; |
7091 | else if (! TREE_READONLY (decl) |
7092 | || (DECL_INITIAL (decl) |
7093 | && ! TREE_CONSTANT (DECL_INITIAL (decl)))) |
7094 | { |
7095 | /* Here the reloc_rw_mask is not testing whether the section should |
7096 | be read-only or not, but whether the dynamic link will have to |
7097 | do something. If so, we wish to segregate the data in order to |
7098 | minimize cache misses inside the dynamic linker. */ |
7099 | if (reloc & targetm.asm_out.reloc_rw_mask ()) |
7100 | ret = reloc == 1 ? SECCAT_DATA_REL_LOCAL : SECCAT_DATA_REL; |
7101 | else |
7102 | ret = SECCAT_DATA; |
7103 | } |
7104 | else if (reloc & targetm.asm_out.reloc_rw_mask ()) |
7105 | ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO; |
7106 | else if (reloc || (flag_merge_constants < 2 && !DECL_MERGEABLE (decl)) |
7107 | || ((flag_sanitize & SANITIZE_ADDRESS) |
7108 | /* PR 81697: for architectures that use section anchors we |
7109 | need to ignore DECL_RTL_SET_P (decl) for string constants |
7110 | inside this asan_protect_global call because otherwise |
7111 | we'll wrongly put them into SECCAT_RODATA_MERGE_CONST |
7112 | section, set DECL_RTL (decl) later on and add DECL to |
7113 | protected globals via successive asan_protect_global |
7114 | calls. In this scenario we'll end up with wrong |
7115 | alignment of these strings at runtime and possible ASan |
7116 | false positives. */ |
7117 | && asan_protect_global (d, ignore_decl_rtl_set_p: use_object_blocks_p () |
7118 | && use_blocks_for_decl_p (decl: d)))) |
7119 | /* C and C++ don't allow different variables to share the same |
7120 | location. -fmerge-all-constants allows even that (at the |
7121 | expense of not conforming). */ |
7122 | ret = SECCAT_RODATA; |
7123 | else if (DECL_INITIAL (decl) |
7124 | && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST) |
7125 | ret = SECCAT_RODATA_MERGE_STR_INIT; |
7126 | else |
7127 | ret = SECCAT_RODATA_MERGE_CONST; |
7128 | } |
7129 | else if (TREE_CODE (decl) == CONSTRUCTOR) |
7130 | { |
7131 | if ((reloc & targetm.asm_out.reloc_rw_mask ()) |
7132 | || ! TREE_CONSTANT (decl)) |
7133 | ret = SECCAT_DATA; |
7134 | else |
7135 | ret = SECCAT_RODATA; |
7136 | } |
7137 | else |
7138 | ret = SECCAT_RODATA; |
7139 | |
7140 | /* There are no read-only thread-local sections. */ |
7141 | if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl)) |
7142 | { |
7143 | /* Note that this would be *just* SECCAT_BSS, except that there's |
7144 | no concept of a read-only thread-local-data section. */ |
7145 | if (ret == SECCAT_BSS |
7146 | || DECL_INITIAL (decl) == NULL |
7147 | || (flag_zero_initialized_in_bss |
7148 | && initializer_zerop (DECL_INITIAL (decl)))) |
7149 | ret = SECCAT_TBSS; |
7150 | else |
7151 | ret = SECCAT_TDATA; |
7152 | } |
7153 | |
7154 | /* If the target uses small data sections, select it. */ |
7155 | else if (targetm.in_small_data_p (decl)) |
7156 | { |
7157 | if (ret == SECCAT_BSS) |
7158 | ret = SECCAT_SBSS; |
7159 | else if (targetm.have_srodata_section && ret == SECCAT_RODATA) |
7160 | ret = SECCAT_SRODATA; |
7161 | else |
7162 | ret = SECCAT_SDATA; |
7163 | } |
7164 | |
7165 | return ret; |
7166 | } |
7167 | |
7168 | static bool |
7169 | decl_readonly_section_1 (enum section_category category) |
7170 | { |
7171 | switch (category) |
7172 | { |
7173 | case SECCAT_RODATA: |
7174 | case SECCAT_RODATA_MERGE_STR: |
7175 | case SECCAT_RODATA_MERGE_STR_INIT: |
7176 | case SECCAT_RODATA_MERGE_CONST: |
7177 | case SECCAT_SRODATA: |
7178 | return true; |
7179 | default: |
7180 | return false; |
7181 | } |
7182 | } |
7183 | |
7184 | bool |
7185 | decl_readonly_section (const_tree decl, int reloc) |
7186 | { |
7187 | return decl_readonly_section_1 (category: categorize_decl_for_section (decl, reloc)); |
7188 | } |
7189 | |
7190 | /* Select a section based on the above categorization. */ |
7191 | |
7192 | section * |
7193 | default_elf_select_section (tree decl, int reloc, |
7194 | unsigned HOST_WIDE_INT align) |
7195 | { |
7196 | const char *sname; |
7197 | |
7198 | switch (categorize_decl_for_section (decl, reloc)) |
7199 | { |
7200 | case SECCAT_TEXT: |
7201 | /* We're not supposed to be called on FUNCTION_DECLs. */ |
7202 | gcc_unreachable (); |
7203 | case SECCAT_RODATA: |
7204 | return readonly_data_section; |
7205 | case SECCAT_RODATA_MERGE_STR: |
7206 | return mergeable_string_section (decl, align, flags: 0); |
7207 | case SECCAT_RODATA_MERGE_STR_INIT: |
7208 | return mergeable_string_section (DECL_INITIAL (decl), align, flags: 0); |
7209 | case SECCAT_RODATA_MERGE_CONST: |
7210 | return mergeable_constant_section (DECL_MODE (decl), align, flags: 0); |
7211 | case SECCAT_SRODATA: |
7212 | sname = ".sdata2" ; |
7213 | break; |
7214 | case SECCAT_DATA: |
7215 | if (DECL_P (decl) && DECL_PERSISTENT_P (decl)) |
7216 | { |
7217 | sname = ".persistent" ; |
7218 | break; |
7219 | } |
7220 | return data_section; |
7221 | case SECCAT_DATA_REL: |
7222 | sname = ".data.rel" ; |
7223 | break; |
7224 | case SECCAT_DATA_REL_LOCAL: |
7225 | sname = ".data.rel.local" ; |
7226 | break; |
7227 | case SECCAT_DATA_REL_RO: |
7228 | sname = ".data.rel.ro" ; |
7229 | break; |
7230 | case SECCAT_DATA_REL_RO_LOCAL: |
7231 | sname = ".data.rel.ro.local" ; |
7232 | break; |
7233 | case SECCAT_SDATA: |
7234 | sname = ".sdata" ; |
7235 | break; |
7236 | case SECCAT_TDATA: |
7237 | sname = ".tdata" ; |
7238 | break; |
7239 | case SECCAT_BSS: |
7240 | if (DECL_P (decl) && DECL_NOINIT_P (decl)) |
7241 | { |
7242 | sname = ".noinit" ; |
7243 | break; |
7244 | } |
7245 | if (bss_section) |
7246 | return bss_section; |
7247 | sname = ".bss" ; |
7248 | break; |
7249 | case SECCAT_SBSS: |
7250 | sname = ".sbss" ; |
7251 | break; |
7252 | case SECCAT_TBSS: |
7253 | sname = ".tbss" ; |
7254 | break; |
7255 | default: |
7256 | gcc_unreachable (); |
7257 | } |
7258 | |
7259 | return get_named_section (decl, name: sname, reloc); |
7260 | } |
7261 | |
7262 | /* Construct a unique section name based on the decl name and the |
7263 | categorization performed above. */ |
7264 | |
7265 | void |
7266 | default_unique_section (tree decl, int reloc) |
7267 | { |
7268 | /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */ |
7269 | bool one_only = DECL_ONE_ONLY (decl) && !HAVE_COMDAT_GROUP; |
7270 | const char *prefix, *name, *linkonce; |
7271 | char *string; |
7272 | tree id; |
7273 | |
7274 | switch (categorize_decl_for_section (decl, reloc)) |
7275 | { |
7276 | case SECCAT_TEXT: |
7277 | prefix = one_only ? ".t" : ".text" ; |
7278 | break; |
7279 | case SECCAT_RODATA: |
7280 | case SECCAT_RODATA_MERGE_STR: |
7281 | case SECCAT_RODATA_MERGE_STR_INIT: |
7282 | case SECCAT_RODATA_MERGE_CONST: |
7283 | prefix = one_only ? ".r" : ".rodata" ; |
7284 | break; |
7285 | case SECCAT_SRODATA: |
7286 | prefix = one_only ? ".s2" : ".sdata2" ; |
7287 | break; |
7288 | case SECCAT_DATA: |
7289 | prefix = one_only ? ".d" : ".data" ; |
7290 | if (DECL_P (decl) && DECL_PERSISTENT_P (decl)) |
7291 | { |
7292 | prefix = one_only ? ".p" : ".persistent" ; |
7293 | break; |
7294 | } |
7295 | break; |
7296 | case SECCAT_DATA_REL: |
7297 | prefix = one_only ? ".d.rel" : ".data.rel" ; |
7298 | break; |
7299 | case SECCAT_DATA_REL_LOCAL: |
7300 | prefix = one_only ? ".d.rel.local" : ".data.rel.local" ; |
7301 | break; |
7302 | case SECCAT_DATA_REL_RO: |
7303 | prefix = one_only ? ".d.rel.ro" : ".data.rel.ro" ; |
7304 | break; |
7305 | case SECCAT_DATA_REL_RO_LOCAL: |
7306 | prefix = one_only ? ".d.rel.ro.local" : ".data.rel.ro.local" ; |
7307 | break; |
7308 | case SECCAT_SDATA: |
7309 | prefix = one_only ? ".s" : ".sdata" ; |
7310 | break; |
7311 | case SECCAT_BSS: |
7312 | if (DECL_P (decl) && DECL_NOINIT_P (decl)) |
7313 | { |
7314 | prefix = one_only ? ".n" : ".noinit" ; |
7315 | break; |
7316 | } |
7317 | prefix = one_only ? ".b" : ".bss" ; |
7318 | break; |
7319 | case SECCAT_SBSS: |
7320 | prefix = one_only ? ".sb" : ".sbss" ; |
7321 | break; |
7322 | case SECCAT_TDATA: |
7323 | prefix = one_only ? ".td" : ".tdata" ; |
7324 | break; |
7325 | case SECCAT_TBSS: |
7326 | prefix = one_only ? ".tb" : ".tbss" ; |
7327 | break; |
7328 | default: |
7329 | gcc_unreachable (); |
7330 | } |
7331 | |
7332 | id = DECL_ASSEMBLER_NAME (decl); |
7333 | ultimate_transparent_alias_target (alias: &id); |
7334 | name = IDENTIFIER_POINTER (id); |
7335 | name = targetm.strip_name_encoding (name); |
7336 | |
7337 | /* If we're using one_only, then there needs to be a .gnu.linkonce |
7338 | prefix to the section name. */ |
7339 | linkonce = one_only ? ".gnu.linkonce" : "" ; |
7340 | |
7341 | string = ACONCAT ((linkonce, prefix, "." , name, NULL)); |
7342 | |
7343 | set_decl_section_name (decl, string); |
7344 | } |
7345 | |
7346 | /* Subroutine of compute_reloc_for_rtx for leaf rtxes. */ |
7347 | |
7348 | static int |
7349 | compute_reloc_for_rtx_1 (const_rtx x) |
7350 | { |
7351 | switch (GET_CODE (x)) |
7352 | { |
7353 | case SYMBOL_REF: |
7354 | return SYMBOL_REF_LOCAL_P (x) ? 1 : 2; |
7355 | case LABEL_REF: |
7356 | return 1; |
7357 | default: |
7358 | return 0; |
7359 | } |
7360 | } |
7361 | |
7362 | /* Like compute_reloc_for_constant, except for an RTX. The return value |
7363 | is a mask for which bit 1 indicates a global relocation, and bit 0 |
7364 | indicates a local relocation. Used by default_select_rtx_section |
7365 | and default_elf_select_rtx_section. */ |
7366 | |
7367 | static int |
7368 | compute_reloc_for_rtx (const_rtx x) |
7369 | { |
7370 | switch (GET_CODE (x)) |
7371 | { |
7372 | case SYMBOL_REF: |
7373 | case LABEL_REF: |
7374 | return compute_reloc_for_rtx_1 (x); |
7375 | |
7376 | case CONST: |
7377 | { |
7378 | int reloc = 0; |
7379 | subrtx_iterator::array_type array; |
7380 | FOR_EACH_SUBRTX (iter, array, x, ALL) |
7381 | reloc |= compute_reloc_for_rtx_1 (x: *iter); |
7382 | return reloc; |
7383 | } |
7384 | |
7385 | default: |
7386 | return 0; |
7387 | } |
7388 | } |
7389 | |
7390 | section * |
7391 | default_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED, |
7392 | rtx x, |
7393 | unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED) |
7394 | { |
7395 | if (compute_reloc_for_rtx (x) & targetm.asm_out.reloc_rw_mask ()) |
7396 | return data_section; |
7397 | else |
7398 | return readonly_data_section; |
7399 | } |
7400 | |
7401 | section * |
7402 | default_elf_select_rtx_section (machine_mode mode, rtx x, |
7403 | unsigned HOST_WIDE_INT align) |
7404 | { |
7405 | int reloc = compute_reloc_for_rtx (x); |
7406 | |
7407 | /* ??? Handle small data here somehow. */ |
7408 | |
7409 | if (reloc & targetm.asm_out.reloc_rw_mask ()) |
7410 | { |
7411 | if (reloc == 1) |
7412 | return get_named_section (NULL, name: ".data.rel.ro.local" , reloc: 1); |
7413 | else |
7414 | return get_named_section (NULL, name: ".data.rel.ro" , reloc: 3); |
7415 | } |
7416 | |
7417 | return mergeable_constant_section (mode, align, flags: 0); |
7418 | } |
7419 | |
7420 | /* Set the generally applicable flags on the SYMBOL_REF for EXP. */ |
7421 | |
7422 | void |
7423 | default_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED) |
7424 | { |
7425 | rtx symbol; |
7426 | int flags; |
7427 | |
7428 | /* Careful not to prod global register variables. */ |
7429 | if (!MEM_P (rtl)) |
7430 | return; |
7431 | symbol = XEXP (rtl, 0); |
7432 | if (GET_CODE (symbol) != SYMBOL_REF) |
7433 | return; |
7434 | |
7435 | flags = SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_HAS_BLOCK_INFO; |
7436 | if (TREE_CODE (decl) == FUNCTION_DECL) |
7437 | flags |= SYMBOL_FLAG_FUNCTION; |
7438 | if (targetm.binds_local_p (decl)) |
7439 | flags |= SYMBOL_FLAG_LOCAL; |
7440 | if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl)) |
7441 | flags |= DECL_TLS_MODEL (decl) << SYMBOL_FLAG_TLS_SHIFT; |
7442 | else if (targetm.in_small_data_p (decl)) |
7443 | flags |= SYMBOL_FLAG_SMALL; |
7444 | /* ??? Why is DECL_EXTERNAL ever set for non-PUBLIC names? Without |
7445 | being PUBLIC, the thing *must* be defined in this translation unit. |
7446 | Prevent this buglet from being propagated into rtl code as well. */ |
7447 | if (DECL_P (decl) && DECL_EXTERNAL (decl) && TREE_PUBLIC (decl)) |
7448 | flags |= SYMBOL_FLAG_EXTERNAL; |
7449 | |
7450 | SYMBOL_REF_FLAGS (symbol) = flags; |
7451 | } |
7452 | |
7453 | /* By default, we do nothing for encode_section_info, so we need not |
7454 | do anything but discard the '*' marker. */ |
7455 | |
7456 | const char * |
7457 | default_strip_name_encoding (const char *str) |
7458 | { |
7459 | return str + (*str == '*'); |
7460 | } |
7461 | |
7462 | #ifdef ASM_OUTPUT_DEF |
7463 | /* The default implementation of TARGET_ASM_OUTPUT_ANCHOR. Define the |
7464 | anchor relative to ".", the current section position. */ |
7465 | |
7466 | void |
7467 | default_asm_output_anchor (rtx symbol) |
7468 | { |
7469 | gcc_checking_assert (TARGET_SUPPORTS_ALIASES); |
7470 | |
7471 | char buffer[100]; |
7472 | |
7473 | sprintf (s: buffer, format: "*. + " HOST_WIDE_INT_PRINT_DEC, |
7474 | SYMBOL_REF_BLOCK_OFFSET (symbol)); |
7475 | ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer); |
7476 | } |
7477 | #endif |
7478 | |
7479 | /* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */ |
7480 | |
7481 | bool |
7482 | default_use_anchors_for_symbol_p (const_rtx symbol) |
7483 | { |
7484 | tree decl; |
7485 | section *sect = SYMBOL_REF_BLOCK (symbol)->sect; |
7486 | |
7487 | /* This function should only be called with non-zero SYMBOL_REF_BLOCK, |
7488 | furthermore get_block_for_section should not create object blocks |
7489 | for mergeable sections. */ |
7490 | gcc_checking_assert (sect && !(sect->common.flags & SECTION_MERGE)); |
7491 | |
7492 | /* Don't use anchors for small data sections. The small data register |
7493 | acts as an anchor for such sections. */ |
7494 | if (sect->common.flags & SECTION_SMALL) |
7495 | return false; |
7496 | |
7497 | decl = SYMBOL_REF_DECL (symbol); |
7498 | if (decl && DECL_P (decl)) |
7499 | { |
7500 | /* Don't use section anchors for decls that might be defined or |
7501 | usurped by other modules. */ |
7502 | if (TREE_PUBLIC (decl) && !decl_binds_to_current_def_p (decl)) |
7503 | return false; |
7504 | |
7505 | /* Don't use section anchors for decls that will be placed in a |
7506 | small data section. */ |
7507 | /* ??? Ideally, this check would be redundant with the SECTION_SMALL |
7508 | one above. The problem is that we only use SECTION_SMALL for |
7509 | sections that should be marked as small in the section directive. */ |
7510 | if (targetm.in_small_data_p (decl)) |
7511 | return false; |
7512 | |
7513 | /* Don't use section anchors for decls that won't fit inside a single |
7514 | anchor range to reduce the amount of instructions required to refer |
7515 | to the entire declaration. */ |
7516 | if (DECL_SIZE_UNIT (decl) == NULL_TREE |
7517 | || !tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)) |
7518 | || (tree_to_uhwi (DECL_SIZE_UNIT (decl)) |
7519 | >= (unsigned HOST_WIDE_INT) targetm.max_anchor_offset)) |
7520 | return false; |
7521 | |
7522 | } |
7523 | return true; |
7524 | } |
7525 | |
7526 | /* Return true when RESOLUTION indicate that symbol will be bound to the |
7527 | definition provided by current .o file. */ |
7528 | |
7529 | static bool |
7530 | resolution_to_local_definition_p (enum ld_plugin_symbol_resolution resolution) |
7531 | { |
7532 | return (resolution == LDPR_PREVAILING_DEF |
7533 | || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP |
7534 | || resolution == LDPR_PREVAILING_DEF_IRONLY); |
7535 | } |
7536 | |
7537 | /* Return true when RESOLUTION indicate that symbol will be bound locally |
7538 | within current executable or DSO. */ |
7539 | |
7540 | static bool |
7541 | resolution_local_p (enum ld_plugin_symbol_resolution resolution) |
7542 | { |
7543 | return (resolution == LDPR_PREVAILING_DEF |
7544 | || resolution == LDPR_PREVAILING_DEF_IRONLY |
7545 | || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP |
7546 | || resolution == LDPR_PREEMPTED_REG |
7547 | || resolution == LDPR_PREEMPTED_IR |
7548 | || resolution == LDPR_RESOLVED_IR |
7549 | || resolution == LDPR_RESOLVED_EXEC); |
7550 | } |
7551 | |
7552 | /* COMMON_LOCAL_P is true means that the linker can guarantee that an |
7553 | uninitialized common symbol in the executable will still be defined |
7554 | (through COPY relocation) in the executable. */ |
7555 | |
7556 | bool |
7557 | default_binds_local_p_3 (const_tree exp, bool shlib, bool weak_dominate, |
7558 | bool extern_protected_data, bool common_local_p) |
7559 | { |
7560 | /* A non-decl is an entry in the constant pool. */ |
7561 | if (!DECL_P (exp)) |
7562 | return true; |
7563 | |
7564 | /* Weakrefs may not bind locally, even though the weakref itself is always |
7565 | static and therefore local. Similarly, the resolver for ifunc functions |
7566 | might resolve to a non-local function. |
7567 | FIXME: We can resolve the weakref case more curefuly by looking at the |
7568 | weakref alias. */ |
7569 | if (lookup_attribute (attr_name: "weakref" , DECL_ATTRIBUTES (exp)) |
7570 | || (!targetm.ifunc_ref_local_ok () |
7571 | && TREE_CODE (exp) == FUNCTION_DECL |
7572 | && cgraph_node::get (decl: exp) |
7573 | && cgraph_node::get (decl: exp)->ifunc_resolver)) |
7574 | return false; |
7575 | |
7576 | /* Static variables are always local. */ |
7577 | if (! TREE_PUBLIC (exp)) |
7578 | return true; |
7579 | |
7580 | /* With resolution file in hand, take look into resolutions. |
7581 | We can't just return true for resolved_locally symbols, |
7582 | because dynamic linking might overwrite symbols |
7583 | in shared libraries. */ |
7584 | bool resolved_locally = false; |
7585 | |
7586 | bool uninited_common = (DECL_COMMON (exp) |
7587 | && (DECL_INITIAL (exp) == NULL |
7588 | || (!in_lto_p |
7589 | && DECL_INITIAL (exp) == error_mark_node))); |
7590 | |
7591 | /* A non-external variable is defined locally only if it isn't |
7592 | uninitialized COMMON variable or common_local_p is true. */ |
7593 | bool defined_locally = (!DECL_EXTERNAL (exp) |
7594 | && (!uninited_common || common_local_p)); |
7595 | if (symtab_node *node = symtab_node::get (decl: exp)) |
7596 | { |
7597 | if (node->in_other_partition) |
7598 | defined_locally = true; |
7599 | if (node->can_be_discarded_p ()) |
7600 | ; |
7601 | else if (resolution_to_local_definition_p (resolution: node->resolution)) |
7602 | defined_locally = resolved_locally = true; |
7603 | else if (resolution_local_p (resolution: node->resolution)) |
7604 | resolved_locally = true; |
7605 | } |
7606 | if (defined_locally && weak_dominate && !shlib) |
7607 | resolved_locally = true; |
7608 | |
7609 | /* Undefined weak symbols are never defined locally. */ |
7610 | if (DECL_WEAK (exp) && !defined_locally) |
7611 | return false; |
7612 | |
7613 | /* A symbol is local if the user has said explicitly that it will be, |
7614 | or if we have a definition for the symbol. We cannot infer visibility |
7615 | for undefined symbols. */ |
7616 | if (DECL_VISIBILITY (exp) != VISIBILITY_DEFAULT |
7617 | && (TREE_CODE (exp) == FUNCTION_DECL |
7618 | || !extern_protected_data |
7619 | || DECL_VISIBILITY (exp) != VISIBILITY_PROTECTED) |
7620 | && (DECL_VISIBILITY_SPECIFIED (exp) || defined_locally)) |
7621 | return true; |
7622 | |
7623 | /* If PIC, then assume that any global name can be overridden by |
7624 | symbols resolved from other modules. */ |
7625 | if (shlib) |
7626 | return false; |
7627 | |
7628 | /* Variables defined outside this object might not be local. */ |
7629 | if (DECL_EXTERNAL (exp) && !resolved_locally) |
7630 | return false; |
7631 | |
7632 | /* Non-dominant weak symbols are not defined locally. */ |
7633 | if (DECL_WEAK (exp) && !resolved_locally) |
7634 | return false; |
7635 | |
7636 | /* Uninitialized COMMON variable may be unified with symbols |
7637 | resolved from other modules. */ |
7638 | if (uninited_common && !resolved_locally) |
7639 | return false; |
7640 | |
7641 | /* Otherwise we're left with initialized (or non-common) global data |
7642 | which is of necessity defined locally. */ |
7643 | return true; |
7644 | } |
7645 | |
7646 | /* Assume ELF-ish defaults, since that's pretty much the most liberal |
7647 | wrt cross-module name binding. */ |
7648 | |
7649 | bool |
7650 | default_binds_local_p (const_tree exp) |
7651 | { |
7652 | return default_binds_local_p_3 (exp, flag_shlib != 0, weak_dominate: true, extern_protected_data: false, common_local_p: false); |
7653 | } |
7654 | |
7655 | /* Similar to default_binds_local_p, but common symbol may be local and |
7656 | extern protected data is non-local. */ |
7657 | |
7658 | bool |
7659 | default_binds_local_p_2 (const_tree exp) |
7660 | { |
7661 | return default_binds_local_p_3 (exp, flag_shlib != 0, weak_dominate: true, extern_protected_data: true, |
7662 | common_local_p: !flag_pic); |
7663 | } |
7664 | |
7665 | bool |
7666 | default_binds_local_p_1 (const_tree exp, int shlib) |
7667 | { |
7668 | return default_binds_local_p_3 (exp, shlib: shlib != 0, weak_dominate: false, extern_protected_data: false, common_local_p: false); |
7669 | } |
7670 | |
7671 | /* Return true when references to DECL must bind to current definition in |
7672 | final executable. |
7673 | |
7674 | The condition is usually equivalent to whether the function binds to the |
7675 | current module (shared library or executable), that is to binds_local_p. |
7676 | We use this fact to avoid need for another target hook and implement |
7677 | the logic using binds_local_p and just special cases where |
7678 | decl_binds_to_current_def_p is stronger than binds_local_p. In particular |
7679 | the weak definitions (that can be overwritten at linktime by other |
7680 | definition from different object file) and when resolution info is available |
7681 | we simply use the knowledge passed to us by linker plugin. */ |
7682 | bool |
7683 | decl_binds_to_current_def_p (const_tree decl) |
7684 | { |
7685 | gcc_assert (DECL_P (decl)); |
7686 | if (!targetm.binds_local_p (decl)) |
7687 | return false; |
7688 | if (!TREE_PUBLIC (decl)) |
7689 | return true; |
7690 | |
7691 | /* When resolution is available, just use it. */ |
7692 | if (symtab_node *node = symtab_node::get (decl)) |
7693 | { |
7694 | if (node->resolution != LDPR_UNKNOWN |
7695 | && !node->can_be_discarded_p ()) |
7696 | return resolution_to_local_definition_p (resolution: node->resolution); |
7697 | } |
7698 | |
7699 | /* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks |
7700 | binds locally but still can be overwritten), DECL_COMMON (can be merged |
7701 | with a non-common definition somewhere in the same module) or |
7702 | DECL_EXTERNAL. |
7703 | This rely on fact that binds_local_p behave as decl_replaceable_p |
7704 | for all other declaration types. */ |
7705 | if (DECL_WEAK (decl)) |
7706 | return false; |
7707 | if (DECL_COMMON (decl) |
7708 | && (DECL_INITIAL (decl) == NULL |
7709 | || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node))) |
7710 | return false; |
7711 | if (DECL_EXTERNAL (decl)) |
7712 | return false; |
7713 | return true; |
7714 | } |
7715 | |
7716 | /* A replaceable function or variable is one which may be replaced |
7717 | at link-time with an entirely different definition, provided that the |
7718 | replacement has the same type. For example, functions declared |
7719 | with __attribute__((weak)) on most systems are replaceable. |
7720 | If SEMANTIC_INTERPOSITION_P is false allow interposition only on |
7721 | symbols explicitly declared weak. |
7722 | |
7723 | COMDAT functions are not replaceable, since all definitions of the |
7724 | function must be equivalent. It is important that COMDAT functions |
7725 | not be treated as replaceable so that use of C++ template |
7726 | instantiations is not penalized. */ |
7727 | |
7728 | bool |
7729 | decl_replaceable_p (tree decl, bool semantic_interposition_p) |
7730 | { |
7731 | gcc_assert (DECL_P (decl)); |
7732 | if (!TREE_PUBLIC (decl) || DECL_COMDAT (decl)) |
7733 | return false; |
7734 | if (!semantic_interposition_p |
7735 | && !DECL_WEAK (decl)) |
7736 | return false; |
7737 | return !decl_binds_to_current_def_p (decl); |
7738 | } |
7739 | |
7740 | /* Default function to output code that will globalize a label. A |
7741 | target must define GLOBAL_ASM_OP or provide its own function to |
7742 | globalize a label. */ |
7743 | #ifdef GLOBAL_ASM_OP |
7744 | void |
7745 | default_globalize_label (FILE * stream, const char *name) |
7746 | { |
7747 | fputs (GLOBAL_ASM_OP, stream: stream); |
7748 | assemble_name (file: stream, name); |
7749 | putc (c: '\n', stream: stream); |
7750 | } |
7751 | #endif /* GLOBAL_ASM_OP */ |
7752 | |
7753 | /* Default function to output code that will globalize a declaration. */ |
7754 | void |
7755 | default_globalize_decl_name (FILE * stream, tree decl) |
7756 | { |
7757 | const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0); |
7758 | targetm.asm_out.globalize_label (stream, name); |
7759 | } |
7760 | |
7761 | /* Default function to output a label for unwind information. The |
7762 | default is to do nothing. A target that needs nonlocal labels for |
7763 | unwind information must provide its own function to do this. */ |
7764 | void |
7765 | default_emit_unwind_label (FILE * stream ATTRIBUTE_UNUSED, |
7766 | tree decl ATTRIBUTE_UNUSED, |
7767 | int for_eh ATTRIBUTE_UNUSED, |
7768 | int empty ATTRIBUTE_UNUSED) |
7769 | { |
7770 | } |
7771 | |
7772 | /* Default function to output a label to divide up the exception table. |
7773 | The default is to do nothing. A target that needs/wants to divide |
7774 | up the table must provide it's own function to do this. */ |
7775 | void |
7776 | default_emit_except_table_label (FILE * stream ATTRIBUTE_UNUSED) |
7777 | { |
7778 | } |
7779 | |
7780 | /* This is how to output an internal numbered label where PREFIX is |
7781 | the class of label and LABELNO is the number within the class. */ |
7782 | |
7783 | void |
7784 | default_generate_internal_label (char *buf, const char *prefix, |
7785 | unsigned long labelno) |
7786 | { |
7787 | ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno); |
7788 | } |
7789 | |
7790 | /* This is how to output an internal numbered label where PREFIX is |
7791 | the class of label and LABELNO is the number within the class. */ |
7792 | |
7793 | void |
7794 | default_internal_label (FILE *stream, const char *prefix, |
7795 | unsigned long labelno) |
7796 | { |
7797 | char *const buf = (char *) alloca (40 + strlen (prefix)); |
7798 | ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno); |
7799 | ASM_OUTPUT_INTERNAL_LABEL (stream, buf); |
7800 | } |
7801 | |
7802 | |
7803 | /* The default implementation of ASM_DECLARE_CONSTANT_NAME. */ |
7804 | |
7805 | void |
7806 | default_asm_declare_constant_name (FILE *file, const char *name, |
7807 | const_tree exp ATTRIBUTE_UNUSED, |
7808 | HOST_WIDE_INT size ATTRIBUTE_UNUSED) |
7809 | { |
7810 | assemble_label (file, name); |
7811 | } |
7812 | |
7813 | /* This is the default behavior at the beginning of a file. It's |
7814 | controlled by two other target-hook toggles. */ |
7815 | void |
7816 | default_file_start (void) |
7817 | { |
7818 | if (targetm.asm_file_start_app_off |
7819 | && !(flag_verbose_asm || flag_debug_asm || flag_dump_rtl_in_asm)) |
7820 | fputs (ASM_APP_OFF, stream: asm_out_file); |
7821 | |
7822 | if (targetm.asm_file_start_file_directive) |
7823 | { |
7824 | /* LTO produced units have no meaningful main_input_filename. */ |
7825 | if (in_lto_p) |
7826 | output_file_directive (asm_out_file, "<artificial>" ); |
7827 | else |
7828 | output_file_directive (asm_out_file, main_input_filename); |
7829 | } |
7830 | } |
7831 | |
7832 | /* This is a generic routine suitable for use as TARGET_ASM_FILE_END |
7833 | which emits a special section directive used to indicate whether or |
7834 | not this object file needs an executable stack. This is primarily |
7835 | a GNU extension to ELF but could be used on other targets. */ |
7836 | |
7837 | int trampolines_created; |
7838 | |
7839 | void |
7840 | file_end_indicate_exec_stack (void) |
7841 | { |
7842 | unsigned int flags = SECTION_DEBUG; |
7843 | if (trampolines_created) |
7844 | flags |= SECTION_CODE; |
7845 | |
7846 | switch_to_section (get_section (name: ".note.GNU-stack" , flags, NULL)); |
7847 | } |
7848 | |
7849 | /* Emit a special section directive to indicate that this object file |
7850 | was compiled with -fsplit-stack. This is used to let the linker |
7851 | detect calls between split-stack code and non-split-stack code, so |
7852 | that it can modify the split-stack code to allocate a sufficiently |
7853 | large stack. We emit another special section if there are any |
7854 | functions in this file which have the no_split_stack attribute, to |
7855 | prevent the linker from warning about being unable to convert the |
7856 | functions if they call non-split-stack code. */ |
7857 | |
7858 | void |
7859 | file_end_indicate_split_stack (void) |
7860 | { |
7861 | if (flag_split_stack) |
7862 | { |
7863 | switch_to_section (get_section (name: ".note.GNU-split-stack" , flags: SECTION_DEBUG, |
7864 | NULL)); |
7865 | if (saw_no_split_stack) |
7866 | switch_to_section (get_section (name: ".note.GNU-no-split-stack" , |
7867 | flags: SECTION_DEBUG, NULL)); |
7868 | } |
7869 | } |
7870 | |
7871 | /* Output DIRECTIVE (a C string) followed by a newline. This is used as |
7872 | a get_unnamed_section callback. */ |
7873 | |
7874 | void |
7875 | output_section_asm_op (const char *directive) |
7876 | { |
7877 | fprintf (stream: asm_out_file, format: "%s\n" , directive); |
7878 | } |
7879 | |
7880 | /* Emit assembly code to switch to section NEW_SECTION. Do nothing if |
7881 | the current section is NEW_SECTION. */ |
7882 | |
7883 | void |
7884 | switch_to_section (section *new_section, tree decl) |
7885 | { |
7886 | bool retain_p; |
7887 | if ((new_section->common.flags & SECTION_NAMED) |
7888 | && decl != nullptr |
7889 | && DECL_P (decl) |
7890 | && ((retain_p = !!lookup_attribute (attr_name: "retain" , |
7891 | DECL_ATTRIBUTES (decl))) |
7892 | != !!(new_section->common.flags & SECTION_RETAIN))) |
7893 | { |
7894 | /* If the SECTION_RETAIN bit doesn't match, switch to a new |
7895 | section. */ |
7896 | tree used_decl, no_used_decl; |
7897 | |
7898 | if (retain_p) |
7899 | { |
7900 | new_section->common.flags |= SECTION_RETAIN; |
7901 | used_decl = decl; |
7902 | no_used_decl = new_section->named.decl; |
7903 | } |
7904 | else |
7905 | { |
7906 | new_section->common.flags &= ~(SECTION_RETAIN |
7907 | | SECTION_DECLARED); |
7908 | used_decl = new_section->named.decl; |
7909 | no_used_decl = decl; |
7910 | } |
7911 | if (no_used_decl != used_decl) |
7912 | { |
7913 | warning (OPT_Wattributes, |
7914 | "%+qD without %<retain%> attribute and %qD with " |
7915 | "%<retain%> attribute are placed in a section with " |
7916 | "the same name" , no_used_decl, used_decl); |
7917 | inform (DECL_SOURCE_LOCATION (used_decl), |
7918 | "%qD was declared here" , used_decl); |
7919 | } |
7920 | } |
7921 | else if (in_section == new_section) |
7922 | return; |
7923 | |
7924 | in_section = new_section; |
7925 | |
7926 | switch (SECTION_STYLE (new_section)) |
7927 | { |
7928 | case SECTION_NAMED: |
7929 | targetm.asm_out.named_section (new_section->named.name, |
7930 | new_section->named.common.flags, |
7931 | new_section->named.decl); |
7932 | break; |
7933 | |
7934 | case SECTION_UNNAMED: |
7935 | new_section->unnamed.callback (new_section->unnamed.data); |
7936 | break; |
7937 | |
7938 | case SECTION_NOSWITCH: |
7939 | gcc_unreachable (); |
7940 | break; |
7941 | } |
7942 | |
7943 | new_section->common.flags |= SECTION_DECLARED; |
7944 | } |
7945 | |
7946 | /* If block symbol SYMBOL has not yet been assigned an offset, place |
7947 | it at the end of its block. */ |
7948 | |
7949 | void |
7950 | place_block_symbol (rtx symbol) |
7951 | { |
7952 | unsigned HOST_WIDE_INT size, mask, offset; |
7953 | class constant_descriptor_rtx *desc; |
7954 | unsigned int alignment; |
7955 | struct object_block *block; |
7956 | tree decl; |
7957 | |
7958 | gcc_assert (SYMBOL_REF_BLOCK (symbol)); |
7959 | if (SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0) |
7960 | return; |
7961 | |
7962 | /* Work out the symbol's size and alignment. */ |
7963 | if (CONSTANT_POOL_ADDRESS_P (symbol)) |
7964 | { |
7965 | desc = SYMBOL_REF_CONSTANT (symbol); |
7966 | alignment = desc->align; |
7967 | size = GET_MODE_SIZE (mode: desc->mode); |
7968 | } |
7969 | else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol)) |
7970 | { |
7971 | decl = SYMBOL_REF_DECL (symbol); |
7972 | gcc_checking_assert (DECL_IN_CONSTANT_POOL (decl)); |
7973 | alignment = DECL_ALIGN (decl); |
7974 | size = get_constant_size (DECL_INITIAL (decl)); |
7975 | if ((flag_sanitize & SANITIZE_ADDRESS) |
7976 | && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST |
7977 | && asan_protect_global (DECL_INITIAL (decl))) |
7978 | { |
7979 | size += asan_red_zone_size (size); |
7980 | alignment = MAX (alignment, |
7981 | ASAN_RED_ZONE_SIZE * BITS_PER_UNIT); |
7982 | } |
7983 | } |
7984 | else |
7985 | { |
7986 | struct symtab_node *snode; |
7987 | decl = SYMBOL_REF_DECL (symbol); |
7988 | |
7989 | snode = symtab_node::get (decl); |
7990 | if (snode->alias) |
7991 | { |
7992 | rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl); |
7993 | |
7994 | gcc_assert (MEM_P (target) |
7995 | && GET_CODE (XEXP (target, 0)) == SYMBOL_REF |
7996 | && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (target, 0))); |
7997 | target = XEXP (target, 0); |
7998 | place_block_symbol (symbol: target); |
7999 | SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target); |
8000 | return; |
8001 | } |
8002 | alignment = get_variable_align (decl); |
8003 | size = tree_to_uhwi (DECL_SIZE_UNIT (decl)); |
8004 | if ((flag_sanitize & SANITIZE_ADDRESS) |
8005 | && asan_protect_global (decl)) |
8006 | { |
8007 | size += asan_red_zone_size (size); |
8008 | alignment = MAX (alignment, |
8009 | ASAN_RED_ZONE_SIZE * BITS_PER_UNIT); |
8010 | } |
8011 | } |
8012 | |
8013 | /* Calculate the object's offset from the start of the block. */ |
8014 | block = SYMBOL_REF_BLOCK (symbol); |
8015 | mask = alignment / BITS_PER_UNIT - 1; |
8016 | offset = (block->size + mask) & ~mask; |
8017 | SYMBOL_REF_BLOCK_OFFSET (symbol) = offset; |
8018 | |
8019 | /* Record the block's new alignment and size. */ |
8020 | block->alignment = MAX (block->alignment, alignment); |
8021 | block->size = offset + size; |
8022 | |
8023 | vec_safe_push (v&: block->objects, obj: symbol); |
8024 | } |
8025 | |
8026 | /* Return the anchor that should be used to address byte offset OFFSET |
8027 | from the first object in BLOCK. MODEL is the TLS model used |
8028 | to access it. */ |
8029 | |
8030 | rtx |
8031 | get_section_anchor (struct object_block *block, HOST_WIDE_INT offset, |
8032 | enum tls_model model) |
8033 | { |
8034 | char label[100]; |
8035 | unsigned int begin, middle, end; |
8036 | unsigned HOST_WIDE_INT min_offset, max_offset, range, bias, delta; |
8037 | rtx anchor; |
8038 | |
8039 | /* Work out the anchor's offset. Use an offset of 0 for the first |
8040 | anchor so that we don't pessimize the case where we take the address |
8041 | of a variable at the beginning of the block. This is particularly |
8042 | useful when a block has only one variable assigned to it. |
8043 | |
8044 | We try to place anchors RANGE bytes apart, so there can then be |
8045 | anchors at +/-RANGE, +/-2 * RANGE, and so on, up to the limits of |
8046 | a ptr_mode offset. With some target settings, the lowest such |
8047 | anchor might be out of range for the lowest ptr_mode offset; |
8048 | likewise the highest anchor for the highest offset. Use anchors |
8049 | at the extreme ends of the ptr_mode range in such cases. |
8050 | |
8051 | All arithmetic uses unsigned integers in order to avoid |
8052 | signed overflow. */ |
8053 | max_offset = (unsigned HOST_WIDE_INT) targetm.max_anchor_offset; |
8054 | min_offset = (unsigned HOST_WIDE_INT) targetm.min_anchor_offset; |
8055 | range = max_offset - min_offset + 1; |
8056 | if (range == 0) |
8057 | offset = 0; |
8058 | else |
8059 | { |
8060 | bias = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (mode: ptr_mode) - 1); |
8061 | if (offset < 0) |
8062 | { |
8063 | delta = -(unsigned HOST_WIDE_INT) offset + max_offset; |
8064 | delta -= delta % range; |
8065 | if (delta > bias) |
8066 | delta = bias; |
8067 | offset = (HOST_WIDE_INT) (-delta); |
8068 | } |
8069 | else |
8070 | { |
8071 | delta = (unsigned HOST_WIDE_INT) offset - min_offset; |
8072 | delta -= delta % range; |
8073 | if (delta > bias - 1) |
8074 | delta = bias - 1; |
8075 | offset = (HOST_WIDE_INT) delta; |
8076 | } |
8077 | } |
8078 | |
8079 | /* Do a binary search to see if there's already an anchor we can use. |
8080 | Set BEGIN to the new anchor's index if not. */ |
8081 | begin = 0; |
8082 | end = vec_safe_length (v: block->anchors); |
8083 | while (begin != end) |
8084 | { |
8085 | middle = (end + begin) / 2; |
8086 | anchor = (*block->anchors)[middle]; |
8087 | if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset) |
8088 | end = middle; |
8089 | else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset) |
8090 | begin = middle + 1; |
8091 | else if (SYMBOL_REF_TLS_MODEL (anchor) > model) |
8092 | end = middle; |
8093 | else if (SYMBOL_REF_TLS_MODEL (anchor) < model) |
8094 | begin = middle + 1; |
8095 | else |
8096 | return anchor; |
8097 | } |
8098 | |
8099 | /* Create a new anchor with a unique label. */ |
8100 | ASM_GENERATE_INTERNAL_LABEL (label, "LANCHOR" , anchor_labelno++); |
8101 | anchor = create_block_symbol (ggc_strdup (label), block, offset); |
8102 | SYMBOL_REF_FLAGS (anchor) |= SYMBOL_FLAG_LOCAL | SYMBOL_FLAG_ANCHOR; |
8103 | SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT; |
8104 | |
8105 | /* Insert it at index BEGIN. */ |
8106 | vec_safe_insert (v&: block->anchors, ix: begin, obj: anchor); |
8107 | return anchor; |
8108 | } |
8109 | |
8110 | /* Output the objects in BLOCK. */ |
8111 | |
8112 | static void |
8113 | output_object_block (struct object_block *block) |
8114 | { |
8115 | class constant_descriptor_rtx *desc; |
8116 | unsigned int i; |
8117 | HOST_WIDE_INT offset; |
8118 | tree decl; |
8119 | rtx symbol; |
8120 | |
8121 | if (!block->objects) |
8122 | return; |
8123 | |
8124 | /* Switch to the section and make sure that the first byte is |
8125 | suitably aligned. */ |
8126 | /* Special case VTV comdat sections similar to assemble_variable. */ |
8127 | if (SECTION_STYLE (block->sect) == SECTION_NAMED |
8128 | && block->sect->named.name |
8129 | && (strcmp (s1: block->sect->named.name, s2: ".vtable_map_vars" ) == 0)) |
8130 | handle_vtv_comdat_section (block->sect, block->sect->named.decl); |
8131 | else |
8132 | switch_to_section (new_section: block->sect, SYMBOL_REF_DECL ((*block->objects)[0])); |
8133 | |
8134 | gcc_checking_assert (!(block->sect->common.flags & SECTION_MERGE)); |
8135 | assemble_align (align: block->alignment); |
8136 | |
8137 | /* Define the values of all anchors relative to the current section |
8138 | position. */ |
8139 | FOR_EACH_VEC_SAFE_ELT (block->anchors, i, symbol) |
8140 | targetm.asm_out.output_anchor (symbol); |
8141 | |
8142 | /* Output the objects themselves. */ |
8143 | offset = 0; |
8144 | FOR_EACH_VEC_ELT (*block->objects, i, symbol) |
8145 | { |
8146 | /* Move to the object's offset, padding with zeros if necessary. */ |
8147 | assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset); |
8148 | offset = SYMBOL_REF_BLOCK_OFFSET (symbol); |
8149 | if (CONSTANT_POOL_ADDRESS_P (symbol)) |
8150 | { |
8151 | desc = SYMBOL_REF_CONSTANT (symbol); |
8152 | /* Pass 1 for align as we have already laid out everything in the block. |
8153 | So aligning shouldn't be necessary. */ |
8154 | output_constant_pool_1 (desc, align: 1); |
8155 | offset += GET_MODE_SIZE (mode: desc->mode); |
8156 | } |
8157 | else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol)) |
8158 | { |
8159 | HOST_WIDE_INT size; |
8160 | decl = SYMBOL_REF_DECL (symbol); |
8161 | assemble_constant_contents (DECL_INITIAL (decl), XSTR (symbol, 0), |
8162 | DECL_ALIGN (decl), merge_strings: false); |
8163 | |
8164 | size = get_constant_size (DECL_INITIAL (decl)); |
8165 | offset += size; |
8166 | if ((flag_sanitize & SANITIZE_ADDRESS) |
8167 | && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST |
8168 | && asan_protect_global (DECL_INITIAL (decl))) |
8169 | { |
8170 | size = asan_red_zone_size (size); |
8171 | assemble_zeros (size); |
8172 | offset += size; |
8173 | } |
8174 | } |
8175 | else |
8176 | { |
8177 | HOST_WIDE_INT size; |
8178 | decl = SYMBOL_REF_DECL (symbol); |
8179 | assemble_variable_contents (decl, XSTR (symbol, 0), dont_output_data: false, merge_strings: false); |
8180 | size = tree_to_uhwi (DECL_SIZE_UNIT (decl)); |
8181 | offset += size; |
8182 | if ((flag_sanitize & SANITIZE_ADDRESS) |
8183 | && asan_protect_global (decl)) |
8184 | { |
8185 | size = asan_red_zone_size (size); |
8186 | assemble_zeros (size); |
8187 | offset += size; |
8188 | } |
8189 | } |
8190 | } |
8191 | } |
8192 | |
8193 | /* A callback for qsort to compare object_blocks. */ |
8194 | |
8195 | static int |
8196 | output_object_block_compare (const void *x, const void *y) |
8197 | { |
8198 | object_block *p1 = *(object_block * const*)x; |
8199 | object_block *p2 = *(object_block * const*)y; |
8200 | |
8201 | if (p1->sect->common.flags & SECTION_NAMED |
8202 | && !(p2->sect->common.flags & SECTION_NAMED)) |
8203 | return 1; |
8204 | |
8205 | if (!(p1->sect->common.flags & SECTION_NAMED) |
8206 | && p2->sect->common.flags & SECTION_NAMED) |
8207 | return -1; |
8208 | |
8209 | if (p1->sect->common.flags & SECTION_NAMED |
8210 | && p2->sect->common.flags & SECTION_NAMED) |
8211 | return strcmp (s1: p1->sect->named.name, s2: p2->sect->named.name); |
8212 | |
8213 | unsigned f1 = p1->sect->common.flags; |
8214 | unsigned f2 = p2->sect->common.flags; |
8215 | if (f1 == f2) |
8216 | return 0; |
8217 | return f1 < f2 ? -1 : 1; |
8218 | } |
8219 | |
8220 | /* Output the definitions of all object_blocks. */ |
8221 | |
8222 | void |
8223 | output_object_blocks (void) |
8224 | { |
8225 | vec<object_block *, va_heap> v; |
8226 | v.create (nelems: object_block_htab->elements ()); |
8227 | object_block *obj; |
8228 | hash_table<object_block_hasher>::iterator hi; |
8229 | |
8230 | FOR_EACH_HASH_TABLE_ELEMENT (*object_block_htab, obj, object_block *, hi) |
8231 | v.quick_push (obj); |
8232 | |
8233 | /* Sort them in order to output them in a deterministic manner, |
8234 | otherwise we may get .rodata sections in different orders with |
8235 | and without -g. */ |
8236 | v.qsort (output_object_block_compare); |
8237 | unsigned i; |
8238 | FOR_EACH_VEC_ELT (v, i, obj) |
8239 | output_object_block (block: obj); |
8240 | |
8241 | v.release (); |
8242 | } |
8243 | |
8244 | /* This function provides a possible implementation of the |
8245 | TARGET_ASM_RECORD_GCC_SWITCHES target hook for ELF targets. When triggered |
8246 | by -frecord-gcc-switches it creates a new mergeable, string section in the |
8247 | assembler output file called TARGET_ASM_RECORD_GCC_SWITCHES_SECTION which |
8248 | contains the switches in ASCII format. |
8249 | |
8250 | FIXME: This code does not correctly handle double quote characters |
8251 | that appear inside strings, (it strips them rather than preserving them). |
8252 | FIXME: ASM_OUTPUT_ASCII, as defined in config/elfos.h will not emit NUL |
8253 | characters - instead it treats them as sub-string separators. Since |
8254 | we want to emit NUL strings terminators into the object file we have to use |
8255 | ASM_OUTPUT_SKIP. */ |
8256 | |
8257 | void |
8258 | elf_record_gcc_switches (const char *options) |
8259 | { |
8260 | section *sec = get_section (name: targetm.asm_out.record_gcc_switches_section, |
8261 | flags: SECTION_DEBUG | SECTION_MERGE |
8262 | | SECTION_STRINGS | (SECTION_ENTSIZE & 1), NULL); |
8263 | switch_to_section (new_section: sec); |
8264 | ASM_OUTPUT_ASCII (asm_out_file, options, strlen (options) + 1); |
8265 | } |
8266 | |
8267 | /* Emit text to declare externally defined symbols. It is needed to |
8268 | properly support non-default visibility. */ |
8269 | void |
8270 | default_elf_asm_output_external (FILE *file ATTRIBUTE_UNUSED, |
8271 | tree decl, |
8272 | const char *name ATTRIBUTE_UNUSED) |
8273 | { |
8274 | /* We output the name if and only if TREE_SYMBOL_REFERENCED is |
8275 | set in order to avoid putting out names that are never really |
8276 | used. Always output visibility specified in the source. */ |
8277 | if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)) |
8278 | && (DECL_VISIBILITY_SPECIFIED (decl) |
8279 | || targetm.binds_local_p (decl))) |
8280 | maybe_assemble_visibility (decl); |
8281 | } |
8282 | |
8283 | /* The default hook for TARGET_ASM_OUTPUT_SOURCE_FILENAME. */ |
8284 | |
8285 | void |
8286 | default_asm_output_source_filename (FILE *file, const char *name) |
8287 | { |
8288 | #ifdef ASM_OUTPUT_SOURCE_FILENAME |
8289 | ASM_OUTPUT_SOURCE_FILENAME (file, name); |
8290 | #else |
8291 | fprintf (stream: file, format: "\t.file\t" ); |
8292 | output_quoted_string (file, name); |
8293 | putc (c: '\n', stream: file); |
8294 | #endif |
8295 | } |
8296 | |
8297 | /* Output a file name in the form wanted by System V. */ |
8298 | |
8299 | void |
8300 | output_file_directive (FILE *asm_file, const char *input_name) |
8301 | { |
8302 | int len; |
8303 | const char *na; |
8304 | |
8305 | if (input_name == NULL) |
8306 | input_name = "<stdin>" ; |
8307 | else |
8308 | input_name = remap_debug_filename (input_name); |
8309 | |
8310 | len = strlen (s: input_name); |
8311 | na = input_name + len; |
8312 | |
8313 | /* NA gets INPUT_NAME sans directory names. */ |
8314 | while (na > input_name) |
8315 | { |
8316 | if (IS_DIR_SEPARATOR (na[-1])) |
8317 | break; |
8318 | na--; |
8319 | } |
8320 | |
8321 | targetm.asm_out.output_source_filename (asm_file, na); |
8322 | } |
8323 | |
8324 | /* Create a DEBUG_EXPR_DECL / DEBUG_EXPR pair from RTL expression |
8325 | EXP. */ |
8326 | rtx |
8327 | make_debug_expr_from_rtl (const_rtx exp) |
8328 | { |
8329 | tree ddecl = make_node (DEBUG_EXPR_DECL), type; |
8330 | machine_mode mode = GET_MODE (exp); |
8331 | rtx dval; |
8332 | |
8333 | DECL_ARTIFICIAL (ddecl) = 1; |
8334 | if (REG_P (exp) && REG_EXPR (exp)) |
8335 | type = TREE_TYPE (REG_EXPR (exp)); |
8336 | else if (MEM_P (exp) && MEM_EXPR (exp)) |
8337 | type = TREE_TYPE (MEM_EXPR (exp)); |
8338 | else |
8339 | type = NULL_TREE; |
8340 | if (type && TYPE_MODE (type) == mode) |
8341 | TREE_TYPE (ddecl) = type; |
8342 | else |
8343 | TREE_TYPE (ddecl) = lang_hooks.types.type_for_mode (mode, 1); |
8344 | SET_DECL_MODE (ddecl, mode); |
8345 | dval = gen_rtx_DEBUG_EXPR (mode); |
8346 | DEBUG_EXPR_TREE_DECL (dval) = ddecl; |
8347 | SET_DECL_RTL (ddecl, dval); |
8348 | return dval; |
8349 | } |
8350 | |
8351 | #ifdef ELF_ASCII_ESCAPES |
8352 | /* Default ASM_OUTPUT_LIMITED_STRING for ELF targets. */ |
8353 | |
8354 | void |
8355 | default_elf_asm_output_limited_string (FILE *f, const char *s) |
8356 | { |
8357 | int escape; |
8358 | unsigned char c; |
8359 | |
8360 | fputs (STRING_ASM_OP, stream: f); |
8361 | putc (c: '"', stream: f); |
8362 | while (*s != '\0') |
8363 | { |
8364 | c = *s; |
8365 | escape = ELF_ASCII_ESCAPES[c]; |
8366 | switch (escape) |
8367 | { |
8368 | case 0: |
8369 | putc (c: c, stream: f); |
8370 | break; |
8371 | case 1: |
8372 | putc (c: '\\', stream: f); |
8373 | putc (c: '0'+((c>>6)&7), stream: f); |
8374 | putc (c: '0'+((c>>3)&7), stream: f); |
8375 | putc (c: '0'+(c&7), stream: f); |
8376 | break; |
8377 | default: |
8378 | putc (c: '\\', stream: f); |
8379 | putc (c: escape, stream: f); |
8380 | break; |
8381 | } |
8382 | s++; |
8383 | } |
8384 | putc (c: '\"', stream: f); |
8385 | putc (c: '\n', stream: f); |
8386 | } |
8387 | |
8388 | /* Default ASM_OUTPUT_ASCII for ELF targets. */ |
8389 | |
8390 | void |
8391 | default_elf_asm_output_ascii (FILE *f, const char *s, unsigned int len) |
8392 | { |
8393 | const char *limit = s + len; |
8394 | const char *last_null = NULL; |
8395 | unsigned bytes_in_chunk = 0; |
8396 | unsigned char c; |
8397 | int escape; |
8398 | |
8399 | for (; s < limit; s++) |
8400 | { |
8401 | const char *p; |
8402 | |
8403 | if (bytes_in_chunk >= 60) |
8404 | { |
8405 | putc (c: '\"', stream: f); |
8406 | putc (c: '\n', stream: f); |
8407 | bytes_in_chunk = 0; |
8408 | } |
8409 | |
8410 | if (s > last_null) |
8411 | { |
8412 | for (p = s; p < limit && *p != '\0'; p++) |
8413 | continue; |
8414 | last_null = p; |
8415 | } |
8416 | else |
8417 | p = last_null; |
8418 | |
8419 | if (p < limit && (p - s) <= (long) ELF_STRING_LIMIT) |
8420 | { |
8421 | if (bytes_in_chunk > 0) |
8422 | { |
8423 | putc (c: '\"', stream: f); |
8424 | putc (c: '\n', stream: f); |
8425 | bytes_in_chunk = 0; |
8426 | } |
8427 | |
8428 | default_elf_asm_output_limited_string (f, s); |
8429 | s = p; |
8430 | } |
8431 | else |
8432 | { |
8433 | if (bytes_in_chunk == 0) |
8434 | fputs (ASCII_DATA_ASM_OP "\"" , stream: f); |
8435 | |
8436 | c = *s; |
8437 | escape = ELF_ASCII_ESCAPES[c]; |
8438 | switch (escape) |
8439 | { |
8440 | case 0: |
8441 | putc (c: c, stream: f); |
8442 | bytes_in_chunk++; |
8443 | break; |
8444 | case 1: |
8445 | putc (c: '\\', stream: f); |
8446 | putc (c: '0'+((c>>6)&7), stream: f); |
8447 | putc (c: '0'+((c>>3)&7), stream: f); |
8448 | putc (c: '0'+(c&7), stream: f); |
8449 | bytes_in_chunk += 4; |
8450 | break; |
8451 | default: |
8452 | putc (c: '\\', stream: f); |
8453 | putc (c: escape, stream: f); |
8454 | bytes_in_chunk += 2; |
8455 | break; |
8456 | } |
8457 | |
8458 | } |
8459 | } |
8460 | |
8461 | if (bytes_in_chunk > 0) |
8462 | { |
8463 | putc (c: '\"', stream: f); |
8464 | putc (c: '\n', stream: f); |
8465 | } |
8466 | } |
8467 | #endif |
8468 | |
8469 | static GTY(()) section *elf_init_array_section; |
8470 | static GTY(()) section *elf_fini_array_section; |
8471 | |
8472 | static section * |
8473 | get_elf_initfini_array_priority_section (int priority, |
8474 | bool constructor_p) |
8475 | { |
8476 | section *sec; |
8477 | if (priority != DEFAULT_INIT_PRIORITY) |
8478 | { |
8479 | char buf[18]; |
8480 | sprintf (s: buf, format: "%s.%.5u" , |
8481 | constructor_p ? ".init_array" : ".fini_array" , |
8482 | priority); |
8483 | sec = get_section (name: buf, flags: SECTION_WRITE | SECTION_NOTYPE, NULL_TREE); |
8484 | } |
8485 | else |
8486 | { |
8487 | if (constructor_p) |
8488 | { |
8489 | if (elf_init_array_section == NULL) |
8490 | elf_init_array_section |
8491 | = get_section (name: ".init_array" , |
8492 | flags: SECTION_WRITE | SECTION_NOTYPE, NULL_TREE); |
8493 | sec = elf_init_array_section; |
8494 | } |
8495 | else |
8496 | { |
8497 | if (elf_fini_array_section == NULL) |
8498 | elf_fini_array_section |
8499 | = get_section (name: ".fini_array" , |
8500 | flags: SECTION_WRITE | SECTION_NOTYPE, NULL_TREE); |
8501 | sec = elf_fini_array_section; |
8502 | } |
8503 | } |
8504 | return sec; |
8505 | } |
8506 | |
8507 | /* Use .init_array section for constructors. */ |
8508 | |
8509 | void |
8510 | default_elf_init_array_asm_out_constructor (rtx symbol, int priority) |
8511 | { |
8512 | section *sec = get_elf_initfini_array_priority_section (priority, |
8513 | constructor_p: true); |
8514 | assemble_addr_to_section (symbol, sec); |
8515 | } |
8516 | |
8517 | /* Use .fini_array section for destructors. */ |
8518 | |
8519 | void |
8520 | default_elf_fini_array_asm_out_destructor (rtx symbol, int priority) |
8521 | { |
8522 | section *sec = get_elf_initfini_array_priority_section (priority, |
8523 | constructor_p: false); |
8524 | assemble_addr_to_section (symbol, sec); |
8525 | } |
8526 | |
8527 | /* Default TARGET_ASM_OUTPUT_IDENT hook. |
8528 | |
8529 | This is a bit of a cheat. The real default is a no-op, but this |
8530 | hook is the default for all targets with a .ident directive. */ |
8531 | |
8532 | void |
8533 | default_asm_output_ident_directive (const char *ident_str) |
8534 | { |
8535 | const char *ident_asm_op = "\t.ident\t" ; |
8536 | |
8537 | /* If we are still in the front end, do not write out the string |
8538 | to asm_out_file. Instead, add a fake top-level asm statement. |
8539 | This allows the front ends to use this hook without actually |
8540 | writing to asm_out_file, to handle #ident or Pragma Ident. */ |
8541 | if (symtab->state == PARSING) |
8542 | { |
8543 | char *buf = ACONCAT ((ident_asm_op, "\"" , ident_str, "\"\n" , NULL)); |
8544 | symtab->finalize_toplevel_asm (asm_str: build_string (strlen (s: buf), buf)); |
8545 | } |
8546 | else |
8547 | fprintf (stream: asm_out_file, format: "%s\"%s\"\n" , ident_asm_op, ident_str); |
8548 | } |
8549 | |
8550 | /* Switch to a COMDAT section with COMDAT name of decl. |
8551 | |
8552 | FIXME: resolve_unique_section needs to deal better with |
8553 | decls with both DECL_SECTION_NAME and DECL_ONE_ONLY. Once |
8554 | that is fixed, this if-else statement can be replaced with |
8555 | a single call to "switch_to_section (sect)". */ |
8556 | |
8557 | void |
8558 | switch_to_comdat_section (section *sect, tree decl) |
8559 | { |
8560 | #if defined (OBJECT_FORMAT_ELF) |
8561 | targetm.asm_out.named_section (sect->named.name, |
8562 | sect->named.common.flags |
8563 | | SECTION_LINKONCE, |
8564 | decl); |
8565 | in_section = sect; |
8566 | #else |
8567 | /* Neither OBJECT_FORMAT_PE, nor OBJECT_FORMAT_COFF is set here. |
8568 | Therefore the following check is used. |
8569 | In case a the target is PE or COFF a comdat group section |
8570 | is created, e.g. .vtable_map_vars$foo. The linker places |
8571 | everything in .vtable_map_vars at the end. |
8572 | |
8573 | A fix could be made in |
8574 | gcc/config/i386/winnt.cc: i386_pe_unique_section. */ |
8575 | if (TARGET_PECOFF) |
8576 | { |
8577 | char *name; |
8578 | |
8579 | if (TREE_CODE (decl) == IDENTIFIER_NODE) |
8580 | name = ACONCAT ((sect->named.name, "$" , |
8581 | IDENTIFIER_POINTER (decl), NULL)); |
8582 | else |
8583 | name = ACONCAT ((sect->named.name, "$" , |
8584 | IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl)), |
8585 | NULL)); |
8586 | |
8587 | targetm.asm_out.named_section (name, |
8588 | sect->named.common.flags |
8589 | | SECTION_LINKONCE, |
8590 | decl); |
8591 | in_section = sect; |
8592 | } |
8593 | else |
8594 | switch_to_section (sect); |
8595 | #endif |
8596 | } |
8597 | |
8598 | /* This function ensures that vtable_map variables are not only |
8599 | in the comdat section, but that each variable has its own unique |
8600 | comdat name. Without this the variables end up in the same section |
8601 | with a single comdat name. */ |
8602 | |
8603 | static void |
8604 | handle_vtv_comdat_section (section *sect, const_tree decl ATTRIBUTE_UNUSED) |
8605 | { |
8606 | switch_to_comdat_section(sect, DECL_NAME (decl)); |
8607 | } |
8608 | |
8609 | #include "gt-varasm.h" |
8610 | |